From aab1c198bce751ad6fbcf489055ce5f701486bb5 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 28 Apr 2022 10:07:47 -0300 Subject: [PATCH 001/130] Make builds and tests possible in Altinity's infrastructure add comment and rename github robot token add clickhouse instance password parameter use Altinity's s3 bucket Use altinityinfra dockerhub images and minor adjustments (#135) Allow CI to be triggered on PR Proper error reporting during docker pull and lowercase version name allow `altinitystable` git tags Download specific MinIO version instead of latest - same as upstream master remove stale chmod More stable CI/CD builds: - Rebuilding all docker images - Reduced number of docker images - Rerunning functional tests even if those were already executed in previous run Added missing dependencies for stateful tests Re-generating _pb2 files on each test run Changed hardcoded docker images name prefixes from `clickhouse/` to `altinityinfra/` Pushing images as :latest too to avoid some test failures reverted back to use clickhouse/jdbc-bridge Fixed how version is generated: taking into account VERSION_TWEAK and VERSION_FLAVOUR Pushing checks events data to "gh-data" instead of "default" etc. --- .github/workflows/cherry_pick.yml | 48 - .github/workflows/docs_check.yml | 175 - .github/workflows/docs_release.yml | 119 - .github/workflows/jepsen.yml | 41 - .github/workflows/nightly.yml | 118 - .github/workflows/pull_request.yml | 3329 ----------------- .github/workflows/release.yml | 43 - .github/workflows/release_branches.yml | 1999 ++-------- .github/workflows/tags_stable.yml | 72 - .github/workflows/woboq.yml | 42 - cmake/autogenerated_versions.txt | 11 +- cmake/version.cmake | 2 +- docker/images.json | 75 +- docker/packager/binary/Dockerfile | 2 +- docker/packager/binary/build.sh | 4 + docker/packager/packager | 2 +- docker/test/base/Dockerfile | 4 +- docker/test/codebrowser/Dockerfile | 2 +- docker/test/fasttest/Dockerfile | 2 +- docker/test/fuzzer/Dockerfile | 2 +- docker/test/integration/base/Dockerfile | 2 +- .../compose/docker_compose_dotnet_client.yml | 2 +- .../runner/compose/docker_compose_keeper.yml | 6 +- .../docker_compose_kerberized_hdfs.yml | 4 +- .../docker_compose_kerberized_kafka.yml | 2 +- .../runner/compose/docker_compose_minio.yml | 6 +- .../docker_compose_mysql_golang_client.yml | 2 +- .../docker_compose_mysql_java_client.yml | 2 +- .../docker_compose_mysql_js_client.yml | 2 +- .../docker_compose_mysql_php_client.yml | 2 +- .../docker_compose_postgresql_java_client.yml | 2 +- docker/test/keeper-jepsen/Dockerfile | 2 +- docker/test/split_build_smoke_test/Dockerfile | 4 +- docker/test/stateful/Dockerfile | 5 +- docker/test/stateful/setup_minio.sh | 92 +- ...sts possible in Altinity's infrastructure) | 77 + docker/test/stateless/Dockerfile | 4 +- docker/test/stateless/setup_minio.sh | 1 + docker/test/stateless_pytest/Dockerfile | 33 + docker/test/stress/Dockerfile | 2 +- docker/test/unit/Dockerfile | 2 +- packages/clickhouse-client.yaml | 4 +- packages/clickhouse-common-static-dbg.yaml | 4 +- packages/clickhouse-common-static.yaml | 4 +- packages/clickhouse-keeper-dbg.yaml | 4 +- packages/clickhouse-keeper.yaml | 4 +- packages/clickhouse-server.yaml | 4 +- tests/ci/ast_fuzzer_check.py | 4 +- tests/ci/build_check.py | 19 +- tests/ci/ccache_utils.py | 1 + tests/ci/ci_config.py | 10 +- tests/ci/clickhouse_helper.py | 10 +- tests/ci/codebrowser_check.py | 2 +- tests/ci/compatibility_check.py | 6 +- tests/ci/docker_images_check.py | 55 +- tests/ci/docker_manifests_merge.py | 8 +- tests/ci/docker_pull_helper.py | 23 +- tests/ci/docker_server.py | 10 +- tests/ci/docker_test.py | 64 +- tests/ci/docs_check.py | 4 +- tests/ci/docs_release.py | 2 +- tests/ci/env_helper.py | 8 +- tests/ci/fast_test_check.py | 4 +- tests/ci/functional_test_check.py | 16 +- tests/ci/get_robot_token.py | 9 +- tests/ci/git_helper.py | 2 +- tests/ci/git_test.py | 6 + tests/ci/integration_test_check.py | 34 +- tests/ci/keeper_jepsen_check.py | 4 +- tests/ci/performance_comparison_check.py | 2 +- tests/ci/run_check.py | 1 + tests/ci/split_build_smoke_check.py | 4 +- tests/ci/stress_check.py | 4 +- tests/ci/style_check.py | 4 +- tests/ci/tests/docker_images.json | 100 +- tests/ci/unit_tests_check.py | 4 +- tests/ci/version_helper.py | 26 +- tests/integration/ci-runner.py | 24 +- tests/integration/helpers/cluster.py | 7 +- tests/integration/helpers/network.py | 10 +- tests/integration/runner | 20 +- tests/integration/test_storage_kafka/test.py | 12 + .../aes_encryption_env/clickhouse-service.yml | 28 + .../clickhouse-service.yml | 28 + .../example_env/clickhouse-service.yml | 28 + .../clickhouse-service.yml | 27 + .../kerberos_env/clickhouse-service.yml | 32 + .../authentication_env/clickhouse-service.yml | 29 + .../clickhouse-service.yml | 28 + .../clickhouse-service.yml | 29 + .../clickhouse-service.yml | 28 + .../clickhouse-service.yml | 28 + .../role_mapping_env/clickhouse-service.yml | 37 + .../map_type_env/clickhouse-service.yml | 27 + .../rbac/rbac_env/clickhouse-service.yml | 29 + tests/testflows/runner | 132 + .../clickhouse-service.yml | 27 + utils/clickhouse-docker | 4 +- 98 files changed, 1409 insertions(+), 6021 deletions(-) delete mode 100644 .github/workflows/cherry_pick.yml delete mode 100644 .github/workflows/docs_check.yml delete mode 100644 .github/workflows/docs_release.yml delete mode 100644 .github/workflows/jepsen.yml delete mode 100644 .github/workflows/nightly.yml delete mode 100644 .github/workflows/pull_request.yml delete mode 100644 .github/workflows/release.yml delete mode 100644 .github/workflows/tags_stable.yml delete mode 100644 .github/workflows/woboq.yml mode change 120000 => 100755 docker/test/stateful/setup_minio.sh create mode 100755 docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) create mode 100644 docker/test/stateless_pytest/Dockerfile create mode 100644 tests/testflows/aes_encryption/aes_encryption_env/clickhouse-service.yml create mode 100644 tests/testflows/datetime64_extended_range/datetime64_extended_range_env/clickhouse-service.yml create mode 100644 tests/testflows/example/example_env/clickhouse-service.yml create mode 100644 tests/testflows/extended_precision_data_types/extended-precision-data-type_env/clickhouse-service.yml create mode 100644 tests/testflows/kerberos/kerberos_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/authentication/authentication_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/authentication/ldap_authentication_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/external_user_directory/external_user_directory_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/external_user_directory/ldap_external_user_directory_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/role_mapping/ldap_role_mapping_env/clickhouse-service.yml create mode 100644 tests/testflows/ldap/role_mapping/role_mapping_env/clickhouse-service.yml create mode 100755 tests/testflows/map_type/map_type_env/clickhouse-service.yml create mode 100755 tests/testflows/rbac/rbac_env/clickhouse-service.yml create mode 100755 tests/testflows/runner create mode 100755 tests/testflows/window_functions/window_functions_env/clickhouse-service.yml diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml deleted file mode 100644 index 92be7d1f859a..000000000000 --- a/.github/workflows/cherry_pick.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: CherryPick - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: cherry-pick -on: # yamllint disable-line rule:truthy - schedule: - - cron: '0 * * * *' - workflow_dispatch: - -jobs: - CherryPick: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - # https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/cherry_pick - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_check - REPO_COPY=${{runner.temp}}/docs_check/ClickHouse - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Docs Check - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 docs_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FinishCheck: - needs: - - StyleCheck - - DockerHubPush - - DocsCheck - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Finish label - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 finish_check.py - python3 merge_pr.py --check-approved diff --git a/.github/workflows/docs_release.yml b/.github/workflows/docs_release.yml deleted file mode 100644 index e58d5383fe7b..000000000000 --- a/.github/workflows/docs_release.yml +++ /dev/null @@ -1,119 +0,0 @@ -name: DocsReleaseChecks - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: master-release - cancel-in-progress: true -'on': - push: - branches: - - master - paths: - - '.github/**' - - 'docker/docs/release/**' - - 'docs/**' - - 'utils/list-versions/version_date.tsv' - - 'website/**' - workflow_dispatch: -jobs: - DockerHubPushAarch64: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix amd64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/changed_images.json - DocsRelease: - needs: DockerHubPush - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - # https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_release - REPO_COPY=${{runner.temp}}/docs_release/ClickHouse - CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}} - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/keeper_jepsen - REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 - - name: Jepsen Test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 keeper_jepsen_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index c449d814f267..000000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,118 +0,0 @@ -name: NightlyBuilds - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -"on": - schedule: - - cron: '13 3 * * *' - workflow_dispatch: - -jobs: - DockerHubPushAarch64: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 --all - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix amd64 --all - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/changed_images.json - BuilderCoverity: - needs: DockerHubPush - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - BUILD_NAME=coverity - CACHES_PATH=${{runner.temp}}/../ccaches - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - TEMP_PATH=${{runner.temp}}/build_check - EOF - echo "COVERITY_TOKEN=${{ secrets.COVERITY_TOKEN }}" >> "$GITHUB_ENV" - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload Coverity Analysis - if: ${{ success() || failure() }} - run: | - curl --form token="${COVERITY_TOKEN}" \ - --form email='security+coverity@clickhouse.com' \ - --form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \ - --form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \ - --form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \ - https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml deleted file mode 100644 index eb99bf1cf1e0..000000000000 --- a/.github/workflows/pull_request.yml +++ /dev/null @@ -1,3329 +0,0 @@ -name: PullRequestCI - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -on: # yamllint disable-line rule:truthy - pull_request: - types: - - synchronize - - reopened - - opened - branches: - - master - paths-ignore: - - 'docker/docs/**' - - 'docs/**' - - 'website/**' -########################################################################################## -##################################### SMALL CHECKS ####################################### -########################################################################################## -jobs: - CheckLabels: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Labels check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 run_check.py - PythonUnitTests: - needs: CheckLabels - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Python unit tests - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 -m unittest discover -s . -p '*_test.py' - DockerHubPushAarch64: - needs: CheckLabels - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: - needs: CheckLabels - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix amd64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/changed_images.json - StyleCheck: - needs: DockerHubPush - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() || always() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/fasttest - REPO_COPY=${{runner.temp}}/fasttest/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Fast Test - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 fast_test_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - CompatibilityCheck: - needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheck - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - SharedBuildSmokeTest: - needs: [BuilderDebShared] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/split_build_check - REPO_COPY=${{runner.temp}}/split_build_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Shared build check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 split_build_smoke_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -######################################################################################### -#################################### ORDINARY BUILDS #################################### -######################################################################################### - BuilderDebRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # for performance artifact - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - BuilderBinRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # for performance artifact - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebUBsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebTsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebMsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebDebug: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -########################################################################################## -##################################### SPECIAL BUILDS ##################################### -########################################################################################## - BuilderDebShared: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_shared - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinClangTidy: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_tidy - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwin: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinFreeBSD: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_freebsd - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinPPC64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_ppc64le - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -############################################################################################ -##################################### Docker images ####################################### -############################################################################################ - DockerServerImages: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push - python3 docker_server.py --release-type head --no-push --no-ubuntu \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################ -##################################### BUILD REPORTER ####################################### -############################################################################################ - BuilderReport: - needs: - - BuilderBinRelease - - BuilderDebAarch64 - - BuilderDebAsan - - BuilderDebDebug - - BuilderDebMsan - - BuilderDebRelease - - BuilderDebTsan - - BuilderDebUBsan - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - BuilderSpecialReport: - needs: - - BuilderBinAarch64 - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - - BuilderBinFreeBSD - # - BuilderBinGCC - - BuilderBinPPC64 - - BuilderBinClangTidy - - BuilderDebShared - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -########################### FUNCTIONAl STATELESS TESTS ####################################### -############################################################################################## - FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseWideParts: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_wide_parts - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, wide parts enabled) - REPO_COPY=${{runner.temp}}/stateless_wide_parts/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (address) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (address) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestFlakyCheck: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_flaky_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests flaky check (address) - REPO_COPY=${{runner.temp}}/stateless_flaky_asan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - TestsBugfixCheck: - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/tests_bugfix_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=tests bugfix validate check - KILL_TIMEOUT=3600 - REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Bugfix test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - - TEMP_PATH="${TEMP_PATH}/integration" \ - REPORTS_PATH="${REPORTS_PATH}/integration" \ - python3 integration_test_check.py "Integration $CHECK_NAME" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - - TEMP_PATH="${TEMP_PATH}/stateless" \ - REPORTS_PATH="${REPORTS_PATH}/stateless" \ - python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - - python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/post_commit_status.tsv" "${TEMP_PATH}/integration/post_commit_status.tsv" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -############################ FUNCTIONAl STATEFUL TESTS ####################################### -############################################################################################## - FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (address) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (thread) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (memory) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -######################################### STRESS TESTS ####################################### -############################################################################################## - StressTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (address) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestTsan: - needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (thread) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (memory) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (undefined) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -##################################### AST FUZZERS ############################################ -############################################################################################## - ASTFuzzerTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ASan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (TSan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestUBSan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (UBSan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestMSan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (MSan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -############################# INTEGRATION TESTS ############################################# -############################################################################################# - IntegrationTestsAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsFlakyCheck: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan_flaky_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests flaky check (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -#################################### UNIT TESTS ############################################# -############################################################################################# - UnitTestsAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsReleaseClang: - needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release-clang) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -#################################### PERFORMANCE TESTS ###################################### -############################################################################################# - PerformanceComparisonX86-0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -###################################### JEPSEN TESTS ######################################### -############################################################################################# - Jepsen: - # This is special test NOT INCLUDED in FinishCheck - # When it's skipped, all dependent tasks will be skipped too. - # DO NOT add it there - if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') - needs: [BuilderBinRelease] - uses: ./.github/workflows/jepsen.yml - - FinishCheck: - needs: - - StyleCheck - - DockerHubPush - - DockerServerImages - - CheckLabels - - BuilderReport - - BuilderSpecialReport - - FastTest - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestRelease - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseWideParts - - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestUBsan - - FunctionalStatefulTestDebug - - FunctionalStatefulTestRelease - - FunctionalStatefulTestAarch64 - - FunctionalStatefulTestAsan - - FunctionalStatefulTestTsan - - FunctionalStatefulTestMsan - - FunctionalStatefulTestUBsan - - FunctionalStatelessTestReleaseS3 - - StressTestDebug - - StressTestAsan - - StressTestTsan - - StressTestMsan - - StressTestUBsan - - ASTFuzzerTestDebug - - ASTFuzzerTestAsan - - ASTFuzzerTestTsan - - ASTFuzzerTestMSan - - ASTFuzzerTestUBSan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 - - PerformanceComparisonAarch-0 - - PerformanceComparisonAarch-1 - - PerformanceComparisonAarch-2 - - PerformanceComparisonAarch-3 - - UnitTestsAsan - - UnitTestsTsan - - UnitTestsMsan - - UnitTestsUBsan - - UnitTestsReleaseClang - - SharedBuildSmokeTest - - CompatibilityCheck - - IntegrationTestsFlakyCheck - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Finish label - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 finish_check.py - python3 merge_pr.py --check-approved diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index a6468bad801d..000000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: PublishedReleaseCI -# - Gets artifacts from S3 -# - Sends it to JFROG Artifactory -# - Adds them to the release assets - -on: # yamllint disable-line rule:truthy - release: - types: - - published - -jobs: - ReleasePublish: - runs-on: [self-hosted, style-checker] - steps: - - name: Deploy packages and assets - run: | - GITHUB_TAG="${GITHUB_REF#refs/tags/}" - curl '${{ secrets.PACKAGES_RELEASE_URL }}/release/'"${GITHUB_TAG}"'?binary=binary_darwin&binary=binary_darwin_aarch64&sync=true' -d '' - ############################################################################################ - ##################################### Docker images ####################################### - ############################################################################################ - DockerServerImages: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # otherwise we will have no version info - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type auto --version "${{ github.ref }}" - python3 docker_server.py --release-type auto --version "${{ github.ref }}" --no-ubuntu \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5d775cd0c995..d210c8ac7103 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -5,30 +5,39 @@ env: PYTHONUNBUFFERED: 1 on: # yamllint disable-line rule:truthy + pull_request: + types: + - synchronize + - reopened + - opened + branches: + # Anything/22.8 (e.g customizations/22.8) + - '**/22.8*' + release: + types: + - published + - prereleased push: branches: - # 22.1 and 22.10 - - '2[1-9].[1-9][0-9]' - - '2[1-9].[1-9]' + - 'releases/22.8**' jobs: - DockerHubPushAarch64: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: + # DockerHubPushAarch64: + # runs-on: [self-hosted, style-checker-aarch64] + # steps: + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # - name: Images check + # run: | + # cd "$GITHUB_WORKSPACE/tests/ci" + # python3 docker_images_check.py --suffix aarch64 + # - name: Upload images files to artifacts + # uses: actions/upload-artifact@v2 + # with: + # name: changed_images_aarch64 + # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json + # Former DockerHubPushAmd64 + DockerHubPush: runs-on: [self-hosted, style-checker] steps: - name: Check out repository code @@ -39,38 +48,41 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix amd64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check + # TODO(vnemkov): remove this step if you uncomment DockerHubPushAarch64 and DockerHubPush below. + # The rest of the pipeline expects changed_images.json, which was generated by previous version of DockerHubPush. + - name: Rename artifact run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 + mv ${{ runner.temp }}/docker_images_check/changed_images_amd64.json ${{ runner.temp }}/docker_images_check/changed_images.json - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: name: changed_images - path: ${{ runner.temp }}/changed_images.json + path: ${{ runner.temp }}/docker_images_check/changed_images.json + # DockerHubPush: + # needs: [DockerHubPushAmd64, DockerHubPushAarch64] + # runs-on: [self-hosted, style-checker] + # steps: + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # - name: Download changed aarch64 images + # uses: actions/download-artifact@v2 + # with: + # name: changed_images_aarch64 + # path: ${{ runner.temp }} + # - name: Download changed amd64 images + # uses: actions/download-artifact@v2 + # with: + # name: changed_images_amd64 + # path: ${{ runner.temp }} + # - name: Images check + # run: | + # cd "$GITHUB_WORKSPACE/tests/ci" + # python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 + # - name: Upload images files to artifacts + # uses: actions/upload-artifact@v2 + # with: + # name: changed_images + # path: ${{ runner.temp }}/changed_images.json CompatibilityCheck: needs: [BuilderDebRelease] runs-on: [self-hosted, style-checker] @@ -119,12 +131,15 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images uses: actions/download-artifact@v3 with: name: changed_images path: ${{ env.IMAGES_PATH }} + - name: Trust My Directory + run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code uses: ClickHouse/checkout@v1 with: @@ -151,41 +166,69 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAarch64: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + # BuilderDebAarch64: + # needs: [DockerHubPush] + # runs-on: [self-hosted, builder] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/build_check + # IMAGES_PATH=${{runner.temp}}/images_path + # REPO_COPY=${{runner.temp}}/build_check/ClickHouse + # CACHES_PATH=${{runner.temp}}/../ccaches + # BUILD_NAME=package_aarch64 + # EOF + # - name: Download changed images + # uses: actions/download-artifact@v2 + # with: + # name: changed_images + # path: ${{ runner.temp }}/images_path + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # fetch-depth: 0 # otherwise we will have no info about contributors + # - name: Build + # run: | + # git -C "$GITHUB_WORKSPACE" submodule sync + # git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10 + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" + # - name: Upload build URLs to artifacts + # uses: actions/upload-artifact@v2 + # with: + # name: ${{ env.BUILD_URLS }} + # path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json + # - name: Cleanup + # if: always() + # run: | + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: + # sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" +############################################################################################ +##################################### Docker images ####################################### +############################################################################################ + DockerServerImages: + needs: + - BuilderDebRelease + # - BuilderDebAarch64 + runs-on: [self-hosted, style-checker] steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - name: Check out repository code uses: ClickHouse/checkout@v1 with: clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - - name: Build + fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself + - name: Check docker clickhouse/clickhouse-server building run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_server.py --release-type head --no-push + python3 docker_server.py --release-type head --no-push --no-ubuntu \ + --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | @@ -193,42 +236,43 @@ jobs: docker kill $(docker ps -q) ||: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" +############################################################################################ +##################################### BUILD REPORTER ####################################### +############################################################################################ + BuilderReport: + needs: + - BuilderDebRelease + # - BuilderDebAarch64 + runs-on: [self-hosted, style-checker] + if: ${{ success() || failure() }} steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan + CHECK_NAME=ClickHouse build check + REPORTS_PATH=${{runner.temp}}/reports_dir + REPORTS_PATH=${{runner.temp}}/reports_dir + TEMP_PATH=${{runner.temp}}/report_check + NEEDS_DATA_PATH=${{runner.temp}}/needs.json EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code uses: ClickHouse/checkout@v1 with: clear-repository: true - submodules: true - - name: Build + - name: Report Builder run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cat > "$NEEDS_DATA_PATH" << 'EOF' + ${{ toJSON(needs) }} + EOF + cd "$GITHUB_WORKSPACE/tests/ci" + python3 build_report_check.py "$CHECK_NAME" - name: Cleanup if: always() run: | @@ -236,85 +280,94 @@ jobs: docker kill $(docker ps -q) ||: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebUBsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + # BuilderSpecialReport: + # needs: + # # - BuilderBinDarwin + # - BuilderBinDarwinAarch64 + # runs-on: [self-hosted, style-checker] + # if: ${{ success() || failure() }} + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/report_check + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=ClickHouse special build check + # NEEDS_DATA_PATH=${{runner.temp}}/needs.json + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # clear-repository: true + # - name: Report Builder + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cat > "$NEEDS_DATA_PATH" << 'EOF' + # ${{ toJSON(needs) }} + # EOF + # cd "$GITHUB_WORKSPACE/tests/ci" + # python3 build_report_check.py "$CHECK_NAME" + # - name: Cleanup + # if: always() + # run: | + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: + # sudo rm -fr "$TEMP_PATH" + MarkReleaseReady: + needs: + # - BuilderBinDarwin + # - BuilderBinDarwinAarch64 + - BuilderDebRelease + # - BuilderDebAarch64 + runs-on: [self-hosted, style-checker] steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - name: Check out repository code uses: ClickHouse/checkout@v1 with: clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() + - name: Mark Commit Release Ready run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebTsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + cd "$GITHUB_WORKSPACE/tests/ci" + python3 mark_release_ready.py +############################################################################################## +########################### FUNCTIONAl STATELESS TESTS ####################################### +############################################################################################## + FunctionalStatelessTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester] steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan + TEMP_PATH=${{runner.temp}}/stateless_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateless tests (release) + REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse + KILL_TIMEOUT=10800 EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code uses: ClickHouse/checkout@v1 with: clear-repository: true - submodules: true - - name: Build + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | @@ -322,1272 +375,74 @@ jobs: docker kill $(docker ps -q) ||: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebMsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + # FunctionalStatelessTestAarch64: + # needs: [BuilderDebAarch64] + # runs-on: [self-hosted, func-tester-aarch64] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/stateless_release + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=Stateless tests (aarch64) + # REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse + # KILL_TIMEOUT=10800 + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # clear-repository: true + # - name: Functional test + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" + # python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + # - name: Cleanup + # if: always() + # run: | + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: + # sudo rm -fr "$TEMP_PATH" +############################################################################################## +############################ FUNCTIONAl STATEFUL TESTS ####################################### +############################################################################################## + FunctionalStatefulTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester] steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan + TEMP_PATH=${{runner.temp}}/stateful_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateful tests (release) + REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse + KILL_TIMEOUT=3600 EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code uses: ClickHouse/checkout@v1 with: clear-repository: true - submodules: true - - name: Build + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebDebug: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwin: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwinAarch64: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -############################################################################################ -##################################### Docker images ####################################### -############################################################################################ - DockerServerImages: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push - python3 docker_server.py --release-type head --no-push --no-ubuntu \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################ -##################################### BUILD REPORTER ####################################### -############################################################################################ - BuilderReport: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - - BuilderDebAsan - - BuilderDebTsan - - BuilderDebUBsan - - BuilderDebMsan - - BuilderDebDebug - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - BuilderSpecialReport: - needs: - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - MarkReleaseReady: - needs: - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Mark Commit Release Ready - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 mark_release_ready.py -############################################################################################## -########################### FUNCTIONAl STATELESS TESTS ####################################### -############################################################################################## - FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (address) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (address) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (thread) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (memory) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -############################ FUNCTIONAl STATEFUL TESTS ####################################### -############################################################################################## - FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (address) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (thread) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (memory) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -######################################### STRESS TESTS ####################################### -############################################################################################## - StressTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (address) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestTsan: - needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (thread) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (memory) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (undefined) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - StressTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | @@ -1596,268 +451,45 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" + # FunctionalStatefulTestAarch64: + # needs: [BuilderDebAarch64] + # runs-on: [self-hosted, func-tester-aarch64] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/stateful_release + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=Stateful tests (aarch64) + # REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse + # KILL_TIMEOUT=3600 + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # clear-repository: true + # - name: Functional test + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" + # python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + # - name: Cleanup + # if: always() + # run: | + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: + # sudo rm -fr "$TEMP_PATH" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (thread) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease0: needs: [BuilderDebRelease] runs-on: [self-hosted, stress-tester] @@ -1937,43 +569,14 @@ jobs: - DockerHubPush - DockerServerImages - BuilderReport - - BuilderSpecialReport + # - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - FunctionalStatelessTestRelease - - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestUBsan - - FunctionalStatefulTestDebug + # - FunctionalStatelessTestAarch64 - FunctionalStatefulTestRelease - - FunctionalStatefulTestAarch64 - - FunctionalStatefulTestAsan - - FunctionalStatefulTestTsan - - FunctionalStatefulTestMsan - - FunctionalStatefulTestUBsan - - StressTestDebug - - StressTestAsan - - StressTestTsan - - StressTestMsan - - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 + # - FunctionalStatefulTestAarch64 - IntegrationTestsRelease0 - IntegrationTestsRelease1 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - CompatibilityCheck runs-on: [self-hosted, style-checker] steps: diff --git a/.github/workflows/tags_stable.yml b/.github/workflows/tags_stable.yml deleted file mode 100644 index f5b42e9c882a..000000000000 --- a/.github/workflows/tags_stable.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: TagsStableWorkflow -# - Gets artifacts from S3 -# - Sends it to JFROG Artifactory -# - Adds them to the release assets - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -on: # yamllint disable-line rule:truthy - push: - tags: - - 'v*-prestable' - - 'v*-stable' - - 'v*-lts' - workflow_dispatch: - inputs: - tag: - description: 'Test tag' - required: true - type: string - - -jobs: - UpdateVersions: - runs-on: [self-hosted, style-checker] - steps: - - name: Set test tag - if: github.event_name == 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV" - - name: Get tag name - if: github.event_name != 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV" - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - ref: master - fetch-depth: 0 - - name: Update versions, docker version, changelog, security - env: - GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - run: | - ./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv - ./utils/list-versions/update-docker-version.sh - GID=$(id -g "${UID}") - docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \ - --volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \ - /ClickHouse/utils/changelog/changelog.py -v --debug-helpers \ - --gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \ - --output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}" - git add "./docs/changelogs/${GITHUB_TAG}.md" - python3 ./utils/security-generator/generate_security.py > SECURITY.md - git diff HEAD - - name: Create Pull Request - uses: peter-evans/create-pull-request@v3 - with: - author: "robot-clickhouse " - token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - committer: "robot-clickhouse " - commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - branch: auto/${{ env.GITHUB_TAG }} - assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher - delete-branch: true - title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - labels: do not test - body: | - Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - - ### Changelog category (leave one): - - Not for changelog (changelog entry is not required) diff --git a/.github/workflows/woboq.yml b/.github/workflows/woboq.yml deleted file mode 100644 index 326d19ac5d3d..000000000000 --- a/.github/workflows/woboq.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: WoboqBuilder -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: woboq -on: # yamllint disable-line rule:truthy - schedule: - - cron: '0 */18 * * *' - workflow_dispatch: -jobs: - # don't use dockerhub push because this image updates so rarely - WoboqCodebrowser: - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/codebrowser - REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse - IMAGES_PATH=${{runner.temp}}/images_path - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: 'true' - - name: Codebrowser - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index f6be1e0967bd..8e49f83f13c1 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -2,11 +2,16 @@ # NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION, # only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes. -SET(VERSION_REVISION 54465) +SET(VERSION_REVISION 54466) SET(VERSION_MAJOR 22) SET(VERSION_MINOR 8) SET(VERSION_PATCH 13) + SET(VERSION_GITHASH 86b0ecd5d513d6f32ad01b7046ab761d4f2f177c) -SET(VERSION_DESCRIBE v22.8.13.1-lts) -SET(VERSION_STRING 22.8.13.1) + +SET(VERSION_TWEAK 21) +SET(VERSION_FLAVOUR altinitystable) + +SET(VERSION_DESCRIBE v22.8.13.21.altinitystable) +SET(VERSION_STRING 22.8.13.21.altinitystable) # end of autochange diff --git a/cmake/version.cmake b/cmake/version.cmake index acaa772ff2ff..d785da5fe9b1 100644 --- a/cmake/version.cmake +++ b/cmake/version.cmake @@ -19,5 +19,5 @@ set (VERSION_STRING_SHORT "${VERSION_MAJOR}.${VERSION_MINOR}") math (EXPR VERSION_INTEGER "${VERSION_PATCH} + ${VERSION_MINOR}*1000 + ${VERSION_MAJOR}*1000000") if(CLICKHOUSE_OFFICIAL_BUILD) - set(VERSION_OFFICIAL " (official build)") + set(VERSION_OFFICIAL " (altinity build)") endif() diff --git a/docker/images.json b/docker/images.json index 8339205b52f4..3e5f28aca883 100644 --- a/docker/images.json +++ b/docker/images.json @@ -1,33 +1,33 @@ { "docker/packager/binary": { - "name": "clickhouse/binary-builder", + "name": "altinityinfra/binary-builder", "dependent": [ "docker/test/split_build_smoke_test", "docker/test/codebrowser" ] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", + "name": "altinityinfra/fuzzer", "dependent": [] }, "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", + "name": "altinityinfra/performance-comparison", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/packager/binary", "docker/test/base", @@ -35,121 +35,112 @@ ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/stress": { - "name": "clickhouse/stress-test", + "name": "altinityinfra/stress-test", "dependent": [] }, "docker/test/split_build_smoke_test": { - "name": "clickhouse/split-build-smoke-test", + "name": "altinityinfra/split-build-smoke-test", "dependent": [] }, "docker/test/codebrowser": { - "name": "clickhouse/codebrowser", + "name": "altinityinfra/codebrowser", "dependent": [] }, "docker/test/integration/runner": { "only_amd64": true, - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/testflows/runner": { - "name": "clickhouse/testflows-runner", + "name": "altinityinfra/testflows-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", + "name": "altinityinfra/fasttest", "dependent": [] }, "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/style-test", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { "only_amd64": true, - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", - "dependent": [ + "name": "altinityinfra/test-base", + "dependent": [ "docker/test/stateless", "docker/test/integration/base", "docker/test/fuzzer", "docker/test/keeper-jepsen" - ] + ] }, "docker/test/integration/kerberized_hadoop": { "only_amd64": true, - "name": "clickhouse/kerberized-hadoop", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", + "name": "altinityinfra/sqlancer-test", "dependent": [] }, "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", - "dependent": [] - }, - "docker/docs/builder": { - "name": "clickhouse/docs-builder", - "dependent": [ - ] - }, - "docker/docs/release": { - "name": "clickhouse/docs-release", + "name": "altinityinfra/keeper-jepsen-test", "dependent": [] } } diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index d8d7e8cb97b1..c0e67a29dd16 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/binary-builder . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} diff --git a/docker/packager/binary/build.sh b/docker/packager/binary/build.sh index c2de0e33d82b..7f119bd1504f 100755 --- a/docker/packager/binary/build.sh +++ b/docker/packager/binary/build.sh @@ -19,9 +19,13 @@ ln -sf darwin-x86_64 /build/cmake/toolchain/darwin-aarch64 # export CCACHE_LOGFILE=/build/ccache.log # export CCACHE_DEBUG=1 +# TODO(vnemkov): this might not be needed anymore, but let's keep it for the reference. Maybe remove or un-comment on next build attempt? +# https://stackoverflow.com/a/71940133 +# git config --global --add safe.directory '*' mkdir -p /build/build_docker cd /build/build_docker + rm -f CMakeCache.txt # Read cmake arguments into array (possibly empty) read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}" diff --git a/docker/packager/packager b/docker/packager/packager index 66eb568d4606..f1c1ee6eda7a 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -379,7 +379,7 @@ if __name__ == "__main__": args = parser.parse_args() - image_name = f"clickhouse/{IMAGE_TYPE}-builder" + image_name = f"altinityinfra/{IMAGE_TYPE}-builder" ch_root = args.clickhouse_repo_path diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 43cfca1fdfcf..5b09a9dceeda 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/test-base . +# docker build -t altinityinfra/test-base . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/codebrowser/Dockerfile b/docker/test/codebrowser/Dockerfile index c7aed618f6a1..6fd99e9efbf9 100644 --- a/docker/test/codebrowser/Dockerfile +++ b/docker/test/codebrowser/Dockerfile @@ -2,7 +2,7 @@ # docker build --network=host -t clickhouse/codebrowser . # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser ARG FROM_TAG=latest -FROM clickhouse/binary-builder:$FROM_TAG +FROM altinityinfra/binary-builder:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index 7f7a8008d4ed..641c62d68d0a 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/fasttest . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index eb4b09c173f6..2aec54bd1719 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/fuzzer . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 92dea21b1a3a..8d6321eb5d68 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/integration-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG SHELL ["/bin/bash", "-c"] diff --git a/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml b/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml index b63dac51522c..e5746fa209fb 100644 --- a/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: dotnet1: - image: clickhouse/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} + image: altinityinfra/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_keeper.yml b/docker/test/integration/runner/compose/docker_compose_keeper.yml index 811bbdd800d1..babdef38e37c 100644 --- a/docker/test/integration/runner/compose/docker_compose_keeper.yml +++ b/docker/test/integration/runner/compose/docker_compose_keeper.yml @@ -1,7 +1,7 @@ version: '2.3' services: zoo1: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -31,7 +31,7 @@ services: - inet6 - rotate zoo2: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -61,7 +61,7 @@ services: - inet6 - rotate zoo3: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml index e1b4d393169a..365821b3f5ea 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml @@ -4,7 +4,7 @@ services: kerberizedhdfs1: cap_add: - DAC_READ_SEARCH - image: clickhouse/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} hostname: kerberizedhdfs1 restart: always volumes: @@ -22,7 +22,7 @@ services: entrypoint: /etc/bootstrap.sh -d hdfskerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: hdfskerberos volumes: - ${KERBERIZED_HDFS_DIR}/secrets:/tmp/keytab diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml index d57e4e4d5bea..8dbdd9c74c0c 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml @@ -50,7 +50,7 @@ services: - label:disable kafka_kerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: kafka_kerberos volumes: - ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab diff --git a/docker/test/integration/runner/compose/docker_compose_minio.yml b/docker/test/integration/runner/compose/docker_compose_minio.yml index 6e8c826b2346..438f3486e177 100644 --- a/docker/test/integration/runner/compose/docker_compose_minio.yml +++ b/docker/test/integration/runner/compose/docker_compose_minio.yml @@ -21,14 +21,14 @@ services: # HTTP proxies for Minio. proxy1: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" # Redirect proxy port - "80" # Reverse proxy port - "443" # Reverse proxy port (secure) proxy2: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" - "80" @@ -36,7 +36,7 @@ services: # Empty container to run proxy resolver. resolver: - image: clickhouse/python-bottle + image: altinityinfra/python-bottle expose: - "8080" tty: true diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml index 56cc04105740..09154b584244 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: golang1: - image: clickhouse/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} + image: altinityinfra/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml index eb5ffb01baa2..a84cef915df2 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: java1: - image: clickhouse/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml index 90939449c5f3..b46eb2706c47 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: mysqljs1: - image: clickhouse/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} + image: altinityinfra/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml index 408b8ff089a9..662783a00a1f 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: php1: - image: clickhouse/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} + image: altinityinfra/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml b/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml index 904bfffdfd5b..5c8673ae3eeb 100644 --- a/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml @@ -1,6 +1,6 @@ version: '2.2' services: java: - image: clickhouse/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/keeper-jepsen/Dockerfile b/docker/test/keeper-jepsen/Dockerfile index a794e076ec02..b93b07189012 100644 --- a/docker/test/keeper-jepsen/Dockerfile +++ b/docker/test/keeper-jepsen/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/keeper-jepsen-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV DEBIAN_FRONTEND=noninteractive ENV CLOJURE_VERSION=1.10.3.814 diff --git a/docker/test/split_build_smoke_test/Dockerfile b/docker/test/split_build_smoke_test/Dockerfile index 5f84eb42216c..cb41859fb118 100644 --- a/docker/test/split_build_smoke_test/Dockerfile +++ b/docker/test/split_build_smoke_test/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/split-build-smoke-test . +# docker build -t altinityinfra/split-build-smoke-test . ARG FROM_TAG=latest -FROM clickhouse/binary-builder:$FROM_TAG +FROM altinityinfra/binary-builder:$FROM_TAG COPY run.sh /run.sh COPY process_split_build_smoke_test_result.py / diff --git a/docker/test/stateful/Dockerfile b/docker/test/stateful/Dockerfile index 234d0861f8bb..3076f9fb6d5d 100644 --- a/docker/test/stateful/Dockerfile +++ b/docker/test/stateful/Dockerfile @@ -1,7 +1,8 @@ # rebuild in #33610 # docker build -t clickhouse/stateful-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +# TODO consider replacing clickhouse with altinityinfra dockerhub account +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ @@ -9,6 +10,8 @@ RUN apt-get update -y \ python3-requests \ nodejs \ npm \ + rpm2cpio \ + cpio \ && apt-get clean COPY s3downloader /s3downloader diff --git a/docker/test/stateful/setup_minio.sh b/docker/test/stateful/setup_minio.sh deleted file mode 120000 index 0d539f72cb34..000000000000 --- a/docker/test/stateful/setup_minio.sh +++ /dev/null @@ -1 +0,0 @@ -../stateless/setup_minio.sh \ No newline at end of file diff --git a/docker/test/stateful/setup_minio.sh b/docker/test/stateful/setup_minio.sh new file mode 100755 index 000000000000..c0deb46a9602 --- /dev/null +++ b/docker/test/stateful/setup_minio.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +USAGE='Usage for local run: + +./docker/test/stateless/setup_minio.sh { stateful | stateless } ./tests/ + +' + +set -e -x -a -u + +TEST_TYPE="$1" +shift + +case $TEST_TYPE in + stateless) QUERY_DIR=0_stateless ;; + stateful) QUERY_DIR=1_stateful ;; + *) echo "unknown test type $TEST_TYPE"; echo "${USAGE}"; exit 1 ;; +esac + +ls -lha + +mkdir -p ./minio_data + +if [ ! -f ./minio ]; then + MINIO_SERVER_VERSION=${MINIO_SERVER_VERSION:-2022-01-03T18-22-58Z} + MINIO_CLIENT_VERSION=${MINIO_CLIENT_VERSION:-2022-01-05T23-52-51Z} + case $(uname -m) in + x86_64) BIN_ARCH=amd64 ;; + aarch64) BIN_ARCH=arm64 ;; + *) echo "unknown architecture $(uname -m)"; exit 1 ;; + esac + echo 'MinIO binary not found, downloading...' + + BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]') + + wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-${BIN_ARCH}/archive/minio.RELEASE.${MINIO_SERVER_VERSION}" -O ./minio \ + && wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-${BIN_ARCH}/archive/mc.RELEASE.${MINIO_CLIENT_VERSION}" -O ./mc \ + && chmod +x ./mc ./minio +fi + +MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse} +MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse} + +./minio --version + +./minio server --address ":11111" ./minio_data & + +i=0 +while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied +do + if [[ $i == 60 ]]; then + echo "Failed to setup minio" + exit 0 + fi + echo "Trying to connect to minio" + sleep 1 + i=$((i + 1)) +done + +lsof -i :11111 + +sleep 5 + +./mc alias set clickminio http://localhost:11111 clickhouse clickhouse +./mc admin user add clickminio test testtest +./mc admin policy set clickminio readwrite user=test +./mc mb clickminio/test +if [ "$TEST_TYPE" = "stateless" ]; then + ./mc policy set public clickminio/test +fi + + +# Upload data to Minio. By default after unpacking all tests will in +# /usr/share/clickhouse-test/queries + +TEST_PATH=${1:-/usr/share/clickhouse-test} +MINIO_DATA_PATH=${TEST_PATH}/queries/${QUERY_DIR}/data_minio + +# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename +# shellcheck disable=SC2045 +for FILE in $(ls "${MINIO_DATA_PATH}"); do + echo "$FILE"; + ./mc cp "${MINIO_DATA_PATH}"/"$FILE" clickminio/test/"$FILE"; +done + +mkdir -p ~/.aws +cat <> ~/.aws/credentials +[default] +aws_access_key_id=${MINIO_ROOT_USER} +aws_secret_access_key=${MINIO_ROOT_PASSWORD} +EOT diff --git a/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) b/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) new file mode 100755 index 000000000000..d077dea920c6 --- /dev/null +++ b/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) @@ -0,0 +1,77 @@ +#!/bin/bash + +# TODO: Make this file shared with stateless tests +# +# Usage for local run: +# +# ./docker/test/stateful/setup_minio.sh ./tests/ +# + +set -e -x -a -u + +rpm2cpio ./minio-20220103182258.0.0.x86_64.rpm | cpio -i --make-directories +find -name minio +cp ./usr/local/bin/minio ./ + +ls -lha + +mkdir -p ./minio_data + +if [ ! -f ./minio ]; then + echo 'MinIO binary not found, downloading...' + + BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]') + + wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-amd64/minio" \ + && chmod +x ./minio \ + && wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-amd64/mc" \ + && chmod +x ./mc +fi + +MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse} +MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse} + +./minio --version +./minio server --address ":11111" ./minio_data & + +i=0 +while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied +do + if [[ $i == 60 ]]; then + echo "Failed to setup minio" + exit 0 + fi + echo "Trying to connect to minio" + sleep 1 + i=$((i + 1)) +done + +lsof -i :11111 + +sleep 5 + +./mc alias set clickminio http://localhost:11111 clickhouse clickhouse +./mc admin user add clickminio test testtest +./mc admin policy set clickminio readwrite user=test +./mc mb clickminio/test + + +# Upload data to Minio. By default after unpacking all tests will in +# /usr/share/clickhouse-test/queries + +TEST_PATH=${1:-/usr/share/clickhouse-test} +MINIO_DATA_PATH=${TEST_PATH}/queries/1_stateful/data_minio + +# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename +# shellcheck disable=SC2045 +for FILE in $(ls "${MINIO_DATA_PATH}"); do + echo "$FILE"; + ./mc cp "${MINIO_DATA_PATH}"/"$FILE" clickminio/test/"$FILE"; +done + +mkdir -p ~/.aws +cat <> ~/.aws/credentials +[default] +aws_access_key_id=clickhouse +aws_secret_access_key=clickhouse +EOT diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 9a31c5bbb4c1..58ac030e46c1 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/stateless-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz" @@ -38,6 +38,8 @@ RUN apt-get update -y \ zstd \ file \ pv \ + rpm2cpio \ + cpio \ && apt-get clean diff --git a/docker/test/stateless/setup_minio.sh b/docker/test/stateless/setup_minio.sh index e4625bfba752..c0deb46a9602 100755 --- a/docker/test/stateless/setup_minio.sh +++ b/docker/test/stateless/setup_minio.sh @@ -42,6 +42,7 @@ MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse} MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse} ./minio --version + ./minio server --address ":11111" ./minio_data & i=0 diff --git a/docker/test/stateless_pytest/Dockerfile b/docker/test/stateless_pytest/Dockerfile new file mode 100644 index 000000000000..c148b6212417 --- /dev/null +++ b/docker/test/stateless_pytest/Dockerfile @@ -0,0 +1,33 @@ +# rebuild in #33610 +# docker build -t clickhouse/stateless-pytest . +ARG FROM_TAG=latest +FROM altinityinfra/test-base:$FROM_TAG + +RUN apt-get update -y && \ + apt-get install -y --no-install-recommends \ + python3-pip \ + python3-setuptools \ + python3-wheel \ + brotli \ + netcat-openbsd \ + postgresql-client \ + zstd + +RUN python3 -m pip install \ + wheel \ + pytest \ + pytest-html \ + pytest-json \ + pytest-randomly \ + pytest-rerunfailures \ + pytest-timeout \ + pytest-xdist \ + pandas \ + numpy \ + scipy + +CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \ + dpkg -i package_folder/clickhouse-common-static-dbg_*.deb; \ + dpkg -i package_folder/clickhouse-server_*.deb; \ + dpkg -i package_folder/clickhouse-client_*.deb; \ + python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --reruns=1 --timeout=600 --json=test_output/report.json --html=test_output/report.html --self-contained-html diff --git a/docker/test/stress/Dockerfile b/docker/test/stress/Dockerfile index 393508fd551b..4f6834fff737 100644 --- a/docker/test/stress/Dockerfile +++ b/docker/test/stress/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/stress-test . ARG FROM_TAG=latest -FROM clickhouse/stateful-test:$FROM_TAG +FROM altinityinfra/stateful-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/unit/Dockerfile b/docker/test/unit/Dockerfile index b75bfb6661cc..378341ab8b69 100644 --- a/docker/test/unit/Dockerfile +++ b/docker/test/unit/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/unit-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get install gdb diff --git a/packages/clickhouse-client.yaml b/packages/clickhouse-client.yaml index d4fd9300208e..10a8c6962004 100644 --- a/packages/clickhouse-client.yaml +++ b/packages/clickhouse-client.yaml @@ -4,8 +4,8 @@ name: "clickhouse-client" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-common-static-dbg.yaml b/packages/clickhouse-common-static-dbg.yaml index b2d2b3aaf262..2e6b446c4493 100644 --- a/packages/clickhouse-common-static-dbg.yaml +++ b/packages/clickhouse-common-static-dbg.yaml @@ -4,8 +4,8 @@ name: "clickhouse-common-static-dbg" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-common-static.yaml b/packages/clickhouse-common-static.yaml index c77914d0d692..3167e78dbc3e 100644 --- a/packages/clickhouse-common-static.yaml +++ b/packages/clickhouse-common-static.yaml @@ -4,8 +4,8 @@ name: "clickhouse-common-static" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-keeper-dbg.yaml b/packages/clickhouse-keeper-dbg.yaml index a6be9ec9e971..6041e47306f3 100644 --- a/packages/clickhouse-keeper-dbg.yaml +++ b/packages/clickhouse-keeper-dbg.yaml @@ -4,8 +4,8 @@ name: "clickhouse-keeper-dbg" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-keeper.yaml b/packages/clickhouse-keeper.yaml index f2095dda02a3..f98f4eb97d51 100644 --- a/packages/clickhouse-keeper.yaml +++ b/packages/clickhouse-keeper.yaml @@ -4,8 +4,8 @@ name: "clickhouse-keeper" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-server.yaml b/packages/clickhouse-server.yaml index fe59828ca437..90a0eaff9b47 100644 --- a/packages/clickhouse-server.yaml +++ b/packages/clickhouse-server.yaml @@ -4,8 +4,8 @@ name: "clickhouse-server" arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index f3939dc89adb..11a136a372cf 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -24,7 +24,7 @@ from stopwatch import Stopwatch from rerun_helper import RerunHelper -IMAGE_NAME = "clickhouse/fuzzer" +IMAGE_NAME = "altinityinfra/fuzzer" def get_run_command(pr_number, sha, download_url, workspace_path, image): @@ -169,7 +169,7 @@ def get_commit(gh, commit_sha): check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) logging.info("Result: '%s', '%s', '%s'", status, description, report_url) print(f"::notice ::Report url: {report_url}") diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 4926cd72c713..f544d3955760 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -17,6 +17,7 @@ S3_BUILDS_BUCKET, S3_DOWNLOAD, TEMP_PATH, + CLICKHOUSE_STABLE_VERSION_SUFFIX, ) from s3_helper import S3Helper from pr_info import PRInfo @@ -31,7 +32,7 @@ from docker_pull_helper import get_image_with_version from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/binary-builder" +IMAGE_NAME = "altinityinfra/binary-builder" BUILD_LOG_NAME = "build_log.log" @@ -270,18 +271,22 @@ def main(): logging.info("Got version from repo %s", version.string) - official_flag = pr_info.number == 0 if "official" in build_config: official_flag = build_config["official"] - version_type = "testing" - if "release" in pr_info.labels or "release-lts" in pr_info.labels: - version_type = "stable" - official_flag = True + official_flag = True + version._flavour = version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX + # TODO (vnemkov): right now we'll use simplified version management: + # only update git hash and explicitly set stable version suffix. + # official_flag = pr_info.number == 0 + # version_type = "testing" + # if "release" in pr_info.labels or "release-lts" in pr_info.labels: + # version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX + # official_flag = True update_version_local(version, version_type) - logging.info("Updated local files with version") + logging.info(f"Updated local files with version : {version.string} / {version.describe}") logging.info("Build short name %s", build_name) diff --git a/tests/ci/ccache_utils.py b/tests/ci/ccache_utils.py index 864b3a8f9b64..2452b851863c 100644 --- a/tests/ci/ccache_utils.py +++ b/tests/ci/ccache_utils.py @@ -5,6 +5,7 @@ import sys import os import shutil +from env_helper import S3_BUILDS_BUCKET from pathlib import Path import requests # type: ignore diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 4b2cf4df743d..a0fafebb79f1 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,15 +180,7 @@ }, "builds_report_config": { "ClickHouse build check": [ - "package_release", - "coverity", - "package_aarch64", - "package_asan", - "package_ubsan", - "package_tsan", - "package_msan", - "package_debug", - "binary_release", + "package_release" ], "ClickHouse special build check": [ "binary_tidy", diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py index a81334860d10..4bc7ad1e6fc7 100644 --- a/tests/ci/clickhouse_helper.py +++ b/tests/ci/clickhouse_helper.py @@ -37,12 +37,8 @@ def _insert_json_str_info_impl(url, auth, db, table, json_str): url, params=params, data=json_str, headers=auth ) except Exception as e: - logging.warning( - "Received exception while sending data to %s on %s attempt: %s", - url, - i, - e, - ) + error = f"Received exception while sending data to {url} on {i} attempt: {e}" + logging.warning(error) continue logging.info("Response content '%s'", response.content) @@ -142,7 +138,7 @@ def prepare_tests_results_for_clickhouse( check_name, ): - pull_request_url = "https://github.com/ClickHouse/ClickHouse/commits/master" + pull_request_url = "https://github.com/Altinity/ClickHouse/commits/master" base_ref = "master" head_ref = "master" base_repo = pr_info.repo_full_name diff --git a/tests/ci/codebrowser_check.py b/tests/ci/codebrowser_check.py index 97036c6fc7bd..6d23a95a519d 100644 --- a/tests/ci/codebrowser_check.py +++ b/tests/ci/codebrowser_check.py @@ -40,7 +40,7 @@ def get_run_command(repo_path, output_path, image): if not os.path.exists(temp_path): os.makedirs(temp_path) - docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser") + docker_image = get_image_with_version(IMAGES_PATH, "altinityinfra/codebrowser") s3_helper = S3Helper() result_path = os.path.join(temp_path, "result_path") diff --git a/tests/ci/compatibility_check.py b/tests/ci/compatibility_check.py index 39d027ad3c0c..ac89349aeab8 100644 --- a/tests/ci/compatibility_check.py +++ b/tests/ci/compatibility_check.py @@ -24,8 +24,8 @@ from stopwatch import Stopwatch from rerun_helper import RerunHelper -IMAGE_UBUNTU = "clickhouse/test-old-ubuntu" -IMAGE_CENTOS = "clickhouse/test-old-centos" +IMAGE_UBUNTU = "altinityinfra/test-old-ubuntu" +IMAGE_CENTOS = "altinityinfra/test-old-centos" MAX_GLIBC_VERSION = "2.4" DOWNLOAD_RETRIES_COUNT = 5 CHECK_NAME = "Compatibility check" @@ -198,7 +198,7 @@ def url_filter(url): CHECK_NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "error": sys.exit(1) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 7585efe035e4..a2482f6e7750 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -84,7 +84,7 @@ def get_images_dict(repo_path: str, image_file_path: str) -> ImagesDict: images_dict = json.load(dict_file) else: logging.info( - "Image file %s doesnt exists in repo %s", image_file_path, repo_path + "Image file %s doesn't exists in repo %s", image_file_path, repo_path ) return images_dict @@ -106,22 +106,23 @@ def get_changed_docker_images( str(files_changed), ) - changed_images = [] - - for dockerfile_dir, image_description in images_dict.items(): - for f in files_changed: - if f.startswith(dockerfile_dir): - name = image_description["name"] - only_amd64 = image_description.get("only_amd64", False) - logging.info( - "Found changed file '%s' which affects " - "docker image '%s' with path '%s'", - f, - name, - dockerfile_dir, - ) - changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - break + # Rebuild all images + changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] + + # for dockerfile_dir, image_description in images_dict.items(): + # for f in files_changed: + # if f.startswith(dockerfile_dir): + # name = image_description["name"] + # only_amd64 = image_description.get("only_amd64", False) + # logging.info( + # "Found changed file '%s' which affects " + # "docker image '%s' with path '%s'", + # f, + # name, + # dockerfile_dir, + # ) + # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + # break # The order is important: dependents should go later than bases, so that # they are built with updated base versions. @@ -253,6 +254,19 @@ def build_and_push_one_image( f"--tag {image.repo}:{version_string} " f"{cache_from} " f"--cache-to type=inline,mode=max " + # FIXME: many tests utilize packages without specifying version, hence docker pulls :latest + # this will fail multiple jobs are going to be executed on different machines and + # push different images as latest. + # To fix it we may: + # - require jobs to be executed on same machine images were built (no parallelism) + # - change all the test's code (mostly docker-compose files in integration tests) + # that depend on said images and push version somehow into docker-compose. + # (and that is lots of work and many potential conflicts with upstream) + # - tag and push all images as :latest and then just pray that collisions are infrequent. + # and if even if collision happens, image is not that different and would still properly work. + # (^^^ CURRENT SOLUTION ^^^) But this is just a numbers game, it will blow up at some point. + # - do something crazy + f"--tag {image.repo}:latest " f"{push_arg}" f"--progress plain {image.full_path}" ) @@ -261,6 +275,7 @@ def build_and_push_one_image( retcode = proc.wait() if retcode != 0: + logging.error("Building image {} failed with error: {}\n{}".format(image, retcode, ''.join(list(open(build_log, 'rt'))))) return False, build_log logging.info("Processing of %s successfully finished", image.repo) @@ -407,8 +422,8 @@ def main(): if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -493,7 +508,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == "error": sys.exit(1) diff --git a/tests/ci/docker_manifests_merge.py b/tests/ci/docker_manifests_merge.py index 246793f8f661..fd72320dc2ed 100644 --- a/tests/ci/docker_manifests_merge.py +++ b/tests/ci/docker_manifests_merge.py @@ -26,7 +26,7 @@ def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, - description="The program gets images from changed_images_*.json, merges imeges " + description="The program gets images from changed_images_*.json, merges images " "with different architectures into one manifest and pushes back to docker hub", ) @@ -173,8 +173,8 @@ def main(): args = parse_args() if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -233,7 +233,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if __name__ == "__main__": diff --git a/tests/ci/docker_pull_helper.py b/tests/ci/docker_pull_helper.py index 04817ed7de37..1154cf9b8dd3 100644 --- a/tests/ci/docker_pull_helper.py +++ b/tests/ci/docker_pull_helper.py @@ -5,6 +5,7 @@ import time import subprocess import logging +import traceback from typing import Optional @@ -48,10 +49,27 @@ def get_images_with_versions( for image_name in required_image: docker_image = DockerImage(image_name, version) if image_name in images: - docker_image.version = images[image_name] + image_version = images[image_name] + # NOTE(vnemkov): For some reason we can get version as list of versions, + # in this case choose one that has commit hash and hence is the longest string. + # E.g. from ['latest-amd64', '0-amd64', '0-473d8f560fc78c6cdaabb960a537ca5ab49f795f-amd64'] + # choose '0-473d8f560fc78c6cdaabb960a537ca5ab49f795f-amd64' since it 100% points to proper commit. + if isinstance(image_version, list): + max_len = 0 + max_len_version = '' + for version in image_version: + if len(version) > max_len: + max_len = len(version) + max_len_version = version + logging.debug(f"selected version {max_len_version} from {image_version}") + image_version = max_len_version + + docker_image.version = image_version + docker_images.append(docker_image) if pull: + latest_error = None for docker_image in docker_images: for i in range(10): try: @@ -65,7 +83,8 @@ def get_images_with_versions( except Exception as ex: latest_error = ex time.sleep(i * 3) - logging.info("Got execption pulling docker %s", ex) + logging.info("Got exception pulling docker %s", ex) + latest_error = traceback.format_exc() else: raise Exception( "Cannot pull dockerhub for image docker pull " diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index fd28e5a18908..515a0fd118f8 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -53,7 +53,7 @@ def parse_args() -> argparse.Namespace: "--version", type=version_arg, default=get_version_from_repo(git=git).string, - help="a version to build, automaticaly got from version_helper, accepts either " + help="a version to build, automatically got from version_helper, accepts either " "tag ('refs/tags/' is removed automatically) or a normal 22.2.2.2 format", ) parser.add_argument( @@ -74,7 +74,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--image-repo", type=str, - default="clickhouse/clickhouse-server", + default="altinityinfra/clickhouse-server", help="image name on docker hub", ) parser.add_argument( @@ -314,8 +314,8 @@ def main(): if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -362,7 +362,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status != "success": sys.exit(1) diff --git a/tests/ci/docker_test.py b/tests/ci/docker_test.py index 1848300e2f61..527460b7068f 100644 --- a/tests/ci/docker_test.py +++ b/tests/ci/docker_test.py @@ -37,61 +37,61 @@ def test_get_changed_docker_images(self): self.maxDiff = None expected = sorted( [ - di.DockerImage("docker/test/base", "clickhouse/test-base", False), - di.DockerImage("docker/docs/builder", "clickhouse/docs-builder", True), + di.DockerImage("docker/test/base", "altinityinfra/test-base", False), + di.DockerImage("docker/docs/builder", "altinityinfra/docs-builder", True), di.DockerImage( "docker/test/stateless", - "clickhouse/stateless-test", + "altinityinfra/stateless-test", False, - "clickhouse/test-base", + "altinityinfra/test-base", ), di.DockerImage( "docker/test/integration/base", - "clickhouse/integration-test", + "altinityinfra/integration-test", False, - "clickhouse/test-base", - ), - di.DockerImage( - "docker/test/fuzzer", - "clickhouse/fuzzer", - False, - "clickhouse/test-base", + "altinityinfra/test-base", ), + # di.DockerImage( + # "docker/test/fuzzer", + # "altinityinfra/fuzzer", + # False, + # "altinityinfra/test-base", + # ), di.DockerImage( "docker/test/keeper-jepsen", - "clickhouse/keeper-jepsen-test", - False, - "clickhouse/test-base", - ), - di.DockerImage( - "docker/docs/check", - "clickhouse/docs-check", - False, - "clickhouse/docs-builder", - ), - di.DockerImage( - "docker/docs/release", - "clickhouse/docs-release", + "altinityinfra/keeper-jepsen-test", False, - "clickhouse/docs-builder", + "altinityinfra/test-base", ), + # di.DockerImage( + # "docker/docs/check", + # "altinityinfra/docs-check", + # False, + # "altinityinfra/docs-builder", + # ), + # di.DockerImage( + # "docker/docs/release", + # "altinityinfra/docs-release", + # False, + # "altinityinfra/docs-builder", + # ), di.DockerImage( "docker/test/stateful", - "clickhouse/stateful-test", + "altinityinfra/stateful-test", False, - "clickhouse/stateless-test", + "altinityinfra/stateless-test", ), di.DockerImage( "docker/test/unit", - "clickhouse/unit-test", + "altinityinfra/unit-test", False, - "clickhouse/stateless-test", + "altinityinfra/stateless-test", ), di.DockerImage( "docker/test/stress", - "clickhouse/stress-test", + "altinityinfra/stress-test", False, - "clickhouse/stateful-test", + "altinityinfra/stateful-test", ), ] ) diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py index c95770b646df..0248ffcc1d6e 100644 --- a/tests/ci/docs_check.py +++ b/tests/ci/docs_check.py @@ -70,7 +70,7 @@ if not os.path.exists(temp_path): os.makedirs(temp_path) - docker_image = get_image_with_version(temp_path, "clickhouse/docs-builder") + docker_image = get_image_with_version(temp_path, "altinityinfra/docs-builder") test_output = os.path.join(temp_path, "docs_check_log") if not os.path.exists(test_output): @@ -139,6 +139,6 @@ NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == "error": sys.exit(1) diff --git a/tests/ci/docs_release.py b/tests/ci/docs_release.py index 355e4af7426b..cd8829485628 100644 --- a/tests/ci/docs_release.py +++ b/tests/ci/docs_release.py @@ -49,7 +49,7 @@ def parse_args() -> argparse.Namespace: if not os.path.exists(temp_path): os.makedirs(temp_path) - docker_image = get_image_with_version(temp_path, "clickhouse/docs-release") + docker_image = get_image_with_version(temp_path, "altinityinfra/docs-release") test_output = os.path.join(temp_path, "docs_release_log") if not os.path.exists(test_output): diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index a5a4913be0b4..13a7a1ffd7e5 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -13,7 +13,7 @@ CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN") GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "") GITHUB_JOB = os.getenv("GITHUB_JOB", "local") -GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse") +GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "Altinity/ClickHouse") GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "0") GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com") GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root) @@ -22,9 +22,11 @@ REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) REPO_COPY = os.getenv("REPO_COPY", git_root) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) -S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds") -S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports") +S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "altinity-build-artifacts") +S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts") S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") +CLICKHOUSE_STABLE_VERSION_SUFFIX = os.getenv("CLICKHOUSE_STABLE_VERSION_SUFFIX", "stable") + S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL) S3_ARTIFACT_DOWNLOAD_TEMPLATE = ( f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/" diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py index 03e427268083..eececee52963 100644 --- a/tests/ci/fast_test_check.py +++ b/tests/ci/fast_test_check.py @@ -106,7 +106,7 @@ def process_results(result_folder): logging.info("Check is already finished according to github status, exiting") sys.exit(0) - docker_image = get_image_with_version(temp_path, "clickhouse/fasttest") + docker_image = get_image_with_version(temp_path, "altinityinfra/fasttest") s3_helper = S3Helper() @@ -221,7 +221,7 @@ def process_results(result_folder): report_url, NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) # Refuse other checks to run if fast test failed if state != "success": diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index 388f93f34ece..bb0b17a59aa0 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -58,9 +58,9 @@ def get_additional_envs(check_name, run_by_hash_num, run_by_hash_total): def get_image_name(check_name): if "stateless" in check_name.lower(): - return "clickhouse/stateless-test" + return "altinityinfra/stateless-test" if "stateful" in check_name.lower(): - return "clickhouse/stateful-test" + return "altinityinfra/stateful-test" else: raise Exception(f"Cannot deduce image name based on check name {check_name}") @@ -239,10 +239,12 @@ def parse_args(): run_by_hash_total = 0 check_name_with_group = check_name - rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) - if rerun_helper.is_already_finished_by_status(): - logging.info("Check is already finished according to github status, exiting") - sys.exit(0) + # Always re-run, even if it finished in previous run. + # gh = Github(get_best_robot_token()) + # rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) + # if rerun_helper.is_already_finished_by_status(): + # logging.info("Check is already finished according to github status, exiting") + # sys.exit(0) tests_to_run = [] if run_changed_tests: @@ -360,7 +362,7 @@ def parse_args(): report_url, check_name_with_group, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state != "success": if FORCE_TESTS_LABEL in pr_info.labels: diff --git a/tests/ci/get_robot_token.py b/tests/ci/get_robot_token.py index 163e1ce071e4..df241d6239b9 100644 --- a/tests/ci/get_robot_token.py +++ b/tests/ci/get_robot_token.py @@ -11,7 +11,14 @@ def get_parameter_from_ssm(name, decrypt=True, client=None): return client.get_parameter(Name=name, WithDecryption=decrypt)["Parameter"]["Value"] -def get_best_robot_token(token_prefix_env_name="github_robot_token_"): +# Original CI code uses the "_original" version of this method. Each robot token is rate limited +# and the original implementation selects the "best one". To make it simpler and iterate faster, +# we are using only one robot and keeping the method signature. In the future we might reconsider +# having multiple robot tokens +def get_best_robot_token(token_prefix_env_name="github_robot_token", total_tokens=4): + return get_parameter_from_ssm(token_prefix_env_name) + +def get_best_robot_token_original(token_prefix_env_name="github_robot_token_", total_tokens=4): client = boto3.client("ssm", region_name="us-east-1") parameters = client.describe_parameters( ParameterFilters=[ diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index 77c2fc9cf05f..10f17f083889 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -12,7 +12,7 @@ # \A and \Z match only start and end of the whole string RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z" TAG_REGEXP = ( - r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts)\Z" + r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts|altinitystable)\Z" ) SHA_REGEXP = r"\A([0-9]|[a-f]){40}\Z" diff --git a/tests/ci/git_test.py b/tests/ci/git_test.py index 3aedd8a8dea1..0c28c8d38421 100644 --- a/tests/ci/git_test.py +++ b/tests/ci/git_test.py @@ -70,6 +70,9 @@ def test_tags(self): with self.assertRaises(Exception): setattr(self.git, tag_attr, tag) + def check_tag(self): + self.git.check_tag("v21.12.333.4567-altinitystable") + def test_tweak(self): self.git.commits_since_tag = 0 self.assertEqual(self.git.tweak, 1) @@ -79,3 +82,6 @@ def test_tweak(self): self.assertEqual(self.git.tweak, 22224) self.git.commits_since_tag = 0 self.assertEqual(self.git.tweak, 22222) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index 3709a7271d7e..bd1513f87f22 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -36,17 +36,17 @@ # When update, update # integration/ci-runner.py:ClickhouseIntegrationTestsRunner.get_images_names too IMAGES = [ - "clickhouse/integration-tests-runner", - "clickhouse/mysql-golang-client", - "clickhouse/mysql-java-client", - "clickhouse/mysql-js-client", - "clickhouse/mysql-php-client", - "clickhouse/postgresql-java-client", - "clickhouse/integration-test", - "clickhouse/kerberos-kdc", - "clickhouse/kerberized-hadoop", - "clickhouse/integration-helper", - "clickhouse/dotnet-client", + "altinityinfra/integration-tests-runner", + "altinityinfra/mysql-golang-client", + "altinityinfra/mysql-java-client", + "altinityinfra/mysql-js-client", + "altinityinfra/mysql-php-client", + "altinityinfra/postgresql-java-client", + "altinityinfra/integration-test", + "altinityinfra/kerberos-kdc", + "altinityinfra/kerberized-hadoop", + "altinityinfra/integration-helper", + "altinityinfra/dotnet-client", ] @@ -182,10 +182,12 @@ def parse_args(): gh = Github(get_best_robot_token(), per_page=100) - rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) - if rerun_helper.is_already_finished_by_status(): - logging.info("Check is already finished according to github status, exiting") - sys.exit(0) + # Always re-run, even if it finished in previous run. + # gh = Github(get_best_robot_token()) + # rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) + # if rerun_helper.is_already_finished_by_status(): + # logging.info("Check is already finished according to github status, exiting") + # sys.exit(0) images = get_images_with_versions(reports_path, IMAGES) images_with_versions = {i.name: i.version for i in images} @@ -287,7 +289,7 @@ def parse_args(): check_name_with_group, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "error": sys.exit(1) diff --git a/tests/ci/keeper_jepsen_check.py b/tests/ci/keeper_jepsen_check.py index a0695d3283a1..7490b8de7222 100644 --- a/tests/ci/keeper_jepsen_check.py +++ b/tests/ci/keeper_jepsen_check.py @@ -26,7 +26,7 @@ JEPSEN_GROUP_NAME = "jepsen_group" DESIRED_INSTANCE_COUNT = 3 -IMAGE_NAME = "clickhouse/keeper-jepsen-test" +IMAGE_NAME = "altinityinfra/keeper-jepsen-test" CHECK_NAME = "ClickHouse Keeper Jepsen" @@ -271,5 +271,5 @@ def get_run_command( report_url, CHECK_NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) clear_autoscaling_group() diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py index 40befc78de20..bade93548b4f 100644 --- a/tests/ci/performance_comparison_check.py +++ b/tests/ci/performance_comparison_check.py @@ -22,7 +22,7 @@ from s3_helper import S3Helper from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/performance-comparison" +IMAGE_NAME = "altinityinfra/performance-comparison" def get_run_command( diff --git a/tests/ci/run_check.py b/tests/ci/run_check.py index 5e6542f6e4c6..ea66c845d374 100644 --- a/tests/ci/run_check.py +++ b/tests/ci/run_check.py @@ -57,6 +57,7 @@ "Not for changelog", ], "pr-performance": ["Performance Improvement"], + "arthurpassos" # Altinity } CATEGORY_TO_LABEL = {c: lb for lb, categories in LABELS.items() for c in categories} diff --git a/tests/ci/split_build_smoke_check.py b/tests/ci/split_build_smoke_check.py index c6bf1051c879..5ccf2b3dd467 100644 --- a/tests/ci/split_build_smoke_check.py +++ b/tests/ci/split_build_smoke_check.py @@ -20,7 +20,7 @@ from rerun_helper import RerunHelper -DOCKER_IMAGE = "clickhouse/split-build-smoke-test" +DOCKER_IMAGE = "altinityinfra/split-build-smoke-test" DOWNLOAD_RETRIES_COUNT = 5 RESULT_LOG_NAME = "run.log" CHECK_NAME = "Split build smoke test" @@ -148,7 +148,7 @@ def get_run_command(build_path, result_folder, server_log_folder, docker_image): CHECK_NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "error": sys.exit(1) diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index 8c5889108ad0..9484a905ff63 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -119,7 +119,7 @@ def process_results(result_folder, server_log_path, run_log_path): logging.info("Check is already finished according to github status, exiting") sys.exit(0) - docker_image = get_image_with_version(reports_path, "clickhouse/stress-test") + docker_image = get_image_with_version(reports_path, "altinityinfra/stress-test") packages_path = os.path.join(temp_path, "packages") if not os.path.exists(packages_path): @@ -179,7 +179,7 @@ def process_results(result_folder, server_log_path, run_log_path): report_url, check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "error": sys.exit(1) diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 23a1dd467d72..aa848eb7ab62 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -165,7 +165,7 @@ def commit_push_staged(pr_info: PRInfo): if not os.path.exists(temp_path): os.makedirs(temp_path) - docker_image = get_image_with_version(temp_path, "clickhouse/style-test") + docker_image = get_image_with_version(temp_path, "altinityinfra/style-test") s3_helper = S3Helper() cmd = ( @@ -202,7 +202,7 @@ def commit_push_staged(pr_info: PRInfo): report_url, NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state in ["error", "failure"]: sys.exit(1) diff --git a/tests/ci/tests/docker_images.json b/tests/ci/tests/docker_images.json index ca5c516bccba..53ad258f6ec9 100644 --- a/tests/ci/tests/docker_images.json +++ b/tests/ci/tests/docker_images.json @@ -1,10 +1,10 @@ { "docker/packager/deb": { - "name": "clickhouse/deb-builder", + "name": "altinityinfra/deb-builder", "dependent": [] }, "docker/packager/binary": { - "name": "clickhouse/binary-builder", + "name": "altinityinfra/binary-builder", "dependent": [ "docker/test/split_build_smoke_test", "docker/test/pvs", @@ -12,156 +12,112 @@ ] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", - "dependent": [] - }, - "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", - "dependent": [] - }, - "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", - "dependent": [] - }, - "docker/test/pvs": { - "name": "clickhouse/pvs-test", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/test/base", "docker/test/fasttest" ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", - "dependent": [] - }, - "docker/test/stress": { - "name": "clickhouse/stress-test", - "dependent": [] - }, - "docker/test/split_build_smoke_test": { - "name": "clickhouse/split-build-smoke-test", - "dependent": [] - }, - "docker/test/codebrowser": { - "name": "clickhouse/codebrowser", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/integration/runner": { - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/testflows/runner": { - "name": "clickhouse/testflows-runner", + "name": "altinityinfra/testflows-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", - "dependent": [] - }, - "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/fasttest", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", - "dependent": [ + "name": "altinityinfra/test-base", + "dependent": [ "docker/test/stateless", "docker/test/integration/base", "docker/test/fuzzer", "docker/test/keeper-jepsen" - ] + ] }, "docker/test/integration/kerberized_hadoop": { - "name": "clickhouse/kerberized-hadoop", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", + "name": "altinityinfra/sqlancer-test", "dependent": [] }, "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", - "dependent": [] - }, - "docker/docs/builder": { - "name": "clickhouse/docs-builder", - "only_amd64": true, - "dependent": [ - "docker/docs/check", - "docker/docs/release" - ] - }, - "docker/docs/check": { - "name": "clickhouse/docs-check", - "dependent": [] - }, - "docker/docs/release": { - "name": "clickhouse/docs-release", + "name": "altinityinfra/keeper-jepsen-test", "dependent": [] } } diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py index c2dfab9dddcb..e5718a0306b2 100644 --- a/tests/ci/unit_tests_check.py +++ b/tests/ci/unit_tests_check.py @@ -26,7 +26,7 @@ from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/unit-test" +IMAGE_NAME = "altinityinfra/unit-test" def get_test_name(line): @@ -177,7 +177,7 @@ def process_result(result_folder): check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "error": sys.exit(1) diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index 966858c0747a..b30d6c4b4add 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -46,6 +46,7 @@ def __init__( revision: Union[int, str], git: Optional[Git], tweak: str = None, + flavour: str = None, ): self._major = int(major) self._minor = int(minor) @@ -58,6 +59,7 @@ def __init__( elif self._git is not None: self._tweak = self._git.tweak self._describe = "" + self._flavour = flavour def update(self, part: str) -> "ClickHouseVersion": """If part is valid, returns a new version""" @@ -115,9 +117,12 @@ def describe(self): @property def string(self): - return ".".join( + version_as_string = ".".join( (str(self.major), str(self.minor), str(self.patch), str(self.tweak)) ) + if self._flavour: + version_as_string = f"{version_as_string}.{self._flavour}" + return version_as_string def as_dict(self) -> VERSIONS: return { @@ -137,7 +142,10 @@ def as_tuple(self) -> Tuple[int, int, int, int]: def with_description(self, version_type): if version_type not in VersionType.VALID: raise ValueError(f"version type {version_type} not in {VersionType.VALID}") - self._describe = f"v{self.string}-{version_type}" + if version_type == self._flavour: + self._describe = f"v{self.string}" + else: + self._describe = f"v{self.string}-{version_type}" def __eq__(self, other) -> bool: if not isinstance(self, type(other)): @@ -165,16 +173,17 @@ def __le__(self, other: "ClickHouseVersion") -> bool: class VersionType: LTS = "lts" PRESTABLE = "prestable" - STABLE = "stable" + STABLE = "altinitystable" TESTING = "testing" VALID = (TESTING, PRESTABLE, STABLE, LTS) def validate_version(version: str): + # NOTE(vnemkov): minor but imporant fixes, so versions with 'flavour' are threated as valid (e.g. 22.8.8.4.altinitystable) parts = version.split(".") - if len(parts) != 4: + if len(parts) < 4: raise ValueError(f"{version} does not contain 4 parts") - for part in parts: + for part in parts[:4]: int(part) @@ -214,6 +223,9 @@ def get_version_from_repo( versions["patch"], versions["revision"], git, + # Explicitly use tweak value from version file + tweak=versions.get("tweak", versions["revision"]), + flavour=versions["flavour"] ) @@ -222,7 +234,7 @@ def get_version_from_string( ) -> ClickHouseVersion: validate_version(version) parts = version.split(".") - return ClickHouseVersion(parts[0], parts[1], parts[2], -1, git, parts[3]) + return ClickHouseVersion(parts[0], parts[1], parts[2], -1, git, parts[3], parts[4] if len(parts) >= 4 else None) def get_version_from_tag(tag: str) -> ClickHouseVersion: @@ -294,7 +306,7 @@ def update_contributors( cfd.write(content) -def update_version_local(version, version_type="testing"): +def update_version_local(version : ClickHouseVersion, version_type="testing"): update_contributors() version.with_description(version_type) update_cmake_version(version) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index d5819351ce45..5f9b0619deca 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -278,17 +278,17 @@ def shuffle_test_groups(self): @staticmethod def get_images_names(): return [ - "clickhouse/dotnet-client", - "clickhouse/integration-helper", - "clickhouse/integration-test", - "clickhouse/integration-tests-runner", - "clickhouse/kerberized-hadoop", - "clickhouse/kerberos-kdc", - "clickhouse/mysql-golang-client", - "clickhouse/mysql-java-client", - "clickhouse/mysql-js-client", - "clickhouse/mysql-php-client", - "clickhouse/postgresql-java-client", + "altinityinfra/dotnet-client", + "altinityinfra/integration-helper", + "altinityinfra/integration-test", + "altinityinfra/integration-tests-runner", + "altinityinfra/kerberized-hadoop", + "altinityinfra/kerberos-kdc", + "altinityinfra/mysql-golang-client", + "altinityinfra/mysql-java-client", + "altinityinfra/mysql-js-client", + "altinityinfra/mysql-php-client", + "altinityinfra/postgresql-java-client", ] def _can_run_with(self, path, opt): @@ -494,7 +494,7 @@ def _get_runner_image_cmd(self, repo_path): "--docker-image-version", ): for img in self.get_images_names(): - if img == "clickhouse/integration-tests-runner": + if img == "altinityinfra/integration-tests-runner": runner_version = self.get_image_version(img) logging.info( "Can run with custom docker image version %s", runner_version diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index ad1cefe60553..9f8ddcd7f0d3 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -38,6 +38,7 @@ except Exception as e: logging.warning(f"Cannot import some modules, some tests may not work: {e}") + from dict2xml import dict2xml from kazoo.client import KazooClient from kazoo.exceptions import KazooException @@ -802,7 +803,7 @@ def setup_keeper_cmd(self, instance, env_variables, docker_compose_yml_dir): env_variables["keeper_binary"] = binary_path env_variables["keeper_cmd_prefix"] = keeper_cmd_prefix - env_variables["image"] = "clickhouse/integration-test:" + self.docker_base_tag + env_variables["image"] = "altinityinfra/integration-test:" + self.docker_base_tag env_variables["user"] = str(os.getuid()) env_variables["keeper_fs"] = "bind" for i in range(1, 4): @@ -1349,7 +1350,7 @@ def add_instance( with_coredns=False, hostname=None, env_variables=None, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag=None, stay_alive=False, ipv4_address=None, @@ -2893,7 +2894,7 @@ def __init__( copy_common_configs=True, hostname=None, env_variables=None, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag="latest", stay_alive=False, ipv4_address=None, diff --git a/tests/integration/helpers/network.py b/tests/integration/helpers/network.py index 7fdd3b79bd6a..d689f4b47ab2 100644 --- a/tests/integration/helpers/network.py +++ b/tests/integration/helpers/network.py @@ -249,7 +249,7 @@ def _ensure_container(self): time.sleep(i) image = subprocess.check_output( - "docker images -q clickhouse/integration-helper 2>/dev/null", shell=True + "docker images -q altinityinfra/integration-helper 2>/dev/null", shell=True ) if not image.strip(): print("No network image helper, will try download") @@ -258,16 +258,16 @@ def _ensure_container(self): for i in range(5): try: subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL - "docker pull clickhouse/integration-helper", shell=True + "docker pull altinityinfra/integration-helper", shell=True ) break except: time.sleep(i) else: - raise Exception("Cannot pull clickhouse/integration-helper image") + raise Exception("Cannot pull altinityinfra/integration-helper image") self._container = self._docker_client.containers.run( - "clickhouse/integration-helper", + "altinityinfra/integration-helper", auto_remove=True, command=("sleep %s" % self.container_exit_timeout), # /run/xtables.lock passed inside for correct iptables --wait @@ -307,7 +307,7 @@ def _exec_run(self, cmd, **kwargs): return output -# Approximately mesure network I/O speed for interface +# Approximately measure network I/O speed for interface class NetThroughput(object): def __init__(self, node): self.node = node diff --git a/tests/integration/runner b/tests/integration/runner index f0d87b23a83d..12aff6ea8b75 100755 --- a/tests/integration/runner +++ b/tests/integration/runner @@ -28,7 +28,7 @@ CONFIG_DIR_IN_REPO = "programs/server" INTEGRATION_DIR_IN_REPO = "tests/integration" SRC_DIR_IN_REPO = "src" -DIND_INTEGRATION_TESTS_IMAGE_NAME = "clickhouse/integration-tests-runner" +DIND_INTEGRATION_TESTS_IMAGE_NAME = "altinityinfra/integration-tests-runner" def check_args_and_update_paths(args): @@ -305,23 +305,23 @@ if __name__ == "__main__": if args.docker_compose_images_tags is not None: for img_tag in args.docker_compose_images_tags: [image, tag] = img_tag.split(":") - if image == "clickhouse/mysql-golang-client": + if image == "altinityinfra/mysql-golang-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_GOLANG_CLIENT_TAG", tag) - elif image == "clickhouse/dotnet-client": + elif image == "altinityinfra/dotnet-client": env_tags += "-e {}={} ".format("DOCKER_DOTNET_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-java-client": + elif image == "altinityinfra/mysql-java-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_JAVA_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-js-client": + elif image == "altinityinfra/mysql-js-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_JS_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-php-client": + elif image == "altinityinfra/mysql-php-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_PHP_CLIENT_TAG", tag) - elif image == "clickhouse/postgresql-java-client": + elif image == "altinityinfra/postgresql-java-client": env_tags += "-e {}={} ".format("DOCKER_POSTGRESQL_JAVA_CLIENT_TAG", tag) - elif image == "clickhouse/integration-test": + elif image == "altinityinfra/integration-test": env_tags += "-e {}={} ".format("DOCKER_BASE_TAG", tag) - elif image == "clickhouse/kerberized-hadoop": + elif image == "altinityinfra/kerberized-hadoop": env_tags += "-e {}={} ".format("DOCKER_KERBERIZED_HADOOP_TAG", tag) - elif image == "clickhouse/kerberos-kdc": + elif image == "altinityinfra/kerberos-kdc": env_tags += "-e {}={} ".format("DOCKER_KERBEROS_KDC_TAG", tag) else: logging.info("Unknown image %s" % (image)) diff --git a/tests/integration/test_storage_kafka/test.py b/tests/integration/test_storage_kafka/test.py index 46bf7b0b3a0d..c039ad2e1b82 100644 --- a/tests/integration/test_storage_kafka/test.py +++ b/tests/integration/test_storage_kafka/test.py @@ -30,12 +30,24 @@ from kafka.protocol.group import MemberAssignment from kafka.admin import NewTopic +from pathlib import Path +from helpers.cluster import run_and_check # protoc --version # libprotoc 3.0.0 # # to create kafka_pb2.py # protoc --python_out=. kafka.proto +# Regenerate _pb2 files on each run, to make sure test doesn't depend installed protobuf version +proto_dir = Path(__file__).parent / "clickhouse_path/format_schemas" +gen_dir = Path(__file__).parent +gen_dir.mkdir(exist_ok=True) +run_and_check( + f"python3 -m grpc_tools.protoc -I{proto_dir!s} --python_out={gen_dir!s} --grpc_python_out={gen_dir!s} \ + {proto_dir!s}/kafka.proto", + shell=True, +) + from . import kafka_pb2 from . import social_pb2 from . import message_with_repeated_pb2 diff --git a/tests/testflows/aes_encryption/aes_encryption_env/clickhouse-service.yml b/tests/testflows/aes_encryption/aes_encryption_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/aes_encryption/aes_encryption_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/datetime64_extended_range/datetime64_extended_range_env/clickhouse-service.yml b/tests/testflows/datetime64_extended_range/datetime64_extended_range_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/datetime64_extended_range/datetime64_extended_range_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/example/example_env/clickhouse-service.yml b/tests/testflows/example/example_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/example/example_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/extended_precision_data_types/extended-precision-data-type_env/clickhouse-service.yml b/tests/testflows/extended_precision_data_types/extended-precision-data-type_env/clickhouse-service.yml new file mode 100644 index 000000000000..9162d06bf27d --- /dev/null +++ b/tests/testflows/extended_precision_data_types/extended-precision-data-type_env/clickhouse-service.yml @@ -0,0 +1,27 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/kerberos/kerberos_env/clickhouse-service.yml b/tests/testflows/kerberos/kerberos_env/clickhouse-service.yml new file mode 100644 index 000000000000..7671684f6ee0 --- /dev/null +++ b/tests/testflows/kerberos/kerberos_env/clickhouse-service.yml @@ -0,0 +1,32 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + - "${CLICKHOUSE_TESTS_DIR}/configs/kerberos/etc/krb5.conf:/etc/krb5.conf" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + + environment: + KRB5_CLIENT_KTNAME: /etc/krb5.keytab + KRB5_KTNAME: /etc/krb5.keytab + + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/authentication/authentication_env/clickhouse-service.yml b/tests/testflows/ldap/authentication/authentication_env/clickhouse-service.yml new file mode 100644 index 000000000000..f8cc0a62c67c --- /dev/null +++ b/tests/testflows/ldap/authentication/authentication_env/clickhouse-service.yml @@ -0,0 +1,29 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + init: true + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "tail -f /dev/null" + healthcheck: + test: echo 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/authentication/ldap_authentication_env/clickhouse-service.yml b/tests/testflows/ldap/authentication/ldap_authentication_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/ldap/authentication/ldap_authentication_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/external_user_directory/external_user_directory_env/clickhouse-service.yml b/tests/testflows/ldap/external_user_directory/external_user_directory_env/clickhouse-service.yml new file mode 100644 index 000000000000..f8cc0a62c67c --- /dev/null +++ b/tests/testflows/ldap/external_user_directory/external_user_directory_env/clickhouse-service.yml @@ -0,0 +1,29 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + init: true + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "tail -f /dev/null" + healthcheck: + test: echo 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/external_user_directory/ldap_external_user_directory_env/clickhouse-service.yml b/tests/testflows/ldap/external_user_directory/ldap_external_user_directory_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/ldap/external_user_directory/ldap_external_user_directory_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/role_mapping/ldap_role_mapping_env/clickhouse-service.yml b/tests/testflows/ldap/role_mapping/ldap_role_mapping_env/clickhouse-service.yml new file mode 100644 index 000000000000..74a56b63aabc --- /dev/null +++ b/tests/testflows/ldap/role_mapping/ldap_role_mapping_env/clickhouse-service.yml @@ -0,0 +1,28 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 10 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/ldap/role_mapping/role_mapping_env/clickhouse-service.yml b/tests/testflows/ldap/role_mapping/role_mapping_env/clickhouse-service.yml new file mode 100644 index 000000000000..3fe80bfce343 --- /dev/null +++ b/tests/testflows/ldap/role_mapping/role_mapping_env/clickhouse-service.yml @@ -0,0 +1,37 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + init: true + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/common.xml:/etc/clickhouse-server/users.d/common.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/common.xml:/etc/clickhouse-server/config.d/common.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/logs.xml:/etc/clickhouse-server/config.d/logs.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/ports.xml:/etc/clickhouse-server/config.d/ports.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/remote.xml:/etc/clickhouse-server/config.d/remote.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/ssl.xml:/etc/clickhouse-server/config.d/ssl.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/storage.xml:/etc/clickhouse-server/config.d/storage.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d/zookeeper.xml:/etc/clickhouse-server/config.d/zookeeper.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl/dhparam.pem:/etc/clickhouse-server/ssl/dhparam.pem" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl/server.crt:/etc/clickhouse-server/ssl/server.crt" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl/server.key:/etc/clickhouse-server/ssl/server.key" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "tail -f /dev/null" + healthcheck: + test: echo 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/map_type/map_type_env/clickhouse-service.yml b/tests/testflows/map_type/map_type_env/clickhouse-service.yml new file mode 100755 index 000000000000..9162d06bf27d --- /dev/null +++ b/tests/testflows/map_type/map_type_env/clickhouse-service.yml @@ -0,0 +1,27 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/rbac/rbac_env/clickhouse-service.yml b/tests/testflows/rbac/rbac_env/clickhouse-service.yml new file mode 100755 index 000000000000..4634f3b8721f --- /dev/null +++ b/tests/testflows/rbac/rbac_env/clickhouse-service.yml @@ -0,0 +1,29 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + init: true + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "tail -f /dev/null" + healthcheck: + test: echo 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/tests/testflows/runner b/tests/testflows/runner new file mode 100755 index 000000000000..1cf2a784ca0e --- /dev/null +++ b/tests/testflows/runner @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +#-*- coding: utf-8 -*- +import subprocess +import os +import getpass +import argparse +import logging +import signal +import subprocess +import sys + +CUR_FILE_DIR = os.path.dirname(os.path.realpath(__file__)) +DEFAULT_CLICKHOUSE_ROOT = os.path.abspath(os.path.join(CUR_FILE_DIR, "../../")) +CURRENT_WORK_DIR = os.getcwd() +CONTAINER_NAME = "clickhouse_testflows_tests" + +DIND_TESTFLOWS_TESTS_IMAGE_NAME = "altinityinfra/testflows-runner" + +def check_args_and_update_paths(args): + if not os.path.isabs(args.binary): + args.binary = os.path.abspath(os.path.join(CURRENT_WORK_DIR, args.binary)) + + if not args.bridge_binary: + args.bridge_binary = os.path.join(os.path.dirname(args.binary), 'clickhouse-odbc-bridge') + elif not os.path.isabs(args.bridge_binary): + args.bridge_binary = os.path.abspath(os.path.join(CURRENT_WORK_DIR, args.bridge_binary)) + + if not os.path.isabs(args.configs_dir): + args.configs_dir = os.path.abspath(os.path.join(CURRENT_WORK_DIR, args.configs_dir)) + + if not os.path.isabs(args.clickhouse_root): + args.clickhouse_root = os.path.abspath(os.path.join(CURRENT_WORK_DIR, args.clickhouse_root)) + + for path in [args.binary, args.configs_dir, args.clickhouse_root]: + if not os.path.exists(path): + raise Exception("Path {} doesn't exists".format(path)) + +def docker_kill_handler_handler(signum, frame): + subprocess.check_call('docker kill $(docker ps -a -q --filter name={name} --format="{{{{.ID}}}}")'.format(name=CONTAINER_NAME), shell=True) + raise KeyboardInterrupt("Killed by Ctrl+C") + +signal.signal(signal.SIGINT, docker_kill_handler_handler) + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') + parser = argparse.ArgumentParser(description="ClickHouse testflows runner") + + parser.add_argument( + "--binary", + default=os.environ.get("CLICKHOUSE_TESTS_SERVER_BIN_PATH", os.environ.get("CLICKHOUSE_TESTS_CLIENT_BIN_PATH", "/usr/bin/clickhouse")), + help="Path to clickhouse binary") + + parser.add_argument( + "--bridge-binary", + default=os.environ.get("CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH", ""), + help="Path to clickhouse-odbc-bridge binary. Defaults to clickhouse-odbc-bridge in the same dir as clickhouse.") + + parser.add_argument( + "--configs-dir", + default=os.environ.get("CLICKHOUSE_TESTS_BASE_CONFIG_DIR", os.path.join(DEFAULT_CLICKHOUSE_ROOT, "programs/server")), + help="Path to clickhouse configs directory") + + parser.add_argument( + "--clickhouse-root", + default=DEFAULT_CLICKHOUSE_ROOT, + help="Path to repository root folder") + + parser.add_argument( + "--command", + default='', + help="Set it to run some other command in container (for example bash)") + + parser.add_argument( + "--disable-net-host", + action='store_true', + default=False, + help="Don't use net host in parent docker container") + + parser.add_argument( + "--docker-image-version", + default="latest", + help="Version of docker image which runner will use to run tests") + + + parser.add_argument('testflows_args', nargs='*', help="args for testflows command") + + args = parser.parse_args() + + check_args_and_update_paths(args) + + net = "" + if not args.disable_net_host: + net = "--net=host" + + # create named volume which will be used inside to store images and other docker related files, + # to avoid redownloading it every time + # + # should be removed manually when not needed + subprocess.check_call('docker volume create {name}_volume'.format(name=CONTAINER_NAME), shell=True) + + # enable tty mode & interactive for docker if we have real tty + tty = "" + if sys.stdout.isatty() and sys.stdin.isatty(): + tty = "-it" + + cmd = "docker run {net} {tty} --rm --name {name} --privileged --volume={bridge_bin}:/clickhouse-odbc-bridge --volume={bin}:/clickhouse \ + --volume={cfg}:/clickhouse-config --volume={pth}:/ClickHouse --volume={name}_volume:/var/lib/docker -e TESTFLOWS_OPTS='{opts}' {img} {command}".format( + net=net, + tty=tty, + bin=args.binary, + bridge_bin=args.bridge_binary, + cfg=args.configs_dir, + pth=args.clickhouse_root, + opts=' '.join(args.testflows_args), + img=DIND_TESTFLOWS_TESTS_IMAGE_NAME + ":" + args.docker_image_version, + name=CONTAINER_NAME, + command=args.command + ) + + print(("Running testflows container as: '" + cmd + "'.")) + # testflows return non zero error code on failed tests + subprocess.call(cmd, shell=True) + + result_path = os.environ.get("CLICKHOUSE_TESTS_RESULT_PATH", None) + if result_path is not None: + move_from = os.path.join(args.clickhouse_root, 'tests/testflows') + status = os.path.join(move_from, 'check_status.tsv') + results = os.path.join(move_from, 'test_results.tsv') + clickhouse_logs = os.path.join(move_from, 'clickhouse_logs.tar.gz') + subprocess.call("mv {} {}".format(status, result_path), shell=True) + subprocess.call("mv {} {}".format(results, result_path), shell=True) + subprocess.call("mv {} {}".format(clickhouse_logs, result_path), shell=True) diff --git a/tests/testflows/window_functions/window_functions_env/clickhouse-service.yml b/tests/testflows/window_functions/window_functions_env/clickhouse-service.yml new file mode 100755 index 000000000000..9162d06bf27d --- /dev/null +++ b/tests/testflows/window_functions/window_functions_env/clickhouse-service.yml @@ -0,0 +1,27 @@ +version: '2.3' + +services: + clickhouse: + image: altinityinfra/integration-test + expose: + - "9000" + - "9009" + - "8123" + volumes: + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d:/etc/clickhouse-server/users.d" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml" + - "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml" + - "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse" + - "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge" + entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log" + healthcheck: + test: clickhouse client --query='select 1' + interval: 10s + timeout: 10s + retries: 3 + start_period: 300s + cap_add: + - SYS_PTRACE + security_opt: + - label:disable diff --git a/utils/clickhouse-docker b/utils/clickhouse-docker index cfe515f1de54..34b637f0eaad 100755 --- a/utils/clickhouse-docker +++ b/utils/clickhouse-docker @@ -26,11 +26,11 @@ then # https://stackoverflow.com/a/39454426/1555175 wget -nv https://registry.hub.docker.com/v1/repositories/clickhouse/clickhouse-server/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}' else - docker pull clickhouse/clickhouse-server:${param} + docker pull altinityinfra/clickhouse-server:${param} tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX) # older version require /nonexistent folder to exist to run clickhouse client :D chmod 777 ${tmp_dir} set -e - containerid=`docker run -v${tmp_dir}:/nonexistent -d clickhouse/clickhouse-server:${param}` + containerid=`docker run -v${tmp_dir}:/nonexistent -d altinityinfra/clickhouse-server:${param}` set +e while : do From 2e2219ffcc96f97dd8ae81338130b30c7c0abbe6 Mon Sep 17 00:00:00 2001 From: Kseniia Sumarokova <54203879+kssenii@users.noreply.github.com> Date: Sun, 16 Oct 2022 14:03:31 +0200 Subject: [PATCH 002/130] Merge pull request #42321 from ClickHouse/filimonov-request_timeout_ms Increase request_timeout_ms for s3 disks. --- src/Disks/ObjectStorages/S3/diskSettings.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Disks/ObjectStorages/S3/diskSettings.cpp b/src/Disks/ObjectStorages/S3/diskSettings.cpp index 11f7b2e8ad77..9c232a586bb5 100644 --- a/src/Disks/ObjectStorages/S3/diskSettings.cpp +++ b/src/Disks/ObjectStorages/S3/diskSettings.cpp @@ -124,7 +124,7 @@ std::unique_ptr getClient(const Poco::Util::AbstractConfigura throw Exception("S3 path must ends with '/', but '" + uri.key + "' doesn't.", ErrorCodes::BAD_ARGUMENTS); client_configuration.connectTimeoutMs = config.getUInt(config_prefix + ".connect_timeout_ms", 10000); - client_configuration.requestTimeoutMs = config.getUInt(config_prefix + ".request_timeout_ms", 5000); + client_configuration.requestTimeoutMs = config.getUInt(config_prefix + ".request_timeout_ms", 30000); client_configuration.maxConnections = config.getUInt(config_prefix + ".max_connections", 100); client_configuration.endpointOverride = uri.endpoint; From 3cf7376bcfdd17473a02db1c8715890a2d3a5b6b Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Fri, 26 Aug 2022 10:18:26 +0200 Subject: [PATCH 003/130] Merge pull request #40620 from zvonand/zvonand-b58 Base58 fix handling leading 0 / '1' --- src/Common/base58.h | 45 +++++++++++++++---- src/Functions/FunctionBase58Conversion.h | 4 +- .../0_stateless/02337_base58.reference | 3 ++ tests/queries/0_stateless/02337_base58.sql | 5 ++- 4 files changed, 46 insertions(+), 11 deletions(-) diff --git a/src/Common/base58.h b/src/Common/base58.h index 3d4b55a1fba8..bc3c3c7aee88 100644 --- a/src/Common/base58.h +++ b/src/Common/base58.h @@ -5,12 +5,22 @@ namespace DB { -inline size_t encodeBase58(const char8_t * src, char8_t * dst) +inline size_t encodeBase58(const char8_t * src, size_t srclen, char8_t * dst) { const char * base58_encoding_alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; + size_t processed = 0; + size_t zeros = 0; + for (;*src == '\0' && processed < srclen-1; ++src) + { + ++processed; + ++zeros; + *dst++ = '1'; + } + size_t idx = 0; - for (; *src; ++src) + + while (processed < srclen-1) { unsigned int carry = static_cast(*src); for (size_t j = 0; j < idx; ++j) @@ -24,6 +34,8 @@ inline size_t encodeBase58(const char8_t * src, char8_t * dst) dst[idx++] = static_cast(carry % 58); carry /= 58; } + ++src; + ++processed; } size_t c_idx = idx >> 1; @@ -37,23 +49,38 @@ inline size_t encodeBase58(const char8_t * src, char8_t * dst) { dst[c_idx] = base58_encoding_alphabet[static_cast(dst[c_idx])]; } + dst[idx] = '\0'; - return idx + 1; + return zeros + idx + 1; } -inline size_t decodeBase58(const char8_t * src, char8_t * dst) +inline size_t decodeBase58(const char8_t * src, size_t srclen, char8_t * dst) { const signed char uint_max = UINT_MAX; const signed char map_digits[128] = {uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, - uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, 0, 1, 2, 3, 4, 5, 6, 7, 8, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, 9, 10, 11, 12, 13, 14, 15, 16, uint_max, 17, 18, 19, 20, 21, uint_max, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, uint_max, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, uint_max, uint_max, uint_max, uint_max, uint_max}; + uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, 0, 1, 2, + 3, 4, 5, 6, 7, 8, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, + 9, 10, 11, 12, 13, 14, 15, 16, uint_max, 17, 18, 19, 20, + 21, uint_max, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, + uint_max, uint_max, uint_max, uint_max, uint_max, uint_max, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, uint_max, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, uint_max, uint_max, uint_max, uint_max, uint_max}; + + size_t processed = 0; + size_t zeros = 0; + for (;*src == '1' && processed < srclen-1; ++src) + { + ++processed; + ++zeros; + *dst++ = '\0'; + } size_t idx = 0; - for (; *src; ++src) + while (processed < srclen-1) { unsigned int carry = map_digits[*src]; if (unlikely(carry == UINT_MAX)) @@ -71,6 +98,8 @@ inline size_t decodeBase58(const char8_t * src, char8_t * dst) dst[idx++] = static_cast(carry & 0xff); carry >>= 8; } + ++src; + ++processed; } size_t c_idx = idx >> 1; @@ -81,7 +110,7 @@ inline size_t decodeBase58(const char8_t * src, char8_t * dst) dst[idx - (i + 1)] = s; } dst[idx] = '\0'; - return idx + 1; + return zeros + idx + 1; } } diff --git a/src/Functions/FunctionBase58Conversion.h b/src/Functions/FunctionBase58Conversion.h index 82e2a2caac04..bc166f2c9f5a 100644 --- a/src/Functions/FunctionBase58Conversion.h +++ b/src/Functions/FunctionBase58Conversion.h @@ -48,7 +48,7 @@ struct Base58Encode for (size_t row = 0; row < input_rows_count; ++row) { size_t srclen = src_offsets[row] - src_offset_prev; - auto encoded_size = encodeBase58(src, dst_pos); + auto encoded_size = encodeBase58(src, srclen, dst_pos); src += srclen; dst_pos += encoded_size; @@ -90,7 +90,7 @@ struct Base58Decode { size_t srclen = src_offsets[row] - src_offset_prev; - auto decoded_size = decodeBase58(src, dst_pos); + auto decoded_size = decodeBase58(src, srclen, dst_pos); if (!decoded_size) throw Exception("Invalid Base58 value, cannot be decoded", ErrorCodes::BAD_ARGUMENTS); diff --git a/tests/queries/0_stateless/02337_base58.reference b/tests/queries/0_stateless/02337_base58.reference index bc666044388f..20b9124c1502 100644 --- a/tests/queries/0_stateless/02337_base58.reference +++ b/tests/queries/0_stateless/02337_base58.reference @@ -21,3 +21,6 @@ foo foob fooba foobar + +1 +1 diff --git a/tests/queries/0_stateless/02337_base58.sql b/tests/queries/0_stateless/02337_base58.sql index 9c9379a2854f..42b032c7601b 100644 --- a/tests/queries/0_stateless/02337_base58.sql +++ b/tests/queries/0_stateless/02337_base58.sql @@ -9,4 +9,7 @@ SELECT base58Decode('Hold my beer...'); -- { serverError 36 } SELECT base58Decode(encoded) FROM (SELECT base58Encode(val) as encoded FROM (select arrayJoin(['', 'f', 'fo', 'foo', 'foob', 'fooba', 'foobar', 'Hello world!']) val)); SELECT base58Encode(val) FROM (select arrayJoin(['', 'f', 'fo', 'foo', 'foob', 'fooba', 'foobar']) val); -SELECT base58Decode(val) FROM (select arrayJoin(['', '2m', '8o8', 'bQbp', '3csAg9', 'CZJRhmz', 't1Zv2yaZ']) val); +SELECT base58Decode(val) FROM (select arrayJoin(['', '2m', '8o8', 'bQbp', '3csAg9', 'CZJRhmz', 't1Zv2yaZ', '']) val); + +SELECT base58Encode(base58Decode('1BWutmTvYPwDtmw9abTkS4Ssr8no61spGAvW1X6NDix')) == '1BWutmTvYPwDtmw9abTkS4Ssr8no61spGAvW1X6NDix'; +select base58Encode('\x00\x0b\xe3\xe1\xeb\xa1\x7a\x47\x3f\x89\xb0\xf7\xe8\xe2\x49\x40\xf2\x0a\xeb\x8e\xbc\xa7\x1a\x88\xfd\xe9\x5d\x4b\x83\xb7\x1a\x09') == '1BWutmTvYPwDtmw9abTkS4Ssr8no61spGAvW1X6NDix'; From 111ca9ce05538c0187736ffbb8273958b998b9f4 Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Thu, 1 Sep 2022 19:40:40 +0200 Subject: [PATCH 004/130] Merge pull request #40485 from arthurpassos/fix-parquet-chunked-array-deserialization Add support for extended (chunked) arrays for Parquet format --- .../Formats/Impl/ParquetBlockInputFormat.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index 12fa9710c42f..427c159314b3 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -55,7 +55,16 @@ Chunk ParquetBlockInputFormat::generate() return res; std::shared_ptr table; - arrow::Status read_status = file_reader->ReadRowGroup(row_group_current, column_indices, &table); + + std::unique_ptr<::arrow::RecordBatchReader> rbr; + std::vector row_group_indices { row_group_current }; + arrow::Status get_batch_reader_status = file_reader->GetRecordBatchReader(row_group_indices, column_indices, &rbr); + + if (!get_batch_reader_status.ok()) + throw ParsingException{"Error while reading Parquet data: " + get_batch_reader_status.ToString(), ErrorCodes::CANNOT_READ_ALL_DATA}; + + arrow::Status read_status = rbr->ReadAll(&table); + if (!read_status.ok()) throw ParsingException{"Error while reading Parquet data: " + read_status.ToString(), ErrorCodes::CANNOT_READ_ALL_DATA}; From c4ad8a02184593b6a23c2bed1431f1d1119e5959 Mon Sep 17 00:00:00 2001 From: Alexander Tokmakov Date: Mon, 3 Oct 2022 13:25:50 +0300 Subject: [PATCH 005/130] Merge pull request #41945 from ClickHouse/enable_more_s3_tests Enable some disabled S3 tests --- src/Interpreters/InterpreterOptimizeQuery.cpp | 4 +++ .../MergeTree/MergeFromLogEntryTask.cpp | 2 +- src/Storages/MergeTree/MergeTreeData.cpp | 15 ++++++++++- src/Storages/MergeTree/MergeTreeData.h | 7 ++++- .../MergeTree/MutateFromLogEntryTask.cpp | 2 +- .../ReplicatedMergeTreePartCheckThread.cpp | 4 +-- .../ReplicatedMergeTreeRestartingThread.cpp | 2 +- src/Storages/StorageReplicatedMergeTree.cpp | 4 +-- .../test_broken_part_during_merge/test.py | 3 ++- tests/integration/test_lost_part/test.py | 26 ++++++++++++------- .../00502_custom_partitioning_local.reference | 2 +- .../00502_custom_partitioning_local.sql | 3 +-- ...731_long_merge_tree_select_opened_files.sh | 1 + .../01070_mutations_with_dependencies.sql | 1 + .../01509_parallel_quorum_and_merge_long.sh | 3 +-- .../0_stateless/01533_multiple_nested.sql | 2 +- .../02343_read_from_s3_compressed_blocks.sql | 2 +- 17 files changed, 56 insertions(+), 27 deletions(-) diff --git a/src/Interpreters/InterpreterOptimizeQuery.cpp b/src/Interpreters/InterpreterOptimizeQuery.cpp index 239e15f996ec..47ffb6c0cf5e 100644 --- a/src/Interpreters/InterpreterOptimizeQuery.cpp +++ b/src/Interpreters/InterpreterOptimizeQuery.cpp @@ -6,6 +6,7 @@ #include #include #include +#include #include @@ -75,6 +76,9 @@ BlockIO InterpreterOptimizeQuery::execute() } } + if (auto * snapshot_data = dynamic_cast(storage_snapshot->data.get())) + snapshot_data->parts = {}; + table->optimize(query_ptr, metadata_snapshot, ast.partition, ast.final, ast.deduplicate, column_names, getContext()); return {}; diff --git a/src/Storages/MergeTree/MergeFromLogEntryTask.cpp b/src/Storages/MergeTree/MergeFromLogEntryTask.cpp index d51cd6aa07d8..b73c055d7bcf 100644 --- a/src/Storages/MergeTree/MergeFromLogEntryTask.cpp +++ b/src/Storages/MergeTree/MergeFromLogEntryTask.cpp @@ -309,7 +309,7 @@ bool MergeFromLogEntryTask::finalize(ReplicatedMergeMutateTaskBase::PartLogWrite write_part_log(ExecutionStatus::fromCurrentException()); if (storage.getSettings()->detach_not_byte_identical_parts) - storage.forgetPartAndMoveToDetached(std::move(part), "merge-not-byte-identical"); + storage.forcefullyMovePartToDetachedAndRemoveFromMemory(std::move(part), "merge-not-byte-identical"); else storage.tryRemovePartImmediately(std::move(part)); diff --git a/src/Storages/MergeTree/MergeTreeData.cpp b/src/Storages/MergeTree/MergeTreeData.cpp index 643ab9b1bbfc..c6a0f85fbb41 100644 --- a/src/Storages/MergeTree/MergeTreeData.cpp +++ b/src/Storages/MergeTree/MergeTreeData.cpp @@ -3136,7 +3136,20 @@ void MergeTreeData::restoreAndActivatePart(const DataPartPtr & part, DataPartsLo modifyPartState(part, DataPartState::Active); } -void MergeTreeData::forgetPartAndMoveToDetached(const MergeTreeData::DataPartPtr & part_to_detach, const String & prefix, bool restore_covered) + +void MergeTreeData::outdateBrokenPartAndCloneToDetached(const DataPartPtr & part_to_detach, const String & prefix) +{ + auto metadata_snapshot = getInMemoryMetadataPtr(); + if (prefix.empty()) + LOG_INFO(log, "Cloning part {} to {} and making it obsolete.", part_to_detach->data_part_storage->getPartDirectory(), part_to_detach->name); + else + LOG_INFO(log, "Cloning part {} to {}_{} and making it obsolete.", part_to_detach->data_part_storage->getPartDirectory(), prefix, part_to_detach->name); + + part_to_detach->makeCloneInDetached(prefix, metadata_snapshot); + removePartsFromWorkingSet(NO_TRANSACTION_RAW, {part_to_detach}, true); +} + +void MergeTreeData::forcefullyMovePartToDetachedAndRemoveFromMemory(const MergeTreeData::DataPartPtr & part_to_detach, const String & prefix, bool restore_covered) { if (prefix.empty()) LOG_INFO(log, "Renaming {} to {} and forgetting it.", part_to_detach->data_part_storage->getPartDirectory(), part_to_detach->name); diff --git a/src/Storages/MergeTree/MergeTreeData.h b/src/Storages/MergeTree/MergeTreeData.h index 96c47c48f964..e928ce4e239b 100644 --- a/src/Storages/MergeTree/MergeTreeData.h +++ b/src/Storages/MergeTree/MergeTreeData.h @@ -607,7 +607,12 @@ class MergeTreeData : public IStorage, public WithMutableContext /// Renames the part to detached/_ and removes it from data_parts, //// so it will not be deleted in clearOldParts. /// If restore_covered is true, adds to the working set inactive parts, which were merged into the deleted part. - void forgetPartAndMoveToDetached(const DataPartPtr & part, const String & prefix = "", bool restore_covered = false); + /// NOTE: This method is safe to use only for parts which nobody else holds (like on server start or for parts which was not committed). + /// For active parts it's unsafe because this method modifies fields of part (rename) while some other thread can try to read it. + void forcefullyMovePartToDetachedAndRemoveFromMemory(const DataPartPtr & part, const String & prefix = "", bool restore_covered = false); + + /// Outdate broken part, set remove time to zero (remove as fast as possible) and make clone in detached directory. + void outdateBrokenPartAndCloneToDetached(const DataPartPtr & part, const String & prefix); /// If the part is Obsolete and not used by anybody else, immediately delete it from filesystem and remove from memory. void tryRemovePartImmediately(DataPartPtr && part); diff --git a/src/Storages/MergeTree/MutateFromLogEntryTask.cpp b/src/Storages/MergeTree/MutateFromLogEntryTask.cpp index a51eb7854ab7..312a9a9013a7 100644 --- a/src/Storages/MergeTree/MutateFromLogEntryTask.cpp +++ b/src/Storages/MergeTree/MutateFromLogEntryTask.cpp @@ -195,7 +195,7 @@ bool MutateFromLogEntryTask::finalize(ReplicatedMergeMutateTaskBase::PartLogWrit write_part_log(ExecutionStatus::fromCurrentException()); if (storage.getSettings()->detach_not_byte_identical_parts) - storage.forgetPartAndMoveToDetached(std::move(new_part), "mutate-not-byte-identical"); + storage.forcefullyMovePartToDetachedAndRemoveFromMemory(std::move(new_part), "mutate-not-byte-identical"); else storage.tryRemovePartImmediately(std::move(new_part)); diff --git a/src/Storages/MergeTree/ReplicatedMergeTreePartCheckThread.cpp b/src/Storages/MergeTree/ReplicatedMergeTreePartCheckThread.cpp index 610139b0c2fe..0dd482aa255d 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreePartCheckThread.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreePartCheckThread.cpp @@ -375,7 +375,7 @@ CheckResult ReplicatedMergeTreePartCheckThread::checkPart(const String & part_na LOG_ERROR(log, fmt::runtime(message)); /// Delete part locally. - storage.forgetPartAndMoveToDetached(part, "broken"); + storage.outdateBrokenPartAndCloneToDetached(part, "broken"); /// Part is broken, let's try to find it and fetch. searchForMissingPartAndFetchIfPossible(part_name, exists_in_zookeeper); @@ -392,7 +392,7 @@ CheckResult ReplicatedMergeTreePartCheckThread::checkPart(const String & part_na String message = "Unexpected part " + part_name + " in filesystem. Removing."; LOG_ERROR(log, fmt::runtime(message)); - storage.forgetPartAndMoveToDetached(part, "unexpected"); + storage.outdateBrokenPartAndCloneToDetached(part, "unexpected"); return {part_name, false, message}; } else diff --git a/src/Storages/MergeTree/ReplicatedMergeTreeRestartingThread.cpp b/src/Storages/MergeTree/ReplicatedMergeTreeRestartingThread.cpp index e466e511a8ea..96990770fdf0 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreeRestartingThread.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreeRestartingThread.cpp @@ -250,7 +250,7 @@ void ReplicatedMergeTreeRestartingThread::removeFailedQuorumParts() if (part) { LOG_DEBUG(log, "Found part {} with failed quorum. Moving to detached. This shouldn't happen often.", part_name); - storage.forgetPartAndMoveToDetached(part, "noquorum"); + storage.forcefullyMovePartToDetachedAndRemoveFromMemory(part, "noquorum"); storage.queue.removeFailedQuorumPart(part->info); } } diff --git a/src/Storages/StorageReplicatedMergeTree.cpp b/src/Storages/StorageReplicatedMergeTree.cpp index 46a81f306768..a59eec2b5099 100644 --- a/src/Storages/StorageReplicatedMergeTree.cpp +++ b/src/Storages/StorageReplicatedMergeTree.cpp @@ -1282,7 +1282,7 @@ void StorageReplicatedMergeTree::checkParts(bool skip_sanity_checks) for (const DataPartPtr & part : unexpected_parts) { LOG_ERROR(log, "Renaming unexpected part {} to ignored_{}", part->name, part->name); - forgetPartAndMoveToDetached(part, "ignored", true); + forcefullyMovePartToDetachedAndRemoveFromMemory(part, "ignored", true); } } @@ -5128,7 +5128,7 @@ void StorageReplicatedMergeTree::restoreMetadataInZooKeeper() if (part->getState() == DataPartState::Active) active_parts_names.push_back(part->name); - forgetPartAndMoveToDetached(part); + forcefullyMovePartToDetachedAndRemoveFromMemory(part); } LOG_INFO(log, "Moved all parts to detached/"); diff --git a/tests/integration/test_broken_part_during_merge/test.py b/tests/integration/test_broken_part_during_merge/test.py index 2171e33a02a0..f4110844466d 100644 --- a/tests/integration/test_broken_part_during_merge/test.py +++ b/tests/integration/test_broken_part_during_merge/test.py @@ -24,7 +24,8 @@ def test_merge_and_part_corruption(started_cluster): node1.query( """ CREATE TABLE replicated_mt(date Date, id UInt32, value Int32) - ENGINE = ReplicatedMergeTree('/clickhouse/tables/replicated_mt', '{replica}') ORDER BY id; + ENGINE = ReplicatedMergeTree('/clickhouse/tables/replicated_mt', '{replica}') ORDER BY id + SETTINGS cleanup_delay_period=1, cleanup_delay_period_random_add=1; """.format( replica=node1.name ) diff --git a/tests/integration/test_lost_part/test.py b/tests/integration/test_lost_part/test.py index 7640c961d12c..7a71044e5db3 100644 --- a/tests/integration/test_lost_part/test.py +++ b/tests/integration/test_lost_part/test.py @@ -40,7 +40,8 @@ def remove_part_from_disk(node, table, part_name): def test_lost_part_same_replica(start_cluster): for node in [node1, node2]: node.query( - "CREATE TABLE mt0 (id UInt64, date Date) ENGINE ReplicatedMergeTree('/clickhouse/tables/t', '{}') ORDER BY tuple() PARTITION BY date".format( + "CREATE TABLE mt0 (id UInt64, date Date) ENGINE ReplicatedMergeTree('/clickhouse/tables/t', '{}') ORDER BY tuple() PARTITION BY date " + "SETTINGS cleanup_delay_period=1, cleanup_delay_period_random_add=1".format( node.name ) ) @@ -73,7 +74,7 @@ def test_lost_part_same_replica(start_cluster): node1.query("ATTACH TABLE mt0") node1.query("SYSTEM START MERGES mt0") - res, err = node1.http_query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt0") + res, err = node1.query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt0") print("result: ", res) print("error: ", res) @@ -104,7 +105,8 @@ def test_lost_part_same_replica(start_cluster): def test_lost_part_other_replica(start_cluster): for node in [node1, node2]: node.query( - "CREATE TABLE mt1 (id UInt64) ENGINE ReplicatedMergeTree('/clickhouse/tables/t1', '{}') ORDER BY tuple()".format( + "CREATE TABLE mt1 (id UInt64) ENGINE ReplicatedMergeTree('/clickhouse/tables/t1', '{}') ORDER BY tuple() " + "SETTINGS cleanup_delay_period=1, cleanup_delay_period_random_add=1".format( node.name ) ) @@ -136,7 +138,7 @@ def test_lost_part_other_replica(start_cluster): node1.query("CHECK TABLE mt1") node2.query("SYSTEM START REPLICATION QUEUES") - res, err = node1.http_query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt1") + res, err = node1.query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt1") print("result: ", res) print("error: ", res) @@ -168,7 +170,8 @@ def test_lost_part_other_replica(start_cluster): def test_lost_part_mutation(start_cluster): for node in [node1, node2]: node.query( - "CREATE TABLE mt2 (id UInt64) ENGINE ReplicatedMergeTree('/clickhouse/tables/t2', '{}') ORDER BY tuple()".format( + "CREATE TABLE mt2 (id UInt64) ENGINE ReplicatedMergeTree('/clickhouse/tables/t2', '{}') ORDER BY tuple() " + "SETTINGS cleanup_delay_period=1, cleanup_delay_period_random_add=1".format( node.name ) ) @@ -196,7 +199,7 @@ def test_lost_part_mutation(start_cluster): node1.query("CHECK TABLE mt2") node1.query("SYSTEM START MERGES mt2") - res, err = node1.http_query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt2") + res, err = node1.query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt2") print("result: ", res) print("error: ", res) @@ -225,7 +228,9 @@ def test_lost_last_part(start_cluster): for node in [node1, node2]: node.query( "CREATE TABLE mt3 (id UInt64, p String) ENGINE ReplicatedMergeTree('/clickhouse/tables/t3', '{}') " - "ORDER BY tuple() PARTITION BY p".format(node.name) + "ORDER BY tuple() PARTITION BY p SETTINGS cleanup_delay_period=1, cleanup_delay_period_random_add=1".format( + node.name + ) ) node1.query("SYSTEM STOP MERGES mt3") @@ -246,9 +251,6 @@ def test_lost_last_part(start_cluster): node1.query("CHECK TABLE mt3") node1.query("SYSTEM START MERGES mt3") - res, err = node1.http_query_and_get_answer_with_error("SYSTEM SYNC REPLICA mt3") - print("result: ", res) - print("error: ", res) for i in range(10): result = node1.query("SELECT count() FROM system.replication_queue") @@ -259,6 +261,10 @@ def test_lost_last_part(start_cluster): "DROP/DETACH PARTITION" ): break + if node1.contains_in_log( + "Created empty part 8b8f0fede53df97513a9fb4cb19dc1e4_0_0_0 " + ): + break time.sleep(1) else: assert False, "Don't have required messages in node1 log" diff --git a/tests/queries/0_stateless/00502_custom_partitioning_local.reference b/tests/queries/0_stateless/00502_custom_partitioning_local.reference index 7b14a2d4edcf..fff28819e74e 100644 --- a/tests/queries/0_stateless/00502_custom_partitioning_local.reference +++ b/tests/queries/0_stateless/00502_custom_partitioning_local.reference @@ -9,7 +9,7 @@ Sum before DETACH PARTITION: Sum after DETACH PARTITION: 0 system.detached_parts after DETACH PARTITION: -default not_partitioned all all_1_2_1 default 1 2 1 +default not_partitioned all all_1_2_1 1 2 1 *** Partitioned by week *** Parts before OPTIMIZE: 1999-12-27 19991227_1_1_0 diff --git a/tests/queries/0_stateless/00502_custom_partitioning_local.sql b/tests/queries/0_stateless/00502_custom_partitioning_local.sql index b7eb08c919ec..c85a978af689 100644 --- a/tests/queries/0_stateless/00502_custom_partitioning_local.sql +++ b/tests/queries/0_stateless/00502_custom_partitioning_local.sql @@ -1,4 +1,3 @@ --- Tags: no-s3-storage SELECT '*** Not partitioned ***'; DROP TABLE IF EXISTS not_partitioned; @@ -19,7 +18,7 @@ ALTER TABLE not_partitioned DETACH PARTITION ID 'all'; SELECT 'Sum after DETACH PARTITION:'; SELECT sum(x) FROM not_partitioned; SELECT 'system.detached_parts after DETACH PARTITION:'; -SELECT * FROM system.detached_parts WHERE database = currentDatabase() AND table = 'not_partitioned'; +SELECT system.detached_parts.* EXCEPT disk FROM system.detached_parts WHERE database = currentDatabase() AND table = 'not_partitioned'; DROP TABLE not_partitioned; diff --git a/tests/queries/0_stateless/00731_long_merge_tree_select_opened_files.sh b/tests/queries/0_stateless/00731_long_merge_tree_select_opened_files.sh index 2510517a7404..11396dd34eb4 100755 --- a/tests/queries/0_stateless/00731_long_merge_tree_select_opened_files.sh +++ b/tests/queries/0_stateless/00731_long_merge_tree_select_opened_files.sh @@ -1,5 +1,6 @@ #!/usr/bin/env bash # Tags: long, no-s3-storage +# no-s3 because read FileOpen metric set -e diff --git a/tests/queries/0_stateless/01070_mutations_with_dependencies.sql b/tests/queries/0_stateless/01070_mutations_with_dependencies.sql index 506fd23904fe..566bb16b10c8 100644 --- a/tests/queries/0_stateless/01070_mutations_with_dependencies.sql +++ b/tests/queries/0_stateless/01070_mutations_with_dependencies.sql @@ -1,4 +1,5 @@ -- Tags: no-parallel, no-s3-storage +-- With s3 policy TTL TO DISK 'default' doesn't work (because we have no default, only 's3') drop table if exists ttl; set mutations_sync = 2; diff --git a/tests/queries/0_stateless/01509_parallel_quorum_and_merge_long.sh b/tests/queries/0_stateless/01509_parallel_quorum_and_merge_long.sh index 55b6110918b7..9325cac0ae66 100755 --- a/tests/queries/0_stateless/01509_parallel_quorum_and_merge_long.sh +++ b/tests/queries/0_stateless/01509_parallel_quorum_and_merge_long.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -# Tags: long, no-replicated-database, no-s3-storage +# Tags: long, no-replicated-database # Tag no-replicated-database: Fails due to additional replicas or shards -# Tag no-s3-storage: Merge assigned to replica 2, but replication queues are stopped for it set -e diff --git a/tests/queries/0_stateless/01533_multiple_nested.sql b/tests/queries/0_stateless/01533_multiple_nested.sql index a61f13fc8073..94d81c110cb6 100644 --- a/tests/queries/0_stateless/01533_multiple_nested.sql +++ b/tests/queries/0_stateless/01533_multiple_nested.sql @@ -1,5 +1,5 @@ -- Tags: no-s3-storage --- Temporary supressed +-- no-s3 because read FileOpen metric DROP TABLE IF EXISTS nested; SET flatten_nested = 0; diff --git a/tests/queries/0_stateless/02343_read_from_s3_compressed_blocks.sql b/tests/queries/0_stateless/02343_read_from_s3_compressed_blocks.sql index 03e32d324976..4049cb7b3821 100644 --- a/tests/queries/0_stateless/02343_read_from_s3_compressed_blocks.sql +++ b/tests/queries/0_stateless/02343_read_from_s3_compressed_blocks.sql @@ -1,4 +1,4 @@ --- Tags: no-parallel, no-fasttest, no-s3-storage +-- Tags: no-parallel, no-fasttest DROP TABLE IF EXISTS t_s3_compressed_blocks; From 76ecd6ba604dc2ed7dc94044b3a572e499a9e2d8 Mon Sep 17 00:00:00 2001 From: Alexander Tokmakov Date: Thu, 20 Oct 2022 15:55:37 +0300 Subject: [PATCH 006/130] Merge pull request #42134 from ClickHouse/improve_replica_recovery Improve lost replica recovery (ReplicatedMergeTree) --- src/Interpreters/DDLWorker.cpp | 2 +- src/Storages/MergeTree/MergeTreeData.cpp | 5 +- src/Storages/StorageReplicatedMergeTree.cpp | 42 +++-- tests/clickhouse-test | 4 +- .../02448_clone_replica_lost_part.reference | 11 ++ .../02448_clone_replica_lost_part.sql | 147 ++++++++++++++++++ 6 files changed, 195 insertions(+), 16 deletions(-) create mode 100644 tests/queries/0_stateless/02448_clone_replica_lost_part.reference create mode 100644 tests/queries/0_stateless/02448_clone_replica_lost_part.sql diff --git a/src/Interpreters/DDLWorker.cpp b/src/Interpreters/DDLWorker.cpp index 6ec20ab5f5fc..beecd2f6ec05 100644 --- a/src/Interpreters/DDLWorker.cpp +++ b/src/Interpreters/DDLWorker.cpp @@ -113,7 +113,7 @@ DDLWorker::DDLWorker( void DDLWorker::startup() { [[maybe_unused]] bool prev_stop_flag = stop_flag.exchange(false); - chassert(true); + chassert(prev_stop_flag); main_thread = ThreadFromGlobalPool(&DDLWorker::runMainThread, this); cleanup_thread = ThreadFromGlobalPool(&DDLWorker::runCleanupThread, this); } diff --git a/src/Storages/MergeTree/MergeTreeData.cpp b/src/Storages/MergeTree/MergeTreeData.cpp index c6a0f85fbb41..bb90babbac21 100644 --- a/src/Storages/MergeTree/MergeTreeData.cpp +++ b/src/Storages/MergeTree/MergeTreeData.cpp @@ -3146,7 +3146,10 @@ void MergeTreeData::outdateBrokenPartAndCloneToDetached(const DataPartPtr & part LOG_INFO(log, "Cloning part {} to {}_{} and making it obsolete.", part_to_detach->data_part_storage->getPartDirectory(), prefix, part_to_detach->name); part_to_detach->makeCloneInDetached(prefix, metadata_snapshot); - removePartsFromWorkingSet(NO_TRANSACTION_RAW, {part_to_detach}, true); + + DataPartsLock lock = lockParts(); + if (part_to_detach->getState() == DataPartState::Active) + removePartsFromWorkingSet(NO_TRANSACTION_RAW, {part_to_detach}, true, &lock); } void MergeTreeData::forcefullyMovePartToDetachedAndRemoveFromMemory(const MergeTreeData::DataPartPtr & part_to_detach, const String & prefix, bool restore_covered) diff --git a/src/Storages/StorageReplicatedMergeTree.cpp b/src/Storages/StorageReplicatedMergeTree.cpp index a59eec2b5099..c08bd5c8f007 100644 --- a/src/Storages/StorageReplicatedMergeTree.cpp +++ b/src/Storages/StorageReplicatedMergeTree.cpp @@ -2452,6 +2452,7 @@ void StorageReplicatedMergeTree::cloneReplica(const String & source_replica, Coo std::vector source_queue; ActiveDataPartSet get_part_set{format_version}; ActiveDataPartSet drop_range_set{format_version}; + std::unordered_set exact_part_names; { std::vector queue_get_futures; @@ -2489,14 +2490,22 @@ void StorageReplicatedMergeTree::cloneReplica(const String & source_replica, Coo info.parsed_entry->znode_name = source_queue_names[i]; if (info.parsed_entry->type == LogEntry::DROP_RANGE) + { drop_range_set.add(info.parsed_entry->new_part_name); - - if (info.parsed_entry->type == LogEntry::GET_PART) + } + else if (info.parsed_entry->type == LogEntry::GET_PART) { String maybe_covering_drop_range = drop_range_set.getContainingPart(info.parsed_entry->new_part_name); if (maybe_covering_drop_range.empty()) get_part_set.add(info.parsed_entry->new_part_name); } + else + { + /// We should keep local parts if they present in the queue of source replica. + /// There's a chance that we are the only replica that has these parts. + Strings entry_virtual_parts = info.parsed_entry->getVirtualPartNames(format_version); + std::move(entry_virtual_parts.begin(), entry_virtual_parts.end(), std::inserter(exact_part_names, exact_part_names.end())); + } } } @@ -2516,11 +2525,17 @@ void StorageReplicatedMergeTree::cloneReplica(const String & source_replica, Coo for (const auto & part : local_parts_in_zk) { - if (get_part_set.getContainingPart(part).empty()) - { - parts_to_remove_from_zk.emplace_back(part); - LOG_WARNING(log, "Source replica does not have part {}. Removing it from ZooKeeper.", part); - } + /// We look for exact match (and not for any covering part) + /// because our part might be dropped and covering part might be merged though gap. + /// (avoid resurrection of data that was removed a long time ago) + if (get_part_set.getContainingPart(part) == part) + continue; + + if (exact_part_names.contains(part)) + continue; + + parts_to_remove_from_zk.emplace_back(part); + LOG_WARNING(log, "Source replica does not have part {}. Removing it from ZooKeeper.", part); } { @@ -2542,11 +2557,14 @@ void StorageReplicatedMergeTree::cloneReplica(const String & source_replica, Coo for (const auto & part : local_active_parts) { - if (get_part_set.getContainingPart(part->name).empty()) - { - parts_to_remove_from_working_set.emplace_back(part); - LOG_WARNING(log, "Source replica does not have part {}. Removing it from working set.", part->name); - } + if (get_part_set.getContainingPart(part->name) == part->name) + continue; + + if (exact_part_names.contains(part->name)) + continue; + + parts_to_remove_from_working_set.emplace_back(part); + LOG_WARNING(log, "Source replica does not have part {}. Removing it from working set.", part->name); } if (getSettings()->detach_old_local_parts_when_cloning_replica) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index 3d62746dc48c..7f0b1175f36c 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -981,7 +981,7 @@ class TestCase: and (proc.stderr is None) and (proc.stdout is None or "Exception" not in proc.stdout) ) - need_drop_database = not maybe_passed + need_drop_database = maybe_passed debug_log = "" if os.path.exists(self.testcase_args.debug_log_file): @@ -1971,7 +1971,7 @@ if __name__ == "__main__": parser.add_argument( "--no-drop-if-fail", action="store_true", - help="Do not drop database for test if test has failed", + help="Do not drop database for test if test has failed (does not work if reference file mismatch)", ) parser.add_argument( "--hide-db-name", diff --git a/tests/queries/0_stateless/02448_clone_replica_lost_part.reference b/tests/queries/0_stateless/02448_clone_replica_lost_part.reference new file mode 100644 index 000000000000..26c6cbf438b1 --- /dev/null +++ b/tests/queries/0_stateless/02448_clone_replica_lost_part.reference @@ -0,0 +1,11 @@ +1 [2,3,4,5] +2 [1,2,3,4,5] +3 [1,2,3,4,5] +4 [3,4,5] +5 [1,2,3,4,5] +6 [1,2,3,4,5] +7 [1,2,3,4,5,20,30,40,50] +8 [1,2,3,4,5,10,20,30,40,50] +9 [1,2,3,4,5,10,20,30,40,50] +11 [1,2,3,4,5,10,20,30,40,50,100,300,400,500,600] +12 [1,2,3,4,5,10,20,30,40,50,100,300,400,500,600] diff --git a/tests/queries/0_stateless/02448_clone_replica_lost_part.sql b/tests/queries/0_stateless/02448_clone_replica_lost_part.sql new file mode 100644 index 000000000000..371f7389837f --- /dev/null +++ b/tests/queries/0_stateless/02448_clone_replica_lost_part.sql @@ -0,0 +1,147 @@ +-- Tags: long + +drop table if exists rmt1; +drop table if exists rmt2; +create table rmt1 (n int) engine=ReplicatedMergeTree('/test/02448/{database}/rmt', '1') order by tuple() + settings min_replicated_logs_to_keep=1, max_replicated_logs_to_keep=2, cleanup_delay_period=0, cleanup_delay_period_random_add=1, old_parts_lifetime=0, max_parts_to_merge_at_once=5; +create table rmt2 (n int) engine=ReplicatedMergeTree('/test/02448/{database}/rmt', '2') order by tuple() + settings min_replicated_logs_to_keep=1, max_replicated_logs_to_keep=2, cleanup_delay_period=0, cleanup_delay_period_random_add=1, old_parts_lifetime=0, max_parts_to_merge_at_once=5; + +-- insert part only on one replica +system stop replicated sends rmt1; +insert into rmt1 values (1); +detach table rmt1; -- make replica inactive +system start replicated sends rmt1; + +-- trigger log rotation, rmt1 will be lost +insert into rmt2 values (2); +insert into rmt2 values (3); +insert into rmt2 values (4); +insert into rmt2 values (5); +-- check that entry was not removed from the queue (part is not lost) +set receive_timeout=5; +system sync replica rmt2; -- {serverError TIMEOUT_EXCEEDED} +set receive_timeout=300; + +select 1, arraySort(groupArray(n)) from rmt2; + +-- rmt1 will mimic rmt2 +attach table rmt1; +system sync replica rmt1; +system sync replica rmt2; + +-- check that no parts are lost +select 2, arraySort(groupArray(n)) from rmt1; +select 3, arraySort(groupArray(n)) from rmt2; + + +truncate table rmt1; +truncate table rmt2; + + +-- insert parts only on one replica and merge them +system stop replicated sends rmt2; +insert into rmt2 values (1); +insert into rmt2 values (2); +system sync replica rmt2; +optimize table rmt2 final; +system sync replica rmt2; +-- give it a chance to remove source parts +select sleep(2) format Null; -- increases probability of reproducing the issue +detach table rmt2; +system start replicated sends rmt2; + + +-- trigger log rotation, rmt2 will be lost +insert into rmt1 values (3); +insert into rmt1 values (4); +insert into rmt1 values (5); +set receive_timeout=5; +-- check that entry was not removed from the queue (part is not lost) +system sync replica rmt1; -- {serverError TIMEOUT_EXCEEDED} +set receive_timeout=300; + +select 4, arraySort(groupArray(n)) from rmt1; + +-- rmt1 will mimic rmt2 +system stop fetches rmt1; +attach table rmt2; +system sync replica rmt2; +-- give rmt2 a chance to remove merged part (but it should not do it) +select sleep(2) format Null; -- increases probability of reproducing the issue +system start fetches rmt1; +system sync replica rmt1; + +-- check that no parts are lost +select 5, arraySort(groupArray(n)) from rmt1; +select 6, arraySort(groupArray(n)) from rmt2; + + +-- insert part only on one replica +system stop replicated sends rmt1; +insert into rmt1 values (123); +alter table rmt1 update n=10 where n=123 settings mutations_sync=1; +-- give it a chance to remove source part +select sleep(2) format Null; -- increases probability of reproducing the issue +detach table rmt1; -- make replica inactive +system start replicated sends rmt1; + +-- trigger log rotation, rmt1 will be lost +insert into rmt2 values (20); +insert into rmt2 values (30); +insert into rmt2 values (40); +insert into rmt2 values (50); +-- check that entry was not removed from the queue (part is not lost) +set receive_timeout=5; +system sync replica rmt2; -- {serverError TIMEOUT_EXCEEDED} +set receive_timeout=300; + +select 7, arraySort(groupArray(n)) from rmt2; + +-- rmt1 will mimic rmt2 +system stop fetches rmt2; +attach table rmt1; +system sync replica rmt1; +-- give rmt1 a chance to remove mutated part (but it should not do it) +select sleep(2) format Null; -- increases probability of reproducing the issue +system start fetches rmt2; +system sync replica rmt2; + +-- check that no parts are lost +select 8, arraySort(groupArray(n)) from rmt1; +select 9, arraySort(groupArray(n)) from rmt2; + +-- avoid arbitrary merges after inserting +optimize table rmt2 final; +-- insert parts (all_18_18_0, all_19_19_0) on both replicas (will be deduplicated, but it does not matter) +insert into rmt1 values (100); +insert into rmt2 values (100); +insert into rmt1 values (200); +insert into rmt2 values (200); +detach table rmt1; + +-- create a gap in block numbers buy dropping part +insert into rmt2 values (300); +alter table rmt2 drop part 'all_19_19_0'; -- remove 200 +insert into rmt2 values (400); +insert into rmt2 values (500); +insert into rmt2 values (600); +system sync replica rmt2; +-- merge through gap +optimize table rmt2; +-- give it a chance to cleanup log +select sleep(2) format Null; -- increases probability of reproducing the issue + +-- rmt1 will mimic rmt2, but will not be able to fetch parts for a while +system stop replicated sends rmt2; +attach table rmt1; +-- rmt1 should not show the value (200) from dropped part +select throwIf(n = 200) from rmt1 format Null; +select 11, arraySort(groupArray(n)) from rmt2; + +system start replicated sends rmt2; +system sync replica rmt1; +select 12, arraySort(groupArray(n)) from rmt1; + +drop table rmt1; +drop table rmt2; From d0c6650c5c35334aa790fa90f940d72d286d6a33 Mon Sep 17 00:00:00 2001 From: Alexander Tokmakov Date: Thu, 9 Feb 2023 02:29:29 +0300 Subject: [PATCH 007/130] Merge pull request #45629 from arthurpassos/fix_cares_crash Cancel c-ares failed requests and retry on system interrupts to prevent callbacks with dangling references and premature resolution failures --- .../functions/ip-address-functions.md | 38 +++++ src/Common/CaresPTRResolver.cpp | 53 ++++++- src/Common/CaresPTRResolver.h | 7 +- .../tests/gtest_dns_reverse_resolve.cpp | 57 +++++++ src/Functions/reverseDNSQuery.cpp | 142 ++++++++++++++++++ .../config.d/reverse_dns_query_function.xml | 3 + tests/config/install.sh | 1 + .../test_reverse_dns_query/__init__.py | 0 .../test_reverse_dns_query/configs/config.xml | 3 + .../configs/listen_host.xml | 5 + .../configs/reverse_dns_function.xml | 3 + .../coredns_config/Corefile | 4 + .../test_reverse_dns_query/test.py | 50 ++++++ ...2483_test_reverse_dns_resolution.reference | 14 ++ .../02483_test_reverse_dns_resolution.sql | 14 ++ 15 files changed, 385 insertions(+), 9 deletions(-) create mode 100644 src/Common/tests/gtest_dns_reverse_resolve.cpp create mode 100644 src/Functions/reverseDNSQuery.cpp create mode 100644 tests/config/config.d/reverse_dns_query_function.xml create mode 100644 tests/integration/test_reverse_dns_query/__init__.py create mode 100644 tests/integration/test_reverse_dns_query/configs/config.xml create mode 100644 tests/integration/test_reverse_dns_query/configs/listen_host.xml create mode 100644 tests/integration/test_reverse_dns_query/configs/reverse_dns_function.xml create mode 100644 tests/integration/test_reverse_dns_query/coredns_config/Corefile create mode 100644 tests/integration/test_reverse_dns_query/test.py create mode 100644 tests/queries/0_stateless/02483_test_reverse_dns_resolution.reference create mode 100644 tests/queries/0_stateless/02483_test_reverse_dns_resolution.sql diff --git a/docs/en/sql-reference/functions/ip-address-functions.md b/docs/en/sql-reference/functions/ip-address-functions.md index 9b34a4db4408..d8f14b2305ad 100644 --- a/docs/en/sql-reference/functions/ip-address-functions.md +++ b/docs/en/sql-reference/functions/ip-address-functions.md @@ -492,3 +492,41 @@ Result: │ 0 │ └────────────────────────────────────────────────────────────────────┘ ``` + +## reverseDNSQuery + +Performs a reverse DNS query to get the PTR records associated with the IP address. + +**Syntax** + +``` sql +reverseDNSQuery(address) +``` + +This function performs reverse DNS resolutions on both IPv4 and IPv6. + +**Arguments** + +- `address` — An IPv4 or IPv6 address. [String](../../sql-reference/data-types/string.md). + +**Returned value** + +- Associated domains (PTR records). + +Type: Type: [Array(String)](../../sql-reference/data-types/array.md). + +**Example** + +Query: + +``` sql +SELECT reverseDNSQuery('192.168.0.2'); +``` + +Result: + +``` text +┌─reverseDNSQuery('192.168.0.2')────────────┐ +│ ['test2.example.com','test3.example.com'] │ +└───────────────────────────────────────────┘ +``` diff --git a/src/Common/CaresPTRResolver.cpp b/src/Common/CaresPTRResolver.cpp index c6fe70a09fa4..ded8fdd62b6a 100644 --- a/src/Common/CaresPTRResolver.cpp +++ b/src/Common/CaresPTRResolver.cpp @@ -16,9 +16,9 @@ namespace DB static void callback(void * arg, int status, int, struct hostent * host) { - auto * ptr_records = static_cast*>(arg); - if (ptr_records && status == ARES_SUCCESS) + if (status == ARES_SUCCESS) { + auto * ptr_records = static_cast*>(arg); /* * In some cases (e.g /etc/hosts), hostent::h_name is filled and hostent::h_aliases is empty. * Thus, we can't rely solely on hostent::h_aliases. More info on: @@ -81,7 +81,12 @@ namespace DB std::unordered_set ptr_records; resolve(ip, ptr_records); - wait(); + + if (!wait_and_process()) + { + cancel_requests(); + throw DB::Exception(DB::ErrorCodes::DNS_ERROR, "Failed to complete reverse DNS query for IP {}", ip); + } return ptr_records; } @@ -93,7 +98,12 @@ namespace DB std::unordered_set ptr_records; resolve_v6(ip, ptr_records); - wait(); + + if (!wait_and_process()) + { + cancel_requests(); + throw DB::Exception(DB::ErrorCodes::DNS_ERROR, "Failed to complete reverse DNS query for IP {}", ip); + } return ptr_records; } @@ -115,7 +125,7 @@ namespace DB ares_gethostbyaddr(channel, reinterpret_cast(&addr), sizeof(addr), AF_INET6, callback, &response); } - void CaresPTRResolver::wait() + bool CaresPTRResolver::wait_and_process() { int sockets[ARES_GETSOCK_MAXNUM]; pollfd pollfd[ARES_GETSOCK_MAXNUM]; @@ -129,6 +139,21 @@ namespace DB if (!readable_sockets.empty()) { number_of_fds_ready = poll(readable_sockets.data(), readable_sockets.size(), timeout); + + bool poll_error = number_of_fds_ready < 0; + bool is_poll_error_an_interrupt = poll_error && errno == EINTR; + + /* + * Retry in case of interrupts and return false in case of actual errors. + * */ + if (is_poll_error_an_interrupt) + { + continue; + } + else if (poll_error) + { + return false; + } } if (number_of_fds_ready > 0) @@ -141,6 +166,13 @@ namespace DB break; } } + + return true; + } + + void CaresPTRResolver::cancel_requests() + { + ares_cancel(channel); } std::span CaresPTRResolver::get_readable_sockets(int * sockets, pollfd * pollfd) @@ -149,7 +181,7 @@ namespace DB int number_of_sockets_to_poll = 0; - for (int i = 0; i < ARES_GETSOCK_MAXNUM; i++, number_of_sockets_to_poll++) + for (int i = 0; i < ARES_GETSOCK_MAXNUM; i++) { pollfd[i].events = 0; pollfd[i].revents = 0; @@ -157,7 +189,12 @@ namespace DB if (ARES_GETSOCK_READABLE(sockets_bitmask, i)) { pollfd[i].fd = sockets[i]; - pollfd[i].events = POLLIN; + pollfd[i].events = C_ARES_POLL_EVENTS; + } + + if (pollfd[i].events) + { + number_of_sockets_to_poll++; } else { @@ -192,7 +229,7 @@ namespace DB { for (auto readable_socket : readable_sockets) { - auto fd = readable_socket.revents & POLLIN ? readable_socket.fd : ARES_SOCKET_BAD; + auto fd = readable_socket.revents & C_ARES_POLL_EVENTS ? readable_socket.fd : ARES_SOCKET_BAD; ares_process_fd(channel, fd, ARES_SOCKET_BAD); } } diff --git a/src/Common/CaresPTRResolver.h b/src/Common/CaresPTRResolver.h index 9df6d7aeb72d..454509ae43c3 100644 --- a/src/Common/CaresPTRResolver.h +++ b/src/Common/CaresPTRResolver.h @@ -23,6 +23,9 @@ namespace DB * Allow only DNSPTRProvider to instantiate this class * */ struct provider_token {}; + + static constexpr auto C_ARES_POLL_EVENTS = POLLRDNORM | POLLIN; + public: explicit CaresPTRResolver(provider_token); ~CaresPTRResolver() override; @@ -32,7 +35,9 @@ namespace DB std::unordered_set resolve_v6(const std::string & ip) override; private: - void wait(); + bool wait_and_process(); + + void cancel_requests(); void resolve(const std::string & ip, std::unordered_set & response); diff --git a/src/Common/tests/gtest_dns_reverse_resolve.cpp b/src/Common/tests/gtest_dns_reverse_resolve.cpp new file mode 100644 index 000000000000..08351564eaf4 --- /dev/null +++ b/src/Common/tests/gtest_dns_reverse_resolve.cpp @@ -0,0 +1,57 @@ +#include +#include +#include +#include +#include +#include + +namespace DB +{ +TEST(Common, ReverseDNS) +{ + auto addresses = std::vector({ + "8.8.8.8", "2001:4860:4860::8888", // dns.google + "142.250.219.35", // google.com + "157.240.12.35", // facebook + "208.84.244.116", "2600:1419:c400::214:c410", //www.terra.com.br, + "127.0.0.1", "::1" + }); + + auto func = [&]() + { + // Good random seed, good engine + auto rnd1 = std::mt19937(std::random_device{}()); + + for (int i = 0; i < 50; ++i) + { + auto & dns_resolver_instance = DNSResolver::instance(); +// unfortunately, DNS cache can't be disabled because we might end up causing a DDoS attack +// dns_resolver_instance.setDisableCacheFlag(); + + auto addr_index = rnd1() % addresses.size(); + + [[maybe_unused]] auto result = dns_resolver_instance.reverseResolve(Poco::Net::IPAddress{ addresses[addr_index] }); + +// will not assert either because some of the IP addresses might change in the future and +// this test will become flaky +// ASSERT_TRUE(!result.empty()); + } + + }; + + auto number_of_threads = 200u; + std::vector threads; + threads.reserve(number_of_threads); + + for (auto i = 0u; i < number_of_threads; i++) + { + threads.emplace_back(func); + } + + for (auto & thread : threads) + { + thread.join(); + } + +} +} diff --git a/src/Functions/reverseDNSQuery.cpp b/src/Functions/reverseDNSQuery.cpp new file mode 100644 index 000000000000..65ce5e859c53 --- /dev/null +++ b/src/Functions/reverseDNSQuery.cpp @@ -0,0 +1,142 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace DB +{ + +namespace ErrorCodes +{ + extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH; + extern const int BAD_ARGUMENTS; + extern const int FUNCTION_NOT_ALLOWED; +} + +class ReverseDNSQuery : public IFunction +{ +public: + static constexpr auto name = "reverseDNSQuery"; + static constexpr auto allow_function_config_name = "allow_reverse_dns_query_function"; + + static FunctionPtr create(ContextPtr) + { + return std::make_shared(); + } + + String getName() const override + { + return name; + } + + ColumnPtr executeImpl(const ColumnsWithTypeAndName & arguments, const DataTypePtr & data_type, size_t input_rows_count) const override + { + if (!Context::getGlobalContextInstance()->getConfigRef().getBool(allow_function_config_name, false)) + { + throw Exception(ErrorCodes::FUNCTION_NOT_ALLOWED, "Function {} is not allowed because {} is not set", name, allow_function_config_name); + } + + if (arguments.empty()) + { + throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH, "Function {} requires at least one argument", name); + } + + auto res_type = getReturnTypeImpl({data_type}); + + if (input_rows_count == 0u) + { + return res_type->createColumnConstWithDefaultValue(input_rows_count); + } + + if (!isString(arguments[0].type)) + { + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Function {} requires the input column to be of type String", name); + } + + auto input_column = arguments[0].column; + + auto ip_address = Poco::Net::IPAddress(input_column->getDataAt(0).toString()); + + auto ptr_records = DNSResolver::instance().reverseResolve(ip_address); + + if (ptr_records.empty()) + return res_type->createColumnConstWithDefaultValue(input_rows_count); + + Array res; + + for (const auto & ptr_record : ptr_records) + { + res.push_back(ptr_record); + } + + return res_type->createColumnConst(input_rows_count, res); + } + + bool isSuitableForShortCircuitArgumentsExecution(const DataTypesWithConstInfo & /*arguments*/) const override + { + return false; + } + + size_t getNumberOfArguments() const override + { + return 1u; + } + + DataTypePtr getReturnTypeImpl(const DataTypes & /*arguments*/) const override + { + return std::make_shared(std::make_shared()); + } + +}; + + +REGISTER_FUNCTION(ReverseDNSQuery) +{ + factory.registerFunction( + Documentation( + R"(Performs a reverse DNS query to get the PTR records associated with the IP address. + + **Syntax** + + ``` sql + reverseDNSQuery(address) + ``` + + This function performs reverse DNS resolutions on both IPv4 and IPv6. + + **Arguments** + + - `address` — An IPv4 or IPv6 address. [String](../../sql-reference/data-types/string.md). + + **Returned value** + + - Associated domains (PTR records). + + Type: Type: [Array(String)](../../sql-reference/data-types/array.md). + + **Example** + + Query: + + ``` sql + SELECT reverseDNSQuery('192.168.0.2'); + ``` + + Result: + + ``` text + ┌─reverseDNSQuery('192.168.0.2')────────────┐ + │ ['test2.example.com','test3.example.com'] │ + └───────────────────────────────────────────┘ + ``` + )") + ); +} + +} diff --git a/tests/config/config.d/reverse_dns_query_function.xml b/tests/config/config.d/reverse_dns_query_function.xml new file mode 100644 index 000000000000..8cd7695b4d33 --- /dev/null +++ b/tests/config/config.d/reverse_dns_query_function.xml @@ -0,0 +1,3 @@ + + 1 + \ No newline at end of file diff --git a/tests/config/install.sh b/tests/config/install.sh index b9d51a7f133f..7dca38b66cd4 100755 --- a/tests/config/install.sh +++ b/tests/config/install.sh @@ -49,6 +49,7 @@ ln -sf $SRC_PATH/config.d/ssl_certs.xml $DEST_SERVER_PATH/config.d/ ln -sf $SRC_PATH/config.d/filesystem_cache_log.xml $DEST_SERVER_PATH/config.d/ ln -sf $SRC_PATH/config.d/session_log.xml $DEST_SERVER_PATH/config.d/ ln -sf $SRC_PATH/config.d/system_unfreeze.xml $DEST_SERVER_PATH/config.d/ +ln -sf $SRC_PATH/config.d/reverse_dns_query_function.xml $DEST_SERVER_PATH/config.d/ ln -sf $SRC_PATH/users.d/log_queries.xml $DEST_SERVER_PATH/users.d/ ln -sf $SRC_PATH/users.d/readonly.xml $DEST_SERVER_PATH/users.d/ diff --git a/tests/integration/test_reverse_dns_query/__init__.py b/tests/integration/test_reverse_dns_query/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/integration/test_reverse_dns_query/configs/config.xml b/tests/integration/test_reverse_dns_query/configs/config.xml new file mode 100644 index 000000000000..5ce55afa2a7e --- /dev/null +++ b/tests/integration/test_reverse_dns_query/configs/config.xml @@ -0,0 +1,3 @@ + + 1 + diff --git a/tests/integration/test_reverse_dns_query/configs/listen_host.xml b/tests/integration/test_reverse_dns_query/configs/listen_host.xml new file mode 100644 index 000000000000..58ef55cd3f35 --- /dev/null +++ b/tests/integration/test_reverse_dns_query/configs/listen_host.xml @@ -0,0 +1,5 @@ + + :: + 0.0.0.0 + 1 + diff --git a/tests/integration/test_reverse_dns_query/configs/reverse_dns_function.xml b/tests/integration/test_reverse_dns_query/configs/reverse_dns_function.xml new file mode 100644 index 000000000000..35d0a07c6a6c --- /dev/null +++ b/tests/integration/test_reverse_dns_query/configs/reverse_dns_function.xml @@ -0,0 +1,3 @@ + + 1 + diff --git a/tests/integration/test_reverse_dns_query/coredns_config/Corefile b/tests/integration/test_reverse_dns_query/coredns_config/Corefile new file mode 100644 index 000000000000..84d297f7cdfa --- /dev/null +++ b/tests/integration/test_reverse_dns_query/coredns_config/Corefile @@ -0,0 +1,4 @@ +. { + forward . 127.0.0.11 + log +} diff --git a/tests/integration/test_reverse_dns_query/test.py b/tests/integration/test_reverse_dns_query/test.py new file mode 100644 index 000000000000..c5d4304a5d2e --- /dev/null +++ b/tests/integration/test_reverse_dns_query/test.py @@ -0,0 +1,50 @@ +import pytest +from helpers.cluster import ClickHouseCluster, get_docker_compose_path, run_and_check +from time import sleep +import os + +DOCKER_COMPOSE_PATH = get_docker_compose_path() +SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) + +cluster = ClickHouseCluster(__file__) + +ch_server = cluster.add_instance( + "clickhouse-server", + with_coredns=True, + main_configs=[ + "configs/config.xml", + "configs/reverse_dns_function.xml", + "configs/listen_host.xml", + ], +) + + +@pytest.fixture(scope="module") +def started_cluster(): + global cluster + try: + cluster.start() + yield cluster + + finally: + cluster.shutdown() + + +def setup_ch_server(dns_server_ip): + ch_server.exec_in_container( + (["bash", "-c", f"echo 'nameserver {dns_server_ip}' > /etc/resolv.conf"]) + ) + ch_server.exec_in_container( + (["bash", "-c", "echo 'options ndots:0' >> /etc/resolv.conf"]) + ) + ch_server.query("SYSTEM DROP DNS CACHE") + + +def test_reverse_dns_query(started_cluster): + dns_server_ip = cluster.get_instance_ip(cluster.coredns_host) + + setup_ch_server(dns_server_ip) + + for _ in range(0, 200): + response = ch_server.query("select reverseDNSQuery('2001:4860:4860::8888')") + assert response == "['dns.google']\n" diff --git a/tests/queries/0_stateless/02483_test_reverse_dns_resolution.reference b/tests/queries/0_stateless/02483_test_reverse_dns_resolution.reference new file mode 100644 index 000000000000..2bae467069f8 --- /dev/null +++ b/tests/queries/0_stateless/02483_test_reverse_dns_resolution.reference @@ -0,0 +1,14 @@ +-- { echoOn } +-- Expect dns.google on both queries +select reverseDNSQuery('8.8.8.8'); +['dns.google'] +select reverseDNSQuery('2001:4860:4860::8888'); +['dns.google'] +-- Expect empty response +select reverseDNSQuery(''); +[] +-- Expect error, invalid column type +select reverseDNSQuery(1); -- {serverError 36} +-- Expect error, wrong number of arguments +select reverseDNSQuery(); -- {serverError 42} +select reverseDNSQuery(1, 2); -- {serverError 42} diff --git a/tests/queries/0_stateless/02483_test_reverse_dns_resolution.sql b/tests/queries/0_stateless/02483_test_reverse_dns_resolution.sql new file mode 100644 index 000000000000..d9576c0641a9 --- /dev/null +++ b/tests/queries/0_stateless/02483_test_reverse_dns_resolution.sql @@ -0,0 +1,14 @@ +-- { echoOn } +-- Expect dns.google on both queries +select reverseDNSQuery('8.8.8.8'); +select reverseDNSQuery('2001:4860:4860::8888'); + +-- Expect empty response +select reverseDNSQuery(''); + +-- Expect error, invalid column type +select reverseDNSQuery(1); -- {serverError 36} + +-- Expect error, wrong number of arguments +select reverseDNSQuery(); -- {serverError 42} +select reverseDNSQuery(1, 2); -- {serverError 42} From d96d063f6b854c493b8c05596a0ffc714c7146e9 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 9 Feb 2023 17:15:58 -0300 Subject: [PATCH 008/130] Remove documentation instance --- src/Functions/reverseDNSQuery.cpp | 40 +------------------------------ 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/src/Functions/reverseDNSQuery.cpp b/src/Functions/reverseDNSQuery.cpp index 65ce5e859c53..00b1b3dac229 100644 --- a/src/Functions/reverseDNSQuery.cpp +++ b/src/Functions/reverseDNSQuery.cpp @@ -98,45 +98,7 @@ class ReverseDNSQuery : public IFunction REGISTER_FUNCTION(ReverseDNSQuery) { - factory.registerFunction( - Documentation( - R"(Performs a reverse DNS query to get the PTR records associated with the IP address. - - **Syntax** - - ``` sql - reverseDNSQuery(address) - ``` - - This function performs reverse DNS resolutions on both IPv4 and IPv6. - - **Arguments** - - - `address` — An IPv4 or IPv6 address. [String](../../sql-reference/data-types/string.md). - - **Returned value** - - - Associated domains (PTR records). - - Type: Type: [Array(String)](../../sql-reference/data-types/array.md). - - **Example** - - Query: - - ``` sql - SELECT reverseDNSQuery('192.168.0.2'); - ``` - - Result: - - ``` text - ┌─reverseDNSQuery('192.168.0.2')────────────┐ - │ ['test2.example.com','test3.example.com'] │ - └───────────────────────────────────────────┘ - ``` - )") - ); + factory.registerFunction(); } } From 2ecca4e00ee9accd2b8612a11474ba31ad6a4f32 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 15 Feb 2023 19:15:37 +0400 Subject: [PATCH 009/130] Scheduled CI/CD run 00:00 every Sunday --- .github/workflows/release_branches.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d210c8ac7103..1a101bc064d6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -20,6 +20,8 @@ on: # yamllint disable-line rule:truthy push: branches: - 'releases/22.8**' + schedule: + - cron: '0 0 * * 0' jobs: # DockerHubPushAarch64: From ca174c79dc9e88b17503d871b8818dd648df2561 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 28 Feb 2023 17:19:07 +0100 Subject: [PATCH 010/130] Using secrets for managing AWS access --- .github/workflows/release_branches.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d210c8ac7103..2f89e33b6d2d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -21,6 +21,11 @@ on: # yamllint disable-line rule:truthy branches: - 'releases/22.8**' +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + jobs: # DockerHubPushAarch64: # runs-on: [self-hosted, style-checker-aarch64] From df30db829ff57a2362d36e8681fe94623af42a69 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 28 Feb 2023 17:29:52 +0100 Subject: [PATCH 011/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 2f89e33b6d2d..a06264449dfc 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -3,6 +3,10 @@ name: ReleaseBranchCI env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + on: # yamllint disable-line rule:truthy pull_request: @@ -21,11 +25,6 @@ on: # yamllint disable-line rule:truthy branches: - 'releases/22.8**' -env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - jobs: # DockerHubPushAarch64: # runs-on: [self-hosted, style-checker-aarch64] From 0e2ba9b4e58a38397d44b80de5a2004a0da08c31 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 3 Mar 2023 00:37:24 +0100 Subject: [PATCH 012/130] Fixed tag checking code --- tests/ci/git_helper.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index 10f17f083889..dce9a0e993d5 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -10,10 +10,13 @@ # ^ and $ match subline in `multiple\nlines` # \A and \Z match only start and end of the whole string +# NOTE (vnemkov): support both upstream tag style: v22.x.y.z-lts and Altinity tag style: v22.x.y.z.altinitystable +# Because at early release stages there could be no Altinity tag set on commit, only upstream one. RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z" TAG_REGEXP = ( - r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts|altinitystable)\Z" + r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*[-\.](testing|prestable|stable|lts|altinitystable)\Z" ) + SHA_REGEXP = r"\A([0-9]|[a-f]){40}\Z" CWD = p.dirname(p.realpath(__file__)) From edd1b038bdd967dae4c55d89083bae63f0ea4d22 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Fri, 28 Oct 2022 16:18:00 +0200 Subject: [PATCH 013/130] Merge pull request #42469 from amosbird/issue_42456 Fix misbehavior of key analysis --- src/Interpreters/convertFieldToType.cpp | 5 +++-- src/Storages/MergeTree/KeyCondition.cpp | 3 ++- ...ondition_with_types_that_cannot_be_nullable.reference | 1 + ..._key_condition_with_types_that_cannot_be_nullable.sql | 9 +++++++++ 4 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.reference create mode 100644 tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.sql diff --git a/src/Interpreters/convertFieldToType.cpp b/src/Interpreters/convertFieldToType.cpp index 4e7562ef451e..fdbae838ab41 100644 --- a/src/Interpreters/convertFieldToType.cpp +++ b/src/Interpreters/convertFieldToType.cpp @@ -218,10 +218,11 @@ Field convertFieldToTypeImpl(const Field & src, const IDataType & type, const ID } if (which_type.isDateTime64() - && (which_from_type.isNativeInt() || which_from_type.isNativeUInt() || which_from_type.isDate() || which_from_type.isDate32() || which_from_type.isDateTime() || which_from_type.isDateTime64())) + && (src.getType() == Field::Types::UInt64 || src.getType() == Field::Types::Int64 || src.getType() == Field::Types::Decimal64)) { const auto scale = static_cast(type).getScale(); - const auto decimal_value = DecimalUtils::decimalFromComponents(applyVisitor(FieldVisitorConvertToNumber(), src), 0, scale); + const auto decimal_value + = DecimalUtils::decimalFromComponents(applyVisitor(FieldVisitorConvertToNumber(), src), 0, scale); return Field(DecimalField(decimal_value, scale)); } } diff --git a/src/Storages/MergeTree/KeyCondition.cpp b/src/Storages/MergeTree/KeyCondition.cpp index caddb85263df..27664753bad8 100644 --- a/src/Storages/MergeTree/KeyCondition.cpp +++ b/src/Storages/MergeTree/KeyCondition.cpp @@ -1172,7 +1172,8 @@ bool KeyCondition::transformConstantWithValidFunctions( if (is_valid_chain) { - auto const_type = cur_node->result_type; + out_type = removeLowCardinality(out_type); + auto const_type = removeLowCardinality(cur_node->result_type); auto const_column = out_type->createColumnConst(1, out_value); auto const_value = (*castColumnAccurateOrNull({const_column, out_type, ""}, const_type))[0]; diff --git a/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.reference b/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.reference new file mode 100644 index 000000000000..13b65c29f05d --- /dev/null +++ b/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.reference @@ -0,0 +1 @@ +printer1 diff --git a/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.sql b/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.sql new file mode 100644 index 000000000000..690ec6c70e0b --- /dev/null +++ b/tests/queries/0_stateless/02457_key_condition_with_types_that_cannot_be_nullable.sql @@ -0,0 +1,9 @@ +drop table if exists test; + +create table test (Printer LowCardinality(String), IntervalStart DateTime) engine MergeTree partition by (hiveHash(Printer), toYear(IntervalStart)) order by (Printer, IntervalStart); + +insert into test values ('printer1', '2006-02-07 06:28:15'); + +select Printer from test where Printer='printer1'; + +drop table test; From 0c3acbbf91bfa562dcb7e9cac56f5dfb5f102b9f Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 20 Sep 2022 06:08:14 +0300 Subject: [PATCH 014/130] Merge pull request #41483 from amosbird/jbod-fix1 Fix nullptr dereference in DB::VolumeJBOD::reserve --- src/Disks/IDisk.h | 1 + src/Disks/VolumeJBOD.h | 3 +++ 2 files changed, 4 insertions(+) diff --git a/src/Disks/IDisk.h b/src/Disks/IDisk.h index b2552ec331f0..4d522ff52c57 100644 --- a/src/Disks/IDisk.h +++ b/src/Disks/IDisk.h @@ -71,6 +71,7 @@ class Space : public std::enable_shared_from_this virtual const String & getName() const = 0; /// Reserve the specified number of bytes. + /// Returns valid reservation or nullptr when failure. virtual ReservationPtr reserve(UInt64 bytes) = 0; virtual ~Space() = default; diff --git a/src/Disks/VolumeJBOD.h b/src/Disks/VolumeJBOD.h index 21d61e6dd8d0..81da64c488d1 100644 --- a/src/Disks/VolumeJBOD.h +++ b/src/Disks/VolumeJBOD.h @@ -82,6 +82,9 @@ class VolumeJBOD : public IVolume ReservationPtr reserve(uint64_t bytes) { ReservationPtr reservation = disk->reserve(bytes); + if (!reservation) + return {}; + /// Not just subtract bytes, but update the value, /// since some reservations may be done directly via IDisk, or not by ClickHouse. free_size = reservation->getUnreservedSpace(); From 6b004a61ae53083acac1a3f05b5e37c7a13e44cf Mon Sep 17 00:00:00 2001 From: avogar Date: Fri, 20 Jan 2023 16:40:33 +0000 Subject: [PATCH 015/130] Fix aborts in arrow lib --- contrib/arrow | 2 +- src/Processors/Formats/Impl/ArrowBufferedStreams.cpp | 7 +++++++ src/Processors/Formats/Impl/ArrowBufferedStreams.h | 3 +++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/contrib/arrow b/contrib/arrow index 450a56387043..d03245f801f7 160000 --- a/contrib/arrow +++ b/contrib/arrow @@ -1 +1 @@ -Subproject commit 450a5638704386356f8e520080468fc9bc8bcaf8 +Subproject commit d03245f801f798c63ee9a7d2b8914a9e5c5cd666 diff --git a/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp b/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp index ebd9783b4fd4..96ed2a7021e0 100644 --- a/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp +++ b/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp @@ -11,6 +11,7 @@ #include #include #include +#include #include #include #include @@ -95,6 +96,12 @@ arrow::Result> RandomAccessFileFromSeekableReadBu return buffer; } +arrow::Future> RandomAccessFileFromSeekableReadBuffer::ReadAsync(const arrow::io::IOContext &, int64_t position, int64_t nbytes) +{ + /// Just a stub to to avoid using internal arrow thread pool + return arrow::Future>::MakeFinished(ReadAt(position, nbytes)); +} + arrow::Status RandomAccessFileFromSeekableReadBuffer::Seek(int64_t position) { seekable_in.seek(position, SEEK_SET); diff --git a/src/Processors/Formats/Impl/ArrowBufferedStreams.h b/src/Processors/Formats/Impl/ArrowBufferedStreams.h index da0382837313..b1494d853fe2 100644 --- a/src/Processors/Formats/Impl/ArrowBufferedStreams.h +++ b/src/Processors/Formats/Impl/ArrowBufferedStreams.h @@ -62,6 +62,9 @@ class RandomAccessFileFromSeekableReadBuffer : public arrow::io::RandomAccessFil arrow::Result> Read(int64_t nbytes) override; + /// Override async reading to not use internal arrow thread pool. + arrow::Future> ReadAsync(const arrow::io::IOContext&, int64_t position, int64_t nbytes) override; + arrow::Status Seek(int64_t position) override; private: From 2a6d87564fee59c5dc5c3021c7a1a0226f2c53f9 Mon Sep 17 00:00:00 2001 From: avogar Date: Fri, 20 Jan 2023 16:41:59 +0000 Subject: [PATCH 016/130] Better comment --- src/Processors/Formats/Impl/ArrowBufferedStreams.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Processors/Formats/Impl/ArrowBufferedStreams.h b/src/Processors/Formats/Impl/ArrowBufferedStreams.h index b1494d853fe2..cae871bb8a68 100644 --- a/src/Processors/Formats/Impl/ArrowBufferedStreams.h +++ b/src/Processors/Formats/Impl/ArrowBufferedStreams.h @@ -62,7 +62,7 @@ class RandomAccessFileFromSeekableReadBuffer : public arrow::io::RandomAccessFil arrow::Result> Read(int64_t nbytes) override; - /// Override async reading to not use internal arrow thread pool. + /// Override async reading to avoid using internal arrow thread pool. arrow::Future> ReadAsync(const arrow::io::IOContext&, int64_t position, int64_t nbytes) override; arrow::Status Seek(int64_t position) override; From a02b8ab9c9e5c049f2af601384b676bad5173067 Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Mon, 23 Jan 2023 15:31:07 +0100 Subject: [PATCH 017/130] Better comment --- src/Processors/Formats/Impl/ArrowBufferedStreams.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Processors/Formats/Impl/ArrowBufferedStreams.h b/src/Processors/Formats/Impl/ArrowBufferedStreams.h index cae871bb8a68..3be31e91f0be 100644 --- a/src/Processors/Formats/Impl/ArrowBufferedStreams.h +++ b/src/Processors/Formats/Impl/ArrowBufferedStreams.h @@ -63,6 +63,8 @@ class RandomAccessFileFromSeekableReadBuffer : public arrow::io::RandomAccessFil arrow::Result> Read(int64_t nbytes) override; /// Override async reading to avoid using internal arrow thread pool. + /// In our code we don't use async reading, so implementation is sync, + /// we just call ReadAt and return future with rady value. arrow::Future> ReadAsync(const arrow::io::IOContext&, int64_t position, int64_t nbytes) override; arrow::Status Seek(int64_t position) override; From c28462f5cb206da492af50ba05033e621ef535c5 Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Mon, 23 Jan 2023 17:13:16 +0100 Subject: [PATCH 018/130] Fix typo --- src/Processors/Formats/Impl/ArrowBufferedStreams.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Processors/Formats/Impl/ArrowBufferedStreams.h b/src/Processors/Formats/Impl/ArrowBufferedStreams.h index 3be31e91f0be..4169ce8fc80e 100644 --- a/src/Processors/Formats/Impl/ArrowBufferedStreams.h +++ b/src/Processors/Formats/Impl/ArrowBufferedStreams.h @@ -64,7 +64,7 @@ class RandomAccessFileFromSeekableReadBuffer : public arrow::io::RandomAccessFil /// Override async reading to avoid using internal arrow thread pool. /// In our code we don't use async reading, so implementation is sync, - /// we just call ReadAt and return future with rady value. + /// we just call ReadAt and return future with ready value. arrow::Future> ReadAsync(const arrow::io::IOContext&, int64_t position, int64_t nbytes) override; arrow::Status Seek(int64_t position) override; From 217b88eca8626483388ad5c7dd3b1cf672085ef1 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Mar 2023 17:57:04 +0100 Subject: [PATCH 019/130] Removed scheduled run --- .github/workflows/release_branches.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 8ef94e1582af..a06264449dfc 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -24,8 +24,6 @@ on: # yamllint disable-line rule:truthy push: branches: - 'releases/22.8**' - schedule: - - cron: '0 0 * * 0' jobs: # DockerHubPushAarch64: From a7a80338895d12a6c31e8d3684486b74c51cb868 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 24 Mar 2023 10:33:43 -0300 Subject: [PATCH 020/130] backport upstream 47958 --- src/CMakeLists.txt | 1 + .../gtest_assert_arrow_log_does_not_abort.cpp | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 src/Processors/tests/gtest_assert_arrow_log_does_not_abort.cpp diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 3ece5fd410b3..8493b306067b 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -602,6 +602,7 @@ if (ENABLE_TESTS) dbms clickhouse_common_config clickhouse_common_zookeeper + ch_contrib::parquet string_utils) if (TARGET ch_contrib::simdjson) diff --git a/src/Processors/tests/gtest_assert_arrow_log_does_not_abort.cpp b/src/Processors/tests/gtest_assert_arrow_log_does_not_abort.cpp new file mode 100644 index 000000000000..fb13f4664d9d --- /dev/null +++ b/src/Processors/tests/gtest_assert_arrow_log_does_not_abort.cpp @@ -0,0 +1,18 @@ +#include +#include +#include +#include + +using namespace DB; + +TEST(ChunkedArray, ChunkedArrayWithZeroChunksShouldNotAbort) +{ + std::vector> empty_chunks_vector; + + EXPECT_ANY_THROW(::arrow::ChunkedArray{empty_chunks_vector}); +} + +TEST(ArrowLog, FatalLogShouldThrow) +{ + EXPECT_ANY_THROW(::arrow::util::ArrowLog(__FILE__, __LINE__, ::arrow::util::ArrowLogLevel::ARROW_FATAL)); +} From 729969c2f4984812de3bd8a83288c128ee3ca9f0 Mon Sep 17 00:00:00 2001 From: avogar Date: Mon, 19 Dec 2022 20:30:54 +0000 Subject: [PATCH 021/130] Fix reading columns that are not presented in input data in Parquet/ORC formats --- .../Formats/Impl/ArrowBlockInputFormat.cpp | 2 +- .../Formats/Impl/ArrowColumnToCHColumn.cpp | 10 +++------- .../Formats/Impl/ArrowColumnToCHColumn.h | 4 ++-- .../Formats/Impl/ORCBlockInputFormat.cpp | 9 +++++++-- .../Formats/Impl/ParquetBlockInputFormat.cpp | 2 +- ...02511_parquet_orc_missing_columns.reference | 8 ++++++++ .../02511_parquet_orc_missing_columns.sh | 18 ++++++++++++++++++ 7 files changed, 40 insertions(+), 13 deletions(-) create mode 100644 tests/queries/0_stateless/02511_parquet_orc_missing_columns.reference create mode 100755 tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh diff --git a/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp b/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp index 05fc3b8ca2a1..552c971a26e5 100644 --- a/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp @@ -71,7 +71,7 @@ Chunk ArrowBlockInputFormat::generate() ++record_batch_current; - arrow_column_to_ch_column->arrowTableToCHChunk(res, *table_result); + arrow_column_to_ch_column->arrowTableToCHChunk(res, *table_result, (*table_result)->num_rows()); /// If defaults_for_omitted_fields is true, calculate the default values from default expression for omitted fields. /// Otherwise fill the missing columns with zero values of its type. diff --git a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp index 4863ebfb2f59..93814ee3e752 100644 --- a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp +++ b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp @@ -767,7 +767,7 @@ ArrowColumnToCHColumn::ArrowColumnToCHColumn( { } -void ArrowColumnToCHColumn::arrowTableToCHChunk(Chunk & res, std::shared_ptr & table) +void ArrowColumnToCHColumn::arrowTableToCHChunk(Chunk & res, std::shared_ptr & table, size_t num_rows) { NameToColumnPtr name_to_column_ptr; for (auto column_name : table->ColumnNames()) @@ -781,16 +781,12 @@ void ArrowColumnToCHColumn::arrowTableToCHChunk(Chunk & res, std::shared_ptrsecond->length(); columns_list.reserve(header.columns()); std::unordered_map>> nested_tables; bool skipped = false; diff --git a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.h b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.h index 092ed65d61a3..5630597517d2 100644 --- a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.h +++ b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.h @@ -28,9 +28,9 @@ class ArrowColumnToCHColumn bool allow_missing_columns_, bool case_insensitive_matching_ = false); - void arrowTableToCHChunk(Chunk & res, std::shared_ptr & table); + void arrowTableToCHChunk(Chunk & res, std::shared_ptr & table, size_t num_rows); - void arrowColumnsToCHChunk(Chunk & res, NameToColumnPtr & name_to_column_ptr); + void arrowColumnsToCHChunk(Chunk & res, NameToColumnPtr & name_to_column_ptr, size_t num_rows); /// Get missing columns that exists in header but not in arrow::Schema std::vector getMissingColumns(const arrow::Schema & schema) const; diff --git a/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp index 36126c21bf1e..42b5a1342add 100644 --- a/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp @@ -54,14 +54,19 @@ Chunk ORCBlockInputFormat::generate() throw ParsingException( ErrorCodes::CANNOT_READ_ALL_DATA, "Error while reading batch of ORC data: {}", table_result.status().ToString()); + /// We should extract the number of rows directly from the stripe, because in case when + /// record batch contains 0 columns (for example if we requested only columns that + /// are not presented in data) the number of rows in record batch will be 0. + size_t num_rows = file_reader->GetRawORCReader()->getStripe(stripe_current)->getNumberOfRows(); + auto table = table_result.ValueOrDie(); - if (!table || !table->num_rows()) + if (!table || !num_rows) return {}; ++stripe_current; Chunk res; - arrow_column_to_ch_column->arrowTableToCHChunk(res, table); + arrow_column_to_ch_column->arrowTableToCHChunk(res, table, num_rows); /// If defaults_for_omitted_fields is true, calculate the default values from default expression for omitted fields. /// Otherwise fill the missing columns with zero values of its type. if (format_settings.defaults_for_omitted_fields) diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index 427c159314b3..f2328749e929 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -70,7 +70,7 @@ Chunk ParquetBlockInputFormat::generate() ++row_group_current; - arrow_column_to_ch_column->arrowTableToCHChunk(res, table); + arrow_column_to_ch_column->arrowTableToCHChunk(res, table, table->num_rows()); /// If defaults_for_omitted_fields is true, calculate the default values from default expression for omitted fields. /// Otherwise fill the missing columns with zero values of its type. diff --git a/tests/queries/0_stateless/02511_parquet_orc_missing_columns.reference b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.reference new file mode 100644 index 000000000000..d5318a96f1ac --- /dev/null +++ b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.reference @@ -0,0 +1,8 @@ +Hello +Hello +Hello +6 6 +Hello +Hello +Hello +6 6 diff --git a/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh new file mode 100755 index 000000000000..780504b88077 --- /dev/null +++ b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CUR_DIR"/../shell_config.sh + +$CLICKHOUSE_LOCAL -q "select number as x from numbers(3) format Parquet" > 02511_data1.parquet +$CLICKHOUSE_LOCAL -q "select y from file(02511_data1.parquet, auto, 'x UInt64, y String default \'Hello\'') settings input_format_parquet_allow_missing_columns=1" +$CLICKHOUSE_LOCAL -q "select number as x, 'Hello' as y from numbers(3) format Parquet" > 02511_data2.parquet +$CLICKHOUSE_LOCAL -q "select count(*), count(y) from file('02511_data*.parquet', auto, 'x UInt64, y String') settings input_format_parquet_allow_missing_columns=1" + +$CLICKHOUSE_LOCAL -q "select number as x from numbers(3) format ORC" > 02511_data1.orc +$CLICKHOUSE_LOCAL -q "select y from file(02511_data1.orc, auto, 'x UInt64, y String default \'Hello\'') settings input_format_orc_allow_missing_columns=1" +$CLICKHOUSE_LOCAL -q "select number as x, 'Hello' as y from numbers(3) format ORC" > 02511_data2.orc +$CLICKHOUSE_LOCAL -q "select count(*), count(y) from file('02511_data*.orc', auto, 'x UInt64, y String') settings input_format_orc_allow_missing_columns=1" + +rm 02511_data* + From 0e73a84a2b941476d68b2400928dc7099fdc44f3 Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Tue, 20 Dec 2022 14:49:39 +0100 Subject: [PATCH 022/130] Fix style --- src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp index 93814ee3e752..eacfce48c2d1 100644 --- a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp +++ b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp @@ -69,7 +69,6 @@ namespace ErrorCodes extern const int DUPLICATE_COLUMN; extern const int THERE_IS_NO_COLUMN; extern const int UNKNOWN_EXCEPTION; - extern const int INCORRECT_NUMBER_OF_COLUMNS; extern const int INCORRECT_DATA; } From ae51a1447c2f6886f0d387f9623b3b7fe487d73a Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Tue, 20 Dec 2022 14:50:06 +0100 Subject: [PATCH 023/130] Skip fasttest --- tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh index 780504b88077..455dccafbb9b 100755 --- a/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh +++ b/tests/queries/0_stateless/02511_parquet_orc_missing_columns.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +#Tags: no-fasttest, no-parallel CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) # shellcheck source=../shell_config.sh From 124d25dfebff24c449839805f8343d9262506b3f Mon Sep 17 00:00:00 2001 From: liuneng <1398775315@qq.com> Date: Wed, 1 Feb 2023 15:54:10 +0800 Subject: [PATCH 024/130] optimize parquet reader --- .../Formats/Impl/ParquetBlockInputFormat.cpp | 49 ++++++++++--------- .../Formats/Impl/ParquetBlockInputFormat.h | 3 +- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index f2328749e929..550a66a2b6ee 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -15,6 +15,7 @@ #include "ArrowBufferedStreams.h" #include "ArrowColumnToCHColumn.h" #include +#include namespace DB { @@ -43,34 +44,36 @@ Chunk ParquetBlockInputFormat::generate() block_missing_values.clear(); if (!file_reader) + { prepareReader(); + /// It may be necessary to add a parameter + file_reader->set_batch_size(8192); + std::vector row_group_indices; + for (int i = 0; i < file_reader->num_row_groups(); ++i) + { + if (!skip_row_groups.contains(i)) + row_group_indices.emplace_back(i); + } + auto read_status = file_reader->GetRecordBatchReader(row_group_indices, column_indices, ¤t_record_batch_reader); + if (!read_status.ok()) + throw DB::Exception(ErrorCodes::CANNOT_READ_ALL_DATA, "Error while reading Parquet data: {}", read_status.ToString()); + } if (is_stopped) return {}; - for (; row_group_current < row_group_total && skip_row_groups.contains(row_group_current); ++row_group_current) - ; - - if (row_group_current >= row_group_total) - return res; - - std::shared_ptr table; - - std::unique_ptr<::arrow::RecordBatchReader> rbr; - std::vector row_group_indices { row_group_current }; - arrow::Status get_batch_reader_status = file_reader->GetRecordBatchReader(row_group_indices, column_indices, &rbr); - - if (!get_batch_reader_status.ok()) - throw ParsingException{"Error while reading Parquet data: " + get_batch_reader_status.ToString(), ErrorCodes::CANNOT_READ_ALL_DATA}; - - arrow::Status read_status = rbr->ReadAll(&table); - - if (!read_status.ok()) - throw ParsingException{"Error while reading Parquet data: " + read_status.ToString(), ErrorCodes::CANNOT_READ_ALL_DATA}; - - ++row_group_current; - - arrow_column_to_ch_column->arrowTableToCHChunk(res, table, table->num_rows()); + auto batch = current_record_batch_reader->Next(); + if (*batch) + { + auto tmp_table = arrow::Table::FromRecordBatches({*batch}); + arrow_column_to_ch_column->arrowTableToCHChunk(res, *tmp_table, (*tmp_table)->num_rows()); + } + else + { + current_record_batch_reader.reset(); + file_reader.reset(); + return {}; + } /// If defaults_for_omitted_fields is true, calculate the default values from default expression for omitted fields. /// Otherwise fill the missing columns with zero values of its type. diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.h b/src/Processors/Formats/Impl/ParquetBlockInputFormat.h index 76803bb5b89a..080933e05bde 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.h +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.h @@ -8,7 +8,7 @@ namespace parquet::arrow { class FileReader; } -namespace arrow { class Buffer; } +namespace arrow { class Buffer; class RecordBatchReader;} namespace DB { @@ -46,6 +46,7 @@ class ParquetBlockInputFormat : public IInputFormat BlockMissingValues block_missing_values; const FormatSettings format_settings; const std::unordered_set & skip_row_groups; + std::shared_ptr current_record_batch_reader; std::atomic is_stopped{0}; }; From f3c6e96c2cdaee8c858e6ef45e80bcd6e48fbb92 Mon Sep 17 00:00:00 2001 From: liuneng <1398775315@qq.com> Date: Wed, 1 Feb 2023 18:29:20 +0800 Subject: [PATCH 025/130] add parquet max_block_size setting --- src/Core/Settings.h | 1 + src/Formats/FormatFactory.cpp | 1 + src/Formats/FormatSettings.h | 1 + .../Formats/Impl/ParquetBlockInputFormat.cpp | 12 ++++++++---- 4 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/Core/Settings.h b/src/Core/Settings.h index 82793242e92f..b68d1b3e68a4 100644 --- a/src/Core/Settings.h +++ b/src/Core/Settings.h @@ -712,6 +712,7 @@ static constexpr UInt64 operator""_GiB(unsigned long long value) M(Bool, input_format_csv_use_best_effort_in_schema_inference, true, "Use some tweaks and heuristics to infer schema in CSV format", 0) \ M(Bool, input_format_tsv_use_best_effort_in_schema_inference, true, "Use some tweaks and heuristics to infer schema in TSV format", 0) \ M(Bool, input_format_parquet_skip_columns_with_unsupported_types_in_schema_inference, false, "Skip columns with unsupported types while schema inference for format Parquet", 0) \ + M(UInt64, input_format_parquet_max_block_size, 8192, "Max block size for parquet reader.", 0) \ M(Bool, input_format_protobuf_skip_fields_with_unsupported_types_in_schema_inference, false, "Skip fields with unsupported types while schema inference for format Protobuf", 0) \ M(Bool, input_format_capn_proto_skip_fields_with_unsupported_types_in_schema_inference, false, "Skip columns with unsupported types while schema inference for format CapnProto", 0) \ M(Bool, input_format_orc_skip_columns_with_unsupported_types_in_schema_inference, false, "Skip columns with unsupported types while schema inference for format ORC", 0) \ diff --git a/src/Formats/FormatFactory.cpp b/src/Formats/FormatFactory.cpp index d37f578d947b..f689deadc043 100644 --- a/src/Formats/FormatFactory.cpp +++ b/src/Formats/FormatFactory.cpp @@ -104,6 +104,7 @@ FormatSettings getFormatSettings(ContextPtr context, const Settings & settings) format_settings.parquet.allow_missing_columns = settings.input_format_parquet_allow_missing_columns; format_settings.parquet.skip_columns_with_unsupported_types_in_schema_inference = settings.input_format_parquet_skip_columns_with_unsupported_types_in_schema_inference; format_settings.parquet.output_string_as_string = settings.output_format_parquet_string_as_string; + format_settings.parquet.max_block_size = settings.input_format_parquet_max_block_size; format_settings.pretty.charset = settings.output_format_pretty_grid_charset.toString() == "ASCII" ? FormatSettings::Pretty::Charset::ASCII : FormatSettings::Pretty::Charset::UTF8; format_settings.pretty.color = settings.output_format_pretty_color; format_settings.pretty.max_column_pad_width = settings.output_format_pretty_max_column_pad_width; diff --git a/src/Formats/FormatSettings.h b/src/Formats/FormatSettings.h index 27ad954adaf1..dfa72fc3e573 100644 --- a/src/Formats/FormatSettings.h +++ b/src/Formats/FormatSettings.h @@ -160,6 +160,7 @@ struct FormatSettings bool case_insensitive_column_matching = false; std::unordered_set skip_row_groups = {}; bool output_string_as_string = false; + UInt64 max_block_size = 8192; } parquet; struct Pretty diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index 550a66a2b6ee..743f31a4ed68 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -46,23 +46,27 @@ Chunk ParquetBlockInputFormat::generate() if (!file_reader) { prepareReader(); - /// It may be necessary to add a parameter - file_reader->set_batch_size(8192); + file_reader->set_batch_size(format_settings.parquet.max_block_size); std::vector row_group_indices; - for (int i = 0; i < file_reader->num_row_groups(); ++i) + for (int i = 0; i < row_group_total; ++i) { if (!skip_row_groups.contains(i)) row_group_indices.emplace_back(i); } auto read_status = file_reader->GetRecordBatchReader(row_group_indices, column_indices, ¤t_record_batch_reader); if (!read_status.ok()) - throw DB::Exception(ErrorCodes::CANNOT_READ_ALL_DATA, "Error while reading Parquet data: {}", read_status.ToString()); + throw DB::ParsingException(ErrorCodes::CANNOT_READ_ALL_DATA, "Error while reading Parquet data: {}", read_status.ToString()); } if (is_stopped) return {}; auto batch = current_record_batch_reader->Next(); + if (!batch.ok()) + { + throw ParsingException(ErrorCodes::CANNOT_READ_ALL_DATA, "Error while reading Parquet data: {}", + batch.status().ToString()); + } if (*batch) { auto tmp_table = arrow::Table::FromRecordBatches({*batch}); From 96a87dc519ea61b45cd1772692b7558f37be76de Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 2 Feb 2023 02:42:21 +0300 Subject: [PATCH 026/130] Update ParquetBlockInputFormat.cpp --- src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index 743f31a4ed68..ad7b6d8c2a15 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -15,7 +15,7 @@ #include "ArrowBufferedStreams.h" #include "ArrowColumnToCHColumn.h" #include -#include + namespace DB { From 252fa660df608d6bafd53b4eedb813882cedf59f Mon Sep 17 00:00:00 2001 From: liuneng <1398775315@qq.com> Date: Thu, 2 Feb 2023 11:24:34 +0800 Subject: [PATCH 027/130] Under multi-threading, different order result sets may be generated --- .../0_stateless/02481_parquet_int_list_multiple_chunks.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/queries/0_stateless/02481_parquet_int_list_multiple_chunks.sh b/tests/queries/0_stateless/02481_parquet_int_list_multiple_chunks.sh index c2c6f6898510..5c7c9701a675 100755 --- a/tests/queries/0_stateless/02481_parquet_int_list_multiple_chunks.sh +++ b/tests/queries/0_stateless/02481_parquet_int_list_multiple_chunks.sh @@ -37,6 +37,6 @@ DATA_FILE=$CUR_DIR/data_parquet/int-list-zero-based-chunked-array.parquet ${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS parquet_load" ${CLICKHOUSE_CLIENT} --query="CREATE TABLE parquet_load (arr Array(Int64)) ENGINE = Memory" cat "$DATA_FILE" | ${CLICKHOUSE_CLIENT} -q "INSERT INTO parquet_load FORMAT Parquet" -${CLICKHOUSE_CLIENT} --query="SELECT * FROM parquet_load" | md5sum +${CLICKHOUSE_CLIENT} --query="SELECT * FROM parquet_load SETTINGS max_threads=1" | md5sum ${CLICKHOUSE_CLIENT} --query="SELECT count() FROM parquet_load" ${CLICKHOUSE_CLIENT} --query="drop table parquet_load" \ No newline at end of file From eada70e81e3348d2f5aeaaa8462ea3d63829cc9c Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Tue, 28 Mar 2023 13:05:08 -0300 Subject: [PATCH 028/130] partially backport upstream #47538 --- src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp | 4 ++-- src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp index eacfce48c2d1..e82bbae91982 100644 --- a/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp +++ b/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp @@ -89,7 +89,7 @@ static ColumnWithTypeAndName readColumnWithNumericData(std::shared_ptr buffer = chunk->data()->buffers[1]; - const auto * raw_data = reinterpret_cast(buffer->data()); + const auto * raw_data = reinterpret_cast(buffer->data()) + chunk->offset(); column_data.insert_assume_reserved(raw_data, raw_data + chunk->length()); } return {std::move(internal_column), std::move(internal_type), column_name}; @@ -346,7 +346,7 @@ static ColumnWithTypeAndName readColumnWithIndexesDataImpl(std::shared_ptr buffer = chunk->data()->buffers[1]; - const auto * data = reinterpret_cast(buffer->data()); + const auto * data = reinterpret_cast(buffer->data()) + chunk->offset(); /// Check that indexes are correct (protection against corrupted files) for (int64_t i = 0; i != chunk->length(); ++i) diff --git a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp index ad7b6d8c2a15..ffd12a5ff673 100644 --- a/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp @@ -74,8 +74,6 @@ Chunk ParquetBlockInputFormat::generate() } else { - current_record_batch_reader.reset(); - file_reader.reset(); return {}; } @@ -92,6 +90,7 @@ void ParquetBlockInputFormat::resetParser() IInputFormat::resetParser(); file_reader.reset(); + current_record_batch_reader.reset(); column_indices.clear(); row_group_current = 0; block_missing_values.clear(); From 4842ef9bd0af6c016a4fa4fb9210f33bd84174b3 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 4 Apr 2023 18:41:08 -0700 Subject: [PATCH 029/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 1873 +++++++++++++++++++++++- 1 file changed, 1870 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0bb0a8748d13..f8015ba2fefd 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -24,6 +24,8 @@ on: # yamllint disable-line rule:truthy push: branches: - 'releases/22.8**' + schedule: + - cron: '0 0 * * 0' jobs: # DockerHubPushAarch64: @@ -135,7 +137,7 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release - CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinityfips EOF - name: Download changed images uses: actions/download-artifact@v3 @@ -230,8 +232,7 @@ jobs: - name: Check docker clickhouse/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + python3 docker_server.py --release-type head --no-push python3 docker_server.py --release-type head --no-push --no-ubuntu \ --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - name: Cleanup @@ -569,6 +570,1839 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" +############################################################################################# +##################################### REGRESSION TESTS ###################################### +############################################################################################# + aes_encryption: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=aes_encryption + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output nice + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + aggregate_functions: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=aggregate_functions + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + atomic_insert: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=atomic_insert + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + base_58: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=base_58 + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + benchmark_minio: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ontime_benchmark + STORAGE=/minio + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage minio + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-minio-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + benchmark_aws: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ontime_benchmark + STORAGE=/aws + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --storage aws_s3 + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-aws-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + benchmark_gcs: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ontime_benchmark + STORAGE=/gcs + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --storage gcs + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-gcs-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + clickhouse_keeper: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + datetime64_extended_range: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=datetime64_extended_range + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + disk_level_encryption: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=disk_level_encryption + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + dns: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=dns + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + example: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=example + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + extended_precision_data_types: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=extended_precision_data_types + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + kafka: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=kafka + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + kerberos: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=kerberos + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + ldap_authentication: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ldap/authentication + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-authentication-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + ldap_external_user_directory: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ldap/external_user_directory + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-external-user-directory-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + ldap_role_mapping: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ldap/role_mapping + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-role-mapping-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + lightweight_delete: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=lightweight_delete + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + map_type: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=map_type + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + parquet: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=parquet + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --storage minio + --storage aws_s3 + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --storage gcs + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + part_moves_between_shards: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=part_moves_between_shards + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + rbac: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=rbac + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + selects: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=selects + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + s3_minio: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=s3 + STORAGE=/minio + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --storage minio + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-minio-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + s3_aws: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=s3 + STORAGE=/aws + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --storage aws_s3 + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-aws-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + s3_gcs: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=s3 + STORAGE=/gcs + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --storage gcs + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-gcs-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + ssl_server: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ssl_server + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + tiered_storage: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=tiered_storage + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + tiered_storage_minio: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=tiered_storage + STORAGE=/minio + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --with_minio + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-minio-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + tiered_storage_aws: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=tiered_storage + STORAGE=/aws + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --with-s3amazon + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-aws-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + tiered_storage_gcs: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=tiered_storage + STORAGE=/gcs + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + --with-s3gcs + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-gcs-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + window_functions: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=window_functions + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log FinishCheck: needs: - DockerHubPush @@ -583,6 +2417,39 @@ jobs: - IntegrationTestsRelease0 - IntegrationTestsRelease1 - CompatibilityCheck + - aes_encryption + - aggregate_functions + - atomic_insert + - base_58 + - benchmark_minio + - benchmark_aws + - benchmark_gcs + - clickhouse_keeper + - datetime64_extended_range + - disk_level_encryption + - dns + - example + - extended_precision_data_types + - kafka + - kerberos + - ldap_authentication + - ldap_external_user_directory + - ldap_role_mapping + - lightweight_delete + - map_type + - parquet + - part_moves_between_shards + - rbac + - selects + - s3_minio + - s3_aws + - s3_gcs + - ssl_server + - tiered_storage + - tiered_storage_minio + - tiered_storage_aws + - tiered_storage_gcs + - window_functions runs-on: [self-hosted, style-checker] steps: - name: Check out repository code From f7a62a9271aecbebb4b7daa211e513ecb3127666 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 4 Apr 2023 18:44:25 -0700 Subject: [PATCH 030/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f8015ba2fefd..f94ac4d5ff88 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -24,8 +24,6 @@ on: # yamllint disable-line rule:truthy push: branches: - 'releases/22.8**' - schedule: - - cron: '0 0 * * 0' jobs: # DockerHubPushAarch64: @@ -137,7 +135,7 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release - CLICKHOUSE_STABLE_VERSION_SUFFIX=altinityfips + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images uses: actions/download-artifact@v3 @@ -232,7 +230,8 @@ jobs: - name: Check docker clickhouse/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push + python3 docker_server.py --release-type head --no-push \ + --image-repo clickhouse/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push --no-ubuntu \ --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - name: Cleanup From 84bc4c24811d82296d1055643d0ff8d6ee1cd092 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 5 Apr 2023 08:23:51 -0700 Subject: [PATCH 031/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f94ac4d5ff88..de4cf84d8ab5 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -2206,7 +2206,7 @@ jobs: run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --with_minio + --with-minio --test-to-end --local --collect-service-logs @@ -2271,7 +2271,7 @@ jobs: --log raw.log --with-s3amazon --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.AWS_S3_KEY_ID }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - name: Create and upload logs if: always() From a0a080ef7c90022032c2b26caac89cdeaf7b153b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 5 Apr 2023 10:26:54 -0700 Subject: [PATCH 032/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index de4cf84d8ab5..5544cd0172e0 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -935,7 +935,7 @@ jobs: - name: Run ${{ env.SUITE }} suite run: python3 -u ${{ env.SUITE }}/benchmark.py - --storage gcs + --storage s3_gcs --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} From 225c4b0b455d4311cc998bcd0ff17b01176bce2b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 5 Apr 2023 20:51:44 -0700 Subject: [PATCH 033/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5544cd0172e0..de4cf84d8ab5 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -935,7 +935,7 @@ jobs: - name: Run ${{ env.SUITE }} suite run: python3 -u ${{ env.SUITE }}/benchmark.py - --storage s3_gcs + --storage gcs --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} From 90f69126829bc6aae630af5e44c82cc5a93a6e07 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 11 Apr 2023 16:22:53 +0200 Subject: [PATCH 034/130] Bumped Go version to get some CVE fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Lots of high-severity CVE were fixed in 1.19.8: pkg:golang/stdlib@1.19.5 ✗ HIGH CVE-2022-41725 [Uncontrolled Resource Consumption] https://dso.docker.com/cve/CVE-2022-41725 Affected range : <1.19.6 Fixed version : 1.19.6 CVSS Score : 7.5 CVSS Vector : CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H ✗ HIGH CVE-2022-41724 [Uncontrolled Resource Consumption] https://dso.docker.com/cve/CVE-2022-41724 Affected range : <1.19.6 Fixed version : 1.19.6 CVSS Score : 7.5 CVSS Vector : CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H ✗ HIGH CVE-2022-41723 [Uncontrolled Resource Consumption] https://dso.docker.com/cve/CVE-2022-41723 Affected range : <1.19.6 Fixed version : 1.19.6 CVSS Score : 7.5 CVSS Vector : CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H ✗ HIGH CVE-2022-41722 [Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')] https://dso.docker.com/cve/CVE-2022-41722 Affected range : <1.19.6 Fixed version : 1.19.6 CVSS Score : 7.5 CVSS Vector : CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N ✗ MEDIUM CVE-2023-24532 [Incorrect Calculation] https://dso.docker.com/cve/CVE-2023-24532 Affected range : <1.19.7 Fixed version : 1.19.7 CVSS Score : 5.3 CVSS Vector : CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:N ✗ UNSPECIFIED CVE-2023-24538 [Improper Control of Generation of Code ('Code Injection')] https://dso.docker.com/cve/CVE-2023-24538 Affected range : <1.19.8 Fixed version : 1.19.8 ✗ UNSPECIFIED CVE-2023-24537 [Loop with Unreachable Exit Condition ('Infinite Loop')] https://dso.docker.com/cve/CVE-2023-24537 Affected range : <1.19.8 Fixed version : 1.19.8 ✗ UNSPECIFIED CVE-2023-24536 [Uncontrolled Resource Consumption] https://dso.docker.com/cve/CVE-2023-24536 Affected range : <1.19.8 Fixed version : 1.19.8 ✗ UNSPECIFIED CVE-2023-24534 [Uncontrolled Resource Consumption] https://dso.docker.com/cve/CVE-2023-24534 Affected range : <1.19.8 Fixed version : 1.19.8 --- docker/packager/binary/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index 818472a52d55..edad920313c4 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -53,7 +53,7 @@ RUN arch=${TARGETARCH:-amd64} \ && dpkg -i /tmp/nfpm.deb \ && rm /tmp/nfpm.deb -ARG GO_VERSION=1.18.3 +ARG GO_VERSION=1.19.8 # We need go for clickhouse-diagnostics RUN arch=${TARGETARCH:-amd64} \ && curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \ From fb8124dfeabf81fed4e06da9656ccf4a3b5afa07 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 12 Apr 2023 12:38:24 +0200 Subject: [PATCH 035/130] Do not install clickhouse-diagnostics Due to large number of CVEs that popup in golang runtime --- packages/clickhouse-common-static.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/clickhouse-common-static.yaml b/packages/clickhouse-common-static.yaml index 3167e78dbc3e..429d3fbcb678 100644 --- a/packages/clickhouse-common-static.yaml +++ b/packages/clickhouse-common-static.yaml @@ -33,8 +33,9 @@ deb: contents: - src: root/usr/bin/clickhouse dst: /usr/bin/clickhouse -- src: root/usr/bin/clickhouse-diagnostics - dst: /usr/bin/clickhouse-diagnostics +# Excluded due to CVEs in go runtime that popup constantly +# - src: root/usr/bin/clickhouse-diagnostics +# dst: /usr/bin/clickhouse-diagnostics - src: root/usr/bin/clickhouse-extract-from-config dst: /usr/bin/clickhouse-extract-from-config - src: root/usr/bin/clickhouse-library-bridge From ab55b6a84935dcb220056f095bac94798f1b3dec Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 13 Apr 2023 10:43:16 +0200 Subject: [PATCH 036/130] Updated version to v22.8.15.25.altinitystable --- cmake/autogenerated_versions.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index 0f85d434350c..b79e759db3e7 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -7,9 +7,9 @@ SET(VERSION_MAJOR 22) SET(VERSION_MINOR 8) SET(VERSION_PATCH 15) -SET(VERSION_TWEAK 24) +SET(VERSION_TWEAK 25) SET(VERSION_FLAVOUR altinitystable) -SET(VERSION_DESCRIBE v22.8.15.24.altinitystable) -SET(VERSION_STRING 22.8.15.24.altinitystable) -# end of autochange \ No newline at end of file +SET(VERSION_DESCRIBE v22.8.15.25.altinitystable) +SET(VERSION_STRING 22.8.15.25.altinitystable) +# end of autochange From 3485b16514603512034256d69e03c2f10f87c3ae Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 19 Apr 2023 22:50:02 +0200 Subject: [PATCH 037/130] Updated CH db name for flaky test "gh-data" instead of "default", just like everywhere else --- tests/ci/clickhouse_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py index 4bc7ad1e6fc7..f2ace1d5614e 100644 --- a/tests/ci/clickhouse_helper.py +++ b/tests/ci/clickhouse_helper.py @@ -197,7 +197,7 @@ def mark_flaky_tests(clickhouse_helper, check_name, test_results): AND pull_request_number = 0 """ - tests_data = clickhouse_helper.select_json_each_row("default", query) + tests_data = clickhouse_helper.select_json_each_row("gh-data", query) master_failed_tests = {row["test_name"] for row in tests_data} logging.info("Found flaky tests: %s", ", ".join(master_failed_tests)) From 94667d6e0092c33dfc6ebdbb628bdcc4402a0928 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 19 Apr 2023 23:01:39 +0200 Subject: [PATCH 038/130] Starting regression tests after stateless, stateful and integration --- .github/workflows/release_branches.yml | 210 +++++++++++++------------ 1 file changed, 108 insertions(+), 102 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index de4cf84d8ab5..d5a74caa244a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -572,8 +572,14 @@ jobs: ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# + regression_start: + needs: [FunctionalStatelessTestRelease, FunctionalStatefulTestRelease, IntegrationTestsRelease0, IntegrationTestsRelease1] + runs-on: ubuntu-latest + steps: + - run: true + aes_encryption: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -600,12 +606,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output nice --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -627,7 +633,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log aggregate_functions: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -654,12 +660,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -681,7 +687,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log atomic_insert: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -708,12 +714,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -735,7 +741,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log base_58: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -762,12 +768,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -789,7 +795,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log benchmark_minio: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -817,13 +823,13 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/benchmark.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --storage minio --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -845,7 +851,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log benchmark_aws: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -873,7 +879,7 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/benchmark.py --storage aws_s3 --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} @@ -883,7 +889,7 @@ jobs: --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -903,9 +909,9 @@ jobs: ./*/_instances/*.log ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + ./*/*/_instances/*.log benchmark_gcs: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -933,7 +939,7 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/benchmark.py --storage gcs --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} @@ -942,7 +948,7 @@ jobs: --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -964,7 +970,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log clickhouse_keeper: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -991,12 +997,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1018,7 +1024,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log datetime64_extended_range: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1045,12 +1051,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1072,7 +1078,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log disk_level_encryption: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1099,12 +1105,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1126,7 +1132,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log dns: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1153,12 +1159,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1180,7 +1186,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log example: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1207,12 +1213,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1232,9 +1238,9 @@ jobs: ./*/_instances/*.log ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + ./*/*/_instances/*.log extended_precision_data_types: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1261,12 +1267,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1288,7 +1294,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log kafka: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1315,12 +1321,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1342,7 +1348,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log kerberos: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1369,12 +1375,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1396,7 +1402,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log ldap_authentication: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1423,12 +1429,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1450,7 +1456,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log ldap_external_user_directory: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1477,12 +1483,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1504,7 +1510,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log ldap_role_mapping: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1531,12 +1537,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1558,7 +1564,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log lightweight_delete: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1585,12 +1591,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1612,7 +1618,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log map_type: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1639,12 +1645,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1666,7 +1672,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log parquet: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1693,12 +1699,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1730,7 +1736,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log part_moves_between_shards: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1757,12 +1763,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1784,7 +1790,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log rbac: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1811,12 +1817,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1838,7 +1844,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log selects: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1865,12 +1871,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1892,7 +1898,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log s3_minio: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1920,12 +1926,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -1948,7 +1954,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log s3_aws: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -1976,12 +1982,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2008,7 +2014,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log s3_gcs: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2036,12 +2042,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2067,7 +2073,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log ssl_server: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2094,12 +2100,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2121,7 +2127,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log tiered_storage: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2148,12 +2154,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2175,7 +2181,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log tiered_storage_minio: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2203,13 +2209,13 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --with-minio --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2231,7 +2237,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log tiered_storage_aws: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2259,12 +2265,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" @@ -2290,7 +2296,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log tiered_storage_gcs: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2318,17 +2324,17 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log - --with-s3gcs + --with-s3gcs --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} @@ -2349,7 +2355,7 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log window_functions: - needs: [BuilderDebRelease] + needs: [regression_start] runs-on: [self-hosted, stress-tester] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} @@ -2376,12 +2382,12 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --test-to-end --local - --collect-service-logs + --collect-service-logs --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" From 0b01a6def1d74b4c12c2aee8df9179a9c0fb5c6a Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 19 Apr 2023 18:46:28 -0700 Subject: [PATCH 039/130] Collapsing regression tests --- .github/workflows/release_branches.yml | 1620 ++---------------------- 1 file changed, 77 insertions(+), 1543 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d5a74caa244a..b32457e77a7b 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -578,7 +578,11 @@ jobs: steps: - run: true - aes_encryption: + regression_common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, example, extended_precision_datatypes, kafka, kerberos, lightweight_delete, map_type, rbac, selects, ssl_server, tiered_storage, window_functions] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -590,11 +594,12 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: releases - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=aes_encryption + SUITE=${{ matrix.SUITE }} artifacts=public EOF - name: Download json reports @@ -632,1272 +637,12 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - aggregate_functions: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=aggregate_functions - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - atomic_insert: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=atomic_insert - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - base_58: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=base_58 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - benchmark_minio: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/minio - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage minio - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - benchmark_aws: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/aws - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - benchmark_gcs: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/gcs - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --storage gcs - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-gcs-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - clickhouse_keeper: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - datetime64_extended_range: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=datetime64_extended_range - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - disk_level_encryption: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=disk_level_encryption - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - dns: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=dns - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - example: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=example - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - extended_precision_data_types: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=extended_precision_data_types - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - kafka: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=kafka - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - kerberos: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=kerberos - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - ldap_authentication: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/authentication - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-authentication-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - ldap_external_user_directory: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/external_user_directory - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-external-user-directory-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - ldap_role_mapping: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/role_mapping - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-role-mapping-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - lightweight_delete: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=lightweight_delete - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - map_type: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=map_type - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - parquet: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - --storage minio - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --storage gcs - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - part_moves_between_shards: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=part_moves_between_shards - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - rbac: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=rbac - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - selects: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=selects - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - s3_minio: + + benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -1913,8 +658,8 @@ jobs: run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/minio + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} artifacts=public EOF - name: Download json reports @@ -1927,8 +672,16 @@ jobs: run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite run: python3 - -u ${{ env.SUITE }}/regression.py + -u ${{ env.SUITE }}/benchmark.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --test-to-end --local --collect-service-logs @@ -1936,7 +689,6 @@ jobs: --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log - --storage minio - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -1953,7 +705,12 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - s3_aws: + + ldap: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -1965,12 +722,12 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: releases - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/aws + SUITE=ldap/${{ matrix.SUITE }} artifacts=public EOF - name: Download json reports @@ -1992,18 +749,13 @@ jobs: --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-aws-artifacts + name: ldap-authentication-artifacts path: | ./report.html ./*.log.txt @@ -2013,7 +765,8 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - s3_gcs: + + parquet: needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -2025,12 +778,12 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: releases - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/gcs + SUITE=parquet artifacts=public EOF - name: Download json reports @@ -2052,6 +805,12 @@ jobs: --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log + --storage minio + --storage aws_s3 + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --storage gcs --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} @@ -2059,114 +818,6 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-gcs-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - ssl_server: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ssl_server - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - tiered_storage: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() with: @@ -2180,7 +831,12 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - tiered_storage_minio: + + s3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -2192,12 +848,13 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: releases - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/minio + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} artifacts=public EOF - name: Download json reports @@ -2212,7 +869,6 @@ jobs: run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --with-minio --test-to-end --local --collect-service-logs @@ -2220,13 +876,21 @@ jobs: --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-minio-artifacts + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts path: | ./report.html ./*.log.txt @@ -2236,7 +900,12 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - tiered_storage_aws: + + tiered_storage_s3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -2248,12 +917,13 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: releases - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=tiered_storage - STORAGE=/aws + STORAGE=/${{ matrix.STORAGE }} artifacts=public EOF - name: Download json reports @@ -2275,130 +945,20 @@ jobs: --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log - --with-s3amazon --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - tiered_storage_gcs: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/gcs - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - --with-s3gcs --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-gcs-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - window_functions: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=window_functions - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts path: | ./report.html ./*.log.txt @@ -2408,6 +968,7 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log + FinishCheck: needs: - DockerHubPush @@ -2422,39 +983,12 @@ jobs: - IntegrationTestsRelease0 - IntegrationTestsRelease1 - CompatibilityCheck - - aes_encryption - - aggregate_functions - - atomic_insert - - base_58 - - benchmark_minio - - benchmark_aws - - benchmark_gcs - - clickhouse_keeper - - datetime64_extended_range - - disk_level_encryption - - dns - - example - - extended_precision_data_types - - kafka - - kerberos - - ldap_authentication - - ldap_external_user_directory - - ldap_role_mapping - - lightweight_delete - - map_type + - regression_common + - benchmark + - ldap - parquet - - part_moves_between_shards - - rbac - - selects - - s3_minio - - s3_aws - - s3_gcs - - ssl_server - - tiered_storage - - tiered_storage_minio - - tiered_storage_aws - - tiered_storage_gcs - - window_functions + - s3 + - tiered_storage_s3 runs-on: [self-hosted, style-checker] steps: - name: Check out repository code From 84098f5539a5a1d3e253e16ee5fcf2acd9021e0b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 21 Apr 2023 08:30:23 -0700 Subject: [PATCH 040/130] output nice -> classic --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index b32457e77a7b..d14e0f616674 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -617,7 +617,7 @@ jobs: --test-to-end --local --collect-service-logs - --output nice + --output classic --parallel 1 --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" --log raw.log From 59f78b66826a82fbd5ef0d5b880a8fd7b3f1ab60 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 26 Apr 2023 21:43:49 -0700 Subject: [PATCH 041/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d14e0f616674..593e5810cc4d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -624,6 +624,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -692,6 +694,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -752,6 +756,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -818,6 +824,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -887,6 +895,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -955,6 +965,8 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: From d92aedea998300768d2c1405b9eb16f930ede18d Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 22 May 2023 21:25:50 +0200 Subject: [PATCH 042/130] Attempt to fix integration tests - `--tls=false` from upstream/master - printing out dockerd logs on failure - increased timeout to 30 seconds total (just in case) --- .../test/integration/runner/dockerd-entrypoint.sh | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/docker/test/integration/runner/dockerd-entrypoint.sh b/docker/test/integration/runner/dockerd-entrypoint.sh index bcaa064fe4f5..0cf6de52f023 100755 --- a/docker/test/integration/runner/dockerd-entrypoint.sh +++ b/docker/test/integration/runner/dockerd-entrypoint.sh @@ -12,7 +12,14 @@ echo '{ "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] }' | dd of=/etc/docker/daemon.json 2>/dev/null -dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --default-address-pool base=172.17.0.0/12,size=24 &>/ClickHouse/tests/integration/dockerd.log & + +# In case of test hung it is convenient to use pytest --pdb to debug it, +# and on hung you can simply press Ctrl-C and it will spawn a python pdb, +# but on SIGINT dockerd will exit, so ignore it to preserve the daemon. +trap '' INT +# Binding to an IP address without --tlsverify is deprecated. Startup is intentionally being slowed +# unless --tls=false or --tlsverify=false is set +dockerd --host=unix:///var/run/docker.sock --tls=false --host=tcp://0.0.0.0:2375 --default-address-pool base=172.17.0.0/12,size=24 &>/ClickHouse/tests/integration/dockerd.log & set +e reties=0 @@ -21,9 +28,11 @@ while true; do reties=$((reties+1)) if [[ $reties -ge 100 ]]; then # 10 sec max echo "Can't start docker daemon, timeout exceeded." >&2 + cat /ClickHouse/tests/integration/dockerd.log > &2 exit 1; fi - sleep 0.1 + # For whatever reason docker seems to be unable to start in 10 seconds, so effectivly increeaing timeout to 30 seconds + sleep 0.3 done set -e From 2021b75e385f64133c1d665183df70316a046231 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 22 May 2023 23:44:54 +0200 Subject: [PATCH 043/130] Fixed typo --- docker/test/integration/runner/dockerd-entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/test/integration/runner/dockerd-entrypoint.sh b/docker/test/integration/runner/dockerd-entrypoint.sh index 0cf6de52f023..2bfb6b5bba7b 100755 --- a/docker/test/integration/runner/dockerd-entrypoint.sh +++ b/docker/test/integration/runner/dockerd-entrypoint.sh @@ -28,7 +28,7 @@ while true; do reties=$((reties+1)) if [[ $reties -ge 100 ]]; then # 10 sec max echo "Can't start docker daemon, timeout exceeded." >&2 - cat /ClickHouse/tests/integration/dockerd.log > &2 + cat /ClickHouse/tests/integration/dockerd.log >&2 exit 1; fi # For whatever reason docker seems to be unable to start in 10 seconds, so effectivly increeaing timeout to 30 seconds From 774588ac6330c062557ee43a389fe4dccb010559 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 23 May 2023 14:51:19 +0200 Subject: [PATCH 044/130] Trigger regression_start only if BuilderDebRelease passed --- .github/workflows/release_branches.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 593e5810cc4d..bb494ca66d08 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -573,7 +573,11 @@ jobs: ##################################### REGRESSION TESTS ###################################### ############################################################################################# regression_start: - needs: [FunctionalStatelessTestRelease, FunctionalStatefulTestRelease, IntegrationTestsRelease0, IntegrationTestsRelease1] + needs: [BuilderDebRelease, FunctionalStatelessTestRelease, FunctionalStatefulTestRelease, IntegrationTestsRelease0, IntegrationTestsRelease1] + # Run AFTER any of tests, but only if `BuilderDebRelease` succeed. + # Since we can't reference jobs directly and get their status, + # check if any of jobs succeeds. + if: ${{ always() && contains(needs.*.result, 'success') }} runs-on: ubuntu-latest steps: - run: true From dfdf795223c2a6b92d1381031e44a7f450e0d304 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 24 May 2023 10:18:57 +0200 Subject: [PATCH 045/130] Simplified dependencies of regression_start --- .github/workflows/release_branches.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index bb494ca66d08..a92f536797e0 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -573,11 +573,8 @@ jobs: ##################################### REGRESSION TESTS ###################################### ############################################################################################# regression_start: - needs: [BuilderDebRelease, FunctionalStatelessTestRelease, FunctionalStatefulTestRelease, IntegrationTestsRelease0, IntegrationTestsRelease1] - # Run AFTER any of tests, but only if `BuilderDebRelease` succeed. - # Since we can't reference jobs directly and get their status, - # check if any of jobs succeeds. - if: ${{ always() && contains(needs.*.result, 'success') }} + ## Not depending on the tests above since they can fail at any given moment. + needs: [BuilderDebRelease] runs-on: ubuntu-latest steps: - run: true From e074fef230099ec5014c7a2d2cbd6d60bccaa8fc Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 12:06:01 -0300 Subject: [PATCH 046/130] first test --- .github/workflows/release_branches.yml | 35 +++++++++++++++++++++++++- tests/ci/sign_release.py | 8 ++++++ 2 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 tests/ci/sign_release.py diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a92f536797e0..2699be279807 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -981,7 +981,39 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - + + SignRelease: + runs-on: [ self-hosted ] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/integration_tests_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Integration tests (release) + REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse + RUN_BY_HASH_NUM=0 + RUN_BY_HASH_TOTAL=2 + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Sign release + with: + gpg_key: ${{ secrets.AKEY }} + gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} + run: | + python3 sign_release.py + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" + ########################################################################################### + ################################ FINISH CHECK ############################################# + ########################################################################################### FinishCheck: needs: - DockerHubPush @@ -996,6 +1028,7 @@ jobs: - IntegrationTestsRelease0 - IntegrationTestsRelease1 - CompatibilityCheck + - SignRelease - regression_common - benchmark - ldap diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py new file mode 100644 index 000000000000..dd8baf82317e --- /dev/null +++ b/tests/ci/sign_release.py @@ -0,0 +1,8 @@ +def main(): + logging.basicConfig(level=logging.INFO) + stopwatch = Stopwatch() + + print("Hi, testing\n") + +if __name__ == "__main__": + main() From 7b48c67b857ef1c38f103eb441b67e7110f418a1 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 12:15:22 -0300 Subject: [PATCH 047/130] . --- .github/workflows/release_branches.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 2699be279807..c9afb9869c11 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1000,9 +1000,6 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Sign release - with: - gpg_key: ${{ secrets.AKEY }} - gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} run: | python3 sign_release.py - name: Cleanup From ea12951187c0e23e92956ae50f6c6cb6c00e00ff Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 12:17:31 -0300 Subject: [PATCH 048/130] . --- .github/workflows/release_branches.yml | 3 +++ tests/ci/sign_release.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index c9afb9869c11..faf54d90b7f9 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1000,6 +1000,9 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Sign release + env: + gpg_key: ${{ secrets.AKEY }} + gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} run: | python3 sign_release.py - name: Cleanup diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index dd8baf82317e..1db2b54128d5 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -2,7 +2,7 @@ def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() - print("Hi, testing\n") + print(f"hello ${gpg_key}") if __name__ == "__main__": main() From a57f27cf5ae2c1422c6fd2b15f05678d553b5149 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 12:38:17 -0300 Subject: [PATCH 049/130] aa --- .github/workflows/release_branches.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index faf54d90b7f9..bd0aabe033e6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -999,11 +999,18 @@ jobs: uses: actions/download-artifact@v2 with: path: ${{ env.REPORTS_PATH }} + - uses: actions/download-artifact@v3 + with: + name: ${{ env.BUILD_URLS }} - name: Sign release env: gpg_key: ${{ secrets.AKEY }} gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" python3 sign_release.py - name: Cleanup if: always() From f3dc5c89921e5d068636fecbc2463d3fd1e1723a Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 12:46:57 -0300 Subject: [PATCH 050/130] cancel dhpa64 for now From a28f44e938170c7a9f6e92f385fc0c09a2421953 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 13:01:32 -0300 Subject: [PATCH 051/130] . --- .github/workflows/release_branches.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index bd0aabe033e6..1657c796f8ce 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -988,12 +988,8 @@ jobs: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 + TEMP_PATH=${{runner.temp}}/sign_release + REPO_COPY=${{runner.temp}}/build_check/ClickHouse EOF - name: Download json reports uses: actions/download-artifact@v2 From 031988e6efe96acce36cfc17419378f8351cdab9 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 14:57:00 -0300 Subject: [PATCH 052/130] , --- .github/workflows/release_branches.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 1657c796f8ce..7777802e1277 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -998,6 +998,11 @@ jobs: - uses: actions/download-artifact@v3 with: name: ${{ env.BUILD_URLS }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 - name: Sign release env: gpg_key: ${{ secrets.AKEY }} From a27efb8c7d6cdd6620faa8e84bdaea1ad1dcd743 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 15:00:14 -0300 Subject: [PATCH 053/130] , --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 7777802e1277..7e45314fb3a8 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -989,7 +989,7 @@ jobs: run: | cat >> "$GITHUB_ENV" << 'EOF' TEMP_PATH=${{runner.temp}}/sign_release - REPO_COPY=${{runner.temp}}/build_check/ClickHouse + REPO_COPY=${{runner.temp}}/sign_release/ClickHouse EOF - name: Download json reports uses: actions/download-artifact@v2 @@ -1011,7 +1011,7 @@ jobs: sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" + cd "$TEMP_PATH/tests/ci" python3 sign_release.py - name: Cleanup if: always() From 1228d1a02f31ab9a0eb757e314b9f738cd581e93 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 15:04:48 -0300 Subject: [PATCH 054/130] , --- .github/workflows/release_branches.yml | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 7e45314fb3a8..1f784ee187be 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -988,30 +988,21 @@ jobs: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sign_release - REPO_COPY=${{runner.temp}}/sign_release/ClickHouse EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - uses: actions/download-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - name: Clear repository run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code uses: actions/checkout@v2 + - uses: actions/download-artifact@v3 + with: + name: ${{ env.BUILD_URLS }} - name: Sign release env: gpg_key: ${{ secrets.AKEY }} gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$TEMP_PATH/tests/ci" + cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Cleanup if: always() From f7932ac6faa5eb9f750c7f64ad1987fecf6a021a Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 15:10:00 -0300 Subject: [PATCH 055/130] , --- tests/ci/sign_release.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 1db2b54128d5..045c04ca94ef 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -1,3 +1,6 @@ +import sys +import logging + def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() From e32addbf3be1e6f181f19d3456715fd5043d5df2 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 16 Dec 2022 15:14:42 -0300 Subject: [PATCH 056/130] , --- tests/ci/sign_release.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 045c04ca94ef..6d78867c21f9 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -3,7 +3,6 @@ def main(): logging.basicConfig(level=logging.INFO) - stopwatch = Stopwatch() print(f"hello ${gpg_key}") From 8ac8587ed0e2d035adb881e327d65c2694bdd677 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Sun, 18 Dec 2022 17:50:52 -0300 Subject: [PATCH 057/130] a --- tests/ci/sign_release.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 6d78867c21f9..e2a660e8e78e 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -1,10 +1,12 @@ +#!/usr/bin/env python3 import sys import logging +from env_helper import GPG_KEY def main(): logging.basicConfig(level=logging.INFO) - print(f"hello ${gpg_key}") + print(f"hello ${GPG_KEY}") if __name__ == "__main__": main() From 530ccbb951cf0c5510fe7afebd0d949ee77069e5 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Sun, 18 Dec 2022 17:58:51 -0300 Subject: [PATCH 058/130] a --- .github/workflows/release_branches.yml | 6 ++++-- tests/ci/sign_release.py | 4 +--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 1f784ee187be..dd3ea1a3aeb3 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -997,10 +997,12 @@ jobs: - uses: actions/download-artifact@v3 with: name: ${{ env.BUILD_URLS }} + - name: Display structure of downloaded files + run: ls -R - name: Sign release env: - gpg_key: ${{ secrets.AKEY }} - gpg_passphrase: ${{ secrets.GPGPASSPHRASE }} + GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} + GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index e2a660e8e78e..0dcbb4aae307 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -1,12 +1,10 @@ #!/usr/bin/env python3 import sys import logging -from env_helper import GPG_KEY +from env_helper import GPG_BINARY_SIGNING_KEY def main(): logging.basicConfig(level=logging.INFO) - print(f"hello ${GPG_KEY}") - if __name__ == "__main__": main() From d7e2171db734ec6816d8916127d8d4491848992f Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Sun, 18 Dec 2022 18:00:52 -0300 Subject: [PATCH 059/130] a --- tests/ci/sign_release.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 0dcbb4aae307..8eda0f9e6f1b 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -6,5 +6,7 @@ def main(): logging.basicConfig(level=logging.INFO) + print("hello") + if __name__ == "__main__": main() From e34475e3642c8267d8429cfa6f437f244314d68c Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Sun, 18 Dec 2022 18:06:14 -0300 Subject: [PATCH 060/130] a --- .github/workflows/release_branches.yml | 293 +++++++++++++++++++++++++ 1 file changed, 293 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index dd3ea1a3aeb3..27bbc9239ba6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -170,6 +170,7 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" +<<<<<<< HEAD # BuilderDebAarch64: # needs: [DockerHubPush] # runs-on: [self-hosted, builder] @@ -982,7 +983,298 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log +======= + + ############################################################################################ + ##################################### Docker images ####################################### + ############################################################################################ +# DockerServerImages: +# needs: +# - BuilderDebRelease +# # - BuilderDebAarch64 - currently we do not build aarch images +# runs-on: [self-hosted, style-checker] +# steps: +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# with: +# fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself +# - name: Check docker clickhouse/clickhouse-server building +# run: | +# cd "$GITHUB_WORKSPACE/tests/ci" +# python3 docker_server.py --release-type head --no-push +# python3 docker_server.py --release-type head --no-push --no-ubuntu \ +# --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# ############################################################################################ +# ##################################### BUILD REPORTER ####################################### +# ############################################################################################ +# BuilderReport: +# needs: +# - BuilderDebRelease +# runs-on: [self-hosted, style-checker] +# if: ${{ success() || failure() }} +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# CHECK_NAME=ClickHouse build check (actions) +# REPORTS_PATH=${{runner.temp}}/reports_dir +# REPORTS_PATH=${{runner.temp}}/reports_dir +# TEMP_PATH=${{runner.temp}}/report_check +# NEEDS_DATA_PATH=${{runner.temp}}/needs.json +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Report Builder +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cat > "$NEEDS_DATA_PATH" << 'EOF' +# ${{ toJSON(needs) }} +# EOF +# cd "$GITHUB_WORKSPACE/tests/ci" +# python3 build_report_check.py "$CHECK_NAME" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# MarkReleaseReady: +# needs: +# - BuilderDebRelease +# # - BuilderDebAarch64 +# runs-on: [self-hosted, style-checker] +# steps: +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Mark Commit Release Ready +# run: | +# cd "$GITHUB_WORKSPACE/tests/ci" +# python3 mark_release_ready.py +# ############################################################################################## +# ########################### FUNCTIONAl STATELESS TESTS ####################################### +# ############################################################################################## +# FunctionalStatelessTestRelease: +# needs: [BuilderDebRelease] +# runs-on: [self-hosted, func-tester] +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# TEMP_PATH=${{runner.temp}}/stateless_debug +# REPORTS_PATH=${{runner.temp}}/reports_dir +# CHECK_NAME=Stateless tests (release, actions) +# REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse +# KILL_TIMEOUT=10800 +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Functional test +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" +# cd "$REPO_COPY/tests/ci" +# python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# ############################################################################################## +# ############################ FUNCTIONAl STATEFUL TESTS ####################################### +# ############################################################################################## +# FunctionalStatefulTestRelease: +# needs: [BuilderDebRelease] +# runs-on: [self-hosted, func-tester] +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# TEMP_PATH=${{runner.temp}}/stateful_debug +# REPORTS_PATH=${{runner.temp}}/reports_dir +# CHECK_NAME=Stateful tests (release, actions) +# REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse +# KILL_TIMEOUT=3600 +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Functional test +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" +# cd "$REPO_COPY/tests/ci" +# python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# ############################################################################################# +# ############################# INTEGRATION TESTS ############################################# +# ############################################################################################# +# IntegrationTestsRelease0: +# needs: [BuilderDebRelease] +# runs-on: [self-hosted, stress-tester] +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# TEMP_PATH=${{runner.temp}}/integration_tests_release +# REPORTS_PATH=${{runner.temp}}/reports_dir +# CHECK_NAME=Integration tests (release, actions) +# REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse +# RUN_BY_HASH_NUM=0 +# RUN_BY_HASH_TOTAL=2 +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Integration test +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" +# cd "$REPO_COPY/tests/ci" +# python3 integration_test_check.py "$CHECK_NAME" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# IntegrationTestsRelease1: +# needs: [BuilderDebRelease] +# runs-on: [self-hosted, stress-tester] +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# TEMP_PATH=${{runner.temp}}/integration_tests_release +# REPORTS_PATH=${{runner.temp}}/reports_dir +# CHECK_NAME=Integration tests (release, actions) +# REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse +# RUN_BY_HASH_NUM=1 +# RUN_BY_HASH_TOTAL=2 +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: Integration test +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" +# cd "$REPO_COPY/tests/ci" +# python3 integration_test_check.py "$CHECK_NAME" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +# ############################################################################################# +# ############################### TESTFLOWS TESTS ############################################# +# ############################################################################################# +# TestFlowsTestsRelease: +# needs: [BuilderDebRelease] +# runs-on: [self-hosted, stress-tester] +# steps: +# - name: Set envs +# run: | +# cat >> "$GITHUB_ENV" << 'EOF' +# TEMP_PATH=${{runner.temp}}/testflows_tests_release +# REPORTS_PATH=${{runner.temp}}/reports_dir +# CHECK_NAME=TestFlows tests (release, actions) +# REPO_COPY=${{runner.temp}}/testflows_tests_release/ClickHouse +# EOF +# - name: Download json reports +# uses: actions/download-artifact@v2 +# with: +# path: ${{ env.REPORTS_PATH }} +# - name: Clear repository +# run: | +# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" +# - name: Check out repository code +# uses: actions/checkout@v2 +# - name: TestFlows test +# run: | +# sudo rm -fr "$TEMP_PATH" +# mkdir -p "$TEMP_PATH" +# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" +# cd "$REPO_COPY/tests/ci" +# python3 testflows_test_check.py "$CHECK_NAME" +# - name: Cleanup +# if: always() +# run: | +# # shellcheck disable=SC2046 +# docker kill $(docker ps -q) ||: +# # shellcheck disable=SC2046 +# docker rm -f $(docker ps -a -q) ||: +# sudo rm -fr "$TEMP_PATH" +>>>>>>> 708f7c5093 (a) SignRelease: + needs: [BuilderDebRelease] runs-on: [ self-hosted ] steps: - name: Set envs @@ -997,6 +1289,7 @@ jobs: - uses: actions/download-artifact@v3 with: name: ${{ env.BUILD_URLS }} + path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - name: Display structure of downloaded files run: ls -R - name: Sign release From ac5f1a0f318d04eb0e3ef4effff09d05cf488aa0 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Mon, 19 Dec 2022 08:26:38 -0300 Subject: [PATCH 061/130] a --- .github/workflows/release_branches.yml | 291 ------------------------- 1 file changed, 291 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 27bbc9239ba6..ef291bebc284 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -170,7 +170,6 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -<<<<<<< HEAD # BuilderDebAarch64: # needs: [DockerHubPush] # runs-on: [self-hosted, builder] @@ -983,296 +982,6 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log -======= - - ############################################################################################ - ##################################### Docker images ####################################### - ############################################################################################ -# DockerServerImages: -# needs: -# - BuilderDebRelease -# # - BuilderDebAarch64 - currently we do not build aarch images -# runs-on: [self-hosted, style-checker] -# steps: -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# with: -# fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself -# - name: Check docker clickhouse/clickhouse-server building -# run: | -# cd "$GITHUB_WORKSPACE/tests/ci" -# python3 docker_server.py --release-type head --no-push -# python3 docker_server.py --release-type head --no-push --no-ubuntu \ -# --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# ############################################################################################ -# ##################################### BUILD REPORTER ####################################### -# ############################################################################################ -# BuilderReport: -# needs: -# - BuilderDebRelease -# runs-on: [self-hosted, style-checker] -# if: ${{ success() || failure() }} -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# CHECK_NAME=ClickHouse build check (actions) -# REPORTS_PATH=${{runner.temp}}/reports_dir -# REPORTS_PATH=${{runner.temp}}/reports_dir -# TEMP_PATH=${{runner.temp}}/report_check -# NEEDS_DATA_PATH=${{runner.temp}}/needs.json -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Report Builder -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cat > "$NEEDS_DATA_PATH" << 'EOF' -# ${{ toJSON(needs) }} -# EOF -# cd "$GITHUB_WORKSPACE/tests/ci" -# python3 build_report_check.py "$CHECK_NAME" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# MarkReleaseReady: -# needs: -# - BuilderDebRelease -# # - BuilderDebAarch64 -# runs-on: [self-hosted, style-checker] -# steps: -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Mark Commit Release Ready -# run: | -# cd "$GITHUB_WORKSPACE/tests/ci" -# python3 mark_release_ready.py -# ############################################################################################## -# ########################### FUNCTIONAl STATELESS TESTS ####################################### -# ############################################################################################## -# FunctionalStatelessTestRelease: -# needs: [BuilderDebRelease] -# runs-on: [self-hosted, func-tester] -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# TEMP_PATH=${{runner.temp}}/stateless_debug -# REPORTS_PATH=${{runner.temp}}/reports_dir -# CHECK_NAME=Stateless tests (release, actions) -# REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse -# KILL_TIMEOUT=10800 -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Functional test -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" -# cd "$REPO_COPY/tests/ci" -# python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# ############################################################################################## -# ############################ FUNCTIONAl STATEFUL TESTS ####################################### -# ############################################################################################## -# FunctionalStatefulTestRelease: -# needs: [BuilderDebRelease] -# runs-on: [self-hosted, func-tester] -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# TEMP_PATH=${{runner.temp}}/stateful_debug -# REPORTS_PATH=${{runner.temp}}/reports_dir -# CHECK_NAME=Stateful tests (release, actions) -# REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse -# KILL_TIMEOUT=3600 -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Functional test -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" -# cd "$REPO_COPY/tests/ci" -# python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# ############################################################################################# -# ############################# INTEGRATION TESTS ############################################# -# ############################################################################################# -# IntegrationTestsRelease0: -# needs: [BuilderDebRelease] -# runs-on: [self-hosted, stress-tester] -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# TEMP_PATH=${{runner.temp}}/integration_tests_release -# REPORTS_PATH=${{runner.temp}}/reports_dir -# CHECK_NAME=Integration tests (release, actions) -# REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse -# RUN_BY_HASH_NUM=0 -# RUN_BY_HASH_TOTAL=2 -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Integration test -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" -# cd "$REPO_COPY/tests/ci" -# python3 integration_test_check.py "$CHECK_NAME" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# IntegrationTestsRelease1: -# needs: [BuilderDebRelease] -# runs-on: [self-hosted, stress-tester] -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# TEMP_PATH=${{runner.temp}}/integration_tests_release -# REPORTS_PATH=${{runner.temp}}/reports_dir -# CHECK_NAME=Integration tests (release, actions) -# REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse -# RUN_BY_HASH_NUM=1 -# RUN_BY_HASH_TOTAL=2 -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: Integration test -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" -# cd "$REPO_COPY/tests/ci" -# python3 integration_test_check.py "$CHECK_NAME" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" -# ############################################################################################# -# ############################### TESTFLOWS TESTS ############################################# -# ############################################################################################# -# TestFlowsTestsRelease: -# needs: [BuilderDebRelease] -# runs-on: [self-hosted, stress-tester] -# steps: -# - name: Set envs -# run: | -# cat >> "$GITHUB_ENV" << 'EOF' -# TEMP_PATH=${{runner.temp}}/testflows_tests_release -# REPORTS_PATH=${{runner.temp}}/reports_dir -# CHECK_NAME=TestFlows tests (release, actions) -# REPO_COPY=${{runner.temp}}/testflows_tests_release/ClickHouse -# EOF -# - name: Download json reports -# uses: actions/download-artifact@v2 -# with: -# path: ${{ env.REPORTS_PATH }} -# - name: Clear repository -# run: | -# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" -# - name: Check out repository code -# uses: actions/checkout@v2 -# - name: TestFlows test -# run: | -# sudo rm -fr "$TEMP_PATH" -# mkdir -p "$TEMP_PATH" -# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" -# cd "$REPO_COPY/tests/ci" -# python3 testflows_test_check.py "$CHECK_NAME" -# - name: Cleanup -# if: always() -# run: | -# # shellcheck disable=SC2046 -# docker kill $(docker ps -q) ||: -# # shellcheck disable=SC2046 -# docker rm -f $(docker ps -a -q) ||: -# sudo rm -fr "$TEMP_PATH" ->>>>>>> 708f7c5093 (a) SignRelease: needs: [BuilderDebRelease] runs-on: [ self-hosted ] From 05d7de21aee7776fea319b6c6cfdb033ddd304fa Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Mon, 19 Dec 2022 10:49:51 -0300 Subject: [PATCH 062/130] a --- .github/workflows/release_branches.yml | 4 +-- tests/ci/sign_release.py | 43 ++++++++++++++++++++++++-- 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ef291bebc284..92d24d87e69d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -995,10 +995,10 @@ jobs: sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code uses: actions/checkout@v2 - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v2 with: name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + path: $TEMP_PATH/build_urls.json - name: Display structure of downloaded files run: ls -R - name: Sign release diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 8eda0f9e6f1b..d5ec1e41c060 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -1,12 +1,49 @@ #!/usr/bin/env python3 import sys +import os import logging -from env_helper import GPG_BINARY_SIGNING_KEY +from env_helper import GPG_BINARY_SIGNING_KEY, TEMP_PATH, REPO_COPY, REPORTS_PATH +from github import Github +import subprocess +from s3_helper import S3Helper +from get_robot_token import get_best_robot_token +from pr_info import PRInfo +from build_download_helper import download_builds +from rerun_helper import RerunHelper +from docker_pull_helper import get_images_with_versions + +CHECK_NAME = "Sign release (actions)" def main(): - logging.basicConfig(level=logging.INFO) + temp_path = TEMP_PATH + repo_path = REPO_COPY + reports_path = REPORTS_PATH + + pr_info = PRInfo() + + gh = Github(get_best_robot_token()) + + rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME) + if rerun_helper.is_already_finished_by_status(): + logging.info("Check is already finished according to github status, exiting") + sys.exit(0) + + packages_path = os.path.join(temp_path, "packages") + if not os.path.exists(packages_path): + os.makedirs(packages_path) + + # def url_filter(url): + # return url.endswith(".deb") and ( + # "clickhouse-common-static_" in url or "clickhouse-server_" in url + # ) + + download_builds(CHECK_NAME, reports_path, packages_path) - print("hello") + for f in os.listdir(packages_path): + full_path = os.path.join(packages_path, f) + subprocess.check_call( + f"dpkg -x {full_path} {packages_path} && rm {full_path}", shell=True + ) if __name__ == "__main__": main() From e6f24eca1126a9c49b1f072c5d0122294b4b8d2d Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Mon, 19 Dec 2022 10:57:47 -0300 Subject: [PATCH 063/130] a --- .github/workflows/release_branches.yml | 6 ------ tests/ci/sign_release.py | 1 + 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 92d24d87e69d..e0b2882d5228 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -995,12 +995,6 @@ jobs: sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code uses: actions/checkout@v2 - - uses: actions/download-artifact@v2 - with: - name: ${{ env.BUILD_URLS }} - path: $TEMP_PATH/build_urls.json - - name: Display structure of downloaded files - run: ls -R - name: Sign release env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index d5ec1e41c060..0fffb3a2c370 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -44,6 +44,7 @@ def main(): subprocess.check_call( f"dpkg -x {full_path} {packages_path} && rm {full_path}", shell=True ) + print(f"aaa: {full_path}") if __name__ == "__main__": main() From 8955cd8c903f566523a724a21620fb2a9225e07a Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Mon, 19 Dec 2022 12:50:42 -0300 Subject: [PATCH 064/130] a --- tests/ci/sign_release.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 0fffb3a2c370..1b063cbb05f1 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -8,7 +8,7 @@ from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo -from build_download_helper import download_builds +from build_download_helper import download_builds_filter from rerun_helper import RerunHelper from docker_pull_helper import get_images_with_versions @@ -37,7 +37,7 @@ def main(): # "clickhouse-common-static_" in url or "clickhouse-server_" in url # ) - download_builds(CHECK_NAME, reports_path, packages_path) + download_builds_filter(CHECK_NAME, reports_path, packages_path) for f in os.listdir(packages_path): full_path = os.path.join(packages_path, f) From 04ca68f809174a155adace7753d7cb98e6c2ec1a Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Tue, 3 Jan 2023 06:50:44 -0300 Subject: [PATCH 065/130] a --- tests/ci/ci_config.py | 3 +++ tests/ci/sign_release.py | 1 + 2 files changed, 4 insertions(+) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index a0fafebb79f1..c13d06ef7f43 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -335,6 +335,9 @@ "required_build": "package_aarch64", "test_grep_exclude_filter": "constant_column_search", }, + "Sign release (actions)": { + "required_build": "package_release" + } }, } # type: dict diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 1b063cbb05f1..dd27fc7224cc 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -5,6 +5,7 @@ from env_helper import GPG_BINARY_SIGNING_KEY, TEMP_PATH, REPO_COPY, REPORTS_PATH from github import Github import subprocess +import hashlib from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo From 13c071e59bc01b375a5b5a88cb236086b1df3f39 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Tue, 3 Jan 2023 08:12:44 -0300 Subject: [PATCH 066/130] a --- .github/workflows/release_branches.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e0b2882d5228..4eb90ef6f145 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -988,6 +988,7 @@ jobs: steps: - name: Set envs run: | + REPORTS_PATH=${{runner.temp}}/reports_dir cat >> "$GITHUB_ENV" << 'EOF' EOF - name: Clear repository @@ -995,6 +996,10 @@ jobs: sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code uses: actions/checkout@v2 + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} - name: Sign release env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} From a5334acc42541d183d28f709e4f5c5ed251b72d0 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Tue, 3 Jan 2023 09:18:14 -0300 Subject: [PATCH 067/130] a --- tests/ci/build_download_helper.py | 1 + tests/ci/sign_release.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index 58997bed2536..01c8fe9003c7 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -48,6 +48,7 @@ def get_build_name_for_check(check_name) -> str: def read_build_urls(build_name, reports_path) -> List[str]: for root, _, files in os.walk(reports_path): for f in files: + logging.info("Found file: %s", f) if build_name in f: logging.info("Found build report json %s", f) with open(os.path.join(root, f), "r", encoding="utf-8") as file_handler: diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index dd27fc7224cc..579464fb18a2 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -42,9 +42,6 @@ def main(): for f in os.listdir(packages_path): full_path = os.path.join(packages_path, f) - subprocess.check_call( - f"dpkg -x {full_path} {packages_path} && rm {full_path}", shell=True - ) print(f"aaa: {full_path}") if __name__ == "__main__": From 7cb68a8b503c3ce1113cbe6673d7cb76d25da80c Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Tue, 3 Jan 2023 10:29:42 -0300 Subject: [PATCH 068/130] a From bef2fd2a2bd1b54c12800501bd73d992bf189717 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 4 Jan 2023 08:52:39 -0300 Subject: [PATCH 069/130] a --- .github/workflows/release_branches.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4eb90ef6f145..2dabc7ed2121 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -988,8 +988,8 @@ jobs: steps: - name: Set envs run: | - REPORTS_PATH=${{runner.temp}}/reports_dir cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Clear repository run: | @@ -1004,6 +1004,7 @@ jobs: env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} + REPORTS_PATH: ${{ env.REPORTS_PATH }} run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py From 24d919e0897eba955a1e8bf3700c926f2849dc95 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 4 Jan 2023 17:32:58 -0300 Subject: [PATCH 070/130] a --- .github/workflows/release_branches.yml | 5 ++++ tests/ci/sign_release.py | 36 ++++++++++++++++++++++++-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 2dabc7ed2121..9dc2f4eb9b65 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1008,6 +1008,11 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py + - name: Upload signed hashes + uses: actions/upload-artifact@v2 + with: + name: signed-hashes + path: ${{ runner.temp }}/*.json - name: Cleanup if: always() run: | diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 579464fb18a2..cb801c66783c 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -2,7 +2,7 @@ import sys import os import logging -from env_helper import GPG_BINARY_SIGNING_KEY, TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import GPG_BINARY_SIGNING_KEY, GPG_BINARY_SIGNING_PASSPHRASE, TEMP_PATH, REPO_COPY, REPORTS_PATH from github import Github import subprocess import hashlib @@ -12,9 +12,40 @@ from build_download_helper import download_builds_filter from rerun_helper import RerunHelper from docker_pull_helper import get_images_with_versions +import hashlib +import gnupg + CHECK_NAME = "Sign release (actions)" +def hash_file(file_path): + BLOCK_SIZE = 65536 # The size of each read from the file + + file_hash = hashlib.sha512() # Create the hash object, can use something other than `.sha256()` if you wish + with open(file_path, 'rb') as f: # Open the file to read it's bytes + fb = f.read(BLOCK_SIZE) # Read from the file. Take in the amount declared above + while len(fb) > 0: # While there is still data being read from the file + file_hash.update(fb) # Update the hash + fb = f.read(BLOCK_SIZE) # Read the next block from the file + + hash_file_path = file_path + '.sha512' + with open(hash_file_path, 'x') as f: + digest = file_hash.hexdigest() + f.write(digest) + print(f'Hashed {file_path}: {digest}') + + return hash_file_path + +def sign_file(file_path): + priv_key_file_path = 'priv.key' + with open(priv_key_file_path, 'x') as f: + f.write(GPG_BINARY_SIGNING_KEY) + + os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --bash --import {priv_key_file_path}') + os.system(f'gpg --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') + print(f"Signed {file_path}") + os.remove(priv_key_file_path) + def main(): temp_path = TEMP_PATH repo_path = REPO_COPY @@ -42,7 +73,8 @@ def main(): for f in os.listdir(packages_path): full_path = os.path.join(packages_path, f) - print(f"aaa: {full_path}") + hashed_file_path = hash_file(full_path) + sign_file(hashed_file_path) if __name__ == "__main__": main() From cd8af6a42007a846a2b8e6816e015d0906c1fd3b Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 4 Jan 2023 18:59:53 -0300 Subject: [PATCH 071/130] a --- tests/ci/worker/ubuntu_ami_for_ci.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/worker/ubuntu_ami_for_ci.sh b/tests/ci/worker/ubuntu_ami_for_ci.sh index c5bc090d8d83..027099580509 100644 --- a/tests/ci/worker/ubuntu_ami_for_ci.sh +++ b/tests/ci/worker/ubuntu_ami_for_ci.sh @@ -73,7 +73,7 @@ systemctl restart docker sudo -u ubuntu docker buildx version sudo -u ubuntu docker buildx create --use --name default-builder -pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory +pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory gnupg mkdir -p $RUNNER_HOME && cd $RUNNER_HOME From df3523cdbbb5673d1d9081de963d451d68a505e4 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 4 Jan 2023 21:11:37 -0300 Subject: [PATCH 072/130] a --- tests/ci/sign_release.py | 1 - tests/ci/worker/ubuntu_ami_for_ci.sh | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index cb801c66783c..e8fb62b37a3d 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -13,7 +13,6 @@ from rerun_helper import RerunHelper from docker_pull_helper import get_images_with_versions import hashlib -import gnupg CHECK_NAME = "Sign release (actions)" diff --git a/tests/ci/worker/ubuntu_ami_for_ci.sh b/tests/ci/worker/ubuntu_ami_for_ci.sh index 027099580509..c5bc090d8d83 100644 --- a/tests/ci/worker/ubuntu_ami_for_ci.sh +++ b/tests/ci/worker/ubuntu_ami_for_ci.sh @@ -73,7 +73,7 @@ systemctl restart docker sudo -u ubuntu docker buildx version sudo -u ubuntu docker buildx create --use --name default-builder -pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory gnupg +pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory mkdir -p $RUNNER_HOME && cd $RUNNER_HOME From 40c209b3ac60242e7fef3075520aa893302ca0d0 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 4 Jan 2023 22:27:35 -0300 Subject: [PATCH 073/130] a --- .github/workflows/release_branches.yml | 3 ++- tests/ci/sign_release.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9dc2f4eb9b65..ab387c81d701 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1012,7 +1012,8 @@ jobs: uses: actions/upload-artifact@v2 with: name: signed-hashes - path: ${{ runner.temp }}/*.json + path: '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' +# path: ${{ runner.temp }}/*.gpg - name: Cleanup if: always() run: | diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index e8fb62b37a3d..4ba8531ea8b4 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -40,7 +40,7 @@ def sign_file(file_path): with open(priv_key_file_path, 'x') as f: f.write(GPG_BINARY_SIGNING_KEY) - os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --bash --import {priv_key_file_path}') + os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}') os.system(f'gpg --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') print(f"Signed {file_path}") os.remove(priv_key_file_path) From 075c70ef76d7fa21144d2899373bcf5c99d7ea86 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 5 Jan 2023 09:43:36 -0300 Subject: [PATCH 074/130] a --- tests/ci/sign_release.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 4ba8531ea8b4..3881f8eac526 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -4,15 +4,13 @@ import logging from env_helper import GPG_BINARY_SIGNING_KEY, GPG_BINARY_SIGNING_PASSPHRASE, TEMP_PATH, REPO_COPY, REPORTS_PATH from github import Github -import subprocess -import hashlib from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo -from build_download_helper import download_builds_filter +from build_download_helper import download_builds_filter, get_build_name_for_check from rerun_helper import RerunHelper -from docker_pull_helper import get_images_with_versions import hashlib +from version_helper import get_version_from_repo CHECK_NAME = "Sign release (actions)" @@ -45,14 +43,29 @@ def sign_file(file_path): print(f"Signed {file_path}") os.remove(priv_key_file_path) + return f'{file_path}.gpg' + def main(): temp_path = TEMP_PATH - repo_path = REPO_COPY reports_path = REPORTS_PATH + gh = Github(get_best_robot_token()) + + build_name = get_build_name_for_check(CHECK_NAME) + + if not os.path.exists(TEMP_PATH): + os.makedirs(TEMP_PATH) + pr_info = PRInfo() - gh = Github(get_best_robot_token()) + logging.info("Repo copy path %s", REPO_COPY) + + s3_helper = S3Helper("https://s3.amazonaws.com") + + version = get_version_from_repo() + version_str = f"{version.major}.{version.minor}" + + s3_path_prefix = "/".join((version_str, pr_info.sha, build_name)) rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME) if rerun_helper.is_already_finished_by_status(): @@ -63,17 +76,15 @@ def main(): if not os.path.exists(packages_path): os.makedirs(packages_path) - # def url_filter(url): - # return url.endswith(".deb") and ( - # "clickhouse-common-static_" in url or "clickhouse-server_" in url - # ) - download_builds_filter(CHECK_NAME, reports_path, packages_path) for f in os.listdir(packages_path): full_path = os.path.join(packages_path, f) hashed_file_path = hash_file(full_path) - sign_file(hashed_file_path) + signed_file_path = sign_file(hashed_file_path) + s3_helper.upload_build_file_to_s3(signed_file_path, s3_path_prefix) + + sys.exit(0) if __name__ == "__main__": main() From 3d6553a4dda3d6320ee9b8e912485d59c91a5df6 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 5 Jan 2023 14:04:37 -0300 Subject: [PATCH 075/130] a --- tests/ci/sign_release.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 3881f8eac526..7558542f654f 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -10,7 +10,6 @@ from build_download_helper import download_builds_filter, get_build_name_for_check from rerun_helper import RerunHelper import hashlib -from version_helper import get_version_from_repo CHECK_NAME = "Sign release (actions)" @@ -49,8 +48,6 @@ def main(): temp_path = TEMP_PATH reports_path = REPORTS_PATH - gh = Github(get_best_robot_token()) - build_name = get_build_name_for_check(CHECK_NAME) if not os.path.exists(TEMP_PATH): @@ -62,15 +59,7 @@ def main(): s3_helper = S3Helper("https://s3.amazonaws.com") - version = get_version_from_repo() - version_str = f"{version.major}.{version.minor}" - - s3_path_prefix = "/".join((version_str, pr_info.sha, build_name)) - - rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME) - if rerun_helper.is_already_finished_by_status(): - logging.info("Check is already finished according to github status, exiting") - sys.exit(0) + s3_path_prefix = "/".join((pr_info.head_ref, pr_info.sha, build_name)) packages_path = os.path.join(temp_path, "packages") if not os.path.exists(packages_path): @@ -83,6 +72,7 @@ def main(): hashed_file_path = hash_file(full_path) signed_file_path = sign_file(hashed_file_path) s3_helper.upload_build_file_to_s3(signed_file_path, s3_path_prefix) + print(f'Uploaded file {signed_file_path} to {s3_path_prefix}') sys.exit(0) From 2c038f14fa3a8cd03cc0fa96d776c28dc6329d58 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 09:29:28 -0300 Subject: [PATCH 076/130] a --- tests/ci/sign_release.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 7558542f654f..76539ca7b2be 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -59,7 +59,9 @@ def main(): s3_helper = S3Helper("https://s3.amazonaws.com") - s3_path_prefix = "/".join((pr_info.head_ref, pr_info.sha, build_name)) + s3_path_prefix = f"{pr_info.number}/{pr_info.sha}/" + CHECK_NAME.lower().replace( + " ", "_" + ).replace("(", "_").replace(")", "_").replace(",", "_") packages_path = os.path.join(temp_path, "packages") if not os.path.exists(packages_path): @@ -71,8 +73,9 @@ def main(): full_path = os.path.join(packages_path, f) hashed_file_path = hash_file(full_path) signed_file_path = sign_file(hashed_file_path) - s3_helper.upload_build_file_to_s3(signed_file_path, s3_path_prefix) - print(f'Uploaded file {signed_file_path} to {s3_path_prefix}') + s3_path = f'{s3_path_prefix}/{os.path.basename(signed_file_path)}' + s3_helper.upload_build_file_to_s3(signed_file_path, s3_path) + print(f'Uploaded file {signed_file_path} to {s3_path}') sys.exit(0) From 605d0e1b91fafe30f8e50e7144215389cc668dcd Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 15:32:45 -0300 Subject: [PATCH 077/130] a --- .github/workflows/release_branches.yml | 5 +++-- aform | 1 + tests/ci/sign_release.py | 24 ++++++++---------------- 3 files changed, 12 insertions(+), 18 deletions(-) create mode 100644 aform diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ab387c81d701..f89779eeed4e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -989,6 +989,7 @@ jobs: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/signed REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Clear repository @@ -1012,8 +1013,8 @@ jobs: uses: actions/upload-artifact@v2 with: name: signed-hashes - path: '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' -# path: ${{ runner.temp }}/*.gpg + path: $TEMP_PATH/*.gpg +# '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' - name: Cleanup if: always() run: | diff --git a/aform b/aform new file mode 100644 index 000000000000..5f2ec319ac09 --- /dev/null +++ b/aform @@ -0,0 +1 @@ +skodn asmdap skldma diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 76539ca7b2be..bd72fb45078c 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -3,12 +3,9 @@ import os import logging from env_helper import GPG_BINARY_SIGNING_KEY, GPG_BINARY_SIGNING_PASSPHRASE, TEMP_PATH, REPO_COPY, REPORTS_PATH -from github import Github from s3_helper import S3Helper -from get_robot_token import get_best_robot_token from pr_info import PRInfo -from build_download_helper import download_builds_filter, get_build_name_for_check -from rerun_helper import RerunHelper +from build_download_helper import download_builds_filter import hashlib @@ -37,19 +34,18 @@ def sign_file(file_path): with open(priv_key_file_path, 'x') as f: f.write(GPG_BINARY_SIGNING_KEY) + out_file_path = f'{file_path}.gpg' + os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}') - os.system(f'gpg --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') + os.system(f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') print(f"Signed {file_path}") os.remove(priv_key_file_path) - return f'{file_path}.gpg' + return out_file_path def main(): - temp_path = TEMP_PATH reports_path = REPORTS_PATH - build_name = get_build_name_for_check(CHECK_NAME) - if not os.path.exists(TEMP_PATH): os.makedirs(TEMP_PATH) @@ -63,14 +59,10 @@ def main(): " ", "_" ).replace("(", "_").replace(")", "_").replace(",", "_") - packages_path = os.path.join(temp_path, "packages") - if not os.path.exists(packages_path): - os.makedirs(packages_path) - - download_builds_filter(CHECK_NAME, reports_path, packages_path) + download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) - for f in os.listdir(packages_path): - full_path = os.path.join(packages_path, f) + for f in os.listdir(TEMP_PATH): + full_path = os.path.join(TEMP_PATH, f) hashed_file_path = hash_file(full_path) signed_file_path = sign_file(hashed_file_path) s3_path = f'{s3_path_prefix}/{os.path.basename(signed_file_path)}' From 6a0e2e0d9dc15308d83908333cca1be59b986240 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 16:50:05 -0300 Subject: [PATCH 078/130] a --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f89779eeed4e..c960df749c15 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1013,7 +1013,7 @@ jobs: uses: actions/upload-artifact@v2 with: name: signed-hashes - path: $TEMP_PATH/*.gpg + path: ${{TEMP_PATH}}/*.gpg # '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' - name: Cleanup if: always() From 452e6421458e44201bd7fc9f6feb866dd44c5a20 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 16:59:42 -0300 Subject: [PATCH 079/130] a --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index c960df749c15..f6e572dc1556 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1013,7 +1013,7 @@ jobs: uses: actions/upload-artifact@v2 with: name: signed-hashes - path: ${{TEMP_PATH}}/*.gpg + path: ${{ TEMP_PATH }}/*.gpg # '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' - name: Cleanup if: always() From d6de6862a7073992ed25a7491b8fcb96c0a3ec85 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 17:02:52 -0300 Subject: [PATCH 080/130] a --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f6e572dc1556..70c5dd85b2ab 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1013,7 +1013,7 @@ jobs: uses: actions/upload-artifact@v2 with: name: signed-hashes - path: ${{ TEMP_PATH }}/*.gpg + path: ${{ env.TEMP_PATH }}/*.gpg # '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' - name: Cleanup if: always() From 9b4a34295a18ecb134ee4a54bb90290957d4e0f0 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Fri, 6 Jan 2023 19:50:41 -0300 Subject: [PATCH 081/130] working as epxected, just a cleanup --- .github/workflows/release_branches.yml | 300 +++++++++++++++++++++++++ aform | 1 - tests/ci/build_download_helper.py | 1 - 3 files changed, 300 insertions(+), 2 deletions(-) delete mode 100644 aform diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 70c5dd85b2ab..b6dff3a833fd 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -982,6 +982,296 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log +<<<<<<< HEAD +======= + ############################################################################################ + ##################################### Docker images ####################################### + ############################################################################################ + DockerServerImages: + needs: + - BuilderDebRelease + # - BuilderDebAarch64 - currently we do not build aarch images + runs-on: [self-hosted, style-checker] + steps: + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + with: + fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself + - name: Check docker clickhouse/clickhouse-server building + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_server.py --release-type head --no-push + python3 docker_server.py --release-type head --no-push --no-ubuntu \ + --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + ############################################################################################ + ##################################### BUILD REPORTER ####################################### + ############################################################################################ + BuilderReport: + needs: + - BuilderDebRelease + runs-on: [self-hosted, style-checker] + if: ${{ success() || failure() }} + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + CHECK_NAME=ClickHouse build check (actions) + REPORTS_PATH=${{runner.temp}}/reports_dir + REPORTS_PATH=${{runner.temp}}/reports_dir + TEMP_PATH=${{runner.temp}}/report_check + NEEDS_DATA_PATH=${{runner.temp}}/needs.json + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Report Builder + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cat > "$NEEDS_DATA_PATH" << 'EOF' + ${{ toJSON(needs) }} + EOF + cd "$GITHUB_WORKSPACE/tests/ci" + python3 build_report_check.py "$CHECK_NAME" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + MarkReleaseReady: + needs: + - BuilderDebRelease + # - BuilderDebAarch64 + runs-on: [self-hosted, style-checker] + steps: + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Mark Commit Release Ready + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 mark_release_ready.py + ############################################################################################## + ########################### FUNCTIONAl STATELESS TESTS ####################################### + ############################################################################################## + FunctionalStatelessTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/stateless_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateless tests (release, actions) + REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse + KILL_TIMEOUT=10800 + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Functional test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + ############################################################################################## + ############################ FUNCTIONAl STATEFUL TESTS ####################################### + ############################################################################################## + FunctionalStatefulTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/stateful_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateful tests (release, actions) + REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse + KILL_TIMEOUT=3600 + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Functional test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + ############################################################################################# + ############################# INTEGRATION TESTS ############################################# + ############################################################################################# + IntegrationTestsRelease0: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/integration_tests_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Integration tests (release, actions) + REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse + RUN_BY_HASH_NUM=0 + RUN_BY_HASH_TOTAL=2 + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Integration test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + IntegrationTestsRelease1: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/integration_tests_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Integration tests (release, actions) + REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse + RUN_BY_HASH_NUM=1 + RUN_BY_HASH_TOTAL=2 + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Integration test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + ############################################################################################# + ############################### TESTFLOWS TESTS ############################################# + ############################################################################################# + TestFlowsTestsRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/testflows_tests_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=TestFlows tests (release, actions) + REPO_COPY=${{runner.temp}}/testflows_tests_release/ClickHouse + EOF + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: TestFlows test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 testflows_test_check.py "$CHECK_NAME" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" +>>>>>>> 983e7addba (working as epxected, just a cleanup) SignRelease: needs: [BuilderDebRelease] runs-on: [ self-hosted ] @@ -1029,6 +1319,7 @@ jobs: - DockerHubPush - DockerServerImages - BuilderReport +<<<<<<< HEAD # - BuilderSpecialReport - MarkReleaseReady - FunctionalStatelessTestRelease @@ -1037,6 +1328,15 @@ jobs: # - FunctionalStatefulTestAarch64 - IntegrationTestsRelease0 - IntegrationTestsRelease1 +======= + - MarkReleaseReady + - FunctionalStatelessTestRelease + - FunctionalStatefulTestRelease + - IntegrationTestsRelease0 + - IntegrationTestsRelease1 + - TestFlowsTestsRelease + - SignRelease +>>>>>>> 983e7addba (working as epxected, just a cleanup) - CompatibilityCheck - SignRelease - regression_common diff --git a/aform b/aform deleted file mode 100644 index 5f2ec319ac09..000000000000 --- a/aform +++ /dev/null @@ -1 +0,0 @@ -skodn asmdap skldma diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index 01c8fe9003c7..58997bed2536 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -48,7 +48,6 @@ def get_build_name_for_check(check_name) -> str: def read_build_urls(build_name, reports_path) -> List[str]: for root, _, files in os.walk(reports_path): for f in files: - logging.info("Found file: %s", f) if build_name in f: logging.info("Found build report json %s", f) with open(os.path.join(root, f), "r", encoding="utf-8") as file_handler: From 64d5b55ef7ea473538fead9ee800b7035fa5a64c Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Wed, 11 Jan 2023 12:20:08 -0300 Subject: [PATCH 082/130] sha256 instead of 512 --- tests/ci/sign_release.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index bd72fb45078c..da27af72e0ef 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -14,7 +14,7 @@ def hash_file(file_path): BLOCK_SIZE = 65536 # The size of each read from the file - file_hash = hashlib.sha512() # Create the hash object, can use something other than `.sha256()` if you wish + file_hash = hashlib.sha256() # Create the hash object, can use something other than `.sha256()` if you wish with open(file_path, 'rb') as f: # Open the file to read it's bytes fb = f.read(BLOCK_SIZE) # Read from the file. Take in the amount declared above while len(fb) > 0: # While there is still data being read from the file From 42d46b736b598d468f1ef754c40e0f3a25cab8f0 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 12 Jan 2023 11:13:32 -0300 Subject: [PATCH 083/130] remove comment --- .github/workflows/release_branches.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index b6dff3a833fd..042995566e37 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1304,7 +1304,6 @@ jobs: with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg -# '/home/ubuntu/actions-runner/_work/ClickHouse/ClickHouse/tests/ci/tmp/packages/*.gpg' - name: Cleanup if: always() run: | From bbf76c94dd8d156a9c14cbf9c3389c9504b08e65 Mon Sep 17 00:00:00 2001 From: Arthur Passos Date: Thu, 19 Jan 2023 13:33:55 -0300 Subject: [PATCH 084/130] Add comment specifying files --- tests/ci/sign_release.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index da27af72e0ef..18efa057aa15 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -59,6 +59,7 @@ def main(): " ", "_" ).replace("(", "_").replace(")", "_").replace(",", "_") + # downloads `package_release` artifacts generated download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) for f in os.listdir(TEMP_PATH): @@ -69,6 +70,22 @@ def main(): s3_helper.upload_build_file_to_s3(signed_file_path, s3_path) print(f'Uploaded file {signed_file_path} to {s3_path}') + # Signed hashes are: + # clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-client_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-library-bridge.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-odbc-bridge.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-server-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-keeper_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-server-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-keeper-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse.sha512.gpg + sys.exit(0) if __name__ == "__main__": From ae171d4b18ad538b6678dcd55600f6a8e2bdf1dc Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 31 May 2023 09:30:33 -0700 Subject: [PATCH 085/130] changing 512 to 256 --- tests/ci/sign_release.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 18efa057aa15..29aba4196250 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -21,7 +21,7 @@ def hash_file(file_path): file_hash.update(fb) # Update the hash fb = f.read(BLOCK_SIZE) # Read the next block from the file - hash_file_path = file_path + '.sha512' + hash_file_path = file_path + '.sha256' with open(hash_file_path, 'x') as f: digest = file_hash.hexdigest() f.write(digest) From 10c29f1db22f4213dd989816f9281a359cac005a Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 31 May 2023 09:57:28 -0700 Subject: [PATCH 086/130] merge fix --- .github/workflows/release_branches.yml | 300 ------------------------- 1 file changed, 300 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 042995566e37..32e8faaea2ed 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -982,296 +982,6 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log -<<<<<<< HEAD -======= - ############################################################################################ - ##################################### Docker images ####################################### - ############################################################################################ - DockerServerImages: - needs: - - BuilderDebRelease - # - BuilderDebAarch64 - currently we do not build aarch images - runs-on: [self-hosted, style-checker] - steps: - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - with: - fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push - python3 docker_server.py --release-type head --no-push --no-ubuntu \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################ - ##################################### BUILD REPORTER ####################################### - ############################################################################################ - BuilderReport: - needs: - - BuilderDebRelease - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check (actions) - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - MarkReleaseReady: - needs: - - BuilderDebRelease - # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Mark Commit Release Ready - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 mark_release_ready.py - ############################################################################################## - ########################### FUNCTIONAl STATELESS TESTS ####################################### - ############################################################################################## - FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, actions) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################## - ############################ FUNCTIONAl STATEFUL TESTS ####################################### - ############################################################################################## - FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release, actions) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################# - ############################# INTEGRATION TESTS ############################################# - ############################################################################################# - IntegrationTestsRelease0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release, actions) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release, actions) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################# - ############################### TESTFLOWS TESTS ############################################# - ############################################################################################# - TestFlowsTestsRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/testflows_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=TestFlows tests (release, actions) - REPO_COPY=${{runner.temp}}/testflows_tests_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: TestFlows test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 testflows_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: - sudo rm -fr "$TEMP_PATH" ->>>>>>> 983e7addba (working as epxected, just a cleanup) SignRelease: needs: [BuilderDebRelease] runs-on: [ self-hosted ] @@ -1318,7 +1028,6 @@ jobs: - DockerHubPush - DockerServerImages - BuilderReport -<<<<<<< HEAD # - BuilderSpecialReport - MarkReleaseReady - FunctionalStatelessTestRelease @@ -1327,15 +1036,6 @@ jobs: # - FunctionalStatefulTestAarch64 - IntegrationTestsRelease0 - IntegrationTestsRelease1 -======= - - MarkReleaseReady - - FunctionalStatelessTestRelease - - FunctionalStatefulTestRelease - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - TestFlowsTestsRelease - - SignRelease ->>>>>>> 983e7addba (working as epxected, just a cleanup) - CompatibilityCheck - SignRelease - regression_common From dba9adf77ef74cb81a5491af1a369fd7b78b678a Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 2 Jun 2023 09:11:14 -0700 Subject: [PATCH 087/130] Update env_helper.py --- tests/ci/env_helper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 13a7a1ffd7e5..8c87ffa07186 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -11,6 +11,8 @@ CACHES_PATH = os.getenv("CACHES_PATH", TEMP_PATH) CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN") +GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") +GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "") GITHUB_JOB = os.getenv("GITHUB_JOB", "local") GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "Altinity/ClickHouse") From cf062bdc74cf3e8e3a750489cdc88e4569ea155c Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Mon, 5 Jun 2023 12:33:06 -0700 Subject: [PATCH 088/130] Update env_helper.py --- tests/ci/env_helper.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 8c87ffa07186..13a7a1ffd7e5 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -11,8 +11,6 @@ CACHES_PATH = os.getenv("CACHES_PATH", TEMP_PATH) CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN") -GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") -GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "") GITHUB_JOB = os.getenv("GITHUB_JOB", "local") GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "Altinity/ClickHouse") From b860787a376bd73f178c0aaf1de010b2eb410306 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Mon, 5 Jun 2023 12:33:58 -0700 Subject: [PATCH 089/130] Update sign_release.py --- tests/ci/sign_release.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 29aba4196250..2c44e0a928f2 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -2,12 +2,14 @@ import sys import os import logging -from env_helper import GPG_BINARY_SIGNING_KEY, GPG_BINARY_SIGNING_PASSPHRASE, TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH from s3_helper import S3Helper from pr_info import PRInfo from build_download_helper import download_builds_filter import hashlib +GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") +GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") CHECK_NAME = "Sign release (actions)" From 06271fb1cc1337383191177f67f363d29be00ff3 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 6 Jun 2023 07:34:04 -0700 Subject: [PATCH 090/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 32e8faaea2ed..35f8afa40dc6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,6 +6,7 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + REGRESSION_RESULTS_URL: altinity-build-artifacts/${{github.event.number}}/$GITHUB_SHA on: # yamllint disable-line rule:truthy @@ -153,8 +154,9 @@ jobs: - name: Build run: | sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + mkdir -p "$TEMP_PATH/build_check/package_release" + cd .. && tar czf $TEMP_PATH/build_source.src.tar.gz ClickHouse/ + cd $TEMP_PATH && tar xvzf $TEMP_PATH/build_source.src.tar.gz cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} @@ -583,7 +585,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, example, extended_precision_datatypes, kafka, kerberos, lightweight_delete, map_type, rbac, selects, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, map_type, part_moves_between_shards, rbac, selects, ssl_server, tiered_storage, window_functions] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: From fac3bc6ee162be92641dc9cbdcb9e89988582c27 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 8 Jun 2023 07:30:35 -0700 Subject: [PATCH 091/130] Update build_check.py --- tests/ci/build_check.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index f544d3955760..a0315776421e 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -375,6 +375,18 @@ def main(): print(f"::notice ::Log URL: {log_url}") + src_path = os.path.join(TEMP_PATH, "build_source.src.tar.gz") + + if os.path.exists(src_path): + src_url = s3_helper.upload_build_file_to_s3( + src_path, s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" + ) + logging.info("Source tar %s", src_url) + else: + logging.info("Source tar doesn't exist") + + print(f"::notice ::Source tar URL: {src_url}") + create_json_artifact( TEMP_PATH, build_name, log_url, build_urls, build_config, elapsed, success ) From 230448488b444516a546c7595fe5bc7f0d8e5205 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 14 Jun 2023 19:12:28 +0200 Subject: [PATCH 092/130] Updated DockerServerImages --- .github/workflows/backport_branches.yml | 6 +++--- .github/workflows/master.yml | 6 +++--- .github/workflows/release_branches.yml | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 4dc1b0eb1c21..e39a14db0cb0 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -450,13 +450,13 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + --image-repo altinity/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 4bec636dfd63..fa5f20cbeb58 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -895,13 +895,13 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + --image-repo altinity/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 2ef7251b9fe8..7c391a537a8e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -227,13 +227,13 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + --image-repo altinity/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | From e8e4192829063abde81424d9c374549f53c608be Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 8 Jun 2023 17:49:47 +0200 Subject: [PATCH 093/130] Using ccache instead of sccache and fixed ClickHouseVersion.get_version_from_string --- docker/packager/packager | 4 ++-- tests/ci/build_check.py | 35 ++++++++++++++++++++++++++++++++++- tests/ci/version_helper.py | 15 ++++++++++++--- 3 files changed, 48 insertions(+), 6 deletions(-) diff --git a/docker/packager/packager b/docker/packager/packager index 7e9898ce515b..d3348f739e77 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -97,13 +97,13 @@ def run_docker_image_with_env( else: user = f"{os.geteuid()}:{os.getegid()}" - ccache_mount = f"--volume={ccache_dir}:/ccache" + ccache_mount = f" --volume={ccache_dir}:/ccache" if ccache_dir is None: ccache_mount = "" cmd = ( f"docker run --network=host --user={user} --rm {ccache_mount}" - f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} " + f" --volume={output_dir}:/output --volume={ch_root}:/build {env_part} " f"{interactive} {image_name}" ) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index e55b5e406466..fec68341cd85 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -6,11 +6,13 @@ import os import sys import time +from shutil import rmtree from typing import List, Tuple from ci_config import CI_CONFIG, BuildConfig from docker_pull_helper import get_image_with_version from env_helper import ( + CACHES_PATH, GITHUB_JOB, IMAGES_PATH, REPO_COPY, @@ -28,6 +30,10 @@ get_version_from_repo, update_version_local, ) +from ccache_utils import get_ccache_if_not_exists, upload_ccache +from ci_config import CI_CONFIG, BuildConfig +from docker_pull_helper import get_image_with_version +from tee_popen import TeePopen IMAGE_NAME = "altinityinfra/binary-builder" BUILD_LOG_NAME = "build_log.log" @@ -53,6 +59,7 @@ def get_packager_cmd( output_path: str, build_version: str, image_version: str, + ccache_path: str, official: bool, ) -> str: package_type = build_config["package_type"] @@ -71,7 +78,9 @@ def get_packager_cmd( if build_config["tidy"] == "enable": cmd += " --clang-tidy" - cmd += " --cache=sccache" + # NOTE(vnemkov): we are going to continue to use ccache for now + cmd += " --cache=ccache" + cmd += f" --ccache-dir={ccache_path}" cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" @@ -293,12 +302,31 @@ def main(): if not os.path.exists(build_output_path): os.makedirs(build_output_path) + # NOTE(vnemkov): since we still want to use CCACHE over SCCACHE, unlike upstream, + # So we need to create local directory for that, just as with 22.8 + ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache") + + logging.info("Will try to fetch cache for our build") + try: + get_ccache_if_not_exists( + ccache_path, s3_helper, pr_info.number, TEMP_PATH, pr_info.release_pr + ) + except Exception as e: + # In case there are issues with ccache, remove the path and do not fail a build + logging.info("Failed to get ccache, building without it. Error: %s", e) + rmtree(ccache_path, ignore_errors=True) + + if not os.path.exists(ccache_path): + logging.info("cache was not fetched, will create empty dir") + os.makedirs(ccache_path) + packager_cmd = get_packager_cmd( build_config, os.path.join(REPO_COPY, "docker/packager"), build_output_path, version.string, image_version, + ccache_path, official_flag, ) @@ -314,6 +342,7 @@ def main(): subprocess.check_call( f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True ) + subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True) logging.info("Build finished with %s, log path %s", success, log_path) if not success: # We check if docker works, because if it's down, it's infrastructure @@ -325,6 +354,10 @@ def main(): ) sys.exit(1) + # Upload the ccache first to have the least build time in case of problems + logging.info("Will upload cache") + upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH) + # FIXME performance performance_urls = [] performance_path = os.path.join(build_output_path, "performance.tgz") diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index b30d6c4b4add..fe2fee70fd51 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -46,7 +46,7 @@ def __init__( revision: Union[int, str], git: Optional[Git], tweak: str = None, - flavour: str = None, + flavour: Optional[str] = None, ): self._major = int(major) self._minor = int(minor) @@ -233,8 +233,17 @@ def get_version_from_string( version: str, git: Optional[Git] = None ) -> ClickHouseVersion: validate_version(version) - parts = version.split(".") - return ClickHouseVersion(parts[0], parts[1], parts[2], -1, git, parts[3], parts[4] if len(parts) >= 4 else None) + # dict for simple handling of missing parts with parts.get(index, default) + parts = dict(enumerate(version.split("."))) + return ClickHouseVersion( + parts[0], + parts[1], + parts[2], + -1, + git, + parts.get(3, None), + parts.get(4, None) + ) def get_version_from_tag(tag: str) -> ClickHouseVersion: From 0713b495d10c1252fb51d54400354bbd40a67c0a Mon Sep 17 00:00:00 2001 From: Alexander Gololobov <440544+davenger@users.noreply.github.com> Date: Sat, 1 Apr 2023 11:39:34 +0200 Subject: [PATCH 094/130] Merge pull request #48303 from ClickHouse/grouping-sets-repeated-keys Do not remove inputs from maybe compiled DAG. --- src/Interpreters/ActionsDAG.cpp | 2 +- .../0_stateless/02705_grouping_keys_equal_keys.reference | 6 ++++++ .../queries/0_stateless/02705_grouping_keys_equal_keys.sql | 7 +++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 tests/queries/0_stateless/02705_grouping_keys_equal_keys.reference create mode 100644 tests/queries/0_stateless/02705_grouping_keys_equal_keys.sql diff --git a/src/Interpreters/ActionsDAG.cpp b/src/Interpreters/ActionsDAG.cpp index d5e1f332b50a..ba950890439e 100644 --- a/src/Interpreters/ActionsDAG.cpp +++ b/src/Interpreters/ActionsDAG.cpp @@ -886,7 +886,7 @@ ActionsDAGPtr ActionsDAG::clone() const void ActionsDAG::compileExpressions(size_t min_count_to_compile_expression, const std::unordered_set & lazy_executed_nodes) { compileFunctions(min_count_to_compile_expression, lazy_executed_nodes); - removeUnusedActions(); + removeUnusedActions(/*allow_remove_inputs = */ false); } #endif diff --git a/tests/queries/0_stateless/02705_grouping_keys_equal_keys.reference b/tests/queries/0_stateless/02705_grouping_keys_equal_keys.reference new file mode 100644 index 000000000000..a9e2f17562ae --- /dev/null +++ b/tests/queries/0_stateless/02705_grouping_keys_equal_keys.reference @@ -0,0 +1,6 @@ +1 +1 +1 +1 +1 +1 diff --git a/tests/queries/0_stateless/02705_grouping_keys_equal_keys.sql b/tests/queries/0_stateless/02705_grouping_keys_equal_keys.sql new file mode 100644 index 000000000000..fcf5b4d2ce53 --- /dev/null +++ b/tests/queries/0_stateless/02705_grouping_keys_equal_keys.sql @@ -0,0 +1,7 @@ +SELECT count() +FROM numbers(2) +GROUP BY +GROUPING SETS ( + (number, number + 0, number + 1), + (number % 1048576, number % -9223372036854775808), + (number / 2, number / 2)); From a3428f04905099a28bbeb6de2758086e6e665f2d Mon Sep 17 00:00:00 2001 From: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> Date: Tue, 27 Dec 2022 16:59:42 +0100 Subject: [PATCH 095/130] Merge pull request #44469 from Avogar/low-card-if Don't execute and/or/if/multiIf on LowCardinality dictionary --- src/Functions/FunctionsLogical.h | 1 + src/Functions/if.cpp | 1 + src/Functions/multiIf.cpp | 1 + .../0_stateless/02514_if_with_lazy_low_cardinality.reference | 1 + .../0_stateless/02514_if_with_lazy_low_cardinality.sql | 5 +++++ .../02515_and_or_if_multiif_not_return_lc.reference | 4 ++++ .../0_stateless/02515_and_or_if_multiif_not_return_lc.sql | 5 +++++ 7 files changed, 18 insertions(+) create mode 100644 tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.reference create mode 100644 tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.sql create mode 100644 tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.reference create mode 100644 tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.sql diff --git a/src/Functions/FunctionsLogical.h b/src/Functions/FunctionsLogical.h index 140981faf9f3..e8e0494c54fc 100644 --- a/src/Functions/FunctionsLogical.h +++ b/src/Functions/FunctionsLogical.h @@ -164,6 +164,7 @@ class FunctionAnyArityLogical : public IFunction ColumnPtr executeShortCircuit(ColumnsWithTypeAndName & arguments, const DataTypePtr & result_type) const; bool isSuitableForShortCircuitArgumentsExecution(const DataTypesWithConstInfo & /*arguments*/) const override { return false; } size_t getNumberOfArguments() const override { return 0; } + bool canBeExecutedOnLowCardinalityDictionary() const override { return false; } bool useDefaultImplementationForNulls() const override { return !Impl::specialImplementationForNulls(); } diff --git a/src/Functions/if.cpp b/src/Functions/if.cpp index d7fefb1ad0ec..6203f22f275f 100644 --- a/src/Functions/if.cpp +++ b/src/Functions/if.cpp @@ -1023,6 +1023,7 @@ class FunctionIf : public FunctionIfBase } bool isSuitableForShortCircuitArgumentsExecution(const DataTypesWithConstInfo & /*arguments*/) const override { return false; } ColumnNumbers getArgumentsThatDontImplyNullableReturnType(size_t /*number_of_arguments*/) const override { return {0}; } + bool canBeExecutedOnLowCardinalityDictionary() const override { return false; } /// Get result types by argument types. If the function does not apply to these arguments, throw an exception. DataTypePtr getReturnTypeImpl(const DataTypes & arguments) const override diff --git a/src/Functions/multiIf.cpp b/src/Functions/multiIf.cpp index 6fc722e32f4f..d3c88d9a0f9b 100644 --- a/src/Functions/multiIf.cpp +++ b/src/Functions/multiIf.cpp @@ -50,6 +50,7 @@ class FunctionMultiIf final : public FunctionIfBase bool isSuitableForShortCircuitArgumentsExecution(const DataTypesWithConstInfo & /*arguments*/) const override { return false; } size_t getNumberOfArguments() const override { return 0; } bool useDefaultImplementationForNulls() const override { return false; } + bool canBeExecutedOnLowCardinalityDictionary() const override { return false; } ColumnNumbers getArgumentsThatDontImplyNullableReturnType(size_t number_of_arguments) const override { diff --git a/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.reference b/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.reference new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.reference @@ -0,0 +1 @@ +1 diff --git a/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.sql b/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.sql new file mode 100644 index 000000000000..80e3c0a9ecec --- /dev/null +++ b/tests/queries/0_stateless/02514_if_with_lazy_low_cardinality.sql @@ -0,0 +1,5 @@ +create table if not exists t (`arr.key` Array(LowCardinality(String)), `arr.value` Array(LowCardinality(String))) engine = Memory; +insert into t (`arr.key`, `arr.value`) values (['a'], ['b']); +select if(true, if(lowerUTF8(arr.key) = 'a', 1, 2), 3) as x from t left array join arr; +drop table t; + diff --git a/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.reference b/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.reference new file mode 100644 index 000000000000..805bbdf7a597 --- /dev/null +++ b/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.reference @@ -0,0 +1,4 @@ +UInt8 +UInt8 +UInt8 +UInt8 diff --git a/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.sql b/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.sql new file mode 100644 index 000000000000..0ccccd4d9a71 --- /dev/null +++ b/tests/queries/0_stateless/02515_and_or_if_multiif_not_return_lc.sql @@ -0,0 +1,5 @@ +select toTypeName(if(toLowCardinality(number % 2), 1, 2)) from numbers(1); +select toTypeName(multiIf(toLowCardinality(number % 2), 1, 1, 2, 3)) from numbers(1); +select toTypeName(toLowCardinality(number % 2) and 2) from numbers(1); +select toTypeName(toLowCardinality(number % 2) or 2) from numbers(1); + From 7edcb6bc57537635b077b72be078a4f466498aba Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 1 Aug 2023 14:52:03 +0200 Subject: [PATCH 096/130] Fixed test 02681_final_excessive_reading_bug.sh underscore numeric literals are not supported in 22.8 --- tests/queries/0_stateless/02681_final_excessive_reading_bug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/queries/0_stateless/02681_final_excessive_reading_bug.sh b/tests/queries/0_stateless/02681_final_excessive_reading_bug.sh index a795b9ec5a0b..1d89246b242a 100755 --- a/tests/queries/0_stateless/02681_final_excessive_reading_bug.sh +++ b/tests/queries/0_stateless/02681_final_excessive_reading_bug.sh @@ -19,6 +19,6 @@ $CLICKHOUSE_CLIENT --query_id="$query_id" -q "select * from sample_final FINAL S $CLICKHOUSE_CLIENT -q "SYSTEM FLUSH LOGS" $CLICKHOUSE_CLIENT --param_query_id="$query_id" -q " -SELECT ProfileEvents['SelectedRows'] < 1_000_000 +SELECT ProfileEvents['SelectedRows'] < 1000000 FROM system.query_log WHERE event_date >= yesterday() AND type = 'QueryFinish' AND query_id = {query_id:String} AND current_database = currentDatabase()" From 87286488e5bb249da60d74013ffb1229e3f33da3 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:00:13 -0700 Subject: [PATCH 097/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index bed87f627282..4174d56f6bb4 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,7 +6,9 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - REGRESSION_RESULTS_URL: altinity-build-artifacts/${{github.event.number}}/$GITHUB_SHA + REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows + REGRESSION_COMMON_COMMIT: 0bb6dd1f2814069ffae896a1164cf0610553b29b + REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 on: # yamllint disable-line rule:truthy @@ -585,7 +587,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, map_type, part_moves_between_shards, rbac, selects, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, data_types, parquet, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -597,7 +599,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: releases + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -659,6 +661,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -729,7 +732,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: releases + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -787,7 +790,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: releases + ref: ${{ env.REGRESSION+PARQUET_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -859,7 +862,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: releases + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -930,7 +933,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: releases + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' From 42445bbe60e2a9b07bc75f0c0b89b262aa1e36d8 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:16:16 -0700 Subject: [PATCH 098/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 420 +++++++++++++++++++------ 1 file changed, 332 insertions(+), 88 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4174d56f6bb4..75d79c5394c9 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -18,15 +18,15 @@ on: # yamllint disable-line rule:truthy - reopened - opened branches: - # Anything/22.8 (e.g customizations/22.8) - - '**/22.8*' + # Anything/23.3 (e.g customizations/23.3) + - '**/23.3*' release: types: - published - prereleased push: branches: - - 'releases/22.8**' + - 'releases/23.3**' jobs: # DockerHubPushAarch64: @@ -109,20 +109,49 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheck + - name: CompatibilityCheckX86 run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py + cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" + # CompatibilityCheckAarch64: + # needs: [BuilderDebAarch64] + # runs-on: [self-hosted, style-checker] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/compatibility_check + # REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse + # REPORTS_PATH=${{runner.temp}}/reports_dir + # EOF + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # clear-repository: true + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: CompatibilityCheckAarch64 + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc + # - name: Cleanup + # if: always() + # run: | + # docker ps --quiet | xargs --no-run-if-empty docker kill ||: + # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # sudo rm -fr "$TEMP_PATH" ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### @@ -169,10 +198,8 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" # BuilderDebAarch64: # needs: [DockerHubPush] @@ -212,10 +239,7 @@ jobs: # - name: Cleanup # if: always() # run: | - # # shellcheck disable=SC2046 - # docker kill $(docker ps -q) ||: - # # shellcheck disable=SC2046 - # docker rm -f $(docker ps -a -q) ||: + # docker ps --quiet | xargs --no-run-if-empty docker kill ||: # sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" ############################################################################################ ##################################### Docker images ####################################### @@ -231,20 +255,18 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker altinity/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head --no-push \ - --image-repo altinity/clickhouse-server --image-path docker/server + --image-repo altinityinfra/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push \ - --image-repo altinity/clickhouse-keeper --image-path docker/keeper + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" ############################################################################################ ##################################### BUILD REPORTER ####################################### @@ -285,10 +307,8 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" # BuilderSpecialReport: # needs: @@ -325,16 +345,14 @@ jobs: # - name: Cleanup # if: always() # run: | - # # shellcheck disable=SC2046 - # docker kill $(docker ps -q) ||: - # # shellcheck disable=SC2046 - # docker rm -f $(docker ps -a -q) ||: + # docker ps --quiet | xargs --no-run-if-empty docker kill ||: # sudo rm -fr "$TEMP_PATH" MarkReleaseReady: needs: # - BuilderBinDarwin # - BuilderBinDarwinAarch64 - BuilderDebRelease + - SignRelease # - BuilderDebAarch64 runs-on: [self-hosted, style-checker] steps: @@ -346,11 +364,87 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 mark_release_ready.py +############################################################################################ +#################################### INSTALL PACKAGES ###################################### +############################################################################################ + InstallPackagesTestRelease: + needs: [SignRelease] + runs-on: [self-hosted, style-checker] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/test_install + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Install packages (amd64) + REPO_COPY=${{runner.temp}}/test_install/ClickHouse + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Check out repository code + uses: ClickHouse/checkout@v1 + with: + clear-repository: true + - name: Test packages installation + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" + # InstallPackagesTestAarch64: + # needs: [BuilderDebAarch64] + # runs-on: [self-hosted, style-checker-aarch64] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/test_install + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=Install packages (arm64) + # REPO_COPY=${{runner.temp}}/test_install/ClickHouse + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: ClickHouse/checkout@v1 + # with: + # clear-repository: true + # - name: Test packages installation + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" + # python3 install_check.py "$CHECK_NAME" + # - name: Cleanup + # if: always() + # run: | + # docker ps --quiet | xargs --no-run-if-empty docker kill ||: + # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # sudo rm -fr "$TEMP_PATH" + + tests_start: + ## Do-nothing stage to trigger tests, makes is easier to + needs: [InstallPackagesTestRelease] + runs-on: ubuntu-latest + steps: + - run: true ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] + needs: [tests_start] runs-on: [self-hosted, func-tester] steps: - name: Set envs @@ -380,10 +474,8 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" # FunctionalStatelessTestAarch64: # needs: [BuilderDebAarch64] @@ -425,7 +517,7 @@ jobs: ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] + needs: [tests_start] runs-on: [self-hosted, func-tester] steps: - name: Set envs @@ -455,10 +547,8 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" # FunctionalStatefulTestAarch64: # needs: [BuilderDebAarch64] @@ -500,7 +590,7 @@ jobs: ############################# INTEGRATION TESTS ############################################# ############################################################################################# IntegrationTestsRelease0: - needs: [BuilderDebRelease] + needs: [tests_start] runs-on: [self-hosted, stress-tester] steps: - name: Set envs @@ -531,13 +621,11 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease1: - needs: [BuilderDebRelease] + needs: [tests_start] runs-on: [self-hosted, stress-tester] steps: - name: Set envs @@ -568,17 +656,15 @@ jobs: - name: Cleanup if: always() run: | - # shellcheck disable=SC2046 - docker kill $(docker ps -q) ||: - # shellcheck disable=SC2046 - docker rm -f $(docker ps -a -q) ||: + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# regression_start: ## Not depending on the tests above since they can fail at any given moment. - needs: [BuilderDebRelease] + needs: [tests_start] runs-on: ubuntu-latest steps: - run: true @@ -587,7 +673,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, data_types, parquet, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -599,13 +685,13 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} + refs: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=${{ matrix.SUITE }} - artifacts=public + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -624,13 +710,11 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -661,14 +745,14 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} + refs: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ontime_benchmark STORAGE=/${{ matrix.STORAGE }} - artifacts=public + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -695,13 +779,11 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -716,6 +798,64 @@ jobs: ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log + clickhouse_keeper_ssl: + needs: [regression_start] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + refs: ${{ env.REGRESSION_COMMON_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper_ssl_fips + STORAGE=/${{ matrix.STORAGE }} + artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + ldap: strategy: fail-fast: false @@ -738,7 +878,7 @@ jobs: cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ldap/${{ matrix.SUITE }} - artifacts=public + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -757,13 +897,11 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -790,13 +928,71 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION+PARQUET_COMMIT }} + ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=parquet + STORAGE=/no_s3 + artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + parquet_minio: + needs: [regression_start] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=parquet - artifacts=public + STORAGE=/minio + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -815,27 +1011,78 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log --storage minio + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-minio-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + parquet_aws: + needs: [regression_start] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=parquet + STORAGE=/aws_s3 + artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log --storage aws_s3 --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --storage gcs - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-artifacts + name: ${{ env.SUITE }}-aws_s3-artifacts path: | ./report.html ./*.log.txt @@ -869,7 +1116,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=s3 STORAGE=/${{ matrix.STORAGE }} - artifacts=public + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -888,7 +1135,7 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log --storage ${{ matrix.STORAGE }} --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} @@ -901,8 +1148,6 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -916,7 +1161,7 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - + tiered_storage_s3: strategy: fail-fast: false @@ -940,7 +1185,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=tiered_storage STORAGE=/${{ matrix.STORAGE }} - artifacts=public + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -959,7 +1204,7 @@ jobs: --collect-service-logs --output classic --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="x86_64" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} @@ -971,8 +1216,6 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 if: always() with: @@ -1046,7 +1289,8 @@ jobs: - regression_common - benchmark - ldap - - parquet + - parquet_minio + - parquet_aws - s3 - tiered_storage_s3 runs-on: [self-hosted, style-checker] From 82dc9b3195123839677ce277ed3548636ee13068 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:22:20 -0700 Subject: [PATCH 099/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 192 +++++++------------------ 1 file changed, 53 insertions(+), 139 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 75d79c5394c9..43a473cc724d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -18,15 +18,15 @@ on: # yamllint disable-line rule:truthy - reopened - opened branches: - # Anything/23.3 (e.g customizations/23.3) - - '**/23.3*' + # Anything/22.8 (e.g customizations/22.8) + - '**/22.8*' release: types: - published - prereleased push: branches: - - 'releases/23.3**' + - 'releases/22.8**' jobs: # DockerHubPushAarch64: @@ -109,49 +109,20 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 + - name: CompatibilityCheck run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions + cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" - # CompatibilityCheckAarch64: - # needs: [BuilderDebAarch64] - # runs-on: [self-hosted, style-checker] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/compatibility_check - # REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - # REPORTS_PATH=${{runner.temp}}/reports_dir - # EOF - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # - name: Download json reports - # uses: actions/download-artifact@v3 - # with: - # path: ${{ env.REPORTS_PATH }} - # - name: CompatibilityCheckAarch64 - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - # - name: Cleanup - # if: always() - # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: - # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - # sudo rm -fr "$TEMP_PATH" ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### @@ -198,8 +169,10 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" # BuilderDebAarch64: # needs: [DockerHubPush] @@ -239,7 +212,10 @@ jobs: # - name: Cleanup # if: always() # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: # sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" ############################################################################################ ##################################### Docker images ####################################### @@ -255,18 +231,20 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker altinityinfra/clickhouse-server building + - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head --no-push \ - --image-repo altinityinfra/clickhouse-server --image-path docker/server + --image-repo altinity/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push \ - --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper + --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" ############################################################################################ ##################################### BUILD REPORTER ####################################### @@ -307,8 +285,10 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" # BuilderSpecialReport: # needs: @@ -345,14 +325,16 @@ jobs: # - name: Cleanup # if: always() # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: # sudo rm -fr "$TEMP_PATH" MarkReleaseReady: needs: # - BuilderBinDarwin # - BuilderBinDarwinAarch64 - BuilderDebRelease - - SignRelease # - BuilderDebAarch64 runs-on: [self-hosted, style-checker] steps: @@ -364,87 +346,11 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 mark_release_ready.py -############################################################################################ -#################################### INSTALL PACKAGES ###################################### -############################################################################################ - InstallPackagesTestRelease: - needs: [SignRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - # InstallPackagesTestAarch64: - # needs: [BuilderDebAarch64] - # runs-on: [self-hosted, style-checker-aarch64] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/test_install - # REPORTS_PATH=${{runner.temp}}/reports_dir - # CHECK_NAME=Install packages (arm64) - # REPO_COPY=${{runner.temp}}/test_install/ClickHouse - # EOF - # - name: Download json reports - # uses: actions/download-artifact@v3 - # with: - # path: ${{ env.REPORTS_PATH }} - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # - name: Test packages installation - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 install_check.py "$CHECK_NAME" - # - name: Cleanup - # if: always() - # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: - # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - # sudo rm -fr "$TEMP_PATH" - - tests_start: - ## Do-nothing stage to trigger tests, makes is easier to - needs: [InstallPackagesTestRelease] - runs-on: ubuntu-latest - steps: - - run: true ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestRelease: - needs: [tests_start] + needs: [BuilderDebRelease] runs-on: [self-hosted, func-tester] steps: - name: Set envs @@ -474,8 +380,10 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" # FunctionalStatelessTestAarch64: # needs: [BuilderDebAarch64] @@ -517,7 +425,7 @@ jobs: ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: - needs: [tests_start] + needs: [BuilderDebRelease] runs-on: [self-hosted, func-tester] steps: - name: Set envs @@ -547,8 +455,10 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" # FunctionalStatefulTestAarch64: # needs: [BuilderDebAarch64] @@ -590,7 +500,7 @@ jobs: ############################# INTEGRATION TESTS ############################################# ############################################################################################# IntegrationTestsRelease0: - needs: [tests_start] + needs: [BuilderDebRelease] runs-on: [self-hosted, stress-tester] steps: - name: Set envs @@ -621,11 +531,13 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease1: - needs: [tests_start] + needs: [BuilderDebRelease] runs-on: [self-hosted, stress-tester] steps: - name: Set envs @@ -656,15 +568,17 @@ jobs: - name: Cleanup if: always() run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# regression_start: ## Not depending on the tests above since they can fail at any given moment. - needs: [tests_start] + needs: [BuilderDebRelease] runs-on: ubuntu-latest steps: - run: true From e0fc3720f0512fdaba6195542f4c51701e627651 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:23:21 -0700 Subject: [PATCH 100/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 43a473cc724d..0297d5261037 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1202,7 +1202,9 @@ jobs: - SignRelease - regression_common - benchmark + - clickhouse_keeper_ssl - ldap + - parquet - parquet_minio - parquet_aws - s3 From 71848dcccb537bed49546f921945c0e85f545fa4 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:06:44 -0700 Subject: [PATCH 101/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0297d5261037..9c70ed770dcc 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -7,7 +7,7 @@ env: AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows - REGRESSION_COMMON_COMMIT: 0bb6dd1f2814069ffae896a1164cf0610553b29b + REGRESSION_COMMON_COMMIT: 1108c52f249af64885c255f39192ba3cc4c145ab REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 @@ -729,8 +729,8 @@ jobs: run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper_ssl_fips - STORAGE=/${{ matrix.STORAGE }} + SUITE=clickhouse_keeper + STORAGE=/ssl artifacts=builds EOF - name: Download json reports @@ -849,7 +849,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=parquet STORAGE=/no_s3 - artifacts=builds + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -876,7 +876,7 @@ jobs: - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-artifacts + name: ${{ env.SUITE }}-${{ env.STORAGE }}-artifacts path: | ./report.html ./*.log.txt @@ -906,7 +906,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=parquet STORAGE=/minio - artifacts=builds + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -964,7 +964,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=parquet STORAGE=/aws_s3 - artifacts=builds + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 From f97c1839235f3ad6296be2c6d46fa25a8da43479 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 29 Aug 2023 09:40:36 -0700 Subject: [PATCH 102/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9c70ed770dcc..82fcdcbf3c35 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -599,7 +599,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - refs: ${{ env.REGRESSION_COMMON_COMMIT }} + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -659,7 +659,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - refs: ${{ env.REGRESSION_COMMON_COMMIT }} + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -724,7 +724,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - refs: ${{ env.REGRESSION_COMMON_COMMIT }} + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' @@ -876,7 +876,7 @@ jobs: - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-${{ env.STORAGE }}-artifacts + name: ${{ env.SUITE }}-artifacts path: | ./report.html ./*.log.txt From a5d3a860d838a60433343f1f8f97009c78265cb9 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 30 Aug 2023 11:18:54 -0700 Subject: [PATCH 103/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 58 +++++++++++++++++++++++++- 1 file changed, 57 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 82fcdcbf3c35..ac846e07b9fc 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -9,6 +9,7 @@ env: REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows REGRESSION_COMMON_COMMIT: 1108c52f249af64885c255f39192ba3cc4c145ab REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 + REGRESSION_KEY_VALUE_COMMIT: e072060fba19d3f81a96f4c5cbe9c5d0b1dcfa9d on: # yamllint disable-line rule:truthy @@ -587,7 +588,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [regression_start] runs-on: [self-hosted, stress-tester] env: @@ -769,6 +770,61 @@ jobs: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log + key_value: + needs: [regression_start] + runs-on: [self-hosted, stress-tester] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=key_value + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log ldap: strategy: From 754c42cf5fe2d014d058558ddd2ea57bdea693e1 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 30 Aug 2023 11:25:19 -0700 Subject: [PATCH 104/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ac846e07b9fc..ef9ee34ae029 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1259,6 +1259,7 @@ jobs: - regression_common - benchmark - clickhouse_keeper_ssl + - key_value - ldap - parquet - parquet_minio From 51bc97cf5dac881c3bd8f689c51d58e4e6e39fa2 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 30 Aug 2023 21:34:52 -0700 Subject: [PATCH 105/130] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ef9ee34ae029..0c7db644d025 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -782,7 +782,7 @@ jobs: uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' From bdea6d65903dac101b333039fff3a31a5c8f8eba Mon Sep 17 00:00:00 2001 From: filimonov <1549571+filimonov@users.noreply.github.com> Date: Wed, 20 Nov 2024 21:55:30 +0100 Subject: [PATCH 106/130] respect prefer_locahost_replica=0 in parallel_distributed_insert_select --- src/Storages/StorageDistributed.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Storages/StorageDistributed.cpp b/src/Storages/StorageDistributed.cpp index cc55b2ae271e..02b9751ae24b 100644 --- a/src/Storages/StorageDistributed.cpp +++ b/src/Storages/StorageDistributed.cpp @@ -838,7 +838,7 @@ std::optional StorageDistributed::distributedWriteBetweenDistribu for (size_t shard_index : collections::range(0, shards_info.size())) { const auto & shard_info = shards_info[shard_index]; - if (shard_info.isLocal()) + if (shard_info.isLocal() && settings.prefer_localhost_replica) { InterpreterInsertQuery interpreter(new_query, query_context); pipeline.addCompletedPipeline(interpreter.execute().pipeline); From 5980c5ee9eaa9ca29fbaa030a03f4bfa00448d48 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 22 Jan 2025 14:36:11 +0100 Subject: [PATCH 107/130] Updated runner labels for the jobs --- .github/workflows/release_branches.yml | 48 +++++++++++++------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0c7db644d025..661cb410906a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -31,7 +31,7 @@ on: # yamllint disable-line rule:truthy jobs: # DockerHubPushAarch64: - # runs-on: [self-hosted, style-checker-aarch64] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none-aarch64] # steps: # - name: Check out repository code # uses: ClickHouse/checkout@v1 @@ -46,7 +46,7 @@ jobs: # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json # Former DockerHubPushAmd64 DockerHubPush: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -68,7 +68,7 @@ jobs: path: ${{ runner.temp }}/docker_images_check/changed_images.json # DockerHubPush: # needs: [DockerHubPushAmd64, DockerHubPushAarch64] - # runs-on: [self-hosted, style-checker] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # steps: # - name: Check out repository code # uses: ClickHouse/checkout@v1 @@ -93,7 +93,7 @@ jobs: # path: ${{ runner.temp }}/changed_images.json CompatibilityCheck: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -129,7 +129,7 @@ jobs: ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] + runs-on: [self-hosted, altinity-type-ccx53, altinity-on-demand, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -225,7 +225,7 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -254,7 +254,7 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] if: ${{ success() || failure() }} steps: - name: Set envs @@ -295,7 +295,7 @@ jobs: # needs: # # - BuilderBinDarwin # - BuilderBinDarwinAarch64 - # runs-on: [self-hosted, style-checker] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # if: ${{ success() || failure() }} # steps: # - name: Set envs @@ -337,7 +337,7 @@ jobs: # - BuilderBinDarwinAarch64 - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -352,7 +352,7 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash,altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -427,7 +427,7 @@ jobs: ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash,altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -502,7 +502,7 @@ jobs: ############################################################################################# IntegrationTestsRelease0: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -539,7 +539,7 @@ jobs: sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease1: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -590,7 +590,7 @@ jobs: matrix: SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -650,7 +650,7 @@ jobs: matrix: STORAGE: [minio, aws_s3, gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -715,7 +715,7 @@ jobs: clickhouse_keeper_ssl: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -772,7 +772,7 @@ jobs: ./*/*/_instances/*.log key_value: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -832,7 +832,7 @@ jobs: matrix: SUITE: [authentication, external_user_directory, role_mapping] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -888,7 +888,7 @@ jobs: parquet: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -945,7 +945,7 @@ jobs: parquet_minio: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1003,7 +1003,7 @@ jobs: parquet_aws: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1069,7 +1069,7 @@ jobs: matrix: STORAGE: [minio, aws_s3, gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1138,7 +1138,7 @@ jobs: matrix: STORAGE: [minio, s3amazon, s3gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1266,7 +1266,7 @@ jobs: - parquet_aws - s3 - tiered_storage_s3 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 From 122a1cd046fbfeb842f1d19b603da905362d3536 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 22 Jan 2025 14:23:57 +0000 Subject: [PATCH 108/130] Attempt to fix binary-builder docker image --- docker/packager/binary/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index de9310732a8b..b51bced31ac5 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -9,6 +9,7 @@ ENV CXX=clang++-${LLVM_VERSION} # libtapi is required to support .tbh format from recent MacOS SDKs RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ && cd apple-libtapi \ + && git checkout 15dfc2a8c9a2a89d06ff227560a69f5265b692f9 \ && INSTALLPREFIX=/cctools ./build.sh \ && ./install.sh \ && cd .. \ @@ -17,6 +18,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ # Build and install tools for cross-linking to Darwin (x86-64) RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=x86_64-apple-darwin \ && make install -j$(nproc) \ @@ -26,6 +28,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ # Build and install tools for cross-linking to Darwin (aarch64) RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=aarch64-apple-darwin \ && make install -j$(nproc) \ From b244a6215ac30752985da8e19dece92e1cf7d059 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 22 Jan 2025 11:09:44 -0500 Subject: [PATCH 109/130] attempting to fix binary-builder --- docker/packager/binary/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index b51bced31ac5..9e0b795fcf0e 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -7,7 +7,7 @@ ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} # libtapi is required to support .tbh format from recent MacOS SDKs -RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ +RUN git clone https://github.com/tpoechtrager/apple-libtapi.git \ && cd apple-libtapi \ && git checkout 15dfc2a8c9a2a89d06ff227560a69f5265b692f9 \ && INSTALLPREFIX=/cctools ./build.sh \ @@ -16,7 +16,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ && rm -rf apple-libtapi # Build and install tools for cross-linking to Darwin (x86-64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ @@ -26,7 +26,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && rm -rf cctools-port # Build and install tools for cross-linking to Darwin (aarch64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ From 8529fe5d15ea67e4781351388507e95b83e2b42d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 15:44:48 -0500 Subject: [PATCH 110/130] Fix docker images --- docker/images.json | 5 ----- docker/test/stateless/Dockerfile | 12 ++++++++---- tests/ci/tests/docker_images.json | 4 ---- 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/docker/images.json b/docker/images.json index 3e5f28aca883..0bbabfe9eb1f 100644 --- a/docker/images.json +++ b/docker/images.json @@ -130,11 +130,6 @@ "docker/test/keeper-jepsen" ] }, - "docker/test/integration/kerberized_hadoop": { - "only_amd64": true, - "name": "altinityinfra/kerberized-hadoop", - "dependent": [] - }, "docker/test/sqlancer": { "name": "altinityinfra/sqlancer-test", "dependent": [] diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 58ac030e46c1..3ce249767dc9 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -1,6 +1,6 @@ # rebuild in #33610 # docker build -t clickhouse/stateless-test . -ARG FROM_TAG=latest +ARG FROM_TAG=600-b244a6215ac30752985da8e19dece92e1cf7d059-amd64 FROM altinityinfra/test-base:$FROM_TAG ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz" @@ -52,7 +52,7 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \ && rm -rf /tmp/clickhouse-odbc-tmp -ENV TZ=Europe/Moscow +ENV TZ=Europe/Amsterdam RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone ENV NUM_TRIES=1 @@ -71,7 +71,7 @@ RUN arch=${TARGETARCH:-amd64} \ && chmod +x ./mc ./minio -RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ +RUN wget --no-verbose 'https://archive.apache.org/dist/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ && tar -xvf hadoop-3.3.1.tar.gz \ && rm -rf hadoop-3.3.1.tar.gz @@ -79,9 +79,13 @@ ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_PASSWORD="clickhouse" ENV EXPORT_S3_STORAGE_POLICIES=1 -RUN npm install -g azurite +# TODO: check against 3.29.0, but 23.8.11 had azurite@3.28.0 +RUN npm install -g azurite@3.29.0 \ + && npm install -g tslib@2.6.2 + COPY run.sh / COPY setup_minio.sh / COPY setup_hdfs_minicluster.sh / + CMD ["/bin/bash", "/run.sh"] diff --git a/tests/ci/tests/docker_images.json b/tests/ci/tests/docker_images.json index 53ad258f6ec9..466bae288c56 100644 --- a/tests/ci/tests/docker_images.json +++ b/tests/ci/tests/docker_images.json @@ -108,10 +108,6 @@ "docker/test/keeper-jepsen" ] }, - "docker/test/integration/kerberized_hadoop": { - "name": "altinityinfra/kerberized-hadoop", - "dependent": [] - }, "docker/test/sqlancer": { "name": "altinityinfra/sqlancer-test", "dependent": [] From 4aa52a62309677f8c02c2f9a583d1c1808a93a1b Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 16:03:17 -0500 Subject: [PATCH 111/130] Attempt to build ARM, enable ARM tests, and disable regression for now --- .github/workflows/release_branches.yml | 1606 ++++++++++++------------ tests/ci/ci_config.py | 3 +- 2 files changed, 802 insertions(+), 807 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 661cb410906a..a53fe80bf407 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -30,67 +30,61 @@ on: # yamllint disable-line rule:truthy - 'releases/22.8**' jobs: - # DockerHubPushAarch64: - # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none-aarch64] - # steps: - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # - name: Images check - # run: | - # cd "$GITHUB_WORKSPACE/tests/ci" - # python3 docker_images_check.py --suffix aarch64 - # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 - # with: - # name: changed_images_aarch64 - # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - # Former DockerHubPushAmd64 - DockerHubPush: + DockerHubPushAarch64: + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none] + steps: + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + - name: Images check + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_images_check.py --suffix aarch64 + - name: Upload images files to artifacts + uses: actions/upload-artifact@v4 + with: + name: changed_images_aarch64 + path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json + DockerHubPushAmd64: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix amd64 - # TODO(vnemkov): remove this step if you uncomment DockerHubPushAarch64 and DockerHubPush below. - # The rest of the pipeline expects changed_images.json, which was generated by previous version of DockerHubPush. - - name: Rename artifact - run: | - mv ${{ runner.temp }}/docker_images_check/changed_images_amd64.json ${{ runner.temp }}/docker_images_check/changed_images.json - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: changed_images path: ${{ runner.temp }}/docker_images_check/changed_images.json - # DockerHubPush: - # needs: [DockerHubPushAmd64, DockerHubPushAarch64] - # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - # steps: - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # - name: Download changed aarch64 images - # uses: actions/download-artifact@v2 - # with: - # name: changed_images_aarch64 - # path: ${{ runner.temp }} - # - name: Download changed amd64 images - # uses: actions/download-artifact@v2 - # with: - # name: changed_images_amd64 - # path: ${{ runner.temp }} - # - name: Images check - # run: | - # cd "$GITHUB_WORKSPACE/tests/ci" - # python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 - # with: - # name: changed_images - # path: ${{ runner.temp }}/changed_images.json + DockerHubPush: + needs: [DockerHubPushAmd64, DockerHubPushAarch64] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + steps: + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + - name: Download changed aarch64 images + uses: actions/download-artifact@v4 + with: + name: changed_images_aarch64 + path: ${{ runner.temp }} + - name: Download changed amd64 images + uses: actions/download-artifact@v4 + with: + name: changed_images_amd64 + path: ${{ runner.temp }} + - name: Images check + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 + - name: Upload images files to artifacts + uses: actions/upload-artifact@v4 + with: + name: changed_images + path: ${{ runner.temp }}/changed_images.json CompatibilityCheck: needs: [BuilderDebRelease] runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] @@ -103,7 +97,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Download json reports @@ -149,7 +143,7 @@ jobs: - name: Trust My Directory run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: true @@ -163,7 +157,7 @@ jobs: cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json @@ -175,60 +169,60 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - # BuilderDebAarch64: - # needs: [DockerHubPush] - # runs-on: [self-hosted, builder] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/build_check - # IMAGES_PATH=${{runner.temp}}/images_path - # REPO_COPY=${{runner.temp}}/build_check/ClickHouse - # CACHES_PATH=${{runner.temp}}/../ccaches - # BUILD_NAME=package_aarch64 - # EOF - # - name: Download changed images - # uses: actions/download-artifact@v2 - # with: - # name: changed_images - # path: ${{ runner.temp }}/images_path - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # fetch-depth: 0 # otherwise we will have no info about contributors - # - name: Build - # run: | - # git -C "$GITHUB_WORKSPACE" submodule sync - # git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10 - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - # - name: Upload build URLs to artifacts - # uses: actions/upload-artifact@v2 - # with: - # name: ${{ env.BUILD_URLS }} - # path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json - # - name: Cleanup - # if: always() - # run: | - # # shellcheck disable=SC2046 - # docker kill $(docker ps -q) ||: - # # shellcheck disable=SC2046 - # docker rm -f $(docker ps -a -q) ||: - # sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + BuilderDebAarch64: + needs: [DockerHubPush] + runs-on: [self-hosted, altinity-type-ccx53, altinity-on-demand, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/build_check + IMAGES_PATH=${{runner.temp}}/images_path + REPO_COPY=${{runner.temp}}/build_check/ClickHouse + CACHES_PATH=${{runner.temp}}/../ccaches + BUILD_NAME=package_aarch64 + EOF + - name: Download changed images + uses: actions/download-artifact@v4 + with: + name: changed_images + path: ${{ runner.temp }}/images_path + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + with: + fetch-depth: 0 # otherwise we will have no info about contributors + - name: Build + run: | + git -C "$GITHUB_WORKSPACE" submodule sync + git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10 + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" + - name: Upload build URLs to artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ env.BUILD_URLS }} + path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" ############################################################################################ ##################################### Docker images ####################################### ############################################################################################ DockerServerImages: needs: - BuilderDebRelease - # - BuilderDebAarch64 + - BuilderDebAarch64 runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -253,7 +247,7 @@ jobs: BuilderReport: needs: - BuilderDebRelease - # - BuilderDebAarch64 + - BuilderDebAarch64 runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] if: ${{ success() || failure() }} steps: @@ -271,7 +265,7 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Report Builder @@ -311,7 +305,7 @@ jobs: # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Report Builder @@ -336,11 +330,11 @@ jobs: # - BuilderBinDarwin # - BuilderBinDarwinAarch64 - BuilderDebRelease - # - BuilderDebAarch64 + - BuilderDebAarch64 runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Mark Commit Release Ready @@ -368,7 +362,43 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + with: + clear-repository: true + - name: Functional test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + FunctionalStatelessTestAarch64: + needs: [BuilderDebAarch64] + runs-on: [altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/stateless_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateless tests (aarch64) + REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse + KILL_TIMEOUT=10800 + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Functional test @@ -386,42 +416,6 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" - # FunctionalStatelessTestAarch64: - # needs: [BuilderDebAarch64] - # runs-on: [self-hosted, func-tester-aarch64] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/stateless_release - # REPORTS_PATH=${{runner.temp}}/reports_dir - # CHECK_NAME=Stateless tests (aarch64) - # REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - # KILL_TIMEOUT=10800 - # EOF - # - name: Download json reports - # uses: actions/download-artifact@v3 - # with: - # path: ${{ env.REPORTS_PATH }} - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # - name: Functional test - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - # - name: Cleanup - # if: always() - # run: | - # # shellcheck disable=SC2046 - # docker kill $(docker ps -q) ||: - # # shellcheck disable=SC2046 - # docker rm -f $(docker ps -a -q) ||: - # sudo rm -fr "$TEMP_PATH" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## @@ -443,7 +437,43 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + with: + clear-repository: true + - name: Functional test + run: | + sudo rm -fr "$TEMP_PATH" + mkdir -p "$TEMP_PATH" + cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + - name: Cleanup + if: always() + run: | + # shellcheck disable=SC2046 + docker kill $(docker ps -q) ||: + # shellcheck disable=SC2046 + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr "$TEMP_PATH" + FunctionalStatefulTestAarch64: + needs: [BuilderDebAarch64] + runs-on: [altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none] + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/stateful_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateful tests (aarch64) + REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse + KILL_TIMEOUT=3600 + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Functional test @@ -461,42 +491,6 @@ jobs: # shellcheck disable=SC2046 docker rm -f $(docker ps -a -q) ||: sudo rm -fr "$TEMP_PATH" - # FunctionalStatefulTestAarch64: - # needs: [BuilderDebAarch64] - # runs-on: [self-hosted, func-tester-aarch64] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/stateful_release - # REPORTS_PATH=${{runner.temp}}/reports_dir - # CHECK_NAME=Stateful tests (aarch64) - # REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - # KILL_TIMEOUT=3600 - # EOF - # - name: Download json reports - # uses: actions/download-artifact@v3 - # with: - # path: ${{ env.REPORTS_PATH }} - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # - name: Functional test - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - # - name: Cleanup - # if: always() - # run: | - # # shellcheck disable=SC2046 - # docker kill $(docker ps -q) ||: - # # shellcheck disable=SC2046 - # docker rm -f $(docker ps -a -q) ||: - # sudo rm -fr "$TEMP_PATH" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# @@ -519,7 +513,7 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Integration test @@ -556,7 +550,7 @@ jobs: with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Integration test @@ -577,628 +571,628 @@ jobs: ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# - regression_start: - ## Not depending on the tests above since they can fail at any given moment. - needs: [BuilderDebRelease] - runs-on: ubuntu-latest - steps: - - run: true + # regression_start: + # ## Not depending on the tests above since they can fail at any given moment. + # needs: [BuilderDebRelease] + # runs-on: ubuntu-latest + # steps: + # - run: true - regression_common: - strategy: - fail-fast: false - matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # regression_common: + # strategy: + # fail-fast: false + # matrix: + # SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=${{ matrix.SUITE }} + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - benchmark: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # benchmark: + # strategy: + # fail-fast: false + # matrix: + # STORAGE: [minio, aws_s3, gcs] + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=ontime_benchmark + # STORAGE=/${{ matrix.STORAGE }} + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/benchmark.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --storage ${{ matrix.STORAGE }} + # --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + # --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + # --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + # --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + # --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + # --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + # --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-minio-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - clickhouse_keeper_ssl: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - key_value: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=key_value - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # clickhouse_keeper_ssl: + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=clickhouse_keeper + # STORAGE=/ssl + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --ssl + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log + # key_value: + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=key_value + # artifacts=public + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - ldap: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-authentication-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # ldap: + # strategy: + # fail-fast: false + # matrix: + # SUITE: [authentication, external_user_directory, role_mapping] + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=ldap/${{ matrix.SUITE }} + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ldap-authentication-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - parquet: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/no_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # parquet: + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=parquet + # STORAGE=/no_s3 + # artifacts=public + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - parquet_minio: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/minio - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage minio - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # parquet_minio: + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=parquet + # STORAGE=/minio + # artifacts=public + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # --storage minio + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-minio-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - parquet_aws: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/aws_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws_s3-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # parquet_aws: + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_PARQUET_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=parquet + # STORAGE=/aws_s3 + # artifacts=public + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # --storage aws_s3 + # --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + # --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + # --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + # --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-aws_s3-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # s3: + # strategy: + # fail-fast: false + # matrix: + # STORAGE: [minio, aws_s3, gcs] + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=s3 + # STORAGE=/${{ matrix.STORAGE }} + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # --storage ${{ matrix.STORAGE }} + # --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + # --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + # --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + # --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + # --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + # --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + # --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log - tiered_storage_s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + # tiered_storage_s3: + # strategy: + # fail-fast: false + # matrix: + # STORAGE: [minio, s3amazon, s3gcs] + # needs: [regression_start] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + # env: + # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + # AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + # steps: + # - name: Checkout regression repo + # uses: actions/checkout@v3 + # with: + # repository: Altinity/clickhouse-regression + # ref: ${{ env.REGRESSION_COMMON_COMMIT }} + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # REPORTS_PATH=${{runner.temp}}/reports_dir + # SUITE=tiered_storage + # STORAGE=/${{ matrix.STORAGE }} + # artifacts=builds + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Setup + # run: .github/setup.sh + # - name: Get deb url + # run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + # - name: Run ${{ env.SUITE }} suite + # run: python3 + # -u ${{ env.SUITE }}/regression.py + # --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + # --test-to-end + # --local + # --collect-service-logs + # --output classic + # --parallel 1 + # --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + # --log raw.log + # --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + # --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + # --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + # --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + # --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + # --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + # --with-${{ matrix.STORAGE }} + # - name: Create and upload logs + # if: always() + # run: .github/create_and_upload_logs.sh 1 + # - uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts + # path: | + # ./report.html + # ./*.log.txt + # ./*.log + # ./*.html + # ./*/_instances/*.log + # ./*/_instances/*/logs/*.log + # ./*/*/_instances/*/logs/*.log + # ./*/*/_instances/*.log SignRelease: needs: [BuilderDebRelease] @@ -1214,9 +1208,9 @@ jobs: run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Sign release @@ -1228,7 +1222,7 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg @@ -1249,27 +1243,27 @@ jobs: # - BuilderSpecialReport - MarkReleaseReady - FunctionalStatelessTestRelease - # - FunctionalStatelessTestAarch64 + - FunctionalStatelessTestAarch64 - FunctionalStatefulTestRelease - # - FunctionalStatefulTestAarch64 + - FunctionalStatefulTestAarch64 - IntegrationTestsRelease0 - IntegrationTestsRelease1 - CompatibilityCheck - SignRelease - - regression_common - - benchmark - - clickhouse_keeper_ssl - - key_value - - ldap - - parquet - - parquet_minio - - parquet_aws - - s3 - - tiered_storage_s3 + # - regression_common + # - benchmark + # - clickhouse_keeper_ssl + # - key_value + # - ldap + # - parquet + # - parquet_minio + # - parquet_aws + # - s3 + # - tiered_storage_s3 runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index c13d06ef7f43..b0f96e758fda 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,7 +180,8 @@ }, "builds_report_config": { "ClickHouse build check": [ - "package_release" + "package_release", + "package_aarch64" ], "ClickHouse special build check": [ "binary_tidy", From 20f89b78ee87516b0971d005b341759d4658c564 Mon Sep 17 00:00:00 2001 From: Stuart <146047128+strtgbb@users.noreply.github.com> Date: Wed, 22 Jan 2025 16:21:02 -0500 Subject: [PATCH 112/130] pin altinityinfra/kerberized-hadoop tag --- .../runner/compose/docker_compose_kerberized_hdfs.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml index 365821b3f5ea..b163936460c2 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml @@ -4,7 +4,8 @@ services: kerberizedhdfs1: cap_add: - DAC_READ_SEARCH - image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + # image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + image: altinityinfra/kerberized-hadoop:0-7fb126d648460c159657a337b2f0cc24fbbce2ee-amd64 hostname: kerberizedhdfs1 restart: always volumes: From 50525962a0702546fd03213fe3e18384012d5438 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 17:13:17 -0500 Subject: [PATCH 113/130] artifact fix --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a53fe80bf407..9c198be86c7f 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -59,7 +59,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: changed_images - path: ${{ runner.temp }}/docker_images_check/changed_images.json + path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json DockerHubPush: needs: [DockerHubPushAmd64, DockerHubPushAarch64] runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] From 24e1bfa995b38a07ce010142769089284202166a Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 22 Jan 2025 23:39:30 +0100 Subject: [PATCH 114/130] Set version to 22.8.21.1001.altinitystable --- cmake/autogenerated_versions.txt | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index a2eea47e4ae5..b36c0e945b5a 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -9,9 +9,10 @@ SET(VERSION_MINOR 8) SET(VERSION_PATCH 21) SET(VERSION_GITHASH c9ca79e24e8759591494ce8a68306e30269655f2) -SET(VERSION_TWEAK 39) +SET(VERSION_TWEAK 1001) SET(VERSION_FLAVOUR altinitystable) -SET(VERSION_DESCRIBE v22.8.21.39.altinitystable) -SET(VERSION_STRING 22.8.21.39.altinitystable) +SET(VERSION_DESCRIBE v22.8.21.1001.altinitystable) +SET(VERSION_STRING 22.8.21.1001.altinitystable) + # end of autochange From 7b199057d85aa766e271acb39d8216aa320196b5 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 18:28:27 -0500 Subject: [PATCH 115/130] update docker artifact name --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9c198be86c7f..f36c57c7e968 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -58,7 +58,7 @@ jobs: - name: Upload images files to artifacts uses: actions/upload-artifact@v4 with: - name: changed_images + name: changed_images_amd64 path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json DockerHubPush: needs: [DockerHubPushAmd64, DockerHubPushAarch64] From aca046109300259c46d7c0f7083c9bde5f7ff27a Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:02:24 -0500 Subject: [PATCH 116/130] test fixes --- .github/workflows/regression.yml | 623 ++++++++++++++++++++ .github/workflows/release_branches.yml | 775 ++++--------------------- tests/ci/integration_test_check.py | 1 - tests/integration/ci-runner.py | 1 - tests/integration/runner | 2 - 5 files changed, 728 insertions(+), 674 deletions(-) create mode 100644 .github/workflows/regression.yml diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 000000000000..73728451316e --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,623 @@ +name: Regression test workflow - Release +'on': + workflow_call: + inputs: + runner_type: + description: the label of runner to use, can be a simple string or a comma-separated list + required: true + type: string + commit: + description: commit hash of the regression tests. + required: true + type: string + arch: + description: arch to run the tests on. + required: true + type: string + timeout_minutes: + description: Maximum number of minutes to let workflow run before GitHub cancels it. + default: 210 + type: number + build_sha: + description: commit sha of the workflow run for artifact upload. + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true + AWS_REPORT_KEY_ID: + description: aws s3 key id used for regression test reports. + required: true + AWS_REPORT_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression test reports. + required: true + AWS_REPORT_REGION: + description: aws s3 region used for regression test reports. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true + REGRESSION_AWS_S3_BUCKET: + description: aws s3 bucket used for regression tests. + required: true + REGRESSION_AWS_S3_KEY_ID: + description: aws s3 key id used for regression tests. + required: true + REGRESSION_AWS_S3_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression tests. + required: true + REGRESSION_AWS_S3_REGION: + description: aws s3 region used for regression tests. + required: true + REGRESSION_GCS_KEY_ID: + description: gcs key id used for regression tests. + required: true + REGRESSION_GCS_KEY_SECRET: + description: gcs key secret used for regression tests. + required: true + REGRESSION_GCS_URI: + description: gcs uri used for regression tests. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CHECKS_DATABASE_USER: ${{ secrets.CHECKS_DATABASE_USER }} + CHECKS_DATABASE_PASSWORD: ${{ secrets.CHECKS_DATABASE_PASSWORD }} + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output classic + --parallel 1 + --log raw.log + --with-analyzer + artifacts: builds + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Alter: + strategy: + fail-fast: false + matrix: + ONLY: [replace, attach, move] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=alter + STORAGE=/${{ matrix.ONLY }}_partition + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u alter/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --only "/alter/${{ matrix.ONLY }} partition/*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ClickHouseKeeperSSL: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + path: ${{ env.artifact_paths }} + + LDAP: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + Parquet: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ParquetS3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + STORAGE=${{ matrix.STORAGE}} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + S3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs, azure] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }} + --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }} + --azure-container ${{ secrets.AZURE_CONTAINER_NAME }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + TieredStorage: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 661cb410906a..9c4c119bca93 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,11 +6,6 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows - REGRESSION_COMMON_COMMIT: 1108c52f249af64885c255f39192ba3cc4c145ab - REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 - REGRESSION_KEY_VALUE_COMMIT: e072060fba19d3f81a96f4c5cbe9c5d0b1dcfa9d - on: # yamllint disable-line rule:truthy pull_request: @@ -34,7 +29,7 @@ jobs: # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none-aarch64] # steps: # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # - name: Images check # run: | # cd "$GITHUB_WORKSPACE/tests/ci" @@ -49,7 +44,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check @@ -62,7 +57,7 @@ jobs: run: | mv ${{ runner.temp }}/docker_images_check/changed_images_amd64.json ${{ runner.temp }}/docker_images_check/changed_images.json - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: changed_images path: ${{ runner.temp }}/docker_images_check/changed_images.json @@ -71,14 +66,14 @@ jobs: # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # steps: # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # - name: Download changed aarch64 images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_aarch64 # path: ${{ runner.temp }} # - name: Download changed amd64 images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_amd64 # path: ${{ runner.temp }} @@ -103,11 +98,11 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: CompatibilityCheck @@ -142,14 +137,14 @@ jobs: CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: changed_images path: ${{ env.IMAGES_PATH }} - name: Trust My Directory run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: true @@ -163,7 +158,7 @@ jobs: cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json @@ -189,12 +184,12 @@ jobs: # BUILD_NAME=package_aarch64 # EOF # - name: Download changed images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images # path: ${{ runner.temp }}/images_path # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # fetch-depth: 0 # otherwise we will have no info about contributors # - name: Build @@ -228,7 +223,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -267,11 +262,11 @@ jobs: NEEDS_DATA_PATH=${{runner.temp}}/needs.json EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Report Builder @@ -307,11 +302,11 @@ jobs: # NEEDS_DATA_PATH=${{runner.temp}}/needs.json # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Report Builder @@ -340,7 +335,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Mark Commit Release Ready @@ -364,13 +359,28 @@ jobs: KILL_TIMEOUT=10800 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Functional test run: | sudo rm -fr "$TEMP_PATH" @@ -400,11 +410,11 @@ jobs: # KILL_TIMEOUT=10800 # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Functional test @@ -439,13 +449,28 @@ jobs: KILL_TIMEOUT=3600 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Functional test run: | sudo rm -fr "$TEMP_PATH" @@ -475,11 +500,11 @@ jobs: # KILL_TIMEOUT=3600 # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Functional test @@ -515,13 +540,28 @@ jobs: RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:db8:1::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -552,13 +592,28 @@ jobs: RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:db8:1::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -577,628 +632,17 @@ jobs: ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# - regression_start: - ## Not depending on the tests above since they can fail at any given moment. + RegressionTestsRelease: needs: [BuilderDebRelease] - runs-on: ubuntu-latest - steps: - - run: true - - regression_common: - strategy: - fail-fast: false - matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - benchmark: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - clickhouse_keeper_ssl: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - key_value: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=key_value - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - ldap: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-authentication-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/no_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_minio: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/minio - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage minio - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_aws: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/aws_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws_s3-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - tiered_storage_s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + if: ${{ !failure() && !cancelled() }} + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 217a4fde343586d50229ff5e24295a02412d1d98 + arch: release + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + timeout_minutes: 300 SignRelease: needs: [BuilderDebRelease] @@ -1214,9 +658,9 @@ jobs: run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Sign release @@ -1256,20 +700,11 @@ jobs: - IntegrationTestsRelease1 - CompatibilityCheck - SignRelease - - regression_common - - benchmark - - clickhouse_keeper_ssl - - key_value - - ldap - - parquet - - parquet_minio - - parquet_aws - - s3 - - tiered_storage_s3 + - RegressionTestsRelease runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index bd1513f87f22..957a782c779a 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -44,7 +44,6 @@ "altinityinfra/postgresql-java-client", "altinityinfra/integration-test", "altinityinfra/kerberos-kdc", - "altinityinfra/kerberized-hadoop", "altinityinfra/integration-helper", "altinityinfra/dotnet-client", ] diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 5f9b0619deca..adc3b3b94f49 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -282,7 +282,6 @@ def get_images_names(): "altinityinfra/integration-helper", "altinityinfra/integration-test", "altinityinfra/integration-tests-runner", - "altinityinfra/kerberized-hadoop", "altinityinfra/kerberos-kdc", "altinityinfra/mysql-golang-client", "altinityinfra/mysql-java-client", diff --git a/tests/integration/runner b/tests/integration/runner index 00022b511ba9..b26f87646dc2 100755 --- a/tests/integration/runner +++ b/tests/integration/runner @@ -321,8 +321,6 @@ if __name__ == "__main__": env_tags += "-e {}={} ".format("DOCKER_HELPER_TAG", tag) elif image == "altinityinfra/integration-test": env_tags += "-e {}={} ".format("DOCKER_BASE_TAG", tag) - elif image == "altinityinfra/kerberized-hadoop": - env_tags += "-e {}={} ".format("DOCKER_KERBERIZED_HADOOP_TAG", tag) elif image == "altinityinfra/kerberos-kdc": env_tags += "-e {}={} ".format("DOCKER_KERBEROS_KDC_TAG", tag) else: From c042b1fad65130df352f75f5599d96d5b2e9016c Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Jan 2025 01:42:58 +0100 Subject: [PATCH 117/130] Set version to 22.8.21.1001.altinityhotfix --- cmake/autogenerated_versions.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index b36c0e945b5a..c93991c911c6 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -10,9 +10,9 @@ SET(VERSION_PATCH 21) SET(VERSION_GITHASH c9ca79e24e8759591494ce8a68306e30269655f2) SET(VERSION_TWEAK 1001) -SET(VERSION_FLAVOUR altinitystable) +SET(VERSION_FLAVOUR altinityhotfix) -SET(VERSION_DESCRIBE v22.8.21.1001.altinitystable) -SET(VERSION_STRING 22.8.21.1001.altinitystable) +SET(VERSION_DESCRIBE v22.8.21.1001.altinityhotfix) +SET(VERSION_STRING 22.8.21.1001.altinityhotfix) # end of autochange From 582abc8fa9ffeb22a5bf942469b77459e6c561a0 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:46:01 -0500 Subject: [PATCH 118/130] push docker images and make them amd only --- .github/workflows/release_branches.yml | 4 ++-- tests/ci/docker_server.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9c4c119bca93..9e6180ca9623 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ + python3 docker_server.py --release-type head push \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --no-push \ + python3 docker_server.py --release-type head push \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 93d21ebc614d..ac1abd9831d1 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -31,7 +31,7 @@ ) TEMP_PATH = p.join(RUNNER_TEMP, "docker_images_check") -BUCKETS = {"amd64": "package_release", "arm64": "package_aarch64"} +BUCKETS = {"amd64": "package_release"} git = Git(ignore_no_tags=True) From 987cc8c56bb2bd9f0610d45d725367af757c713f Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:47:03 -0500 Subject: [PATCH 119/130] typo fix --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9e6180ca9623..d328de3d7ec0 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head push \ + python3 docker_server.py --release-type head --push \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head push \ + python3 docker_server.py --release-type head --push \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() From 2b236e50773697a9cc3046b9756bd6404ff09b89 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 20:18:37 -0500 Subject: [PATCH 120/130] add retry script --- .github/retry.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100755 .github/retry.sh diff --git a/.github/retry.sh b/.github/retry.sh new file mode 100755 index 000000000000..566c2cf11315 --- /dev/null +++ b/.github/retry.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Execute command until exitcode is 0 or +# maximum number of retries is reached +# Example: +# ./retry +retries=$1 +delay=$2 +command="${@:3}" +exitcode=0 +try=0 +until [ "$try" -ge $retries ] +do + echo "$command" + eval "$command" + exitcode=$? + if [ $exitcode -eq 0 ]; then + break + fi + try=$((try+1)) + sleep $2 +done +exit $exitcode From e40a197b932b90de09b23fd1c8a715a792b276d0 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 21:23:15 -0500 Subject: [PATCH 121/130] skip docker image rebuild --- .github/workflows/release_branches.yml | 12 +++--- tests/ci/docker_images_check.py | 54 ++++++++++++++++++-------- 2 files changed, 43 insertions(+), 23 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d328de3d7ec0..073a191ba583 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -35,7 +35,7 @@ jobs: # cd "$GITHUB_WORKSPACE/tests/ci" # python3 docker_images_check.py --suffix aarch64 # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_aarch64 # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json @@ -82,7 +82,7 @@ jobs: # cd "$GITHUB_WORKSPACE/tests/ci" # python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images # path: ${{ runner.temp }}/changed_images.json @@ -201,7 +201,7 @@ jobs: # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" # cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" # - name: Upload build URLs to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: ${{ env.BUILD_URLS }} # path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --push \ + python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --push \ + python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() @@ -672,7 +672,7 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 2f3859380cf7..affe37684555 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -106,23 +106,43 @@ def get_changed_docker_images( str(files_changed), ) - # Rebuild all images - changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] - - # for dockerfile_dir, image_description in images_dict.items(): - # for f in files_changed: - # if f.startswith(dockerfile_dir): - # name = image_description["name"] - # only_amd64 = image_description.get("only_amd64", False) - # logging.info( - # "Found changed file '%s' which affects " - # "docker image '%s' with path '%s'", - # f, - # name, - # dockerfile_dir, - # ) - # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - # break + # Find changed images + all_images = [] + changed_images = [] + for dockerfile_dir, image_description in images_dict.items(): + all_images.append(DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False))) + for f in files_changed: + if f.startswith(dockerfile_dir): + name = image_description["name"] + only_amd64 = image_description.get("only_amd64", False) + logging.info( + "Found changed file '%s' which affects " + "docker image '%s' with path '%s'", + f, + name, + dockerfile_dir, + ) + changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + break + + # Rebuild all images on push, release, or scheduled run + if pr_info.number in [0,1]: + changed_images = all_images + + else: + # Rebuild all on opened PR + if pr_info.event['action'] in ['opened', 'reopened']: + changed_images = all_images + + # Check that image for the PR exists + elif pr_info.event['action'] == 'synchronize': + unchanged_images = [ + image for image in all_images if image not in changed_images + ] + logging.info(f"Unchanged images: {unchanged_images}") + for image in unchanged_images: + if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}", shell=True).returncode != 0: + changed_images.append(image) # The order is important: dependents should go later than bases, so that # they are built with updated base versions. From edb90737c6094932c67b2ac1b1523490c11551c6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 22:33:22 -0500 Subject: [PATCH 122/130] remove push-by-digest --- tests/ci/docker_images_check.py | 2 +- tests/ci/docker_server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index affe37684555..f1e506aa38a1 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -141,7 +141,7 @@ def get_changed_docker_images( ] logging.info(f"Unchanged images: {unchanged_images}") for image in unchanged_images: - if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}", shell=True).returncode != 0: + if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}-amd64", shell=True).returncode != 0: changed_images.append(image) # The order is important: dependents should go later than bases, so that diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index ac1abd9831d1..ecfd030360c4 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -242,7 +242,7 @@ def build_and_push_image( init_args = ["docker", "buildx", "build"] if push: init_args.append("--push") - init_args.append("--output=type=image,push-by-digest=true") + init_args.append("--output=type=image") init_args.append(f"--tag={image.repo}") else: init_args.append("--output=type=docker") From 4f3b2cd63c9fda1f5ecd82d50c5eaeecdf9386ed Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 22:38:58 -0500 Subject: [PATCH 123/130] minor regression fix --- .github/workflows/regression.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 73728451316e..d2b144f5ba09 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -165,7 +165,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -216,7 +216,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -268,7 +268,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -323,7 +323,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -374,7 +374,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -420,7 +420,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -471,7 +471,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -527,7 +527,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -589,7 +589,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json From b7b0fd92d4aecf4f9829cd9e1f7fe3954979d2c3 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 23:53:11 -0500 Subject: [PATCH 124/130] update build suffix and add hotfix to version helper --- .github/workflows/regression.yml | 18 ++++----- .github/workflows/release_branches.yml | 2 +- tests/ci/docker_images_check.py | 54 ++++++++------------------ tests/ci/version_helper.py | 3 +- 4 files changed, 29 insertions(+), 48 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index d2b144f5ba09..db8d893ec430 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -168,7 +168,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -219,7 +219,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -271,7 +271,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -326,7 +326,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -377,7 +377,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -423,7 +423,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -474,7 +474,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -530,7 +530,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -592,7 +592,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 073a191ba583..986556a1b3de 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -134,7 +134,7 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release - CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinityhotfix EOF - name: Download changed images uses: actions/download-artifact@v4 diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index f1e506aa38a1..2f3859380cf7 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -106,43 +106,23 @@ def get_changed_docker_images( str(files_changed), ) - # Find changed images - all_images = [] - changed_images = [] - for dockerfile_dir, image_description in images_dict.items(): - all_images.append(DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False))) - for f in files_changed: - if f.startswith(dockerfile_dir): - name = image_description["name"] - only_amd64 = image_description.get("only_amd64", False) - logging.info( - "Found changed file '%s' which affects " - "docker image '%s' with path '%s'", - f, - name, - dockerfile_dir, - ) - changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - break - - # Rebuild all images on push, release, or scheduled run - if pr_info.number in [0,1]: - changed_images = all_images - - else: - # Rebuild all on opened PR - if pr_info.event['action'] in ['opened', 'reopened']: - changed_images = all_images - - # Check that image for the PR exists - elif pr_info.event['action'] == 'synchronize': - unchanged_images = [ - image for image in all_images if image not in changed_images - ] - logging.info(f"Unchanged images: {unchanged_images}") - for image in unchanged_images: - if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}-amd64", shell=True).returncode != 0: - changed_images.append(image) + # Rebuild all images + changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] + + # for dockerfile_dir, image_description in images_dict.items(): + # for f in files_changed: + # if f.startswith(dockerfile_dir): + # name = image_description["name"] + # only_amd64 = image_description.get("only_amd64", False) + # logging.info( + # "Found changed file '%s' which affects " + # "docker image '%s' with path '%s'", + # f, + # name, + # dockerfile_dir, + # ) + # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + # break # The order is important: dependents should go later than bases, so that # they are built with updated base versions. diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index bb9630c99afb..3b8b4bd04cc2 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -175,7 +175,8 @@ class VersionType: PRESTABLE = "prestable" STABLE = "altinitystable" TESTING = "testing" - VALID = (TESTING, PRESTABLE, STABLE, LTS) + HOTFIX = "altinityhotfix" + VALID = (TESTING, PRESTABLE, STABLE, LTS, HOTFIX) def validate_version(version: str): From d2e6a05e986503991ec687ffc13bc80af7259deb Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 23 Jan 2025 01:14:49 -0500 Subject: [PATCH 125/130] docker login --- .github/workflows/release_branches.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 986556a1b3de..e29a7eb92b62 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -229,6 +229,7 @@ jobs: fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - name: Check docker altinity/clickhouse-server building run: | + docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-server --image-path docker/server From d6cf5c44b102590c914446c24da681ea54a591ab Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Jan 2025 09:59:07 +0100 Subject: [PATCH 126/130] Fixed pushing docker images of the server Pushing as `altinityinfra/clickhouse-server` --- .github/workflows/release_branches.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e29a7eb92b62..d23baf2df93e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -227,14 +227,14 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker altinity/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ - --image-repo altinity/clickhouse-server --image-path docker/server + --image-repo altinityinfra/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head \ - --image-repo altinity/clickhouse-keeper --image-path docker/keeper + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | From 046b4ff1cf1521628ea47e1c01eac5423e77580b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Jan 2025 10:59:31 +0100 Subject: [PATCH 127/130] Pushing docker image with exact version --- tests/ci/docker_server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index ecfd030360c4..018f3d232cde 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -210,6 +210,7 @@ def gen_tags(version: ClickHouseVersion, release_type: str) -> List[str]: tags.append(".".join(parts[: i + 1])) elif release_type == "head": tags.append(release_type) + tags.append(version.string) else: raise ValueError(f"{release_type} is not valid release part") return tags From d64c0859378c726e3fe785c1fe2ab33113535bf8 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 23 Jan 2025 11:00:00 -0500 Subject: [PATCH 128/130] Update suffix version --- .github/workflows/release_branches.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 718f3a1fbf58..3de0c5941915 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -176,6 +176,7 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_aarch64 + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinityhotfix EOF - name: Download changed images uses: actions/download-artifact@v4 From 444f547311eaea700e72181b980685f3aa2c32be Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 23 Jan 2025 13:14:40 -0500 Subject: [PATCH 129/130] arm artifacts fix --- tests/ci/build_check.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 59da13a24fb0..86f9697b953b 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -389,22 +389,25 @@ def main(): log_path, s3_path_prefix + "/" + os.path.basename(log_path) ) logging.info("Log url %s", log_url) + print(f"::notice ::Log URL: {log_url}") else: logging.info("Build log doesn't exist") + print("Build log doesn't exist") - print(f"::notice ::Log URL: {log_url}") src_path = os.path.join(TEMP_PATH, "build_source.src.tar.gz") + s3_path = s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" + logging.info("s3_path %s", s3_path) if os.path.exists(src_path): src_url = s3_helper.upload_build_file_to_s3( - src_path, s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" + src_path, s3_path ) logging.info("Source tar %s", src_url) + print(f"::notice ::Source tar URL: {src_url}") else: logging.info("Source tar doesn't exist") - - print(f"::notice ::Source tar URL: {src_url}") + print("Source tar doesn't exist") create_json_artifact( TEMP_PATH, build_name, log_url, build_urls, build_config, elapsed, success From 50b8e2062a5210032ca079ee463aefa508a6e32d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:43:53 -0500 Subject: [PATCH 130/130] docker skip rebuild --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 3de0c5941915..e25b4bf05882 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -391,7 +391,7 @@ jobs: KILL_TIMEOUT=10800 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code @@ -481,7 +481,7 @@ jobs: KILL_TIMEOUT=3600 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code