@@ -15,6 +15,11 @@ limitations under the License.
1515set -e
1616# set -x
1717
18+ error () { echo " ERROR: $* " >&2 ; }
19+ warn () { echo " WARN: $* " ; }
20+ success () { echo " SUCCESS: $* " ; }
21+ log () { echo " LOG: $* " ; }
22+
1823source <( curl -L https://raw.githubusercontent.com/ocp-power-automation/openshift-install-power/92996305e1a8bef69fbe613b912d5561cc753172/openshift-install-powervs 2> /dev/null | sed ' s/main "$@"//g' )
1924
2025function help {
3641 --cos-access-key string Cloud Storage access key(optional)
3742 --cos-secret-key string Cloud Storage secret key(optional)
3843 --skip-os-password Skip the root user password (optional)
44+ --sha256 string Expected SHA256 checksum for the image(optional)
3945 --help help for upload
46+ Environment Variables:
47+ DOWNLOAD_MAX_RETRIES Maximum number of retry attempts if a download fails or the checksum validation fails (default: 3)
48+ DOWNLOAD_RETRY_DELAY Delay between retries in seconds (default: 5)
49+
4050
4151EOF
4252 exit 0
@@ -62,6 +72,11 @@ PVSADM_VERSION="v0.1.11"
6272IMAGE_SIZE=" 11"
6373TARGET_DISK_SIZE=" 120"
6474
75+ # Download retry configuration
76+ DOWNLOAD_MAX_RETRIES=${DOWNLOAD_MAX_RETRIES:- " 3" }
77+ DOWNLOAD_RETRY_DELAY=${DOWNLOAD_RETRY_DELAY:- " 5" }
78+ DOWNLOAD_TIMEOUT=300
79+
6580# Default Centos image name
6681CENTOS_VM_IMAGE_NAME=' CentOS-Stream-8'
6782
@@ -612,16 +627,156 @@ function copy_image_file {
612627}
613628
614629function download_url() {
615- local url=$1
630+ local url=" $1 "
631+ local expected_sha256=" $2 "
616632 local image_name=${url##*/ }
617- rm -rf $image_name
618- retry " curl -fsSL $url -o ./$image_name "
619- if [[ $? -eq 0 ]]; then
620- # IMAGE_PATH=$(realpath ./$image_name)
621- IMAGE_PATH=./$image_name
622- DOWNLOAD_IMAGE_NAME=$image_name
633+ local retry_count=0
634+ local download_success=false
635+
636+ log " ========================================="
637+ log " Starting download: $( basename " $image_name " ) "
638+ log " Source URL: $url "
639+ log " ========================================="
640+
641+ # Validate URL before attempting download
642+ validate_url " $url "
643+
644+ # Remove any existing file
645+ rm -f " $image_name "
646+
647+ # Retry loop: focus purely on download
648+ while [ $retry_count -lt $DOWNLOAD_MAX_RETRIES ]; do
649+ if [ $retry_count -gt 0 ]; then
650+ warn " Retry attempt $retry_count of $DOWNLOAD_MAX_RETRIES "
651+ sleep $DOWNLOAD_RETRY_DELAY
652+ else
653+ log " Download attempt $(( retry_count + 1 )) of $DOWNLOAD_MAX_RETRIES "
654+ fi
655+
656+ log " Downloading $( basename " $image_name " ) ..."
657+ if curl -fLSs --retry 2 --retry-delay 2 --connect-timeout 60 \
658+ --max-time $DOWNLOAD_TIMEOUT " $url " -o " ./$image_name " 2>&1 ; then
659+ download_success=true
660+ break
661+ else
662+ local curl_exit=$?
663+ error " Download failed (curl exit code: $curl_exit )"
664+ case $curl_exit in
665+ 1) error " Could not resolve host (DNS failure)" ;;
666+ 2) error " Failed to connect to host" ;;
667+ 3) error " Partial file transfer" ;;
668+ 4) error " HTTP error (404/403/etc.)" ;;
669+ 5) error " Operation timeout" ;;
670+ 6) error " SSL connection error" ;;
671+ * ) error " See curl manual for exit code $curl_exit " ;;
672+ esac
673+ rm -f " ./$image_name "
674+ retry_count=$(( retry_count + 1 ))
675+ fi
676+ done
677+
678+ # Verify file existence and content after all download attempts
679+ if [ ! -f " ./$image_name " ] || [ ! -s " ./$image_name " ]; then
680+ error " Downloaded file is missing or empty after $DOWNLOAD_MAX_RETRIES attempts."
681+ return 1
682+ fi
683+
684+ # Perform verification once, after successful download
685+ log " Download completed — running one-time verification checks..."
686+
687+ if ! verify_file_size " ./$image_name " " $url " ; then
688+ warn " File size verification failed; please verify manually."
689+ fi
690+
691+ if ! verify_sha256 " ./$image_name " " $expected_sha256 " ; then
692+ error " Checksum verification failed; downloaded file may be corrupted."
693+ return 1
694+ fi
695+
696+ IMAGE_PATH=" ./$image_name "
697+ DOWNLOAD_IMAGE_NAME=" $image_name "
698+
699+ success " ========================================="
700+ success " ✓ Download and verification completed successfully!"
701+ success " File: $( basename " $image_name " ) "
702+ success " Location: $IMAGE_PATH "
703+ success " ========================================="
704+ return 0
705+ }
706+
707+
708+ # All retries failed
709+ if [ " $download_success " = false ]; then
710+ error " ========================================="
711+ error " ✗ Failed to download after $DOWNLOAD_MAX_RETRIES attempts"
712+ error " ========================================="
713+ error " Troubleshooting steps:"
714+ error " 1. Check your internet connection"
715+ error " 2. Verify the URL is correct and accessible:"
716+ error " $url "
717+ error " 3. Ensure special characters in URL are properly escaped"
718+ error " 4. Check if the checksum value is correct"
719+ error " 5. Try downloading manually to diagnose:"
720+ error " curl -LO \" $url \" "
721+ error " 6. Increase retry attempts: export DOWNLOAD_MAX_RETRIES=5"
722+ return 1
723+ fi
724+
725+
726+ # -------------------------------------------------------------------------
727+ # Verify file size matches expected size from HTTP headers
728+ # -------------------------------------------------------------------------
729+ function verify_file_size() {
730+ local file=" $1 "
731+ local url=" $2 "
732+
733+ log " Verifying file size for $( basename " $file " ) ..."
734+
735+ # Get expected size from HTTP headers
736+ local expected_size=$( curl -sI " $url " | grep -i " ^content-length:" | awk ' {print $2}' | tr -d ' \r\n' )
737+
738+ if [ -z " $expected_size " ] || [ " $expected_size " = " 0" ]; then
739+ warn " Unable to determine expected file size from server, skipping size verification"
740+ return 0
741+ fi
742+
743+ # Get actual file size
744+ local actual_size=$( stat -c%s " $file " 2> /dev/null || stat -f%z " $file " 2> /dev/null)
745+
746+ log " Expected size: $( numfmt --to=iec-i --suffix=B $expected_size 2> /dev/null || echo " $expected_size bytes" ) "
747+ log " Actual size: $( numfmt --to=iec-i --suffix=B $actual_size 2> /dev/null || echo " $actual_size bytes" ) "
748+
749+ # Allow 1% difference for potential metadata differences
750+ local size_diff=$(( expected_size - actual_size))
751+ local size_diff_abs=${size_diff# -}
752+ local threshold=$(( expected_size / 100 ))
753+
754+ if [ " $size_diff_abs " -le " $threshold " ]; then
755+ success " ✓ File size verification PASSED"
756+ return 0
623757 else
624- error " Unable to fetch the url"
758+ error " ✗ File size verification FAILED (difference: $size_diff_abs bytes)"
759+ return 1
760+ fi
761+ }
762+
763+ # -------------------------------------------------------------------------
764+ # Validate URL for common issues
765+ # -------------------------------------------------------------------------
766+ function validate_url() {
767+ local url=" $1 "
768+
769+ # Check for unescaped ampersands
770+ if [[ " $url " =~ [^\\ ]\& [^\ ] ]]; then
771+ warn " ⚠ Warning: URL contains unescaped & characters"
772+ warn " This may cause download issues. Consider escaping with \\ & or using quotes"
773+ warn " URL: $url "
774+ fi
775+
776+ # Check if URL is accessible
777+ if ! curl -sf --head " $url " > /dev/null 2>&1 ; then
778+ warn " ⚠ Warning: Unable to verify URL accessibility"
779+ warn " This might indicate network issues or incorrect URL"
625780 fi
626781}
627782
@@ -632,15 +787,15 @@ function download_image {
632787 if [[ " $1 " == " rhel" ]]; then
633788 if echo $RHEL_URL | grep -q -i ' access.cdn.redhat.com' ; then
634789 log " downloading rhel image"
635- download_url $RHEL_URL
790+ download_url " $RHEL_URL " " $IMAGE_SHA256 "
636791 RHEL_IMAGE=$IMAGE_PATH
637792 RHEL_DOWNLOADED_IMAGE_NAME=$DOWNLOAD_IMAGE_NAME
638793 RHEL_NEW_IMAGE_PATH=$IMAGE_NEW_PATH
639794 COPY_RHEL_IMAGE=1
640795 fi
641796 elif [[ " $1 " == " rhcos" ]]; then
642- download_url $RHCOS_URL
643- RHCOS_IMAGE=IMAGE_PATH
797+ download_url " $RHCOS_URL " " $IMAGE_SHA256 "
798+ RHCOS_IMAGE=$ IMAGE_PATH
644799 RHCOS_DOWNLOAD_IMAGE_NAME=$DOWNLOAD_IMAGE_NAME
645800 copy_image_file $RHCOS_IMAGE $RHCOS_OBJECT_NAME
646801 RHCOS_NEW_IMAGE_PATH=$IMAGE_NEW_PATH
@@ -649,14 +804,69 @@ function download_image {
649804 warn " Unknown image"
650805 fi
651806}
807+ function calc_sha256() {
808+ local f=" $1 "
809+ if command -v sha256sum > /dev/null 2>&1 ; then
810+ sha256sum " $f " | awk ' {print $1}'
811+ elif command -v shasum > /dev/null 2>&1 ; then
812+ shasum -a 256 " $f " | awk ' {print $1}'
813+ elif command -v openssl > /dev/null 2>&1 ; then
814+ openssl dgst -sha256 " $f " | awk ' {print $NF}'
815+ else
816+ error " No SHA-256 tool available (need sha256sum, shasum, or openssl)"
817+ fi
818+ }
819+
820+
821+ function verify_sha256() {
822+ local f=" $1 "
823+ local expected=" $2 "
824+
825+ if [ -z " $expected " ]; then
826+ warn " No checksum provided for $( basename " $f " ) , skipping verification"
827+ return 0
828+ fi
829+
830+ log " Verifying SHA256 checksum for $( basename " $f " ) ..."
831+
832+ local actual
833+ actual=" $( calc_sha256 " $f " ) "
834+
835+ if [ -z " $actual " ]; then
836+ error " Failed to calculate checksum for $f "
837+ return 1
838+ fi
839+
840+ local actual_lc=$( echo " $actual " | tr ' [:upper:]' ' [:lower:]' )
841+ local expected_lc=$( echo " $expected " | tr ' [:upper:]' ' [:lower:]' )
842+
843+ log " Expected: $expected_lc "
844+ log " Actual: $actual_lc "
845+
846+ if [[ " $actual_lc " != " $expected_lc " ]]; then
847+ error " SHA-256 checksum mismatch for $( basename " $f " ) "
848+ error " Expected: $expected_lc "
849+ error " Actual: $actual_lc "
850+ error " Possible causes:"
851+ error " - Incomplete download (network interruption)"
852+ error " - Corrupted file during transfer"
853+ error " - Incorrect URL (check for unescaped special characters like &)"
854+ error " - Wrong checksum value provided"
855+ return 1
856+ fi
857+
858+ success " ✓ Checksum verification PASSED for $( basename " $f " ) "
859+ return 0
860+ }
861+
652862
653863function main {
654864 mkdir -p ./logs
655865 vars=" "
656866
657867 # Only use sudo if not running as root
658868 [ " $( id -u) " -ne 0 ] && SUDO=sudo || SUDO=" "
659- platform_checks
869+ # platform_checks
660870
661871 # Parse commands and arguments
662872 while [[ $# -gt 0 ]]; do
@@ -702,6 +912,10 @@ function main {
702912 " --skip-os-password" )
703913 SKIP_OS_PASSWORD=" --skip-os-password"
704914 ;;
915+ " --sha256" )
916+ shift
917+ IMAGE_SHA256=" $1 "
918+ ;;
705919 " --help" )
706920 help
707921 ;;
0 commit comments