|
4 | 4 | import requests |
5 | 5 | import tarfile |
6 | 6 | from osgeo import gdal |
| 7 | +from requests.adapters import HTTPAdapter |
| 8 | +from urllib3.util.retry import Retry |
7 | 9 |
|
8 | 10 | from rtc.runconfig import RunConfig, load_parameters |
9 | 11 | from rtc.core import create_logger |
@@ -149,28 +151,35 @@ def test_workflow(): |
149 | 151 | os.makedirs(test_data_directory, exist_ok=True) |
150 | 152 |
|
151 | 153 | dataset_name = 's1b_los_angeles' |
152 | | - dataset_url = ('https://zenodo.org/record/7753472/files/' |
| 154 | + dataset_url = ('https://zenodo.org/records/7753472/files/' |
153 | 155 | 's1b_los_angeles.tar.gz?download=1') |
154 | 156 |
|
155 | 157 | tests_dir = os.path.dirname(__file__) |
156 | 158 | dataset_dir = os.path.join(test_data_directory, dataset_name) |
157 | 159 | if FLAG_ALWAYS_DOWNLOAD or not os.path.isdir(dataset_dir): |
158 | 160 |
|
159 | | - |
160 | | - |
161 | 161 | print(f'Test dataset {dataset_name} not found. Downloading' |
162 | 162 | f' file {dataset_url}.') |
163 | | - response = requests.get(dataset_url) |
164 | | - response.raise_for_status() |
165 | | - |
166 | | - compressed_filename = os.path.join(test_data_directory, |
167 | | - os.path.basename(dataset_url)) |
168 | | - |
169 | | - open(compressed_filename, 'wb').write(response.content) |
170 | | - |
171 | | - print(f'Extracting downloaded file {compressed_filename}') |
172 | | - with tarfile.open(compressed_filename) as compressed_file: |
173 | | - compressed_file.extractall(test_data_directory) |
| 163 | + # To avoid the issue in downloading, try again. |
| 164 | + session = requests.Session() |
| 165 | + retries = Retry( |
| 166 | + total=5, # up to 5 attempts |
| 167 | + backoff_factor=2, # 2 s, 4 s, 8 s, … |
| 168 | + status_forcelist=[502, 503, 504], |
| 169 | + ) |
| 170 | + session.mount("https://", HTTPAdapter(max_retries=retries)) |
| 171 | + |
| 172 | + compressed_path = os.path.join(test_data_directory, |
| 173 | + f"{dataset_name}.tar.gz") |
| 174 | + with session.get(dataset_url, stream=True, timeout=120) as r: |
| 175 | + r.raise_for_status() |
| 176 | + with open(compressed_path, "wb") as f: |
| 177 | + for chunk in r.iter_content(chunk_size=1024 * 1024): # 1 MB |
| 178 | + f.write(chunk) |
| 179 | + |
| 180 | + print(f"Extracting {compressed_path}") |
| 181 | + with tarfile.open(compressed_path, "r:gz") as tf: |
| 182 | + tf.extractall(test_data_directory) |
174 | 183 |
|
175 | 184 | # create logger |
176 | 185 | log_file = os.path.join('data', 'log.txt') |
|
0 commit comments