diff --git a/.gitignore b/.gitignore index 9d2580c14..1f031da8e 100644 --- a/.gitignore +++ b/.gitignore @@ -45,3 +45,4 @@ docs/_images docs/.doctrees oggm/version.py oggm/ignore +ignore/ diff --git a/docs/whats-new.rst b/docs/whats-new.rst index 56f7f0a25..a6d5724e4 100644 --- a/docs/whats-new.rst +++ b/docs/whats-new.rst @@ -51,6 +51,11 @@ Enhancements ``workflow.merge_gridded_data``. If no grid is provided, the default is to merge all grids of the provided gdirs (:pull:`1779`). By `Patrick Schmitt `_ +- `min_area_h` is now a default diagnostic variable output: this is + glacier area trying to avoid spikes due to interannual variability + in snowfall (:pull:`1791`). + By `Fabien Maussion `_ and + `Patrick Schmitt `_. - Flowlines shapefiles output now have more attributes and are easier to use (:pull:`1786`). By `Fabien Maussion `_ @@ -62,6 +67,7 @@ Enhancements ``flux_divergence`` (:pull:`1792`). By `Patrick Schmitt `_ + Bug fixes ~~~~~~~~~ @@ -72,6 +78,7 @@ Bug fixes By `Dan Goldberg `_ and `Fabien Maussion `_. + v1.6.2 (August 25, 2024) ------------------------ diff --git a/oggm/cli/prepro_levels.py b/oggm/cli/prepro_levels.py index 66c543617..aeafafd47 100644 --- a/oggm/cli/prepro_levels.py +++ b/oggm/cli/prepro_levels.py @@ -12,6 +12,7 @@ import time import logging import json +import importlib import pandas as pd import numpy as np import geopandas as gpd @@ -86,6 +87,8 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None, add_millan_thickness=False, add_millan_velocity=False, add_hugonnet_dhdt=False, add_bedmachine=False, add_glathida=False, + custom_climate_task=None, + custom_climate_task_kwargs=None, start_level=None, start_base_url=None, max_level=5, logging_level='WORKFLOW', dynamic_spinup=False, err_dmdtda_scaling_factor=0.2, @@ -169,6 +172,12 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None, add_glathida : bool adds (reprojects) the glathida thickness data to the glacier directories. Data points are stored as csv files. + custom_climate_task : str + optional import path to a custom climate task in the form + "module_path:function_name". If provided, it will be called instead of + the default process_climate_data. + custom_climate_task_kwargs : dict + optional kwargs passed to the custom climate task when it is executed. start_level : int the pre-processed level to start from (default is to start from scratch). If set, you'll need to indicate start_base_url as well. @@ -629,7 +638,23 @@ def _time_log(): utils.mkdir(sum_dir) # Climate - workflow.execute_entity_task(tasks.process_climate_data, gdirs) + climate_kwargs = custom_climate_task_kwargs or {} + if custom_climate_task: + try: + mod_path, func_name = custom_climate_task.rsplit(':', 1) + except ValueError: + raise InvalidParamsError('custom_climate_task must be of the form "module:function"') + try: + mod = importlib.import_module(mod_path) + except ModuleNotFoundError as err: + raise InvalidParamsError(f'Cannot import module {mod_path}') from err + try: + custom_task_func = getattr(mod, func_name) + except AttributeError as err: + raise InvalidParamsError(f'Module {mod_path} has no attribute {func_name}') from err + workflow.execute_entity_task(custom_task_func, gdirs, **climate_kwargs) + else: + workflow.execute_entity_task(tasks.process_climate_data, gdirs) # Small optim to avoid concurrency utils.get_geodetic_mb_dataframe() @@ -946,6 +971,11 @@ def parse_args(args): help='adds (reprojects) the glathida point thickness ' 'observations to the glacier directories. ' 'The data points are stored as csv.') + parser.add_argument('--custom-climate-task', type=str, default=None, + help='Custom climate task import path in the form module:function. ' + 'If provided, it replaces the default process_climate_data.') + parser.add_argument('--custom-climate-task-kwargs', type=json.loads, default=None, + help='JSON dict of kwargs passed to the custom climate task.') parser.add_argument('--demo', nargs='?', const=True, default=False, help='if you want to run the prepro for the ' 'list of demo glaciers.') @@ -1036,6 +1066,8 @@ def parse_args(args): add_hugonnet_dhdt=args.add_hugonnet_dhdt, add_bedmachine=args.add_bedmachine, add_glathida=args.add_glathida, + custom_climate_task=args.custom_climate_task, + custom_climate_task_kwargs=args.custom_climate_task_kwargs, dynamic_spinup=dynamic_spinup, err_dmdtda_scaling_factor=args.err_dmdtda_scaling_factor, dynamic_spinup_start_year=args.dynamic_spinup_start_year, diff --git a/oggm/core/dynamic_spinup.py b/oggm/core/dynamic_spinup.py index 2243923e6..7c41a4f8a 100644 --- a/oggm/core/dynamic_spinup.py +++ b/oggm/core/dynamic_spinup.py @@ -138,7 +138,7 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None, the forward model run. Therefore you could see quite fast changes (spikes) in the time-evolution (especially visible in length and area). If you set this value to 0 the filtering can be switched off. - Default is cfg.PARAMS['dynamic_spinup_min_ice_thick']. + Default is cfg.PARAMS['min_ice_thick_for_area']. first_guess_t_spinup : float The initial guess for the temperature bias for the spinup MassBalanceModel in °C. @@ -231,7 +231,7 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None, yr_min = gdir.get_climate_info()['baseline_yr_0'] if min_ice_thickness is None: - min_ice_thickness = cfg.PARAMS['dynamic_spinup_min_ice_thick'] + min_ice_thickness = cfg.PARAMS['min_ice_thick_for_area'] # check provided maximum start year here, and change min_spinup_period if spinup_start_yr_max is not None: @@ -443,7 +443,7 @@ def run_model_with_spinup_to_target_year(t_spinup): geom_path=geom_path, diag_path=diag_path, fl_diag_path=fl_diag_path, - dynamic_spinup_min_ice_thick=min_ice_thickness, + min_ice_thick_for_area=min_ice_thickness, fixed_geometry_spinup_yr=fixed_geometry_spinup_yr, store_monthly_step=store_monthly_step, ) @@ -453,9 +453,9 @@ def run_model_with_spinup_to_target_year(t_spinup): if delete_area_min_h: ovars.remove('area_min_h') - if type(ds) == tuple: + if isinstance(ds, tuple): ds = ds[0] - model_area_km2 = ds.area_m2_min_h.loc[target_yr].values * 1e-6 + model_area_km2 = ds.area_min_h_m2.loc[target_yr].values * 1e-6 model_volume_km3 = ds.volume_m3.loc[target_yr].values * 1e-9 else: # only run to rgi date and extract values @@ -1047,7 +1047,7 @@ def dynamic_melt_f_run_with_dynamic_spinup( the forward model run. Therefore you could see quite fast changes (spikes) in the time-evolution (especially visible in length and area). If you set this value to 0 the filtering can be switched off. - Default is cfg.PARAMS['dynamic_spinup_min_ice_thick']. + Default is cfg.PARAMS['min_ice_thick_for_area']. first_guess_t_spinup : float The initial guess for the temperature bias for the spinup MassBalanceModel in °C. @@ -1332,7 +1332,7 @@ def dynamic_melt_f_run_with_dynamic_spinup_fallback( the forward model run. Therefore you could see quite fast changes (spikes) in the time-evolution (especially visible in length and area). If you set this value to 0 the filtering can be switched off. - Default is cfg.PARAMS['dynamic_spinup_min_ice_thick']. + Default is cfg.PARAMS['min_ice_thick_for_area']. first_guess_t_spinup : float The initial guess for the temperature bias for the spinup MassBalanceModel in °C. diff --git a/oggm/core/flowline.py b/oggm/core/flowline.py index b66e8b36d..bcd1f1779 100644 --- a/oggm/core/flowline.py +++ b/oggm/core/flowline.py @@ -926,7 +926,7 @@ def run_until_and_store(self, y1, store_monthly_step=None, stop_criterion=None, fixed_geometry_spinup_yr=None, - dynamic_spinup_min_ice_thick=None, + min_ice_thick_for_area=None, ): """Runs the model and returns intermediate steps in xarray datasets. @@ -978,13 +978,13 @@ def run_until_and_store(self, y1, starting from the chosen year. The only output affected are the glacier wide diagnostic files - all other outputs are set to constants during "spinup" - dynamic_spinup_min_ice_thick : float or None + min_ice_thick_for_area : float or None if set to a float, additional variables are saved which are useful in combination with the dynamic spinup. In particular only grid points with a minimum ice thickness are considered for the total area or the total volume. This is useful to smooth out yearly fluctuations when matching to observations. The names of this new - variables include the suffix _min_h (e.g. 'area_m2_min_h') + variables include the suffix _min_h (e.g. 'area_min_h_m2') Returns ------- @@ -1106,16 +1106,16 @@ def run_until_and_store(self, y1, diag_ds['area_m2'].attrs['description'] = 'Total glacier area' diag_ds['area_m2'].attrs['unit'] = 'm 2' - if dynamic_spinup_min_ice_thick is None: - dynamic_spinup_min_ice_thick = cfg.PARAMS['dynamic_spinup_min_ice_thick'] + if min_ice_thick_for_area is None: + min_ice_thick_for_area = cfg.PARAMS['min_ice_thick_for_area'] if 'area_min_h' in ovars: - # filled with a value if dynamic_spinup_min_ice_thick is not None - diag_ds['area_m2_min_h'] = ('time', np.zeros(nm) * np.nan) - diag_ds['area_m2_min_h'].attrs['description'] = \ - f'Total glacier area of gridpoints with a minimum ice' \ - f'thickness of {dynamic_spinup_min_ice_thick} m' - diag_ds['area_m2_min_h'].attrs['unit'] = 'm 2' + # filled with a value if min_ice_thick_for_area is not None + diag_ds['area_min_h_m2'] = ('time', np.zeros(nm) * np.nan) + diag_ds['area_min_h_m2'].attrs['description'] = \ + f'Total glacier area of gridpoints with a minimum ice ' \ + f'thickness of {min_ice_thick_for_area} m' + diag_ds['area_min_h_m2'].attrs['unit'] = 'm 2' if 'length' in ovars: diag_ds['length_m'] = ('time', np.zeros(nm) * np.nan) @@ -1381,8 +1381,8 @@ def run_until_and_store(self, y1, if 'volume_bwl' in ovars: diag_ds['volume_bwl_m3'].data[i] = self.volume_bwl_m3 if 'area_min_h' in ovars: - diag_ds['area_m2_min_h'].data[i] = np.sum([np.sum( - fl.bin_area_m2[fl.thick > dynamic_spinup_min_ice_thick]) + diag_ds['area_min_h_m2'].data[i] = np.sum([np.sum( + fl.bin_area_m2[fl.thick > min_ice_thick_for_area]) for fl in self.fls]) # Terminus thick is a bit more logic ti = None diff --git a/oggm/core/massbalance.py b/oggm/core/massbalance.py index e2f654c21..0eafb8c76 100644 --- a/oggm/core/massbalance.py +++ b/oggm/core/massbalance.py @@ -1630,10 +1630,14 @@ def mb_calibration_from_geodetic_mb(gdir, *, temp_bias = 0 if cfg.PARAMS['use_temp_bias_from_file']: climinfo = gdir.get_climate_info() - if 'w5e5' not in climinfo['baseline_climate_source'].lower(): - raise InvalidWorkflowError('use_temp_bias_from_file currently ' - 'only available for W5E5 data.') - bias_df = get_temp_bias_dataframe() + # if 'w5e5' not in climinfo['baseline_climate_source'].lower(): + # raise InvalidWorkflowError('use_temp_bias_from_file currently ' + # 'only available for W5E5 data.') + if 'w5e5' in climinfo['baseline_climate_source']: + bias_df = get_temp_bias_dataframe() + elif 'lmr_mira' in climinfo['baseline_climate_source']: + bias_df = get_temp_bias_dataframe(dataset='lmr_mira') + ref_lon = climinfo['baseline_climate_ref_pix_lon'] ref_lat = climinfo['baseline_climate_ref_pix_lat'] # Take nearest diff --git a/oggm/params.cfg b/oggm/params.cfg index eedc2e433..15ada118d 100644 --- a/oggm/params.cfg +++ b/oggm/params.cfg @@ -339,7 +339,7 @@ error_when_glacier_reaches_boundaries = True # some options to the user. # This option sets an arbitrary limit on how thick (m) a glacier should be # to be defined as "glacier" (https://github.com/OGGM/oggm/issues/914) -min_ice_thick_for_length = 0 +min_ice_thick_for_length = 2 # How to calculate the length of a glacier? # - 'naive' (the default) computes the length by summing the number of # grid points with an ice thickness above min_ice_thick_for_length @@ -361,8 +361,9 @@ use_inversion_params_for_run = True # Defines the minimum ice thickness which is used during the dynamic spinup to # match area or volume. Only grid points with a larger thickness are considered # to the total area. This is needed to filter out area changes due to climate -# variability around the rgi year (spikes). -dynamic_spinup_min_ice_thick = 2. +# variability (spikes). The resulting diagnostic variable is stored in +# in 'area_min_h_m2' - the recommended area variable for most applications. +min_ice_thick_for_area = 2. ### Tidewater glaciers options @@ -430,7 +431,7 @@ store_model_geometry = False # melt_residual_off_glacier, melt_residual_on_glacier # model_mb, residual_mb, snow_bucket, # You need to keep all variables in one line unfortunately -store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, length, calving, calving_rate, off_area, on_area, melt_off_glacier, melt_on_glacier, liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier +store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, area_min_h, length, calving, calving_rate, off_area, on_area, melt_off_glacier, melt_on_glacier, liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier # Whether to store the model flowline diagnostic files during operational runs # This can be useful for advanced diagnostics along the flowlines but is diff --git a/oggm/shop/gcm_climate.py b/oggm/shop/gcm_climate.py index 701444979..75dd717d2 100644 --- a/oggm/shop/gcm_climate.py +++ b/oggm/shop/gcm_climate.py @@ -534,6 +534,7 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None, ensemble_member=None, version='v2.1', year_range=('1951', '1980'), + variance_boost=False, filesuffix='', output_filesuffix='', **kwargs): @@ -560,6 +561,13 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None, year_range : tuple of str the year range for which you want to compute the anomalies. Default for LMR is `('1951', '1980')` + variance_boost : bool + LMR data has very low variability. This makes it hard to bias + correct properly. This option enables the addition of realistic monthly + variability by sampling monthly anomalies from the reference period for + each year, resulting in a synthetic time series with more realistic + intra-annual variability suitable for bias correction. This has the + tendency to over-estimate interannual variability though. ensemble_member : int the ensemble member to use (default is the ensemble mean). An integer between 0 and 19. @@ -654,15 +662,83 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None, t = np.cumsum([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] * len(temp)) t = cftime.num2date(np.append([0], t[:-1]), units, calendar='noleap') - temp = xr.DataArray((loc_tmp.data + temp.data[:, np.newaxis]).flatten(), - coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}, - dims=('time',)) - - # For precip the std dev is very small - lets keep it as is for now but - # this is a bit ridiculous. We clip to zero here to be sure - precip = utils.clip_min((loc_pre.data + precip.data[:, np.newaxis]).flatten(), 0) - precip = xr.DataArray(precip, dims=('time',), - coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}) + if variance_boost: + # 1. Compute reference monthly anomalies (subtract climatology from each month in ref period) + ref_monthly_anoms = ds_ref.temp.resample(time='YS').map( + lambda x: x.groupby('time.month').mean().values - loc_tmp.data + ).values # shape: (N_ref_years * 12) + + # 2. Compute annual means for reference and LMR + ref_annual_means = ds_ref.temp.resample(time='YS').mean().values # (N_ref_years,) + lmr_annual_anoms = temp.data # (N_years,) + + # 3. Get ranks (as percentiles) + ref_ranks = np.argsort(np.argsort(ref_annual_means)) / (len(ref_annual_means) - 1) + lmr_ranks = np.argsort(np.argsort(lmr_annual_anoms)) / (len(lmr_annual_anoms) - 1) + + # 4. For each LMR year, find the closest reference year in rank + monthly_series = [] + for i, lmr_rank in enumerate(lmr_ranks): + ref_idx = np.argmin(np.abs(ref_ranks - lmr_rank)) + ref_anoms = ref_monthly_anoms[ref_idx*12:ref_idx*12+12] + # Optionally demean to preserve annual mean + # For some reason this doesn't have the intended outcome + # ref_anoms = ref_anoms - ref_anoms.mean() + year_months = loc_tmp.data + temp.data[i] + ref_anoms + monthly_series.append(year_months) + + monthly_series = np.array(monthly_series).flatten() + + # 5. Create the new DataArray with the correct time axis + temp = xr.DataArray( + monthly_series, + coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}, + dims=('time',) + ) + else: + temp = xr.DataArray((loc_tmp.data + temp.data[:, np.newaxis]).flatten(), + coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}, + dims=('time',)) + + # Now precip + if variance_boost: + # 1. Compute reference monthly anomalies (subtract climatology from each month in ref period) + ref_monthly_anoms = ds_ref.prcp.resample(time='YS').map( + lambda x: x.groupby('time.month').mean().values - loc_tmp.data + ).values # shape: (N_ref_years * 12) + + # 2. Compute annual means for reference and LMR + ref_annual_means = ds_ref.prcp.resample(time='YS').mean().values # (N_ref_years,) + lmr_annual_anoms = precip.data # (N_years,) + + # 3. Get ranks (as percentiles) + ref_ranks = np.argsort(np.argsort(ref_annual_means)) / (len(ref_annual_means) - 1) + lmr_ranks = np.argsort(np.argsort(lmr_annual_anoms)) / (len(lmr_annual_anoms) - 1) + + # 4. For each LMR year, find the closest reference year in rank + monthly_series = [] + for i, lmr_rank in enumerate(lmr_ranks): + ref_idx = np.argmin(np.abs(ref_ranks - lmr_rank)) + ref_anoms = ref_monthly_anoms[ref_idx*12:ref_idx*12+12] + # Optionally demean to preserve annual mean + # For some reason this doesn't have the intended outcome + # ref_anoms = ref_anoms - ref_anoms.mean() + year_months = loc_tmp.data + precip.data[i] + ref_anoms + monthly_series.append(year_months) + + monthly_series = np.array(monthly_series).flatten() + + precip = xr.DataArray( + monthly_series, + dims=('time',), + coords={'time': t, 'lon': temp.lon, 'lat': temp.lat} + ) + else: + # For precip the std dev is very small - lets keep it as is for now but + # this is a bit ridiculous. We clip to zero here to be sure + precip = utils.clip_min((loc_pre.data + precip.data[:, np.newaxis]).flatten(), 0) + precip = xr.DataArray(precip, dims=('time',), + coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}) process_gcm_data(gdir, output_filesuffix=output_filesuffix, prcp=precip, temp=temp, diff --git a/oggm/tests/conftest.py b/oggm/tests/conftest.py index 9912efa6e..e2662d7af 100644 --- a/oggm/tests/conftest.py +++ b/oggm/tests/conftest.py @@ -190,7 +190,8 @@ def secure_url_retrieve(url, *args, **kwargs): base_extra_v14.format('L1') in url or base_extra_v14.format('L2') in url or base_extra_v14l3.format('L3') in url or - base_extra_l3 in url + base_extra_l3 in url or + 'lmr' in url ) return oggm_urlretrieve(url, *args, **kwargs) diff --git a/oggm/tests/test_prepro.py b/oggm/tests/test_prepro.py index 6668f7cf3..0a84e604e 100644 --- a/oggm/tests/test_prepro.py +++ b/oggm/tests/test_prepro.py @@ -1433,6 +1433,9 @@ def test_mb_calibration_from_scalar_mb(self): centerlines.fixed_dx_elevation_band_flowline(gdir) climate.process_custom_climate_data(gdir) + with xr.open_dataset(gdir.get_filepath('climate_historical')) as dsc: + dsc = dsc.load() + mbdf = gdir.get_ref_mb_data() mbdf['ref_mb'] = mbdf['ANNUAL_BALANCE'] ref_mb = mbdf.ANNUAL_BALANCE.mean() @@ -3030,6 +3033,7 @@ def test_process_cmip_no_hydromonths(self): np.testing.assert_allclose(scru.temp, scesm.temp, rtol=1e-3) np.testing.assert_allclose(scru.prcp, scesm.prcp, rtol=1e-3) + @pytest.mark.slow def test_process_lmr(self): hef_file = get_demo_file('Hintereisferner_RGI5.shp') @@ -3046,7 +3050,7 @@ def test_process_lmr(self): fpath_temp = get_demo_file('air_MCruns_ensemble_mean_LMRv2.1.nc') fpath_precip = get_demo_file('prate_MCruns_ensemble_mean_LMRv2.1.nc') - for ensemble_member in [None, 0]: + for ensemble_member, boost in zip([None, 0], [False, True]): fs = '_CCSM4' if ensemble_member is not None: @@ -3056,6 +3060,7 @@ def test_process_lmr(self): ensemble_member=ensemble_member, fpath_temp=fpath_temp, fpath_precip=fpath_precip, + variance_boost=boost, output_filesuffix=fs) fh = gdir.get_filepath('climate_historical') @@ -3077,7 +3082,8 @@ def test_process_lmr(self): # is preserved over 31 years _scru = scru.groupby('time.month').std(dim='time') _scesm = scesm.groupby('time.month').std(dim='time') - np.testing.assert_allclose(_scru.temp, _scesm.temp, rtol=0.2) + rtol = 0.08 if boost else 0.11 + np.testing.assert_allclose(_scru.temp, _scesm.temp, rtol=rtol) # And also the annual cycle scru = scru.groupby('time.month').mean(dim='time') diff --git a/oggm/tests/test_utils.py b/oggm/tests/test_utils.py index 5150d9cf8..861f95506 100644 --- a/oggm/tests/test_utils.py +++ b/oggm/tests/test_utils.py @@ -575,13 +575,13 @@ def remove_diag_var(variable): input_filesuffix=filesuffix) def check_result(ds): - assert 'area_m2_min_h' in ds.data_vars + assert 'area_min_h' in ds.data_vars assert 'melt_on_glacier' in ds.data_vars assert 'melt_on_glacier_monthly' in ds.data_vars assert ds_1['melt_on_glacier'].unit == 'kg yr-1' assert ds_1['melt_on_glacier_monthly'].unit == 'kg yr-1' assert np.all(np.isnan( - ds.loc[{'rgi_id': gdirs[0].rgi_id}]['area_m2_min_h'].values)) + ds.loc[{'rgi_id': gdirs[0].rgi_id}]['area_min_h'].values)) assert np.all(np.isnan( ds.loc[{'rgi_id': gdirs[0].rgi_id}]['melt_on_glacier'].values)) assert np.all(np.isnan( diff --git a/oggm/utils/_downloads.py b/oggm/utils/_downloads.py index 728f7d8f6..4b40af722 100644 --- a/oggm/utils/_downloads.py +++ b/oggm/utils/_downloads.py @@ -1333,12 +1333,16 @@ def get_temp_bias_dataframe(dataset='w5e5'): a DataFrame with the data. """ - if dataset != 'w5e5': - raise NotImplementedError(f'No such dataset available yet: {dataset}') + # if dataset != 'w5e5': + # raise NotImplementedError(f'No such dataset available yet: {dataset}') # fetch the file online - base_url = ('https://cluster.klima.uni-bremen.de/~oggm/ref_mb_params/oggm_v1.6/' - 'w5e5_temp_bias_v2023.4.csv') + if dataset == 'w5e5': + base_url = ('https://cluster.klima.uni-bremen.de/~oggm/ref_mb_params/oggm_v1.6/' + 'w5e5_temp_bias_v2023.4.csv') + elif dataset == 'lmr_mira': + base_url = ('https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/calibration/' + 'exps/lmr_mira_alps/mira_temp_bias_v2025.1.csv') file_path = file_downloader(base_url) diff --git a/oggm/utils/_workflow.py b/oggm/utils/_workflow.py index da92eb9b8..1f225ea77 100644 --- a/oggm/utils/_workflow.py +++ b/oggm/utils/_workflow.py @@ -1135,7 +1135,7 @@ def compile_run_output(gdirs, path=True, input_filesuffix='', allowed_data_vars = ['volume_m3', 'volume_bsl_m3', 'volume_bwl_m3', 'volume_m3_min_h', # only here for back compatibility # as it is a variable in gdirs v1.6 2023.1 - 'area_m2', 'area_m2_min_h', 'length_m', 'calving_m3', + 'area_m2', 'area_min_h_m2', 'length_m', 'calving_m3', 'calving_rate_myr', 'off_area', 'on_area', 'model_mb', 'is_fixed_geometry_spinup'] for gi in range(10): @@ -2403,7 +2403,7 @@ def extend_past_climate_run(past_run_file=None, # New vars for vn in ['volume', 'volume_m3_min_h', 'volume_bsl', 'volume_bwl', - 'area', 'area_m2_min_h', 'length', 'calving', 'calving_rate']: + 'area', 'area_min_h', 'length', 'calving', 'calving_rate']: if vn in ods.data_vars: ods[vn + '_ext'] = ods[vn].copy(deep=True) ods[vn + '_ext'].attrs['description'] += ' (extended with MB data)'