Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,4 @@ docs/_images
docs/.doctrees
oggm/version.py
oggm/ignore
ignore/
7 changes: 7 additions & 0 deletions docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@ Enhancements
``workflow.merge_gridded_data``. If no grid is provided, the default is to
merge all grids of the provided gdirs (:pull:`1779`).
By `Patrick Schmitt <https://github.com/pat-schmitt>`_
- `min_area_h` is now a default diagnostic variable output: this is
glacier area trying to avoid spikes due to interannual variability
in snowfall (:pull:`1791`).
By `Fabien Maussion <https://github.com/fmaussion>`_ and
`Patrick Schmitt <https://github.com/pat-schmitt>`_.
- Flowlines shapefiles output now have more attributes and are easier to
use (:pull:`1786`).
By `Fabien Maussion <https://github.com/fmaussion>`_
Expand All @@ -62,6 +67,7 @@ Enhancements
``flux_divergence`` (:pull:`1792`).
By `Patrick Schmitt <https://github.com/pat-schmitt>`_


Bug fixes
~~~~~~~~~

Expand All @@ -72,6 +78,7 @@ Bug fixes
By `Dan Goldberg <https://github.com/dngoldberg>`_ and
`Fabien Maussion <https://github.com/fmaussion>`_.


v1.6.2 (August 25, 2024)
------------------------

Expand Down
14 changes: 7 additions & 7 deletions oggm/core/dynamic_spinup.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None,
the forward model run. Therefore you could see quite fast changes
(spikes) in the time-evolution (especially visible in length and area).
If you set this value to 0 the filtering can be switched off.
Default is cfg.PARAMS['dynamic_spinup_min_ice_thick'].
Default is cfg.PARAMS['min_ice_thick_for_area'].
first_guess_t_spinup : float
The initial guess for the temperature bias for the spinup
MassBalanceModel in °C.
Expand Down Expand Up @@ -231,7 +231,7 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None,
yr_min = gdir.get_climate_info()['baseline_yr_0']

if min_ice_thickness is None:
min_ice_thickness = cfg.PARAMS['dynamic_spinup_min_ice_thick']
min_ice_thickness = cfg.PARAMS['min_ice_thick_for_area']

# check provided maximum start year here, and change min_spinup_period
if spinup_start_yr_max is not None:
Expand Down Expand Up @@ -443,7 +443,7 @@ def run_model_with_spinup_to_target_year(t_spinup):
geom_path=geom_path,
diag_path=diag_path,
fl_diag_path=fl_diag_path,
dynamic_spinup_min_ice_thick=min_ice_thickness,
min_ice_thick_for_area=min_ice_thickness,
fixed_geometry_spinup_yr=fixed_geometry_spinup_yr,
store_monthly_step=store_monthly_step,
)
Expand All @@ -453,9 +453,9 @@ def run_model_with_spinup_to_target_year(t_spinup):
if delete_area_min_h:
ovars.remove('area_min_h')

if type(ds) == tuple:
if isinstance(ds, tuple):
ds = ds[0]
model_area_km2 = ds.area_m2_min_h.loc[target_yr].values * 1e-6
model_area_km2 = ds.area_min_h_m2.loc[target_yr].values * 1e-6
model_volume_km3 = ds.volume_m3.loc[target_yr].values * 1e-9
else:
# only run to rgi date and extract values
Expand Down Expand Up @@ -1047,7 +1047,7 @@ def dynamic_melt_f_run_with_dynamic_spinup(
the forward model run. Therefore you could see quite fast changes
(spikes) in the time-evolution (especially visible in length and area).
If you set this value to 0 the filtering can be switched off.
Default is cfg.PARAMS['dynamic_spinup_min_ice_thick'].
Default is cfg.PARAMS['min_ice_thick_for_area'].
first_guess_t_spinup : float
The initial guess for the temperature bias for the spinup
MassBalanceModel in °C.
Expand Down Expand Up @@ -1332,7 +1332,7 @@ def dynamic_melt_f_run_with_dynamic_spinup_fallback(
the forward model run. Therefore you could see quite fast changes
(spikes) in the time-evolution (especially visible in length and area).
If you set this value to 0 the filtering can be switched off.
Default is cfg.PARAMS['dynamic_spinup_min_ice_thick'].
Default is cfg.PARAMS['min_ice_thick_for_area'].
first_guess_t_spinup : float
The initial guess for the temperature bias for the spinup
MassBalanceModel in °C.
Expand Down
26 changes: 13 additions & 13 deletions oggm/core/flowline.py
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@ def run_until_and_store(self, y1,
store_monthly_step=None,
stop_criterion=None,
fixed_geometry_spinup_yr=None,
dynamic_spinup_min_ice_thick=None,
min_ice_thick_for_area=None,
):
"""Runs the model and returns intermediate steps in xarray datasets.

Expand Down Expand Up @@ -978,13 +978,13 @@ def run_until_and_store(self, y1,
starting from the chosen year. The only output affected are the
glacier wide diagnostic files - all other outputs are set
to constants during "spinup"
dynamic_spinup_min_ice_thick : float or None
min_ice_thick_for_area : float or None
if set to a float, additional variables are saved which are useful
in combination with the dynamic spinup. In particular only grid
points with a minimum ice thickness are considered for the total
area or the total volume. This is useful to smooth out yearly
fluctuations when matching to observations. The names of this new
variables include the suffix _min_h (e.g. 'area_m2_min_h')
variables include the suffix _min_h (e.g. 'area_min_h_m2')

Returns
-------
Expand Down Expand Up @@ -1106,16 +1106,16 @@ def run_until_and_store(self, y1,
diag_ds['area_m2'].attrs['description'] = 'Total glacier area'
diag_ds['area_m2'].attrs['unit'] = 'm 2'

if dynamic_spinup_min_ice_thick is None:
dynamic_spinup_min_ice_thick = cfg.PARAMS['dynamic_spinup_min_ice_thick']
if min_ice_thick_for_area is None:
min_ice_thick_for_area = cfg.PARAMS['min_ice_thick_for_area']

if 'area_min_h' in ovars:
# filled with a value if dynamic_spinup_min_ice_thick is not None
diag_ds['area_m2_min_h'] = ('time', np.zeros(nm) * np.nan)
diag_ds['area_m2_min_h'].attrs['description'] = \
f'Total glacier area of gridpoints with a minimum ice' \
f'thickness of {dynamic_spinup_min_ice_thick} m'
diag_ds['area_m2_min_h'].attrs['unit'] = 'm 2'
# filled with a value if min_ice_thick_for_area is not None
diag_ds['area_min_h_m2'] = ('time', np.zeros(nm) * np.nan)
diag_ds['area_min_h_m2'].attrs['description'] = \
f'Total glacier area of gridpoints with a minimum ice ' \
f'thickness of {min_ice_thick_for_area} m'
diag_ds['area_min_h_m2'].attrs['unit'] = 'm 2'

if 'length' in ovars:
diag_ds['length_m'] = ('time', np.zeros(nm) * np.nan)
Expand Down Expand Up @@ -1381,8 +1381,8 @@ def run_until_and_store(self, y1,
if 'volume_bwl' in ovars:
diag_ds['volume_bwl_m3'].data[i] = self.volume_bwl_m3
if 'area_min_h' in ovars:
diag_ds['area_m2_min_h'].data[i] = np.sum([np.sum(
fl.bin_area_m2[fl.thick > dynamic_spinup_min_ice_thick])
diag_ds['area_min_h_m2'].data[i] = np.sum([np.sum(
fl.bin_area_m2[fl.thick > min_ice_thick_for_area])
for fl in self.fls])
# Terminus thick is a bit more logic
ti = None
Expand Down
6 changes: 3 additions & 3 deletions oggm/core/massbalance.py
Original file line number Diff line number Diff line change
Expand Up @@ -1630,9 +1630,9 @@ def mb_calibration_from_geodetic_mb(gdir, *,
temp_bias = 0
if cfg.PARAMS['use_temp_bias_from_file']:
climinfo = gdir.get_climate_info()
if 'w5e5' not in climinfo['baseline_climate_source'].lower():
raise InvalidWorkflowError('use_temp_bias_from_file currently '
'only available for W5E5 data.')
# if 'w5e5' not in climinfo['baseline_climate_source'].lower():
# raise InvalidWorkflowError('use_temp_bias_from_file currently '
# 'only available for W5E5 data.')
bias_df = get_temp_bias_dataframe()
ref_lon = climinfo['baseline_climate_ref_pix_lon']
ref_lat = climinfo['baseline_climate_ref_pix_lat']
Expand Down
9 changes: 5 additions & 4 deletions oggm/params.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ error_when_glacier_reaches_boundaries = True
# some options to the user.
# This option sets an arbitrary limit on how thick (m) a glacier should be
# to be defined as "glacier" (https://github.com/OGGM/oggm/issues/914)
min_ice_thick_for_length = 0
min_ice_thick_for_length = 2
# How to calculate the length of a glacier?
# - 'naive' (the default) computes the length by summing the number of
# grid points with an ice thickness above min_ice_thick_for_length
Expand All @@ -361,8 +361,9 @@ use_inversion_params_for_run = True
# Defines the minimum ice thickness which is used during the dynamic spinup to
# match area or volume. Only grid points with a larger thickness are considered
# to the total area. This is needed to filter out area changes due to climate
# variability around the rgi year (spikes).
dynamic_spinup_min_ice_thick = 2.
# variability (spikes). The resulting diagnostic variable is stored in
# in 'area_min_h_m2' - the recommended area variable for most applications.
min_ice_thick_for_area = 2.

### Tidewater glaciers options

Expand Down Expand Up @@ -430,7 +431,7 @@ store_model_geometry = False
# melt_residual_off_glacier, melt_residual_on_glacier
# model_mb, residual_mb, snow_bucket,
# You need to keep all variables in one line unfortunately
store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, length, calving, calving_rate, off_area, on_area, melt_off_glacier, melt_on_glacier, liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier
store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, area_min_h, length, calving, calving_rate, off_area, on_area, melt_off_glacier, melt_on_glacier, liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier

# Whether to store the model flowline diagnostic files during operational runs
# This can be useful for advanced diagnostics along the flowlines but is
Expand Down
94 changes: 85 additions & 9 deletions oggm/shop/gcm_climate.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,7 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None,
ensemble_member=None,
version='v2.1',
year_range=('1951', '1980'),
variance_boost=False,
filesuffix='',
output_filesuffix='',
**kwargs):
Expand All @@ -560,6 +561,13 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None,
year_range : tuple of str
the year range for which you want to compute the anomalies. Default
for LMR is `('1951', '1980')`
variance_boost : bool
LMR data has very low variability. This makes it hard to bias
correct properly. This option enables the addition of realistic monthly
variability by sampling monthly anomalies from the reference period for
each year, resulting in a synthetic time series with more realistic
intra-annual variability suitable for bias correction. This has the
tendency to over-estimate interannual variability though.
ensemble_member : int
the ensemble member to use (default is the ensemble mean). An
integer between 0 and 19.
Expand Down Expand Up @@ -654,15 +662,83 @@ def process_lmr_data(gdir, fpath_temp=None, fpath_precip=None,
t = np.cumsum([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] * len(temp))
t = cftime.num2date(np.append([0], t[:-1]), units, calendar='noleap')

temp = xr.DataArray((loc_tmp.data + temp.data[:, np.newaxis]).flatten(),
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat},
dims=('time',))

# For precip the std dev is very small - lets keep it as is for now but
# this is a bit ridiculous. We clip to zero here to be sure
precip = utils.clip_min((loc_pre.data + precip.data[:, np.newaxis]).flatten(), 0)
precip = xr.DataArray(precip, dims=('time',),
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat})
if variance_boost:
# 1. Compute reference monthly anomalies (subtract climatology from each month in ref period)
ref_monthly_anoms = ds_ref.temp.resample(time='YS').map(
lambda x: x.groupby('time.month').mean().values - loc_tmp.data
).values # shape: (N_ref_years * 12)

# 2. Compute annual means for reference and LMR
ref_annual_means = ds_ref.temp.resample(time='YS').mean().values # (N_ref_years,)
lmr_annual_anoms = temp.data # (N_years,)

# 3. Get ranks (as percentiles)
ref_ranks = np.argsort(np.argsort(ref_annual_means)) / (len(ref_annual_means) - 1)
lmr_ranks = np.argsort(np.argsort(lmr_annual_anoms)) / (len(lmr_annual_anoms) - 1)

# 4. For each LMR year, find the closest reference year in rank
monthly_series = []
for i, lmr_rank in enumerate(lmr_ranks):
ref_idx = np.argmin(np.abs(ref_ranks - lmr_rank))
ref_anoms = ref_monthly_anoms[ref_idx*12:ref_idx*12+12]
# Optionally demean to preserve annual mean
# For some reason this doesn't have the intended outcome
# ref_anoms = ref_anoms - ref_anoms.mean()
year_months = loc_tmp.data + temp.data[i] + ref_anoms
monthly_series.append(year_months)

monthly_series = np.array(monthly_series).flatten()

# 5. Create the new DataArray with the correct time axis
temp = xr.DataArray(
monthly_series,
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat},
dims=('time',)
)
else:
temp = xr.DataArray((loc_tmp.data + temp.data[:, np.newaxis]).flatten(),
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat},
dims=('time',))

# Now precip
if variance_boost:
# 1. Compute reference monthly anomalies (subtract climatology from each month in ref period)
ref_monthly_anoms = ds_ref.prcp.resample(time='YS').map(
lambda x: x.groupby('time.month').mean().values - loc_tmp.data
).values # shape: (N_ref_years * 12)

# 2. Compute annual means for reference and LMR
ref_annual_means = ds_ref.prcp.resample(time='YS').mean().values # (N_ref_years,)
lmr_annual_anoms = precip.data # (N_years,)

# 3. Get ranks (as percentiles)
ref_ranks = np.argsort(np.argsort(ref_annual_means)) / (len(ref_annual_means) - 1)
lmr_ranks = np.argsort(np.argsort(lmr_annual_anoms)) / (len(lmr_annual_anoms) - 1)

# 4. For each LMR year, find the closest reference year in rank
monthly_series = []
for i, lmr_rank in enumerate(lmr_ranks):
ref_idx = np.argmin(np.abs(ref_ranks - lmr_rank))
ref_anoms = ref_monthly_anoms[ref_idx*12:ref_idx*12+12]
# Optionally demean to preserve annual mean
# For some reason this doesn't have the intended outcome
# ref_anoms = ref_anoms - ref_anoms.mean()
year_months = loc_tmp.data + precip.data[i] + ref_anoms
monthly_series.append(year_months)

monthly_series = np.array(monthly_series).flatten()

precip = xr.DataArray(
monthly_series,
dims=('time',),
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat}
)
else:
# For precip the std dev is very small - lets keep it as is for now but
# this is a bit ridiculous. We clip to zero here to be sure
precip = utils.clip_min((loc_pre.data + precip.data[:, np.newaxis]).flatten(), 0)
precip = xr.DataArray(precip, dims=('time',),
coords={'time': t, 'lon': temp.lon, 'lat': temp.lat})

process_gcm_data(gdir, output_filesuffix=output_filesuffix,
prcp=precip, temp=temp,
Expand Down
3 changes: 2 additions & 1 deletion oggm/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,8 @@ def secure_url_retrieve(url, *args, **kwargs):
base_extra_v14.format('L1') in url or
base_extra_v14.format('L2') in url or
base_extra_v14l3.format('L3') in url or
base_extra_l3 in url
base_extra_l3 in url or
'lmr' in url
)
return oggm_urlretrieve(url, *args, **kwargs)

Expand Down
16 changes: 14 additions & 2 deletions oggm/tests/test_prepro.py
Original file line number Diff line number Diff line change
Expand Up @@ -1433,6 +1433,15 @@ def test_mb_calibration_from_scalar_mb(self):
centerlines.fixed_dx_elevation_band_flowline(gdir)
climate.process_custom_climate_data(gdir)

from oggm.ignore.lmr_forcing import process_mira_data
process_mira_data(gdir, filesuffix='mira')

with xr.open_dataset(gdir.get_filepath('climate_historical')) as dsc:
dsc = dsc.load()

with xr.open_dataset(gdir.get_filepath('climate_historical', filesuffix='mira')) as dse:
dse = dse.load()

mbdf = gdir.get_ref_mb_data()
mbdf['ref_mb'] = mbdf['ANNUAL_BALANCE']
ref_mb = mbdf.ANNUAL_BALANCE.mean()
Expand Down Expand Up @@ -3030,6 +3039,7 @@ def test_process_cmip_no_hydromonths(self):
np.testing.assert_allclose(scru.temp, scesm.temp, rtol=1e-3)
np.testing.assert_allclose(scru.prcp, scesm.prcp, rtol=1e-3)

@pytest.mark.slow
def test_process_lmr(self):

hef_file = get_demo_file('Hintereisferner_RGI5.shp')
Expand All @@ -3046,7 +3056,7 @@ def test_process_lmr(self):
fpath_temp = get_demo_file('air_MCruns_ensemble_mean_LMRv2.1.nc')
fpath_precip = get_demo_file('prate_MCruns_ensemble_mean_LMRv2.1.nc')

for ensemble_member in [None, 0]:
for ensemble_member, boost in zip([None, 0], [False, True]):

fs = '_CCSM4'
if ensemble_member is not None:
Expand All @@ -3056,6 +3066,7 @@ def test_process_lmr(self):
ensemble_member=ensemble_member,
fpath_temp=fpath_temp,
fpath_precip=fpath_precip,
variance_boost=boost,
output_filesuffix=fs)

fh = gdir.get_filepath('climate_historical')
Expand All @@ -3077,7 +3088,8 @@ def test_process_lmr(self):
# is preserved over 31 years
_scru = scru.groupby('time.month').std(dim='time')
_scesm = scesm.groupby('time.month').std(dim='time')
np.testing.assert_allclose(_scru.temp, _scesm.temp, rtol=0.2)
rtol = 0.08 if boost else 0.11
np.testing.assert_allclose(_scru.temp, _scesm.temp, rtol=rtol)

# And also the annual cycle
scru = scru.groupby('time.month').mean(dim='time')
Expand Down
4 changes: 2 additions & 2 deletions oggm/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,13 +575,13 @@ def remove_diag_var(variable):
input_filesuffix=filesuffix)

def check_result(ds):
assert 'area_m2_min_h' in ds.data_vars
assert 'area_min_h' in ds.data_vars
assert 'melt_on_glacier' in ds.data_vars
assert 'melt_on_glacier_monthly' in ds.data_vars
assert ds_1['melt_on_glacier'].unit == 'kg yr-1'
assert ds_1['melt_on_glacier_monthly'].unit == 'kg yr-1'
assert np.all(np.isnan(
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['area_m2_min_h'].values))
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['area_min_h'].values))
assert np.all(np.isnan(
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['melt_on_glacier'].values))
assert np.all(np.isnan(
Expand Down
Loading
Loading