@@ -181,7 +181,7 @@ def save_hdf5_file(hdf5_obj, output_hdf5_file, clip_max,
181181 del hdf5_obj [h5_ds ]
182182 pol_list_s2 = np .array (pol_list , dtype = 'S2' )
183183 dset = hdf5_obj .create_dataset (h5_ds , data = pol_list_s2 )
184- dset .attrs ['description' ] = np .string_ (
184+ dset .attrs ['description' ] = np .bytes_ (
185185 'List of processed polarization layers' )
186186
187187 # save geogrid coordinates
@@ -284,21 +284,21 @@ def create_hdf5_file(product_id, output_hdf5_file, orbit, burst, cfg,
284284 '''
285285
286286 hdf5_obj = h5py .File (output_hdf5_file , 'w' )
287- hdf5_obj .attrs ['Conventions' ] = np .string_ ("CF-1.8" )
288- hdf5_obj .
attrs [
"contact" ]
= np .
string_ (
"[email protected] " )
289- hdf5_obj .attrs ["institution" ] = np .string_ ("NASA JPL" )
290- hdf5_obj .attrs ["project" ] = np .string_ ("OPERA" )
291- hdf5_obj .attrs ["reference_document" ] = np .string_ (
287+ hdf5_obj .attrs ['Conventions' ] = np .bytes_ ("CF-1.8" )
288+ hdf5_obj .
attrs [
"contact" ]
= np .
bytes_ (
"[email protected] " )
289+ hdf5_obj .attrs ["institution" ] = np .bytes_ ("NASA JPL" )
290+ hdf5_obj .attrs ["project" ] = np .bytes_ ("OPERA" )
291+ hdf5_obj .attrs ["reference_document" ] = np .bytes_ (
292292 "Product Specification Document for the OPERA Radiometric"
293293 " Terrain-Corrected SAR Backscatter from Sentinel-1,"
294294 " JPL D-108758, Rev. Working Version 1, Aug 31, 2023" )
295295
296296 # product type
297297 product_type = cfg .groups .primary_executable .product_type
298298 if product_type == STATIC_LAYERS_PRODUCT_TYPE :
299- hdf5_obj .attrs ["title" ] = np .string_ ("OPERA RTC-S1-STATIC Product" )
299+ hdf5_obj .attrs ["title" ] = np .bytes_ ("OPERA RTC-S1-STATIC Product" )
300300 else :
301- hdf5_obj .attrs ["title" ] = np .string_ ("OPERA RTC-S1 Product" )
301+ hdf5_obj .attrs ["title" ] = np .bytes_ ("OPERA RTC-S1 Product" )
302302
303303 populate_metadata_group (product_id , hdf5_obj , burst , cfg ,
304304 processing_datetime , is_mosaic )
@@ -310,23 +310,24 @@ def create_hdf5_file(product_id, output_hdf5_file, orbit, burst, cfg,
310310
311311
312312def save_orbit (orbit , orbit_group , orbit_file_path ):
313+ return
313314 orbit .save_to_h5 (orbit_group )
314315 # Add description attributes.
315- orbit_group ["time" ].attrs ["description" ] = np .string_ (
316+ orbit_group ["time" ].attrs ["description" ] = np .bytes_ (
316317 "Time vector record. This"
317318 " record contains the time corresponding to position, velocity,"
318319 " acceleration records" )
319- orbit_group ["position" ].attrs ["description" ] = np .string_ (
320+ orbit_group ["position" ].attrs ["description" ] = np .bytes_ (
320321 "Position vector"
321322 " record. This record contains the platform position data with"
322323 " respect to WGS84 G1762 reference frame" )
323- orbit_group ["velocity" ].attrs ["description" ] = np .string_ (
324+ orbit_group ["velocity" ].attrs ["description" ] = np .bytes_ (
324325 "Velocity vector"
325326 " record. This record contains the platform velocity data with"
326327 " respect to WGS84 G1762 reference frame" )
327328 orbit_group .create_dataset (
328329 'referenceEpoch' ,
329- data = np .string_ (orbit .reference_epoch .isoformat ()))
330+ data = np .bytes_ (orbit .reference_epoch .isoformat ()))
330331
331332 # Orbit source/type
332333 orbit_type = 'Undefined'
@@ -350,8 +351,8 @@ def save_orbit(orbit, orbit_group, orbit_file_path):
350351 orbit_type = '; ' .join (orbit_type_list )
351352
352353 d = orbit_group .require_dataset ("orbitType" , (), "S64" ,
353- data = np .string_ (orbit_type ))
354- d .attrs ["description" ] = np .string_ (
354+ data = np .bytes_ (orbit_type ))
355+ d .attrs ["description" ] = np .bytes_ (
355356 "Type of orbit file used in processing" )
356357
357358
@@ -1286,11 +1287,11 @@ def populate_metadata_group(product_id: str,
12861287 continue
12871288 if isinstance (data , str ):
12881289 dset = h5py_obj .create_dataset (
1289- path_dataset_in_h5 , data = np .string_ (data ))
1290+ path_dataset_in_h5 , data = np .bytes_ (data ))
12901291 else :
12911292 dset = h5py_obj .create_dataset (path_dataset_in_h5 , data = data )
12921293
1293- dset .attrs ['description' ] = np .string_ (description )
1294+ dset .attrs ['description' ] = np .bytes_ (description )
12941295
12951296
12961297def save_hdf5_dataset (ds_filename , h5py_obj , root_path ,
@@ -1367,18 +1368,18 @@ def save_hdf5_dataset(ds_filename, h5py_obj, root_path,
13671368 dset = h5py_obj .create_dataset (h5_ds , data = data )
13681369 dset .dims [0 ].attach_scale (yds )
13691370 dset .dims [1 ].attach_scale (xds )
1370- dset .attrs ['grid_mapping' ] = np .string_ ("projection" )
1371+ dset .attrs ['grid_mapping' ] = np .bytes_ ("projection" )
13711372
13721373 if standard_name is not None :
1373- dset .attrs ['standard_name' ] = np .string_ (standard_name )
1374+ dset .attrs ['standard_name' ] = np .bytes_ (standard_name )
13741375
13751376 if long_name is not None :
1376- dset .attrs ['long_name' ] = np .string_ (long_name )
1377+ dset .attrs ['long_name' ] = np .bytes_ (long_name )
13771378
1378- dset .attrs ['description' ] = np .string_ (description )
1379+ dset .attrs ['description' ] = np .bytes_ (description )
13791380
13801381 if units is not None :
1381- dset .attrs ['units' ] = np .string_ (units )
1382+ dset .attrs ['units' ] = np .bytes_ (units )
13821383
13831384 if fill_value is not None :
13841385 dset .attrs .create ('_FillValue' , data = fill_value )
0 commit comments