Commit ef49e9b3 authored by Hannes Diedrich's avatar Hannes Diedrich
Browse files

Merge branch 'feature/improve_download_to_netcdf' of gitext.gfz-potsdam.de:gts2/gts2_client

parents ae087053 1ce160bf
......@@ -9,6 +9,7 @@ Status
[Coverage report](http://gts2.gitext.gfz-potsdam.de/gts2_client/coverage/)
## Release notes
* **2018-02-20:** Improved output to netCDF file
* **2018-02-16:** Added installation information and installation script for miniconda and all needed packages
* **2018-01-18:** Added docker file and script for building an compatible image and running a container for the client
* **2018-01-17:** Added option for mosaicing/merging tifs and RGBs on client side
......
......@@ -581,65 +581,96 @@ def json_to_netcdf(out_mode, api_result, outpath, out_prefix, geo_ll, geo_ur, st
"interest, time of interest and wanted band combination"
f.acknowledgement = api_result['Acknowledgement']
for key in api_result.keys():
if key == 'Results' or key == 'Metadata':
if key == 'ControlValues':
key_group = f.createGroup(key)
if key == 'Results':
for tile in api_result[key].keys():
tile_group = key_group.createGroup(tile)
for band in api_result[key][tile].keys():
band_group = tile_group.createGroup(band)
band_group.time = api_result[key][tile][band]['time']
# band_group.sensor = api_result[key][tile][band]['sensor'] <-- please include this
for data_info in api_result[key][tile][band].keys():
if data_info != 'time':
data_info_group = band_group.createGroup(data_info)
if data_info == 'data':
band_arr = np.asarray(api_result[key][tile][band][data_info][0])
data_info_group.createDimension('x', band_arr.shape[0])
data_info_group.createDimension('y', band_arr.shape[1])
data = data_info_group.createVariable('Data', 'i4', ('x', 'y'),
fill_value=-1)
data.units = "None"
if level == "L1C":
data.long_name = "Top of Atmosphere Reflectance"
data.standard_name = "toa_reflectance"
else:
data.long_name = "Surface Reflectance"
data.standard_name = "boa_reflectance"
data.valid_range = np.array((-2000.0, 16000.0))
data.actual_range = np.array((np.min(band_arr), np.max(band_arr)))
data[:, :] = band_arr
data_info_group.Band = band.split("_")[0].split("B")[-1]
data_info_group.Resolution = band.split("_")[-1]
data_info_group.Central_Wavelength = wl[str(band.split("_")[0])]
if data_info == 'mapinfo':
[setattr(data_info_group, attr,
str(api_result[key][tile][band][data_info][str(attr)])) for attr
in api_result[key][tile][band][data_info].keys()]
if key == 'Metadata':
for tile in api_result[key].keys():
tile_group = key_group.createGroup(tile)
[setattr(key_group, attr, api_result[key][attr]) for attr in api_result[key].keys()]
if key == 'Request':
key_group = f.createGroup(key)
[setattr(key_group, attr.replace("-", "_"), str(api_result[key][attr]))
for attr in api_result[key].keys() if attr != 'bands' and attr != 'resolution']
key_group.setncattr_string('bands', api_result[key]['bands'])
key_group.setncattr_string('resolution', api_result[key]['resolution'])
if key == 'RequestGeoInfo':
key_group = f.createGroup(key)
tile_group = key_group.createGroup('Tiles')
[setattr(tile_group, attr, api_result[key]['Tiles'][attr])
for attr in api_result[key]['Tiles'].keys()]
if key == 'Results':
key_group = f.createGroup(key)
for tile in api_result[key].keys():
tile_group = key_group.createGroup(tile)
for band in api_result[key][tile].keys():
band_group = tile_group.createGroup(band)
band_group.setncattr_string('sensor', api_result[key][tile][band]['sensor'])
band_group.setncattr_string('time', api_result[key][tile][band]['time'])
for data_info in api_result[key][tile][band].keys():
if data_info == 'mapinfo':
data_info_group = band_group.createGroup(data_info)
[setattr(data_info_group, attr, api_result[key][tile][band][data_info][attr])
for attr in api_result[key][tile][band][data_info].keys()]
if data_info == 'data':
band_arr = np.asarray(api_result[key][tile][band][data_info])
band_group.createDimension('x', band_arr.shape[2])
band_group.createDimension('y', band_arr.shape[1])
band_group.createDimension('t', band_arr.shape[0])
data = band_group.createVariable('Data', 'i4', ('x', 'y', 't'), fill_value=255)
data.units = "None"
if level == "L1C":
data.long_name = "Top of Atmosphere Reflectance"
data.standard_name = "toa_reflectance"
else:
data.long_name = "Surface Reflectance"
data.standard_name = "boa_reflectance"
data.valid_range = np.array((-2000.0, 16000.0))
data.actual_range = np.array((np.min(band_arr), np.max(band_arr)))
data[:, :, :] = band_arr
band_group.Band = band.split("_")[0].split("B")[-1]
band_group.Resolution = band.split("_")[-1]
band_group.Central_Wavelength = wl[str(band.split("_")[0])]
if key == 'Metadata':
key_group = f.createGroup(key)
for tile in api_result[key].keys():
tile_group = key_group.createGroup(tile)
if tile == 'MSK_legend':
[setattr(tile_group, "Class_" + str(attr), api_result[key][tile][attr])
for attr in api_result[key][tile].keys()]
else:
for msk in api_result[key][tile].keys():
mask_group = tile_group.createGroup(msk)
if msk == 'MSK_20m':
[setattr(mask_group, attr,
str(api_result[key][tile][msk][str(attr)])) for attr in
api_result[key][tile][msk].keys() if attr != 'data']
data_group = mask_group.createGroup('data')
mask_arr = np.asarray(api_result[key][tile][msk]['data'][0])
data_group.createDimension('x', mask_arr.shape[0])
data_group.createDimension('y', mask_arr.shape[1])
data = data_group.createVariable('Data', 'i4', ('x', 'y'), fill_value=255)
data.units = "None"
data.long_name = "Mask classes"
data.standard_name = "classes"
data.valid_range = np.array((10.0, 60.0))
data.actual_range = np.array((np.min(mask_arr), np.max(mask_arr)))
data[:, :] = mask_arr
if msk == 'MSK_legend':
[setattr(mask_group, attr,
str(api_result[key][tile][msk][str(attr)])) for attr in
api_result[key][tile][msk].keys()]
mask_group.setncattr_string('time', api_result[key][tile][msk]['time'])
for data_info in api_result[key][tile][msk].keys():
if data_info == 'cloud_stats':
data_info_group = mask_group.createGroup(data_info)
data_info_group.setncattr_string('cloud_frac',
api_result[key][tile][msk][data_info]
['cloud_frac'])
if data_info == 'mapinfo':
data_info_group = mask_group.createGroup(data_info)
[setattr(data_info_group, attr, api_result[key][tile][msk][data_info][attr])
for attr in api_result[key][tile][msk][data_info].keys()]
if data_info == 'stats':
data_info_group = mask_group.createGroup(data_info)
data_info_group.setncattr_string('cloud_frac',
api_result[key][tile][msk][data_info]
['cloud_frac'])
data_info_group.setncattr_string('data_frac',
api_result[key][tile][msk][data_info]
['data_frac'])
data_info_group.setncattr_string('time',
api_result[key][tile][msk][data_info]['time'])
if data_info == 'data':
mask_arr = np.asarray(api_result[key][tile][msk][data_info])
mask_group.createDimension('x', mask_arr.shape[2])
mask_group.createDimension('y', mask_arr.shape[1])
mask_group.createDimension('t', mask_arr.shape[0])
data = mask_group.createVariable('Data', 'i4', ('x', 'y', 't'), fill_value=255)
data.units = "None"
data.long_name = "Mask classes"
data.standard_name = "classes"
data.valid_range = np.array((10.0, 60.0))
data.actual_range = np.array((np.min(mask_arr), np.max(mask_arr)))
data[:, :, :] = mask_arr
f.close()
except:
raise Exception("Something went wrong while saving as netcdf. " + traceback.format_exc())
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment