Commit 9142f468 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

First prototype of algorithms for geometrical and spectral resolution working...

First prototype of algorithms for geometrical and spectral resolution working in map-reduce context (L2A, L2B).
GEOP:
    - renamed GEOPROCESSING_BD.py to GEOPROCESSING.py
    - moved get_prjLonLat(), get_proj4info(), corner_coord_to_minmax() to GEOP
    - added docstrings to DN2Rad(), DN2TOARef(), TOARad2Kelvin_fastforward(), DN2DegreesCelsius_fastforward()
L1A_P:
    - L1A_object.fill_arr_from_disk(): further silencing of console outputs
L1B_P:
    - moved get_DESHIFTER_configs() and class DESHIFTER() to L2A_P
    - adjusted initial values for COREG attributes related to reference image (not None anymore in order to make L2A_P work if shift calculation failed)
    - increased database statement timeouts for queries within get_reference_image_params() to 25sek
    - L1B_object():
        - added attribute "deshift_results"
        - removed deprecated code
        - added join_deshift_results()
        - revised apply_deshift_results()
L2A_P:
    -  added get_DESHIFTER_configs() and class DESHIFTER() from L1B_P
    - fixed two bugs in DESHIFTER.correct_shifts() where DESHIFTER.band2process was not respected and whole image cube was read instead of only one band
    - added class L2A_object()
L2B_P:
    - added class L2B_object()
    - L2B_object():
        - added interpolate_cube_linear()
        - added spectral_homogenization()
META:
    - renamed METADATA_BD.py to METADATA.py
INP_R:
    - added quiet mode to read_ENVIfile()
OUT_W:
    - added enviHdr_keyOrder using list from reorder_envi_header()
    - fixed a bug in reorder_ENVI_header() that caused repetitions of header keys
    - adjusted print_dict within mask_to_ENVI_Classification() in order to also support L2A and L2B
HLP_F:
    - added parent objects for L2A and L2B in parentObjDict
    - added type hints to cut_GMS_obj_into_blocks() and merge_GMS_tiles_to_GMS_obj()
GITIGNORE:
    - updated .gitignore file
CFG:
    - added virtual_sensor_id, datasetid_spectral_ref, target_CWL, target_FWHM to usecase class by querying the database
PC:
    - added type hints to mapper functions
    - revised L2A_map_2()
    - added L2B_map_1()
    - revised/added L2A algorithm calls (only Flink mode is supported so far)
    - added L2B algorithm calls (only Flink mode is supported so far)
pgDB:
    - added Sentinel-2A virtual sensors to virtual_sensors table (different spatial resolutions)
    - added wavelengths positions and band widths to virtual_sensors table
parent 8abcf2ed
......@@ -2,19 +2,8 @@
.idea/
BAK/
OLD/
database/old/
database/cloud_classifier/
database/sampledata/
database/metadata/
database/processed_data/
testing/out/
algorithms/OLD/
*.pyc
gms_io/robert/
gms_io/robert UNIX-Format konvertiert/
gms_io/robert ALT/
gms_io/landsat_downloader/
algorithms/METADATA.py
......@@ -210,7 +210,7 @@ class GEOPROCESSING(object):
self.originY = self.geotransform[3]
self.pixelWidth = self.geotransform[1]
self.pixelHeight = self.geotransform[5]
self.rot1 = self.geotransform[2]
self.rot1 = self.geotransform[2] # FIXME check
self.rot2 = self.geotransform[4]
self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth),
self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry]
......@@ -318,8 +318,8 @@ class GEOPROCESSING(object):
self.update_dataset_related_attributes()
def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None, cutNeg=True,
optshift=None, v=0):
def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None,
cutNeg=True, optshift=None, v=0):
"""----METHOD_3a----------------------------------------------------------
converts OLI DN data to TOA Reflectance. http://landsat.usgs.gov/Landsat8_Using_Product.php
......@@ -3143,6 +3143,38 @@ def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
gdal_ds_GT = None
return ext
def get_prjLonLat(fmt='wkt'):
# type: (str) -> Any
"""Returns standard geographic projection (EPSG 4326) in the WKT or PROJ4 format.
:param fmt: <str> target format - 'WKT' or 'PROJ4'
"""
assert re.search('wkt',fmt,re.I) or re.search('Proj4',fmt,re.I), 'unsupported output format'
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
return srs.ExportToWkt() if re.search('wkt',fmt,re.I) else srs.ExportToProj4()
def get_proj4info(ds=None,proj=None):
# type: (gdal.Dataset,str) -> str
"""Returns PROJ4 formatted projection info for the given gdal.Dataset or projection respectivly,
e.g. '+proj=utm +zone=43 +datum=WGS84 +units=m +no_defs '
:param ds: <gdal.Dataset> the gdal dataset to get PROJ4 info for
:param proj: <str> the projection to get PROJ4 formatted info for
"""
assert ds or proj, "Specify at least one of the arguments 'ds' or 'proj'"
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection() if ds else proj)
return srs.ExportToProj4()
def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
:param corner_coords: list of coordinates like [[x1,y1],[x2,y2],[]...]
"""
x_vals = [int(i[0]) for i in corner_coords]
y_vals = [int(i[1]) for i in corner_coords]
xmin,xmax,ymin,ymax = min(x_vals),max(x_vals),min(y_vals),max(y_vals)
return xmin,xmax,ymin,ymax
def get_footprint_polygon(CornerLonLat):
""" Converts a list of coordinates into a shapely polygon object.
:param CornerLonLat: a list of coordinate tuples like [[lon,lat], [lon. lat], ..]
......@@ -3169,6 +3201,21 @@ def get_overlap_polygon(poly1, poly2):
return {'overlap poly':None, 'overlap percentage':0, 'overlap area':0}
def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,int,int,int,bool) -> np.ndarray
"""Convert DN to Radiance [W * m-2 * sr-1 * micrometer-1]
!!! InputGains and Offsets should be in [W * m-2 * sr-1 * micrometer-1]
:param ndarray: <np.ndarray> array of DNs to be converted into radiance
:param offsets: [W * m-2 * sr-1 * micrometer-1]:
list that includes the offsets of the individual rasterbands [offset_band1, offset_band2,
... ,offset_bandn] or optional input as number if Dataset has only 1 Band
:param gains: [W * m-2 * sr-1 * micrometer-1]:
list that includes the gains of the individual rasterbands [gain_band1, gain_band2, ... ,
gain_bandn] or optional input as number if Dataset has only 1 Band
:param inFill: pixelvalues allocated to background/dummy/fill pixels
:param inZero: pixelvalues allocated to zero radiance
:param inSaturated: pixelvalues allocated to saturated pixels
:param cutNeg: cutNegvalues -> all negative values set to 0
"""
assert isinstance(offsets,list) and isinstance(gains,list), \
"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \
each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains))
......@@ -3200,6 +3247,25 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None,
def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,list,float,float,int,int,int,bool) -> np.ndarray
"""Converts DN data to TOA Reflectance.
:param ndarray: <np.ndarray> array of DNs to be converted into TOA reflectance
:param offsets: list: of offsets of each rasterband [W * m-2 * sr-1 * micrometer-1]
[offset_band1, offset_band2, ... ,offset_bandn] or optional as number if Dataset has
only 1 Band
:param gains: list: of gains of each rasterband [W * m-2 * sr-1 * micrometer-1]
[gain_band1, gain_band2, ... ,gain_bandn] or optional as number if Dataset has
only 1 Band
:param irradiances: list: of irradiance of each band [W * m-2 * micrometer-1]
[irradiance_band1, irradiance_band2, ... ,irradiance_bandn]
:param zenith: number: sun zenith angle
:param earthSunDist: earth-sun- distance for a certain day
:param inFill: number: pixelvalues allocated to background/dummy/fill pixels
:param inZero: number: pixelvalues allocated to zero radiance
:param inSaturated: number: pixelvalues allocated to saturated pixles
:param cutNeg: bool: if true. all negative values turned to zero. default: True
:return: Int16 TOA_Reflectance in [0-10000]
"""
assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \
"Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \
"irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \
......@@ -3231,7 +3297,16 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None):
# type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray
"""Convert top-of-atmosphere radiances of thermal bands to temperatures in Kelvin
by applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of TOA radiance values to be converted into Kelvin
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
......@@ -3261,7 +3336,19 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.95,
inFill=None, inZero=None, inSaturated=None):
"""Convert thermal DNs to temperatures in degrees Celsius
by calculating TOARadiance and applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of DNs to be converted into Degrees Celsius
:param offsets:
:param gains:
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']):
assert isinstance(offsets,list) and isinstance(gains,list), \
......@@ -3721,17 +3808,4 @@ def get_subsetProps_from_subsetArg(shape_fullArr,subset):
return collections.OrderedDict(zip(
['rows','cols','bands','rowStart','rowEnd','colStart','colEnd','bandStart','bandEnd','bandsList'],
[ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ]))
[ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ]))
\ No newline at end of file
......@@ -24,7 +24,7 @@ from gms_io import Input_reader as INP_R
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
from misc import path_generator as PG
from algorithms.METADATA_BD import get_LayerBandsAssignment
from algorithms.METADATA import get_LayerBandsAssignment
########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode
......
......@@ -36,8 +36,8 @@ import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
from algorithms import METADATA_BD as META
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import METADATA as META
from algorithms import GEOPROCESSING as GEOP
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import py_tools_ah
from gms_io import envifilehandling_BD as ef
......@@ -194,11 +194,11 @@ class L1A_object(object):
path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger)
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
else: self.logger.info('Reading file: %s' %self.baseN)
self.log_for_fullArr_or_firstTile(self.logger,'Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python'
self.arr = path_arr
......
This diff is collapsed.
......@@ -29,7 +29,7 @@ except ImportError:
import osr
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import GEOPROCESSING as GEOP
from gms_io import Input_reader as INP_R
from misc import path_generator as PG
from algorithms.L1B_P import L1B_object
......
This diff is collapsed.
......@@ -4,4 +4,39 @@ Created on Fri Jan 16 11:57:31 2015
@author: danschef
"""
import builtins
import numpy as np
from scipy.interpolate import interp1d
from misc import helper_functions as HLP_F
from algorithms.L2A_P import L2A_object
usecase = builtins.GMS_config.usecase
class L2B_object(L2A_object):
def __init__(self, L2A_obj):
super().__init__(None)
if L2A_obj: [setattr(self, key, value) for key,value in L2A_obj.__dict__.items()]
self.proc_level = 'L2B'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) if L2A_obj else None
def spectral_homogenization(self, subset=None, kind='linear'):
tgt_cwls = usecase.target_CWL # FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
assert kind in ['linear',], "%s is not a supported kind of homogenization." %kind
self.log_for_fullArr_or_firstTile(self.logger,'Performing spectral homogenization (%s) with target wavelength '
'positions at %s nm.' %(kind,', '.join(np.array(tgt_cwls[:-1]).astype(str))+' and %s' %tgt_cwls[-1]))
if isinstance(self.arr,np.ndarray):
if kind=='linear': self.arr = self.interpolate_cube_linear(tgt_cwls)
self.meta['wavelength'] = list(tgt_cwls)
self.meta['bands'] = len(tgt_cwls)
self.meta['LayerBandsAssignment'] = [] # TODO
del self.meta['band names'] # TODO
else:
raise NotImplementedError('got %s' %type(self.arr))
def interpolate_cube_linear(self, target_CWLs):
# type: (list,list) -> np.ndarray
orig_CWLs, target_CWLs = np.array(self.meta['wavelength']), np.array(target_CWLs)
outarr = interp1d(np.array(orig_CWLs), self.arr, axis=2, kind='linear', fill_value='extrapolate')(target_CWLs)
outarr = outarr.astype(np.int16)
return outarr
\ No newline at end of file
......@@ -33,7 +33,7 @@ except ImportError: import osr
from gms_io import envifilehandling_BD as ef
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import GEOPROCESSING as GEOP
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
......
......@@ -43,13 +43,14 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
query_cfg = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key': "'%s'" % key})[0][0]
query_job = lambda conn_param, col: get_info_from_postgreSQLdb(conn_param,'scenes_jobs',col,{'id':GMS_process_ID})[0][0]
query_vir = lambda conn_param, col, VSID: get_info_from_postgreSQLdb(conn_param, 'virtual_sensors', col, {'id':VSID})[0][0]
absP, joinP = lambda x: os.path.abspath(x), lambda *x: os.path.join(*x)
class job:
ID = GMS_process_ID
call_type = GMS_call_type
exec_mode = 'Python'
exec_mode = 'Flink'
#exec_mode = 'Python'
assert exec_mode in ['Flink','Python']
start_time = str(datetime.datetime.now())
......@@ -109,8 +110,8 @@ class job:
exec__L1AP = [1, 1]
exec__L1BP = [1, 1]
exec__L1CP = [1, 1]
exec__L2AP = [0, 1]
exec__L2BP = [0, 1]
exec__L2AP = [1, 1]
exec__L2BP = [1, 1]
exec__L2CP = [0, 1]
exec__L2DP = [0, 1]
if exec_mode=='Python':
......@@ -145,6 +146,10 @@ class usecase:
conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
scale_factor_TOARef = 10000
virtual_sensor_id = 10 # Sentinel-2A 10m
datasetid_spectral_ref = 249 # Sentinel-2A
target_CWL = []
target_FWHM = []
elif GMS_call_type == 'webapp':
#skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal'))
skip_thermal = True
......@@ -153,14 +158,20 @@ class usecase:
conversion_type_optical = query_cfg(job.conn_db_meta, 'conversion_type_optical')
conversion_type_thermal = query_cfg(job.conn_db_meta, 'conversion_type_thermal')
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
virtual_sensor_id = query_job(job.conn_db_meta, 'virtualsensorid')
virtual_sensor_id = virtual_sensor_id if virtual_sensor_id!=-1 else 10 # Sentinel-2A 10m
datasetid_spectral_ref = query_vir(job.conn_db_meta, 'spectral_characteristics_datasetid', virtual_sensor_id)
target_CWL = query_vir(job.conn_db_meta, 'wavelengths_pos', virtual_sensor_id)
target_FWHM = query_vir(job.conn_db_meta, 'band_width', virtual_sensor_id)
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
scale_factor_TOARef = int(query_cfg(job.conn_db_meta, 'scale_factor_TOARef'))
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
align_coord_grids = 1 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
# FIXME: Zeitreihen ein grid aligning voraussetzt
userInp_target_gsd = 30 # [meters], overridden if match_gsd==True
userInp_target_gsd = 20 # [meters], overridden if match_gsd==True
match_gsd = True
userdef_EPSG = None # FIXME nutzerdefinierte Einstellung: int(query(job.conn_db_meta,'userdef_EPSG'))
......
......@@ -23,16 +23,16 @@ import builtins
import warnings
import scipy.interpolate
import algorithms.METADATA_BD as META
import misc.database_tools as DB_T
import misc.path_generator as PG
from algorithms import METADATA as META
from misc import database_tools as DB_T
from misc import path_generator as PG
job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
########################### core functions ####################################
def read_ENVIfile(path,arr_shape,arr_pos,logger=None, return_meta=False):
def read_ENVIfile(path,arr_shape,arr_pos,logger=None, return_meta=False, q=0):
hdr_path = os.path.splitext(path)[0] + '.hdr' if not os.path.splitext(path)[1] == '.hdr' else path
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=return_meta)
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=return_meta,q=q)
def read_ENVIhdr_to_dict(hdr_path, logger=None):
# type: (str,logging.logger) -> dict
......@@ -107,17 +107,16 @@ def read_mask_subset(path_masks,bandname,logger,subset=None):
def GMSfile2dict(path_GMSfile):
""" Converts a JSON file (like the GMS file) to a Python dictionary with keys and values.
:param path_GMSfile:
absolute path on disk
:return:
the corresponding Python dictionary
:param path_GMSfile: absolute path on disk
:return: the corresponding Python dictionary
"""
return json.load(open(path_GMSfile))
def unify_envi_header_keys(header_dict):
"""Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names.
(ENVI header keys are always lowercase in contrast to the attribute names used in code).
:param header_dict: """
:param header_dict:
"""
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit',
'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
unified_header_dict = header_dict
......
......@@ -24,7 +24,6 @@ import datetime
import ogr
import osr
import builtins
import time
from itertools import chain
from misc import helper_functions as HLP_F
......@@ -33,12 +32,21 @@ from misc import path_generator as PG
from gms_io import Input_reader as INP_R
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
enviHdr_keyOrder = \
['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type', 'interleave',
'sensor type', 'byte order', 'reflectance scale factor', 'map info', 'coordinate system string',
'corner coordinates lonlat', 'image_type', 'Satellite', 'Sensor', 'Subsystem', 'EntityID', 'arr_pos', 'arr_shape',
'Metafile', 'gResolution', 'AcqDate', 'AcqTime', 'wavelength', 'bandwidths', 'band names', 'LayerBandsAssignment',
'data gain values', 'data offset values', 'reflectance gain values', 'reflectance offset values', 'ThermalConstK1',
'ThermalConstK2', 'ProcLCode', 'PhysUnit', 'wavelength units', 'SunElevation', 'SunAzimuth', 'SolIrradiance',
'EarthSunDist','ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length', 'overpass duraction sec',
'Quality', 'Additional']
def HDR_writer(meta_dic,outpath_hdr,logger=None):
if logger is not None:
logger.info('Writing %s header ...' %os.path.basename(outpath_hdr))
envi.write_envi_header(outpath_hdr,meta_dic)
reorder_ENVI_header(outpath_hdr)
reorder_ENVI_header(outpath_hdr,enviHdr_keyOrder)
def ASCII_writer(In,path_out_baseN):
assert isinstance(In,dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' %type(In)
......@@ -104,28 +112,29 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea
data = tileList_or_Array if is_3D else tileList_or_Array[:,:,None]
memmap[rS:rE+1,cS:cE+1,:] = data
def reorder_ENVI_header(path_hdr):
def reorder_ENVI_header(path_hdr,tgt_keyOrder):
# type: (str,list)
"""Reorders the keys of an ENVI header file according to the implemented order. Keys given in the target order list
but missing the ENVI fileare skipped.
This function is a workaround for envi.write_envi_header of Spectral Python Library that always writes the given
metadata dictinary as an unordered dict.
:param path_hdr: <str> path of the target ENVI file
:param tgt_keyOrder: <list> list of target keys in the correct order
"""
with open(path_hdr, 'r') as inF:
items = inF.read().split('\n')
HLP_F.silentremove(path_hdr)
order = ['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type',
'interleave', 'sensor type', 'byte order', 'reflectance scale factor', 'map info',
'coordinate system string', 'corner coordinates lonlat', 'image_type', 'Satellite', 'Sensor', 'Subsystem',
'EntityID', 'arr_pos', 'arr_shape', 'Metafile', 'gResolution', 'AcqDate', 'AcqTime', 'wavelength',
'bandwidths', 'band names', 'LayerBandsAssignment', 'data gain values', 'data offset values',
'reflectance gain values', 'reflectance offset values', 'ThermalConstK1', 'ThermalConstK2', 'ProcLCode',
'PhysUnit', 'wavelength units', 'SunElevation', 'SunAzimuth', 'SolIrradiance', 'EarthSunDist',
'ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length', 'overpass duraction sec', 'Quality',
'Additional']
with open(path_hdr, 'w') as outFile:
for paramName in order:
for paramName in tgt_keyOrder:
for item in items:
if item.startswith(paramName) or item.startswith(paramName.lower()):
outFile.write(item + '\n')
items.remove(item)
continue
# write remaining header items
[outFile.write(item + '\n') for item in items]
def mask_to_ENVI_Classification(InObj,maskname):
# type: (object,str) -> (np.ndarray, dict, list, list)
......@@ -209,8 +218,10 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False):
if float(spectral.__version__)>=0.16 else \
{'arr': 'int16', 'masks': 'int8', 'mask_clouds':'int8'} # bug workaround for spectral lib v.15 and earlier
print_dict = {'RSD_L1A': 'L1A satellite data', 'MAS_L1A': 'L1A masks', 'MAC_L1A': 'L1A cloud mask',
'RSD_L1B': 'L1B shifted data', 'MAS_L1B': 'L1B masks', 'MAC_L1B': 'L1B cloud mask',
'RSD_L1C': 'L1C atm. corrected reflectance data', 'MAS_L1C': 'L1C masks', 'MAC_L1C': 'L1C cloud mask'}
'RSD_L1B': 'L1B satellite data', 'MAS_L1B': 'L1B masks', 'MAC_L1B': 'L1B cloud mask',
'RSD_L1C': 'L1C atm. corrected reflectance data', 'MAS_L1C': 'L1C masks', 'MAC_L1C': 'L1C cloud mask',
'RSD_L2A': 'L2A geometrically homogenized data', 'MAS_L2A': 'L2A masks', 'MAC_L2A': 'L2A cloud mask',
'RSD_L2B': 'L2B spectrally homogenized data', 'MAS_L2B': 'L2B masks', 'MAC_L2B': 'L2B cloud mask'}
if not hasattr(InObj,'masks') or InObj.masks is None:
InObj.build_combined_masks_array() # creates InObj.masks and InObj.masks_meta
......@@ -236,7 +247,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False):
envi.save_image(outpath_hdr, arr2write, metadata=meta2write, dtype=out_dtype,
interleave=InObj.outInterleave, ext=InObj.outInterleave, force=True)
else: # 'block'
bands = arr2write.shape[2] if len(arr2write.shape) == 3 else 1
bands = arr2write.shape[2] if len(arr2write.shape) == 3 else 1
out_shape = tuple(InObj.shape_fullArr[:2]) + (bands,)
Tiles_Writer(arr2write, outpath_hdr, out_shape, out_dtype, InObj.outInterleave,
out_meta=meta2write, arr_pos=InObj.arr_pos, overwrite=False)
......@@ -275,7 +286,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False):
setattr(InObj,arrayname,outpath_arr) # refresh arr/masks/mask_clouds attributes
if os.path.exists(outpath_hdr): reorder_ENVI_header(outpath_hdr)
if os.path.exists(outpath_hdr): reorder_ENVI_header(outpath_hdr,enviHdr_keyOrder)
if arrayname=='arr':
InObj.path_Outfile_L1A = outpath_arr
elif arrayname=='masks':
......@@ -359,7 +370,6 @@ def write_shp(shapely_poly,path_out,prj=None):
geom = ogr.CreateGeometryFromWkb(shapely_poly.wkb)
feat.SetGeometry(geom)
layer.CreateFeature(feat)
feat = geom = None # destroy these
# Save and close everything
ds = layer = feat = geom = None
\ No newline at end of file
......@@ -35,6 +35,7 @@ import misc.database_tools as DB_T
from algorithms.L1A_P import L1A_object
from algorithms.L1B_P import L1B_object
from algorithms.L1C_P import L1C_object
from algorithms.L2A_P import L2A_object
config = builtins.GMS_config # read from builtins (set by process_controller)
......@@ -47,7 +48,7 @@ dtype_lib_IDL_Python = {0:np.bool_, 1:np.uint8, 2:np.int16, 3:np.int32, 4:np.flo
dtype_lib_GDAL_Python= {"uint8": 1, "int8": 1, "uint16": 2, "int16": 3, "uint32": 4, "int32": 5, "float32": 6,
"float64": 7, "complex64": 10, "complex128": 11}
parentObjDict = {'L1A': L1A_object, 'L1B': L1B_object, 'L1C': L1C_object,
'L2A': None, 'L2B': None, 'L2C': None, 'L2D': None} # FIXME parentobjects L2 processors
'L2A': L2A_object, 'L2B': None, 'L2C': None, 'L2D': None} # FIXME parentobjects L2 processors
initArgsDict = {'L1A': (None,), 'L1B': (None, None), 'L1C': (None,),
'L2A': (None,), 'L2B': (None,), 'L2C': (None,), 'L2D': (None,)} # FIXME initargs L2 processors
......@@ -270,6 +271,7 @@ def get_image_tileborders(target_tileShape, target_tileSize, path_GMS_file=None,
def cut_GMS_obj_into_blocks(tuple__In_obj__blocksize_RowsCols):
# type: (tuple) -> L1A_object
"""Cut a GMS object into tiles with respect to raster attributes as well as scene wide attributes.
:param tuple__In_obj__blocksize_RowsCols: a tuple with GMS_obj as first and [rows,cols] as second element"""
......@@ -293,6 +295,7 @@ def cut_GMS_obj_into_blocks(tuple__In_obj__blocksize_RowsCols):
def merge_GMS_tiles_to_GMS_obj(list_GMS_tiles):
# type: (list) -> L1A_object
"""Merge separate GMS objects belonging to the same scene-ID to ONE GMS object
:param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()"""
if 'IMapUnorderedIterator' in str(type(list_GMS_tiles)): list_GMS_tiles = list(list_GMS_tiles)
......
......@@ -13,16 +13,14 @@ import sys
import os
import multiprocessing
import datetime
import dill
import builtins
import inspect
import time
from itertools import chain
print('##################################################################################################')
called_from_iPyNb = 1 if 'ipykernel/__main__.py' in sys.argv[0] else 0
# check if process_controller is executed by debugger
isdebugging = 1 if True in [frame[1].endswith("pydevd.py") for frame in inspect.stack()] else False
#isdebugging = 1 if True in [frame[1].endswith("pydevd.py") for frame in inspect.stack()] else False
isdebugging = 1 # FIXME
builtins.GMS_call_type = 'console' if len(sys.argv) < 2 or called_from_iPyNb else 'webapp'
builtins.GMS_process_ID = datetime.datetime.now().strftime("%Y%m%d__%Hh%Mm%Ss") if len(sys.argv) < 2\
......@@ -56,8 +54,8 @@ import algorithms.L0B_P as L0B_P # Level 0B Processor
import algorithms.L1A_P as L1A_P # Level 1A Processor
import algorithms.L1B_P as L1B_P # Level 1B Processor
import algorithms.L1C_P as L1C_P # Level 1C Processor
#import algorithms.L2A_P as L2A_P # Level 2A Processor
#import algorithms.L2B_P as L2B_P # Level 2B Processor
import algorithms.L2A_P as L2A_P # Level 2A Processor
import algorithms.L2B_P as L2B_P # Level 2B Processor
#import algorithms.L2C_P as L2C_P # Level 2C Processor
#import algorithms.L2D_P as L2D_P # Level 2D Processor
......@@ -67,6 +65,7 @@ job.logger = HLP_F.setup_logger('log__%s' %job.ID, os.path.join(job.path_job_log
job.logger.info('Execution started.')
def L0B_L1A_map(data_list_item): #map (scene-wise parallelization)
# type: (list) -> L1A_P.L1A_object
L0B_obj = L0B_P.L0B_object(data_list_item)
L1A_obj = L1A_P.L1A_object(L0B_obj)
L1A_obj.calc_mask_nodata()
......@@ -89,6 +88,7 @@ def L0B_L1A_map(data_list_item): #map (scene-wise parallelization)
return L1A_obj
def L0B_L1A_map_1(data_list_item): #map (scene-wise parallelization)
# type: (list) -> L1A_P.L1A_object
L0B_obj = L0B_P.L0B_object(data_list_item)
L1A_obj = L1A_P.L1A_object(L0B_obj)
L1A_obj.calc_mask_nodata()
......@@ -99,6 +99,7 @@ def L0B_L1A_map_1(data_list_item): #map (scene-wise parallelization)
return tiles
def L1A_map_2(L1A_tile): # map (block-wise parallelization)
# type: (L1A_P.L1A_object) -> L1A_P.L1A_object
L1A_tile.calc_TOARadRefTemp()
L1A_tile.calc_cloud_mask()
if job.exec_mode=='Python':
......@@ -107,6 +108,7 @@ def L1A_map_2(L1A_tile): # map (block-wise parallelization)
def L1A_map_3(L1A_obj): #map (scene-wise parallelization)
# type: (L1A_P.L1A_object) -> L1A_P.L1A_object
L1A_obj.calc_corner_positions() # requires mask_1bit
L1A_obj.calc_center_AcqTime() # (if neccessary); requires corner positions
L1A_obj.calc_mean_VAA()
......@@ -121,6 +123,7 @@ def L1A_map_3(L1A_obj): #map (scene-wise parallelization)
return L1A_obj
def L1B_map_1(L1A_obj):
# type: (L1A_P.L1A_object) -> L1B_P.L1B_object