Commit 0f59e4d3 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Revised some docstrings.


Former-commit-id: f2eb8b47
parent 51bbab8b
Welcome to GeoMultiSens's documentation! Welcome to GeoMultiSens's documentation!
====================================== ========================================
Contents: Contents:
......
...@@ -1257,6 +1257,7 @@ class GEOPROCESSING(object): ...@@ -1257,6 +1257,7 @@ class GEOPROCESSING(object):
:param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32; :param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32;
GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount
""" """
if outPath is None: if outPath is None:
outPath = self.workspace outPath = self.workspace
...@@ -1330,6 +1331,7 @@ class GEOPROCESSING(object): ...@@ -1330,6 +1331,7 @@ class GEOPROCESSING(object):
:param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32; :param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32;
GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount
""" """
if outPath is None: if outPath is None:
outPath = self.workspace outPath = self.workspace
...@@ -1360,6 +1362,7 @@ class GEOPROCESSING(object): ...@@ -1360,6 +1362,7 @@ class GEOPROCESSING(object):
:param value: value for the single band file. New file has one Band with the given value :param value: value for the single band file. New file has one Band with the given value
:param outPath: :param outPath:
""" """
if outPath is None: if outPath is None:
outPath = self.workspace outPath = self.workspace
...@@ -2264,6 +2267,7 @@ def ndarray2gdal(ndarray, outPath=None, importFile=None, direction=1, GDAL_Type= ...@@ -2264,6 +2267,7 @@ def ndarray2gdal(ndarray, outPath=None, importFile=None, direction=1, GDAL_Type=
:param v: :param v:
:return: GDAL data File :return: GDAL data File
""" """
if v: if v:
print("\n--------GEOPROCESSING--------\n##Function##" print("\n--------GEOPROCESSING--------\n##Function##"
"\n**ndarray2gdal**") "\n**ndarray2gdal**")
...@@ -2336,6 +2340,7 @@ def GetMaxExt_nb(datalist, bin=1, v=0): ...@@ -2336,6 +2340,7 @@ def GetMaxExt_nb(datalist, bin=1, v=0):
:param v: :param v:
:return: [ulx,uly,lrx,lry]: max extent of all files without backgroundpixel as list :return: [ulx,uly,lrx,lry]: max extent of all files without backgroundpixel as list
""" """
if v == 1: if v == 1:
print("\n--------GEOPROCESSING--------\n##Function##" print("\n--------GEOPROCESSING--------\n##Function##"
"\n**GetMaxExt_nb**") "\n**GetMaxExt_nb**")
...@@ -2480,6 +2485,7 @@ def convertGdalNumpyDataType(dType): ...@@ -2480,6 +2485,7 @@ def convertGdalNumpyDataType(dType):
output: output:
corresponding dataType corresponding dataType
""" """
# dictionary to translate GDAL data types (strings) in corresponding numpy data types # dictionary to translate GDAL data types (strings) in corresponding numpy data types
dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32, dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32,
"Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32} "Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32}
...@@ -2521,6 +2527,7 @@ def ApplyMask(dataFile, maskFile, maskValue, outPath=None, keepDataType=1, v=0): ...@@ -2521,6 +2527,7 @@ def ApplyMask(dataFile, maskFile, maskValue, outPath=None, keepDataType=1, v=0):
:param v: :param v:
:return: InputFile with the given maskValue at the masked pixel positions (ENVI .bsq or ndarray) :return: InputFile with the given maskValue at the masked pixel positions (ENVI .bsq or ndarray)
""" """
# define default outpath filename+masked.bsq # define default outpath filename+masked.bsq
if outPath is None: if outPath is None:
pass pass
...@@ -2806,6 +2813,7 @@ def adjust_acquisArrProv_to_shapeFullArr(arrProv, shapeFullArr, meshwidth=1, sub ...@@ -2806,6 +2813,7 @@ def adjust_acquisArrProv_to_shapeFullArr(arrProv, shapeFullArr, meshwidth=1, sub
def get_raster_size(minx, miny, maxx, maxy, cell_width, cell_height): def get_raster_size(minx, miny, maxx, maxy, cell_width, cell_height):
"""Determine the number of rows/columns given the bounds of the point data and the desired cell size""" """Determine the number of rows/columns given the bounds of the point data and the desired cell size"""
cols = int((maxx - minx) / cell_width) cols = int((maxx - minx) / cell_width)
rows = int((maxy - miny) / abs(cell_height)) rows = int((maxy - miny) / abs(cell_height))
return cols, rows return cols, rows
...@@ -2827,6 +2835,7 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, ...@@ -2827,6 +2835,7 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None,
:param inSaturated: pixelvalues allocated to saturated pixels :param inSaturated: pixelvalues allocated to saturated pixels
:param cutNeg: cutNegvalues -> all negative values set to 0 :param cutNeg: cutNegvalues -> all negative values set to 0
""" """
assert isinstance(offsets,list) and isinstance(gains,list), \ assert isinstance(offsets,list) and isinstance(gains,list), \
"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \ "Offset and Gain parameters have to be provided as two lists containing gains and offsets for \
each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains)) each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains))
...@@ -2877,6 +2886,7 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist, ...@@ -2877,6 +2886,7 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
:param cutNeg: bool: if true. all negative values turned to zero. default: True :param cutNeg: bool: if true. all negative values turned to zero. default: True
:return: Int16 TOA_Reflectance in [0-10000] :return: Int16 TOA_Reflectance in [0-10000]
""" """
assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \ assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \
"Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \ "Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \
"irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \ "irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \
...@@ -2918,6 +2928,7 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ ...@@ -2918,6 +2928,7 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
:param inZero: :param inZero:
:param inSaturated: :param inSaturated:
""" """
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2] bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']): for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \ assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
...@@ -2960,6 +2971,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9 ...@@ -2960,6 +2971,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9
:param inZero: :param inZero:
:param inSaturated: :param inSaturated:
""" """
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2] bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']): for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']):
assert isinstance(offsets,list) and isinstance(gains,list), \ assert isinstance(offsets,list) and isinstance(gains,list), \
...@@ -2990,6 +3002,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9 ...@@ -2990,6 +3002,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9
def is_granule(trueCornerPos): # TODO def is_granule(trueCornerPos): # TODO
"""Idee: testen, ob es sich um Granule handelt oder um die volle Szene - """Idee: testen, ob es sich um Granule handelt oder um die volle Szene -
dazu Winkel der Kanten zu Nord oder Ost berechnen""" dazu Winkel der Kanten zu Nord oder Ost berechnen"""
pass pass
...@@ -3095,7 +3108,8 @@ def calc_VZA_array(shape_fullArr, arr_pos, fullSceneCornerPos, viewing_angle, FO ...@@ -3095,7 +3108,8 @@ def calc_VZA_array(shape_fullArr, arr_pos, fullSceneCornerPos, viewing_angle, FO
:param meshwidth: <int> defines the density of the mesh used for generating the output :param meshwidth: <int> defines the density of the mesh used for generating the output
(1: full resolution; 10: one point each 10 pixels) (1: full resolution; 10: one point each 10 pixels)
:param nodata_mask: <numpy array>, used for declaring nodata values in the output VZA array :param nodata_mask: <numpy array>, used for declaring nodata values in the output VZA array
:param outFill: the value that is assigned to nodata area in the output VZA array""" :param outFill: the value that is assigned to nodata area in the output VZA array
"""
# FIXME in case of Sentinel-2 the viewing_angle corresponds to the center point of the image footprint (trueDataCornerPos) # FIXME in case of Sentinel-2 the viewing_angle corresponds to the center point of the image footprint (trueDataCornerPos)
# FIXME => the algorithm must use the center viewing angle + orbit inclination and must calculate the FOV to be used # FIXME => the algorithm must use the center viewing angle + orbit inclination and must calculate the FOV to be used
...@@ -3176,6 +3190,7 @@ def calc_SZA_SAA(date, lon, lat): # not used anymore since pyorbital is more pr ...@@ -3176,6 +3190,7 @@ def calc_SZA_SAA(date, lon, lat): # not used anymore since pyorbital is more pr
:param lon: :param lon:
:param lat: :param lat:
""" """
obsv = ephem.Observer() obsv = ephem.Observer()
obsv.lon, obsv.lat = str(lon), str(lat) obsv.lon, obsv.lat = str(lon), str(lat)
obsv.date = date obsv.date = date
...@@ -3209,7 +3224,8 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullScene ...@@ -3209,7 +3224,8 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullScene
- 'coarse: SZA/SAA is calculated for image corners then interpolated by solving - 'coarse: SZA/SAA is calculated for image corners then interpolated by solving
an equation system with 4 variables for each image corner: an equation system with 4 variables for each image corner:
SZA/SAA = a + b*col + c*row + d*col*row. SZA/SAA = a + b*col + c*row + d*col*row.
:param lonlat_arr: """ :param lonlat_arr:
"""
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \ if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask) "'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
...@@ -3285,6 +3301,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None): ...@@ -3285,6 +3301,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None):
:param outFill: the value to be used to fill areas outside the actual image bounds :param outFill: the value to be used to fill areas outside the actual image bounds
:return: :return:
""" """
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \ if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask) "'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
...@@ -3297,6 +3314,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None): ...@@ -3297,6 +3314,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None):
def get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr,arr_pos): def get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr,arr_pos):
"""Returns array dims with respect to possible subsetting.""" """Returns array dims with respect to possible subsetting."""
rows, cols, bands = shape_fullArr rows, cols, bands = shape_fullArr
rows, cols = [arr_pos[0][1] - arr_pos[0][0] + 1, arr_pos[1][1] - arr_pos[1][0] + 1] if arr_pos else (rows, cols) rows, cols = [arr_pos[0][1] - arr_pos[0][0] + 1, arr_pos[1][1] - arr_pos[1][0] + 1] if arr_pos else (rows, cols)
rowStart, colStart = [arr_pos[0][0], arr_pos[1][0]] if arr_pos else [0, 0] rowStart, colStart = [arr_pos[0][0], arr_pos[1][0]] if arr_pos else [0, 0]
...@@ -3354,6 +3372,7 @@ def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0): ...@@ -3354,6 +3372,7 @@ def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0):
:param im_gt: :param im_gt:
:param fillVal: :param fillVal:
""" """
print(bounds) print(bounds)
# get target bounds on the same grid like the input array # get target bounds on the same grid like the input array
tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds,im_gt) tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds,im_gt)
......
...@@ -82,6 +82,7 @@ def read_ENVI_image_data_as_array(path, arr_shape, arr_pos, logger=None, return_ ...@@ -82,6 +82,7 @@ def read_ENVI_image_data_as_array(path, arr_shape, arr_pos, logger=None, return_
:param return_meta: <bool> whether to return not only raster data but also meta data (optional, default=False) :param return_meta: <bool> whether to return not only raster data but also meta data (optional, default=False)
:param q: <bool> quiet mode (supresses all console or logging output) (optional, default=False) :param q: <bool> quiet mode (supresses all console or logging output) (optional, default=False)
""" """
hdr_path = os.path.splitext(path)[0]+'.hdr' if not os.path.splitext(path)[1]=='.hdr' else path hdr_path = os.path.splitext(path)[0]+'.hdr' if not os.path.splitext(path)[1]=='.hdr' else path
if not os.path.isfile(hdr_path): if not os.path.isfile(hdr_path):
if not q: if not q:
...@@ -133,6 +134,7 @@ def GMSfile2dict(path_GMSfile): ...@@ -133,6 +134,7 @@ def GMSfile2dict(path_GMSfile):
:param path_GMSfile: absolute path on disk :param path_GMSfile: absolute path on disk
:return: the corresponding Python dictionary :return: the corresponding Python dictionary
""" """
with open(path_GMSfile) as inF: with open(path_GMSfile) as inF:
GMSdict = json.load(inF) GMSdict = json.load(inF)
return GMSdict return GMSdict
...@@ -144,6 +146,7 @@ def unify_envi_header_keys(header_dict): ...@@ -144,6 +146,7 @@ def unify_envi_header_keys(header_dict):
:param header_dict: :param header_dict:
""" """
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit', refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit',
'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle'] 'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
unified_header_dict = header_dict unified_header_dict = header_dict
...@@ -162,6 +165,7 @@ def get_list_GMSfiles(dataset_list, target): ...@@ -162,6 +165,7 @@ def get_list_GMSfiles(dataset_list, target):
:param target: target GMS processing level :param target: target GMS processing level
:return [/path/to/gms_file1.gms, /path/to/gms_file1.gms] :return [/path/to/gms_file1.gms, /path/to/gms_file1.gms]
""" """
dataset_list = [dataset_list] if not isinstance(dataset_list,list) else dataset_list dataset_list = [dataset_list] if not isinstance(dataset_list,list) else dataset_list
if CFG.job.call_type == 'webapp': if CFG.job.call_type == 'webapp':
get_gmsP = lambda ds,tgt: PG.path_generator(ds,proc_level=tgt).get_path_gmsfile() get_gmsP = lambda ds,tgt: PG.path_generator(ds,proc_level=tgt).get_path_gmsfile()
...@@ -215,6 +219,7 @@ class SRF(object): ...@@ -215,6 +219,7 @@ class SRF(object):
:param format_bandnames: whether to format default strings from LayerBandsAssignment as 'B01', 'B02' etc.. :param format_bandnames: whether to format default strings from LayerBandsAssignment as 'B01', 'B02' etc..
:param v: verbose mode :param v: verbose mode
""" """
if not wvl_unit in ['micrometers', 'nanometers']: if not wvl_unit in ['micrometers', 'nanometers']:
raise ValueError('Unknown wavelength unit %s.' %wvl_unit) raise ValueError('Unknown wavelength unit %s.' %wvl_unit)
...@@ -370,6 +375,7 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd): ...@@ -370,6 +375,7 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
:param tgt_ygsd: output Y GSD :param tgt_ygsd: output Y GSD
:return: :return:
""" """
#print(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd) #print(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd)
# handle coordinate infos # handle coordinate infos
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
# Universal writer for all kinds of BigData intermediate results. # Universal writer for all kinds of BigData intermediate results.
# #
# Written by Daniel Scheffler. # Written by Daniel Scheffler.
# #
############################################################################### ###############################################################################
#from __future__ import (division, print_function, absolute_import) #from __future__ import (division, print_function, absolute_import)
...@@ -215,6 +215,7 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea ...@@ -215,6 +215,7 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea
:param arr_pos: <tuple> ((row_start,row_end),(col_start,col_end)) :param arr_pos: <tuple> ((row_start,row_end),(col_start,col_end))
:param overwrite: <bool> :param overwrite: <bool>
""" """
assert isinstance(tileList_or_Array,(list,np.ndarray)) assert isinstance(tileList_or_Array,(list,np.ndarray))
if isinstance(tileList_or_Array,np.ndarray): if isinstance(tileList_or_Array,np.ndarray):
assert arr_pos and isinstance(arr_pos,(list,tuple)) assert arr_pos and isinstance(arr_pos,(list,tuple))
...@@ -286,7 +287,7 @@ def reorder_ENVI_header(path_hdr,tgt_keyOrder): ...@@ -286,7 +287,7 @@ def reorder_ENVI_header(path_hdr,tgt_keyOrder):
# write remaining header items # write remaining header items
[outFile.write(item + '\n') for item in items] [outFile.write(item + '\n') for item in items]
def mask_to_ENVI_Classification(InObj,maskname): def mask_to_ENVI_Classification(InObj,maskname):
# type: (object,str) -> (np.ndarray, dict, list, list) # type: (object,str) -> (np.ndarray, dict, list, list)
cd = get_mask_classdefinition(maskname) cd = get_mask_classdefinition(maskname)
...@@ -403,7 +404,7 @@ def export_VZA_SZA_SAA_RAA_stats(L1A_object): ...@@ -403,7 +404,7 @@ def export_VZA_SZA_SAA_RAA_stats(L1A_object):
outdict[i+'_mean'] = np.mean(arr[arr!=-9999]) outdict[i+'_mean'] = np.mean(arr[arr!=-9999])
outdict[i+'_std'] = np.std (arr[arr!=-9999]) outdict[i+'_std'] = np.std (arr[arr!=-9999])
with open(os.path.join(L1A_object.path_procdata,L1A_object.baseN+'_stats__VZA_SZA_SAA_RAA.dill'),'wb') as outF: with open(os.path.join(L1A_object.path_procdata,L1A_object.baseN+'_stats__VZA_SZA_SAA_RAA.dill'),'wb') as outF:
# json.dump(outdict, outF,skipkeys=True,sort_keys=True,separators=(',', ': '),indent =4) # json.dump(outdict, outF,skipkeys=True,sort_keys=True,separators=(',', ': '),indent =4)
dill.dump(outdict,outF) dill.dump(outdict,outF)
with open(os.path.join(L1A_object.path_procdata,L1A_object.baseN+'_stats__VZA_SZA_SAA_RAA.txt'),'w') as outF: with open(os.path.join(L1A_object.path_procdata,L1A_object.baseN+'_stats__VZA_SZA_SAA_RAA.txt'),'w') as outF:
for k,v in outdict.items(): for k,v in outdict.items():
......
...@@ -7,15 +7,22 @@ Created on Mon May 04 13:07:26 2015 ...@@ -7,15 +7,22 @@ Created on Mon May 04 13:07:26 2015
from __future__ import (division, print_function, absolute_import) from __future__ import (division, print_function, absolute_import)
import pickle,imp import pickle
import os
from geomultisens.io import Input_reader as INP_R
from geomultisens.misc.logging import GMS_logger
from geomultisens import __file__
INP_R = imp.load_source('INP_R','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/io/Input_reader.py')
HLP_F = imp.load_source('HLP_F','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/misc/helper_functions.py')
out_dict = {} out_dict = {}
logger = HLP_F.setup_logger('log__SRF2PKL', '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/sandbox/out/',1,append=1) logger = GMS_logger('log__SRF_DB',
path_logfile=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.log')),
append=True)
for sensorcode,out_sensorcode in zip(['AST_V1','AST_V2','AST_S','AST_T','TM5','TM7','LDCM','RE5','S1','S4','S5'], for sensorcode,out_sensorcode in zip(['AST_V1','AST_V2','AST_S','AST_T','TM5','TM7','LDCM','RE5','S1','S4','S5'],
['ASTER_VNIR1','ASTER_VNIR2','ASTER_SWIR','ASTER_TIR','LANDSAT_TM5','LANDSAT_TM7','LANDSAT_LDCM','RapidEye_5','Spot_1','Spot_4','Spot_5']): ['ASTER_VNIR1','ASTER_VNIR2','ASTER_SWIR','ASTER_TIR','LANDSAT_TM5','LANDSAT_TM7','LANDSAT_LDCM',
'RapidEye_5','Spot_1','Spot_4','Spot_5']):
out_dict[out_sensorcode] = INP_R.SRF_reader(sensorcode,logger) out_dict[out_sensorcode] = INP_R.SRF_reader(sensorcode,logger)
outFilename = '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/sandbox/out/SRF_DB.pkl' outFilename = '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/sandbox/out/SRF_DB.pkl'
......
...@@ -35,6 +35,7 @@ from .definition_dicts import proc_chain ...@@ -35,6 +35,7 @@ from .definition_dicts import proc_chain
def execute_pgSQL_query(cursor,query_command): def execute_pgSQL_query(cursor,query_command):
"""Executes a postgreSQL query catches the full error message if there is one. """Executes a postgreSQL query catches the full error message if there is one.
""" """
try: try:
cursor.execute(query_command) cursor.execute(query_command)
except psycopg2.ProgrammingError as e: except psycopg2.ProgrammingError as e:
...@@ -47,6 +48,7 @@ def get_scene_and_dataset_infos_from_postgreSQLdb(sceneid): ...@@ -47,6 +48,7 @@ def get_scene_and_dataset_infos_from_postgreSQLdb(sceneid):
:param sceneid: <int> the GMS scene ID to get information for :param sceneid: <int> the GMS scene ID to get information for
""" """
query = lambda tablename,vals2return,cond_dict,records2fetch=0:\ query = lambda tablename,vals2return,cond_dict,records2fetch=0:\
get_info_from_postgreSQLdb(CFG.job.conn_database,tablename,vals2return,cond_dict,records2fetch) get_info_from_postgreSQLdb(CFG.job.conn_database,tablename,vals2return,cond_dict,records2fetch)
resultset = query('scenes',['datasetid','satelliteid','sensorid','subsystemid', 'acquisitiondate', 'entityid', resultset = query('scenes',['datasetid','satelliteid','sensorid','subsystemid', 'acquisitiondate', 'entityid',
...@@ -79,6 +81,7 @@ def get_info_from_SQLdb(path_db,tablename,vals2return,cond_dict,records2fetch=0) ...@@ -79,6 +81,7 @@ def get_info_from_SQLdb(path_db,tablename,vals2return,cond_dict,records2fetch=0)
:param cond_dict: <dict> a dictionary containing the query conditions in the form {'column_name':<value>} :param cond_dict: <dict> a dictionary containing the query conditions in the form {'column_name':<value>}
:param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records) :param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records)
""" """
if not isinstance(vals2return,list): vals2return = [vals2return] if not isinstance(vals2return,list): vals2return = [vals2return]
assert isinstance(records2fetch,int), \ assert isinstance(records2fetch,int), \
"get_info_from_SQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch) "get_info_from_SQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch)
...@@ -147,6 +150,7 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict=None, ...@@ -147,6 +150,7 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict=None,
:param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records) :param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records)
:param timeout: <int> allows to set a custom statement timeout (milliseconds) :param timeout: <int> allows to set a custom statement timeout (milliseconds)
""" """
if not isinstance(vals2return,list): vals2return = [vals2return] if not isinstance(vals2return,list): vals2return = [vals2return]
assert isinstance(records2fetch,int), \ assert isinstance(records2fetch,int), \
"get_info_from_postgreSQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch) "get_info_from_postgreSQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch)
...@@ -180,6 +184,7 @@ def update_records_in_postgreSQLdb(conn_params, tablename, vals2update_dict, con ...@@ -180,6 +184,7 @@ def update_records_in_postgreSQLdb(conn_params, tablename, vals2update_dict, con
HINT: <value> can also be a list or a tuple of elements to match HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds) :param timeout: <int> allows to set a custom statement timeout (milliseconds)
""" """
cond_dict = cond_dict if cond_dict else {} cond_dict = cond_dict if cond_dict else {}
conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout) conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout)
connection = psycopg2.connect(conn_params) connection = psycopg2.connect(conn_params)
...@@ -214,6 +219,7 @@ def append_item_to_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2append_ ...@@ -214,6 +219,7 @@ def append_item_to_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2append_
HINT: <value> can also be a list or a tuple of elements to match HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds) :param timeout: <int> allows to set a custom statement timeout (milliseconds)
""" """
assert len(vals2append_dict)==1, 'Values can be appended to only one column at once.' assert len(vals2append_dict)==1, 'Values can be appended to only one column at once.'
if type(list(vals2append_dict.values())[0]) in [list,tuple]: if type(list(vals2append_dict.values())[0]) in [list,tuple]:
raise NotImplementedError('Appending multiple values to one column at once is not yet supported.') raise NotImplementedError('Appending multiple values to one column at once is not yet supported.')
...@@ -252,6 +258,7 @@ def remove_item_from_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2remov ...@@ -252,6 +258,7 @@ def remove_item_from_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2remov
HINT: <value> can also be a list or a tuple of elements to match HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds) :param timeout: <int> allows to set a custom statement timeout (milliseconds)
""" """
assert len(vals2remove_dict)==1, 'Values can be removed from only one column at once.' assert len(vals2remove_dict)==1, 'Values can be removed from only one column at once.'
if type(list(vals2remove_dict.values())[0]) in [list,tuple]: if type(list(vals2remove_dict.values())[0]) in [list,tuple]:
raise NotImplementedError('Removing multiple values from one column at once is not yet supported.') raise NotImplementedError('Removing multiple values from one column at once is not yet supported.')
...@@ -327,6 +334,7 @@ def create_record_in_postgreSQLdb(conn_params, tablename, vals2write_dict, timeo ...@@ -327,6 +334,7 @@ def create_record_in_postgreSQLdb(conn_params, tablename, vals2write_dict, timeo
:param vals2write_dict: <dict> a dictionary containing keys and values to be set in the form {'col_name':<value>} :param vals2write_dict: <dict> a dictionary containing keys and values to be set in the form {'col_name':<value>}
:param timeout: <int> allows to set a custom statement timeout (milliseconds) :param timeout: <int> allows to set a custom statement timeout (milliseconds)
""" """
conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout) conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout)
connection = psycopg2.connect(conn_params) connection = psycopg2.connect(conn_params)
if connection is None: if connection is None:
...@@ -497,6 +505,7 @@ def get_dict_satellite_name_id(conn_params): ...@@ -497,6 +505,7 @@ def get_dict_satellite_name_id(conn_params):
:param conn_params: <str> pgSQL database connection parameters :param conn_params: <str> pgSQL database connection parameters
""" """
res = get_info_from_postgreSQLdb(conn_params,'satellites',['name','id']) res = get_info_from_postgreSQLdb(conn_params,'satellites',['name','id'])
assert len(res)>0, 'Error getting satellite names from postgreSQL database.' assert len(res)>0, 'Error getting satellite names from postgreSQL database.'
arr = np.array(res) arr = np.array(res)
...@@ -507,6 +516,7 @@ def get_dict_sensor_name_id(conn_params): ...@@ -507,6 +516,7 @@ def get_dict_sensor_name_id(conn_params):
# type: (str) -> dict # type: (str) -> dict
"""Returns a dictionary with sensor names as keys and sensor IDs as values as read from pgSQL database. """Returns a dictionary with sensor names as keys and sensor IDs as values as read from pgSQL database.
:param conn_params: <str> pgSQL database connection parameters """ :param conn_params: <str> pgSQL database connection parameters """
res = get_info_from_postgreSQLdb(conn_params,'sensors',['name','id']) res = get_info_from_postgreSQLdb(conn_params,'sensors',['name','id'])
assert len(res)>0, 'Error getting sensor names from postgreSQL database.' assert len(res)>0, 'Error getting sensor names from postgreSQL database.'
arr = np.array(res) arr = np.array(res)
...@@ -521,6 +531,7 @@ def get_entityIDs_from_filename(conn_DB,filename): ...@@ -521,6 +531,7 @@ def get_entityIDs_from_filename(conn_DB,filename):
:param conn_DB: <str> pgSQL database connection parameters :param conn_DB: <str> pgSQL database connection parameters
:param filename: <str> the filename to get the corresponding entity ID(s) for :param filename: <str> the filename to get the corresponding entity ID(s) for
""" """
if filename[:2] in ['LE','LC','LO'] and filename.endswith('.tar.gz'): # Landsat if filename[:2] in ['LE','LC','LO'] and filename.endswith('.tar.gz'): # Landsat
entityIDs = [filename.split('.tar.gz')[0]] entityIDs = [filename.split('.tar.gz')[0]]
else: else:
...@@ -538,6 +549,7 @@ def get_filename_by_entityID(conn_DB, entityid, satellite): ...@@ -538,6 +549,7 @@ def get_filename_by_entityID(conn_DB, entityid, satellite):
:param entityid: <str> entity ID :param entityid: <str> entity ID
:param satellite: <str> satellite name to which the entity ID is belonging :param satellite: <str> satellite name to which the entity ID is belonging
""" """
if re.search('Landsat', satellite, re.I): if re.search('Landsat', satellite, re.I):
filename = '%s.tar.gz' % entityid filename = '%s.tar.gz' % entityid
elif re.search('Sentinel-2', satellite, re.I): elif re.search('Sentinel-2', satellite, re.I):
...@@ -560,6 +572,7 @@ def get_notDownloadedsceneIDs(conn_DB,entityIDs,satellite,sensor,src_folder): ...@@ -560,6 +572,7 @@ def get_notDownloadedsceneIDs(conn_DB,entityIDs,satellite,sensor,src_folder):
:param sensor: <str> the name of the sensor to restrict the query on :param sensor: <str> the name of the sensor to restrict the query on
:param src_folder: <str> the source directory where archive files are saved :param src_folder: <str> the source directory where archive files are saved
""" """
columns = ['id','entityid','satelliteid','sensorid','filename','proc_level'] columns = ['id','entityid','satelliteid','sensorid','filename','proc_level']
result = get_info_from_postgreSQLdb(conn_DB,'scenes',columns,{'entityid':entityIDs}) result = get_info_from_postgreSQLdb(conn_DB,'scenes',columns,{'entityid':entityIDs})
df = pd.DataFrame(result,columns=columns) df = pd.DataFrame(result,columns=columns)
...@@ -592,6 +605,7 @@ class GMS_JOB(object): ...@@ -592,6 +605,7 @@ class GMS_JOB(object):
""" """
:param conn_db: <str> the database connection parameters as given by CFG.job.conn_params :param conn_db: <str> the database connection parameters as given by CFG.job.conn_params
""" """
self.conn = conn_db self.conn = conn_db
self.dataframe = GeoDataFrame() self.dataframe = GeoDataFrame()
self.scene_counts = {} # set by self.create() self.scene_counts = {} # set by self.create()
...@@ -651,6 +665,7 @@ class GMS_JOB(object): ...@@ -651,6 +665,7 @@ class GMS_JOB(object):
def db_entry(self): def db_entry(self):
"""Returns an OrderedDict containing keys and values of the database entry. """Returns an OrderedDict containing keys and values of the database entry.
""" """
db_entry = collections.OrderedDict() db_entry = collections.OrderedDict()