Commit f2eb8b47 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Revised some docstrings.

parent 92ab76dd
Pipeline #365 passed with stage
in 1 minute and 36 seconds
Welcome to GeoMultiSens's documentation!
======================================
========================================
Contents:
......
......@@ -1257,6 +1257,7 @@ class GEOPROCESSING(object):
:param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32;
GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount
"""
if outPath is None:
outPath = self.workspace
......@@ -1330,6 +1331,7 @@ class GEOPROCESSING(object):
:param dtype: GDALDataType string: GDT_Unknown; GDT_Byte; GDT_UInt16; GDT_Int16; GDT_UInt32; GDT_Int32;
GDT_Float32; GDT_Float64; GDT_CInt16; GDT_CInt32; GDT_CFloat32; GDT_CFloat64; GDT_TypeCount
"""
if outPath is None:
outPath = self.workspace
......@@ -1360,6 +1362,7 @@ class GEOPROCESSING(object):
:param value: value for the single band file. New file has one Band with the given value
:param outPath:
"""
if outPath is None:
outPath = self.workspace
......@@ -2264,6 +2267,7 @@ def ndarray2gdal(ndarray, outPath=None, importFile=None, direction=1, GDAL_Type=
:param v:
:return: GDAL data File
"""
if v:
print("\n--------GEOPROCESSING--------\n##Function##"
"\n**ndarray2gdal**")
......@@ -2336,6 +2340,7 @@ def GetMaxExt_nb(datalist, bin=1, v=0):
:param v:
:return: [ulx,uly,lrx,lry]: max extent of all files without backgroundpixel as list
"""
if v == 1:
print("\n--------GEOPROCESSING--------\n##Function##"
"\n**GetMaxExt_nb**")
......@@ -2480,6 +2485,7 @@ def convertGdalNumpyDataType(dType):
output:
corresponding dataType
"""
# dictionary to translate GDAL data types (strings) in corresponding numpy data types
dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32,
"Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32}
......@@ -2521,6 +2527,7 @@ def ApplyMask(dataFile, maskFile, maskValue, outPath=None, keepDataType=1, v=0):
:param v:
:return: InputFile with the given maskValue at the masked pixel positions (ENVI .bsq or ndarray)
"""
# define default outpath filename+masked.bsq
if outPath is None:
pass
......@@ -2806,6 +2813,7 @@ def adjust_acquisArrProv_to_shapeFullArr(arrProv, shapeFullArr, meshwidth=1, sub
def get_raster_size(minx, miny, maxx, maxy, cell_width, cell_height):
"""Determine the number of rows/columns given the bounds of the point data and the desired cell size"""
cols = int((maxx - minx) / cell_width)
rows = int((maxy - miny) / abs(cell_height))
return cols, rows
......@@ -2827,6 +2835,7 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None,
:param inSaturated: pixelvalues allocated to saturated pixels
:param cutNeg: cutNegvalues -> all negative values set to 0
"""
assert isinstance(offsets,list) and isinstance(gains,list), \
"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \
each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains))
......@@ -2877,6 +2886,7 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
:param cutNeg: bool: if true. all negative values turned to zero. default: True
:return: Int16 TOA_Reflectance in [0-10000]
"""
assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \
"Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \
"irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \
......@@ -2918,6 +2928,7 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
......@@ -2960,6 +2971,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']):
assert isinstance(offsets,list) and isinstance(gains,list), \
......@@ -2990,6 +3002,7 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9
def is_granule(trueCornerPos): # TODO
"""Idee: testen, ob es sich um Granule handelt oder um die volle Szene -
dazu Winkel der Kanten zu Nord oder Ost berechnen"""
pass
......@@ -3095,7 +3108,8 @@ def calc_VZA_array(shape_fullArr, arr_pos, fullSceneCornerPos, viewing_angle, FO
:param meshwidth: <int> defines the density of the mesh used for generating the output
(1: full resolution; 10: one point each 10 pixels)
:param nodata_mask: <numpy array>, used for declaring nodata values in the output VZA array
:param outFill: the value that is assigned to nodata area in the output VZA array"""
:param outFill: the value that is assigned to nodata area in the output VZA array
"""
# FIXME in case of Sentinel-2 the viewing_angle corresponds to the center point of the image footprint (trueDataCornerPos)
# FIXME => the algorithm must use the center viewing angle + orbit inclination and must calculate the FOV to be used
......@@ -3176,6 +3190,7 @@ def calc_SZA_SAA(date, lon, lat): # not used anymore since pyorbital is more pr
:param lon:
:param lat:
"""
obsv = ephem.Observer()
obsv.lon, obsv.lat = str(lon), str(lat)
obsv.date = date
......@@ -3209,7 +3224,8 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullScene
- 'coarse: SZA/SAA is calculated for image corners then interpolated by solving
an equation system with 4 variables for each image corner:
SZA/SAA = a + b*col + c*row + d*col*row.
:param lonlat_arr: """
:param lonlat_arr:
"""
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
......@@ -3285,6 +3301,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None):
:param outFill: the value to be used to fill areas outside the actual image bounds
:return:
"""
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
......@@ -3297,6 +3314,7 @@ def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None):
def get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr,arr_pos):
"""Returns array dims with respect to possible subsetting."""
rows, cols, bands = shape_fullArr
rows, cols = [arr_pos[0][1] - arr_pos[0][0] + 1, arr_pos[1][1] - arr_pos[1][0] + 1] if arr_pos else (rows, cols)
rowStart, colStart = [arr_pos[0][0], arr_pos[1][0]] if arr_pos else [0, 0]
......@@ -3354,6 +3372,7 @@ def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0):
:param im_gt:
:param fillVal:
"""
print(bounds)
# get target bounds on the same grid like the input array
tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds,im_gt)
......
......@@ -82,6 +82,7 @@ def read_ENVI_image_data_as_array(path, arr_shape, arr_pos, logger=None, return_
:param return_meta: <bool> whether to return not only raster data but also meta data (optional, default=False)
:param q: <bool> quiet mode (supresses all console or logging output) (optional, default=False)
"""
hdr_path = os.path.splitext(path)[0]+'.hdr' if not os.path.splitext(path)[1]=='.hdr' else path
if not os.path.isfile(hdr_path):
if not q:
......@@ -133,6 +134,7 @@ def GMSfile2dict(path_GMSfile):
:param path_GMSfile: absolute path on disk
:return: the corresponding Python dictionary
"""
with open(path_GMSfile) as inF:
GMSdict = json.load(inF)
return GMSdict
......@@ -144,6 +146,7 @@ def unify_envi_header_keys(header_dict):
:param header_dict:
"""
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit',
'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
unified_header_dict = header_dict
......@@ -162,6 +165,7 @@ def get_list_GMSfiles(dataset_list, target):
:param target: target GMS processing level
:return [/path/to/gms_file1.gms, /path/to/gms_file1.gms]
"""
dataset_list = [dataset_list] if not isinstance(dataset_list,list) else dataset_list
if CFG.job.call_type == 'webapp':
get_gmsP = lambda ds,tgt: PG.path_generator(ds,proc_level=tgt).get_path_gmsfile()
......@@ -215,6 +219,7 @@ class SRF(object):
:param format_bandnames: whether to format default strings from LayerBandsAssignment as 'B01', 'B02' etc..
:param v: verbose mode
"""
if not wvl_unit in ['micrometers', 'nanometers']:
raise ValueError('Unknown wavelength unit %s.' %wvl_unit)
......@@ -370,6 +375,7 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
:param tgt_ygsd: output Y GSD
:return:
"""
#print(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd)
# handle coordinate infos
......
......@@ -215,6 +215,7 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea
:param arr_pos: <tuple> ((row_start,row_end),(col_start,col_end))
:param overwrite: <bool>
"""
assert isinstance(tileList_or_Array,(list,np.ndarray))
if isinstance(tileList_or_Array,np.ndarray):
assert arr_pos and isinstance(arr_pos,(list,tuple))
......
......@@ -7,15 +7,22 @@ Created on Mon May 04 13:07:26 2015
from __future__ import (division, print_function, absolute_import)
import pickle,imp
import pickle
import os
from geomultisens.io import Input_reader as INP_R
from geomultisens.misc.logging import GMS_logger
from geomultisens import __file__
INP_R = imp.load_source('INP_R','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/io/Input_reader.py')
HLP_F = imp.load_source('HLP_F','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/misc/helper_functions.py')
out_dict = {}
logger = HLP_F.setup_logger('log__SRF2PKL', '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/sandbox/out/',1,append=1)
logger = GMS_logger('log__SRF_DB',
path_logfile=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.log')),
append=True)
for sensorcode,out_sensorcode in zip(['AST_V1','AST_V2','AST_S','AST_T','TM5','TM7','LDCM','RE5','S1','S4','S5'],
['ASTER_VNIR1','ASTER_VNIR2','ASTER_SWIR','ASTER_TIR','LANDSAT_TM5','LANDSAT_TM7','LANDSAT_LDCM','RapidEye_5','Spot_1','Spot_4','Spot_5']):
['ASTER_VNIR1','ASTER_VNIR2','ASTER_SWIR','ASTER_TIR','LANDSAT_TM5','LANDSAT_TM7','LANDSAT_LDCM',
'RapidEye_5','Spot_1','Spot_4','Spot_5']):
out_dict[out_sensorcode] = INP_R.SRF_reader(sensorcode,logger)
outFilename = '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/sandbox/out/SRF_DB.pkl'
......
......@@ -35,6 +35,7 @@ from .definition_dicts import proc_chain
def execute_pgSQL_query(cursor,query_command):
"""Executes a postgreSQL query catches the full error message if there is one.
"""
try:
cursor.execute(query_command)
except psycopg2.ProgrammingError as e:
......@@ -47,6 +48,7 @@ def get_scene_and_dataset_infos_from_postgreSQLdb(sceneid):
:param sceneid: <int> the GMS scene ID to get information for
"""
query = lambda tablename,vals2return,cond_dict,records2fetch=0:\
get_info_from_postgreSQLdb(CFG.job.conn_database,tablename,vals2return,cond_dict,records2fetch)
resultset = query('scenes',['datasetid','satelliteid','sensorid','subsystemid', 'acquisitiondate', 'entityid',
......@@ -79,6 +81,7 @@ def get_info_from_SQLdb(path_db,tablename,vals2return,cond_dict,records2fetch=0)
:param cond_dict: <dict> a dictionary containing the query conditions in the form {'column_name':<value>}
:param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records)
"""
if not isinstance(vals2return,list): vals2return = [vals2return]
assert isinstance(records2fetch,int), \
"get_info_from_SQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch)
......@@ -147,6 +150,7 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict=None,
:param records2fetch: <int> number of records to be fetched (default=0: fetch unlimited records)
:param timeout: <int> allows to set a custom statement timeout (milliseconds)
"""
if not isinstance(vals2return,list): vals2return = [vals2return]
assert isinstance(records2fetch,int), \
"get_info_from_postgreSQLdb: Expected an integer for the argument 'records2return'. Got %s" %type(records2fetch)
......@@ -180,6 +184,7 @@ def update_records_in_postgreSQLdb(conn_params, tablename, vals2update_dict, con
HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds)
"""
cond_dict = cond_dict if cond_dict else {}
conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout)
connection = psycopg2.connect(conn_params)
......@@ -214,6 +219,7 @@ def append_item_to_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2append_
HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds)
"""
assert len(vals2append_dict)==1, 'Values can be appended to only one column at once.'
if type(list(vals2append_dict.values())[0]) in [list,tuple]:
raise NotImplementedError('Appending multiple values to one column at once is not yet supported.')
......@@ -252,6 +258,7 @@ def remove_item_from_arrayCol_in_postgreSQLdb(conn_params, tablename, vals2remov
HINT: <value> can also be a list or a tuple of elements to match
:param timeout: <int> allows to set a custom statement timeout (milliseconds)
"""
assert len(vals2remove_dict)==1, 'Values can be removed from only one column at once.'
if type(list(vals2remove_dict.values())[0]) in [list,tuple]:
raise NotImplementedError('Removing multiple values from one column at once is not yet supported.')
......@@ -327,6 +334,7 @@ def create_record_in_postgreSQLdb(conn_params, tablename, vals2write_dict, timeo
:param vals2write_dict: <dict> a dictionary containing keys and values to be set in the form {'col_name':<value>}
:param timeout: <int> allows to set a custom statement timeout (milliseconds)
"""
conn_params = "%s options = '-c statement_timeout=%s'" % (conn_params, timeout)
connection = psycopg2.connect(conn_params)
if connection is None:
......@@ -497,6 +505,7 @@ def get_dict_satellite_name_id(conn_params):
:param conn_params: <str> pgSQL database connection parameters
"""
res = get_info_from_postgreSQLdb(conn_params,'satellites',['name','id'])
assert len(res)>0, 'Error getting satellite names from postgreSQL database.'
arr = np.array(res)
......@@ -507,6 +516,7 @@ def get_dict_sensor_name_id(conn_params):
# type: (str) -> dict
"""Returns a dictionary with sensor names as keys and sensor IDs as values as read from pgSQL database.
:param conn_params: <str> pgSQL database connection parameters """
res = get_info_from_postgreSQLdb(conn_params,'sensors',['name','id'])
assert len(res)>0, 'Error getting sensor names from postgreSQL database.'
arr = np.array(res)
......@@ -521,6 +531,7 @@ def get_entityIDs_from_filename(conn_DB,filename):
:param conn_DB: <str> pgSQL database connection parameters
:param filename: <str> the filename to get the corresponding entity ID(s) for
"""
if filename[:2] in ['LE','LC','LO'] and filename.endswith('.tar.gz'): # Landsat
entityIDs = [filename.split('.tar.gz')[0]]
else:
......@@ -538,6 +549,7 @@ def get_filename_by_entityID(conn_DB, entityid, satellite):
:param entityid: <str> entity ID
:param satellite: <str> satellite name to which the entity ID is belonging
"""
if re.search('Landsat', satellite, re.I):
filename = '%s.tar.gz' % entityid
elif re.search('Sentinel-2', satellite, re.I):
......@@ -560,6 +572,7 @@ def get_notDownloadedsceneIDs(conn_DB,entityIDs,satellite,sensor,src_folder):
:param sensor: <str> the name of the sensor to restrict the query on
:param src_folder: <str> the source directory where archive files are saved
"""
columns = ['id','entityid','satelliteid','sensorid','filename','proc_level']
result = get_info_from_postgreSQLdb(conn_DB,'scenes',columns,{'entityid':entityIDs})
df = pd.DataFrame(result,columns=columns)
......@@ -592,6 +605,7 @@ class GMS_JOB(object):
"""
:param conn_db: <str> the database connection parameters as given by CFG.job.conn_params
"""
self.conn = conn_db
self.dataframe = GeoDataFrame()
self.scene_counts = {} # set by self.create()
......@@ -651,6 +665,7 @@ class GMS_JOB(object):
def db_entry(self):
"""Returns an OrderedDict containing keys and values of the database entry.
"""
db_entry = collections.OrderedDict()
for i in self.jobs_table_columns:
db_entry[i] = getattr(self,i)
......@@ -669,6 +684,7 @@ class GMS_JOB(object):
(default:249 - Sentinel-2A), 104=Landsat-8
:param comment: <str> a comment describing the job (e.g. 'Beta job')
"""
self._set_target_sensor_specs(virtual_sensor_id,datasetid_spatial_ref)
self.comment = comment
......@@ -905,6 +921,7 @@ class GMS_JOB(object):
Create a GMS_JOB instance by querying the database for a specific job ID.
:param job_ID: <int> a valid id from the database table 'jobs'
"""
res = get_info_from_postgreSQLdb(self.conn,'jobs',self.jobs_table_columns,{'id':job_ID})
if not res:
raise ValueError("No job with ID %s found in 'jobs' table of the database." %job_ID)
......@@ -936,6 +953,7 @@ class GMS_JOB(object):
def reset_job_progress(self):
"""Resets everthing in the database entry that has been written during the last run of the job..
"""
self.finishtime = None
self.failed_sceneids = []
self.progress = None
......@@ -980,6 +998,7 @@ class GMS_JOB(object):
Add the job to the 'jobs' table of the database
:return: <int> the job ID of the newly created job
"""
if not self.dataframe.empty:
all_sat,all_sen = \
zip(*[i.split('__') for i in (np.unique(self.dataframe['satellite']+'__'+self.dataframe['sensor']))])
......@@ -1025,6 +1044,7 @@ class GMS_JOB(object):
:param proc_level: <str> delete only results that have the given processing level
:param force:
"""
self.__delete_procdata(self.failed_sceneids,'failed', proc_level=proc_level, force=force)
......@@ -1034,6 +1054,7 @@ class GMS_JOB(object):
:param proc_level: <str> delete only results that have the given processing level
:param force:
"""
self.__delete_procdata(self.sceneids,'processed', proc_level=proc_level, force=force)
......@@ -1045,6 +1066,7 @@ class GMS_JOB(object):
:param proc_level: <str> delete only results that have the given processing level
:param force:
"""
if self.exists_in_db:
if list_sceneIDs:
delete = 'J'
......@@ -1124,6 +1146,7 @@ def add_externally_downloaded_data_to_GMSDB(conn_DB, src_folder, filenames, sate
:param satellite: <str> the name of the satellite to which the filenames are belonging
:param sensor: <str> the name of the sensor to which the filenames are belonging
"""
# FIXME this method only works for Landsat archives or if filename is already set in database (not always the case for S2A)!
res = [get_entityIDs_from_filename(conn_DB, fName) for fName in filenames]
entityIDs = list(itertools.chain.from_iterable(res))
......@@ -1180,6 +1203,7 @@ def pdDataFrame_to_sql_k(engine, frame, name, if_exists='fail', index=True,
The types should be a SQLAlchemy or GeoSQLAlchemy2 type,
:param kwargs: {'keys',...}
"""
pandas_sql = pd.io.sql.pandasSQL_builder(engine, schema=None, flavor=None)
if dtype is not None:
for col, my_type in dtype.items():
......@@ -1207,6 +1231,7 @@ def import_shapefile_into_postgreSQL_database(path_shp, tablename, cols2import=N
:param index_label: <str> Column label for index column(s).
:param primarykey: <str> the name of the column to be set as primary key of the target table
"""
print('Reading shapefile %s...' %path_shp)
GDF = GeoDataFrame.from_file(path_shp)
GDF['geom'] = [*GDF['geometry'].map(str)]
......@@ -1242,6 +1267,7 @@ def data_DB_updater(obj_dict):
:param obj_dict: <dict> a copy of the dictionary of the respective GMS object
"""
assert isinstance(obj_dict,dict), 'The input for data_DB_updater() has to be a dictionary.'
list2str = lambda list2convert: ''.join([str(val) for val in list2convert])
......@@ -1456,6 +1482,7 @@ def archive_exists_on_fileserver(conn_DB, entityID):
:param conn_DB: <str> pgSQL database connection parameters
:param entityID: <str> entity ID to be checked
"""
records = get_info_from_postgreSQLdb(conn_DB, 'scenes', ['satelliteid', 'sensorid'], {'entityid': entityID})
records_filt = [rec for rec in records if rec[0] is not None and rec[1] is not None]
if len(records_filt) == 1:
......
......@@ -123,6 +123,7 @@ class METADATA(object):
"""----METHOD_1------------------------------------------------------------
set all attributes of default
"""
self.EntityID = ""
self.Satellite = ""
self.Sensor = ""
......@@ -195,6 +196,7 @@ class METADATA(object):
@property
def AcqDateTime(self):
"""Returns a datetime.datetime object containing date, time and timezone (UTC time)."""
return self._AcqDateTime
@AcqDateTime.setter
......@@ -220,6 +222,7 @@ class METADATA(object):
"""----METHOD_2------------------------------------------------------------
# read metadata from spot dimap file
"""
#self.default_attr()
if os.path.isdir(self.FolderOrArchive):
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'*/scene01/metadata.dim'))
......@@ -381,6 +384,7 @@ class METADATA(object):
read metadata from LANDSAT metafile: <dataname>.MTL.txt. Metadatafile of LPGS processing chain
:param LayerBandsAssignment:
"""
# self.default_attr()
self.LayerBandsAssignment = LayerBandsAssignment
self.nBands = len(LayerBandsAssignment)
......@@ -649,6 +653,7 @@ class METADATA(object):
"""----METHOD_4------------------------------------------------------------
read metadata from RapidEye metafile: <dataname>metadata.xml
"""
# self.default_attr()
if os.path.isdir(self.FolderOrArchive):
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'*/*_metadata.xml'))
......@@ -808,6 +813,7 @@ class METADATA(object):
output:
:param subsystem:
"""
# self.default_attr()
dat_ = open(self.FolderOrArchive, "r").read() if sys.version_info[0]<3 else \
open(self.FolderOrArchive, "rb").read().decode('latin-1')
......@@ -1007,6 +1013,7 @@ class METADATA(object):
"""----METHOD_6------------------------------------------------------------
read metadata from ALOS summary.txt
"""
# self.default_attr()
if os.path.isdir(self.FolderOrArchive):
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'*/*data*/summary.txt'))
......@@ -1154,6 +1161,7 @@ class METADATA(object):
"""----METHOD_6------------------------------------------------------------
read metadata from ALOS leader file. binary. for exact information content see: file:///misc/ro2/behling/Satelliten/ALOS/doc/ALOS Product Format description.pdf
"""
# self.default_attr()
if os.path.isdir(self.FolderOrArchive):
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'*/*data*/LED-*'))
......@@ -1173,6 +1181,7 @@ class METADATA(object):
def Read_Sentinel2A_xmls(self):
"""Read metadata from Sentinel-2A generic xml and granule xml"""
# self.default_attr()
assert self.SceneID is not None and self.SceneID!=-9999, "Read_Sentinel2A_xmls(): Missing scene ID. "
......@@ -1447,6 +1456,7 @@ class METADATA(object):
ENVI file headers in the same order.
"""
# FIXME orbit params are missing
# descr_dic = { ### FillZeroSaturated von HLP_F ausgeben lassen
# 'ALOS_Rad' :"(1) GEOCODED Level1B2 Product; '"+self.Dataname+"'\n (2) Int16 RadianceData in [W * m-2 * sr-1 * micrometer-1]*10; radiance scale factor: 10 (fillPixels: -99, zeroPixels:0, saturatedPixels: 32767 (Max of Int16))'",
......@@ -1548,6 +1558,7 @@ class METADATA(object):
:param acqDate:
"""
if not os.path.exists(CFG.job.path_earthSunDist):
self.logger.warning("\n\t WARNING: Earth Sun Distance is assumed to be "
"1.0 because no database can be found at %s.""" % CFG.job.path_earthSunDist)
......@@ -1573,6 +1584,7 @@ class METADATA(object):
:param fullSceneCornerLonLat:
:param logger:
"""
ul,lr = fullSceneCornerLonLat[0], fullSceneCornerLonLat[3]
center_coord = [np.mean([ul[0],lr[0]]),np.mean([ul[1],lr[1]])]
time0_ord = mdates.date2num(datetime.datetime.strptime('%s %s' %(self.AcqDate,'00:00:00'),'%Y-%m-%d %H:%M:%S'))
......@@ -1603,6 +1615,7 @@ class METADATA(object):
:param shape_fullArr:
:param logger:
"""
if fullSceneCornerPos != list(([0, 0], [0, shape_fullArr[1] - 1],
[shape_fullArr[0] - 1, 0], [shape_fullArr[0] - 1, shape_fullArr[1] - 1])):
orbitAltitudeKm, orbitPeriodMin = self.orbitParams[0], self.orbitParams[2]
......@@ -1706,6 +1719,7 @@ def get_LayerBandsAssignment(GMS_identifier, nBands=None, ignore_usecase=False,
:param return_fullLBA: in case there is a subsystem:
whether to return LayerBandsAssignment for all bands or for the current subsystem
"""
if GMS_identifier['image_type'] == 'RSD':
GMS_sensorcode = get_GMS_sensorcode(GMS_identifier)
assert GMS_sensorcode, 'Unable to get Layer Bands Assignment. No valid sensorcode privided (got >None<). '
......@@ -1900,6 +1914,7 @@ def metaDict_to_metaODict(metaDict,logger=None):
:param metaDict: <dict> GMS metadata dictionary
:param logger: <logging.logger> if given, warnings will be logged. Otherwise they are raised.
"""
from ..io.Output_writer import enviHdr_keyOrder
expected_keys = [k for k in enviHdr_keyOrder if k in metaDict]
only_gmsFile_keys = ['ViewingAngle_arrProv','IncidenceAngle_arrProv','projection']
......
......@@ -147,6 +147,7 @@ class Dataset(object):
@property # FIXME does not work yet
def log(self):
"""Returns a string of all logged messages until now."""
return self._log
......@@ -352,6 +353,7 @@ class Dataset(object):
"""
Returns an SRTM DEM in the exact dimension an pixel grid of self.arr as an instance of GeoArray.
"""
if self._dem is None:
self.logger.info('Generating DEM...')
self._dem = INP_R.get_dem_by_extent(
......@@ -384,6 +386,7 @@ class Dataset(object):
"""
Returns the path generator object for generating file pathes belonging to the GMS object.
"""
if self._pathGen and self._pathGen.proc_level==self.proc_level:
return self._pathGen
else:
......@@ -410,6 +413,7 @@ class Dataset(object):
:return:
"""
return self._ac_errors # FIXME should give a warning if None
......@@ -479,6 +483,7 @@ class Dataset(object):
:param overwrite: <bool> whether to overwrite existing nodata mask that has already been calculated
:return:
"""
self.logger.info('Calculating nodata mask...')
if self._mask_nodata is None or overwrite:
......@@ -630,6 +635,7 @@ class Dataset(object):
is not overwritten or written once more later, but only renamed.
:param compression: <bool> enable or disable compression
"""
pass
......
......@@ -226,6 +226,7 @@ class GMS_object(Dataset):
@property
def georef(self):
"""Returns True if the current dataset can serve as spatial reference."""
return True if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else False
......@@ -265,6 +266,7 @@ class GMS_object(Dataset):
NOTE: This does not automatically update mask_nodata and mask_clouds BUT if mask_nodata and mask_clouds are
None their getters will automatically synchronize!
"""
if geoArr_initArgs[0] is not None:
self._masks = GeoArray(*geoArr_initArgs)
self._masks.nodata = 0
......@@ -285,6 +287,7 @@ class GMS_object(Dataset):
Returns the options dictionary needed as input for atmospheric correction. If an empty dictionary is returned,
atmospheric correction is not yet available for the current sensor and will later be skipped.
"""
if not self._ac_options:
path_ac_options = PG.get_path_ac_options(self.GMS_identifier)
......@@ -598,6 +601,7 @@ class GMS_object(Dataset):
:param list_