Commit e17d387b authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

process controller: extensive revision in order to clean up old stuff and to...

process controller: extensive revision in order to clean up old stuff and to make it more understandable
- added process_controller_for_testing_OLD_20160405.py: a snapshot of the previous version
L1B_P.COREG: revised get_reference_image_params():
 -> the algorithm now respects satellite, overlap percentage, cloudcover, acquisition date with different priorities and prints a download command to output stream in order to start the download an processing of a not yet existing but needed reference scene
 L1B_P.COREG: improved get_opt_winpos_winsize():
 - the algorithm now recognizes if the chosen matching window in not completely within the overlap area of the input images and automatically reduces window size
DB_T: revised get_overlapping_scenes_from_postgreSQLdb()
HLP_F: improved reorder_CornerLonLat() and sceneID_to_trueDataCornerLonLat()
CFG: added java command queries and datasetid_spatial_ref
parent 47de0d15
/GeoMultiSens.py
.idea/
BAK/
OLD/
......
......@@ -3633,7 +3633,7 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
if not ndarray.flags['OWNDATA']:
temp = np.empty_like(ndarray)
temp[:] = ndarray
ndarray = temp # deep copy: converts view to its own array
ndarray = temp # deep copy: converts view to its own array in oder to avoid wrong output
with rasterio.drivers():
if out_gt is None:
......@@ -3996,6 +3996,7 @@ def mapinfo2geotransform(map_info):
return [float(map_info[3]),float(map_info[5]),0.,float(map_info[4]),0.,-float(map_info[6])]
def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
"""Return (UL, LL, LR, UR)"""
assert gdal_ds is not None or (gt is not None and cols is not None and rows is not None), \
"GEOP.get_corner_coordinates: Missing argument! Please provide either 'gdal_ds' or 'gt', 'cols' AND 'rows'."
gdal_ds_GT = gdal_ds.GetGeoTransform() if gdal_ds is not None else gt
......
This diff is collapsed.
......@@ -19,6 +19,7 @@ import os
import multiprocessing
import socket
import psycopg2
import collections
import osr
os.environ['DISPLAY'] = '127.0.0.0:0.0'
......@@ -38,7 +39,8 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
connection.close()
return records2return
query = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key':"'%s'" %key})[0][0]
query_cfg = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key': "'%s'" % key})[0][0]
query_job = lambda conn_param, col: get_info_from_postgreSQLdb(conn_param,'scenes_jobs',col,{'id':GMS_process_ID})[0][0]
absP, joinP = lambda x: os.path.abspath(x), lambda *x: os.path.join(*x)
......@@ -55,9 +57,8 @@ class job:
SZA_SAA_calculation_accurracy = 'coarse'
# SZA_SAA_calculation_accurracy = 'fine' # Warning! This option takes much more time!
export_VZA_SZA_SAA_RAA_stats = True
export_L1A_obj_dumps = False
export_L1C_obj_dumps = False
profiling = False
path_earthSunDist = absP('./database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv')
path_SRFs = absP('./database/srf/')
path_cloud_classif = absP('./database/cloud_classifier/')
......@@ -82,24 +83,26 @@ class job:
path_archive = joinP(path_fileserver, 'database/sampledata/')
elif GMS_call_type == 'webapp':
conn_database = "dbname='usgscache' user='gmsdb' password='gmsdb' host='geoms' connect_timeout=3 options=" \
"'-c statement_timeout=1000'"
"'-c statement_timeout=10000'" # 10sec
conn_db_meta = conn_database
path_fileserver = query(conn_db_meta,'path_data_root')
path_procdata = joinP(path_fileserver, query(conn_db_meta,'foldername_procdata'))
path_archive = joinP(path_fileserver, query(conn_db_meta,'foldername_download'))
path_earthSunDist = absP(query(conn_db_meta,'path_earthSunDist'))
path_SRFs = absP(query(conn_db_meta,'path_SRFs'))
path_cloud_classif = absP(query(conn_db_meta,'path_cloud_classif'))
path_solar_irr = absP(query(conn_db_meta,'path_solar_irr'))
path_testing = absP(query(conn_db_meta,'path_testing'))
path_benchmarks = absP(query(conn_db_meta,'path_benchmarks'))
path_job_logs = absP(query(conn_db_meta,'path_job_logs'))
path_fileserver = query_cfg(conn_db_meta, 'path_data_root')
path_procdata = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata'))
path_archive = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_download'))
path_earthSunDist = absP(query_cfg(conn_db_meta, 'path_earthSunDist'))
path_SRFs = absP(query_cfg(conn_db_meta, 'path_SRFs'))
path_cloud_classif = absP(query_cfg(conn_db_meta, 'path_cloud_classif'))
path_solar_irr = absP(query_cfg(conn_db_meta, 'path_solar_irr'))
path_testing = absP(query_cfg(conn_db_meta, 'path_testing'))
path_benchmarks = absP(query_cfg(conn_db_meta, 'path_benchmarks'))
path_job_logs = absP(query_cfg(conn_db_meta, 'path_job_logs'))
java_commands = collections.OrderedDict()
java_commands["keyword"] = query_cfg(conn_db_meta, 'command_keyword')
java_commands["value_download"] = query_cfg(conn_db_meta, 'command_value_download')
# processor configuration: [run processor, write output]
exec__L0BP = [1]
exec__L1AP = [1, 1]
exec__L1BP = [1, 0]
exec__L1CP = [1, 1]
exec__L1CP = [0, 1]
exec__L1DP = [1, 1]
exec__L2AP = [0, 0]
exec__L2BP = [0, 0]
......@@ -131,11 +134,13 @@ class usecase:
conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
elif GMS_call_type == 'webapp':
skip_thermal = int(query(job.conn_db_meta,'skip_thermal'))
skip_pan = int(query(job.conn_db_meta,'skip_pan'))
sort_bands_by_cwl = int(query(job.conn_db_meta,'sort_bands_by_cwl'))
conversion_type_optical = query(job.conn_db_meta,'conversion_type_optical')
conversion_type_thermal = query(job.conn_db_meta,'conversion_type_thermal')
skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal'))
skip_pan = int(query_cfg(job.conn_db_meta, 'skip_pan'))
sort_bands_by_cwl = int(query_cfg(job.conn_db_meta, 'sort_bands_by_cwl'))
conversion_type_optical = query_cfg(job.conn_db_meta, 'conversion_type_optical')
conversion_type_thermal = query_cfg(job.conn_db_meta, 'conversion_type_thermal')
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
# FIXME: Zeitreihen ein grid aligning voraussetzt
userInp_target_gsd = 30 # [meters], overridden if match_gsd==True
......
......@@ -284,7 +284,7 @@ def write_global_benchmark_output(list__processing_time__all_runs, list__IO_time
def write_shp(shapely_poly,path_out,prj=None):
print('Writing %s ...' %path_out)
if os.path.exists(path_out): os.remove(path_out)
ds = (lambda drv: drv.CreateDataSource(path_out))(ogr.GetDriverByName("Esri Shapefile"))
ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)
if prj is not None:
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
......
......@@ -69,18 +69,47 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
connection.close()
return records2return
def get_overlapping_scenes_from_postgreSQLdb(conn_params, scene_ID=None, trueDataCornerLonLat=None):
def get_overlapping_scenes_from_postgreSQLdb(conn_params, table='scenes_proc', scene_ID=None,
trueDataCornerLonLat=None, conditions=[], add_cmds=''):
connection = psycopg2.connect(conn_params)
if connection is None: return 'database connection fault'
cursor = connection.cursor()
if scene_ID is not None:
cursor.execute("SELECT scenes_proc.sceneid FROM scenes_proc,scenes WHERE ST_Overlaps(scenes.bounds::geometry, "\
"scenes_proc.bounds::geometry) and scenes.id = %s and scenes_proc.georef = True;" %scene_ID)
elif trueDataCornerLonLat is not None:
datasetids = [int(d.split('=')[1].strip()) for d in conditions if d.startswith('datasetid')]
datasetid = datasetids[0] if datasetids else 104 # Landsat-8
datasetid = 104 if datasetid == 249 else datasetid # FIXME: use Landsat-8 instead of Sentinel-2 as long as S2 L1A_P is not working
if table != 'scenes_proc': assert datasetid is not None, "filtdsId is needed if table is not 'scenes_proc'"
if scene_ID is None: assert trueDataCornerLonLat is not None, "Provide eihter scene_ID or trueDataCornerLonLat!"
if trueDataCornerLonLat is None: assert scene_ID is not None, "Provide eihter scene_ID or trueDataCornerLonLat!"
val2get = "scenes.id" if table=='scenes' else "%s.sceneid" %table
#refcond = ['scenes_proc.georef = True'] if not datasetids else ['scenes.datasetid = %s' %datasetid]
refcond = ['scenes.datasetid = %s' %datasetid]
if trueDataCornerLonLat is not None:
from misc.helper_functions import cornerLonLat_to_postgreSQL_poly
pGSQL_poly = cornerLonLat_to_postgreSQL_poly(trueDataCornerLonLat)
cursor.execute("SELECT scenes_proc.sceneid FROM scenes_proc WHERE ST_Overlaps('SRID=4326;%s'::geometry, "\
"scenes_proc.bounds::geometry) and scenes_proc.georef = True;" %pGSQL_poly)
src_geom = "'SRID=4326;%s'::geometry" %pGSQL_poly # source geometry is given
geocond = ["ST_Overlaps(%s, %s.bounds::geometry)" %(src_geom, table)]
else: # scene_ID is not None:
cursor = connection.cursor()
cursor.execute("SELECT ST_AsText(bounds) FROM scenes WHERE scenes.id = %s" %scene_ID)
res = cursor.fetchone()
if len(res):
src_geom = "'SRID=4326;%s'::geometry" %res
else:
print('The scene with the ID %s does not exist in the scenes table.')
return []
geocond = ["ST_Overlaps(%s, %s.bounds::geometry)" %(src_geom,table)]
join = "INNER JOIN scenes ON (%s.sceneid = scenes.id) " %table \
if table != 'scenes' and datasetids else ''
conditions = [c for c in conditions if not c.startswith('datasetid')]
where = "WHERE %s" %" AND ".join(geocond+refcond+conditions)
usedtbls = "scenes" if table == 'scenes' else "%s, scenes" %table if 'scenes.' in where and join=='' else table
query = "SELECT %s FROM %s %s%s %s" %(val2get, usedtbls, join, where, add_cmds)
#print(query)
cursor = connection.cursor()
cursor.execute(query)
records2return = cursor.fetchall()
cursor.close()
connection.close()
......
......@@ -19,6 +19,7 @@ import fnmatch
import psycopg2
import builtins
import shlex
import warnings
from shapely.geometry import Polygon
from multiprocessing import sharedctypes
from mpl_toolkits.mplot3d import Axes3D
......@@ -355,11 +356,16 @@ def postgreSQL_geometry_to_postgreSQL_poly(geom):
def reorder_CornerLonLat(CornerLonLat):
"""Reorders trueDataCorner and trueDataCornerLonLat from [UL,UR,LL,LR] to clockwise order: [UL,UR,LR,LL]"""
if len(CornerLonLat) > 4:
warnings.warn('Only 4 of the given %s corner coordinates were respected.' %len(CornerLonLat))
return [CornerLonLat[0], CornerLonLat[1], CornerLonLat[3], CornerLonLat[2] ]
def sceneID_to_trueDataCornerLonLat(scene_ID):
"""Returns a list of corner coordinates ordered like (UL,UR,LL,LR) corresponding to the given scene_ID. """
pgSQL_geom = DB_T.get_info_from_postgreSQLdb(config.job.conn_database,'scenes_proc','bounds', {'sceneid':scene_ID})[0][0]
try:
pgSQL_geom = DB_T.get_info_from_postgreSQLdb(config.job.conn_database,'scenes_proc','bounds', {'sceneid':scene_ID})[0][0]
except IndexError:
pgSQL_geom = DB_T.get_info_from_postgreSQLdb(config.job.conn_database,'scenes','bounds', {'id':scene_ID})[0][0]
return postgreSQL_poly_to_cornerLonLat(postgreSQL_geometry_to_postgreSQL_poly(pgSQL_geom))
def scene_ID_to_shapelyPolygon(scene_ID):
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment