Commit e083eae3 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

added a first version of a console argument parser

algorithms.L1A_P:
- converted pyhdf import to soft import

misc.database_tools:
- GMS_JOB: fix
- added from_entityIDlist(): dummy version

run_at_geoms.sh / run_at_geoms_deployed.sh:
- updated call

run_gms:
- added run_from_jobid()
- added run_from_sceneids()
- added run_from_entityids()
- added run_from_constraints()
- added argument parser

updated __version__
parent cd4c82d2
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20170410.01'
__version__ = '20170411.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
......@@ -12,7 +12,10 @@ import gdal
import gdalnumeric
import matplotlib.pyplot as plt
import numpy as np
from pyhdf import SD
try:
from pyhdf import SD
except ImportError:
SD = None
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
......@@ -419,7 +422,8 @@ class L1A_object(GMS_object):
GEOP.ndarray2gdal(data_arr,path_output,geotransform=ds.GetGeoTransform(),
projection=ds.GetProjection(),direction=3)
self.arr = path_output
else:
elif SD is not None:
self.logger.info('Missing HDF4 support within GDAL. Reading HDF file using alternative reader.')
hdfFile = SD.SD(path_archive,SD.SDC.READ)
i, list_matching_dsIdx = 0, []
......@@ -446,6 +450,10 @@ class L1A_object(GMS_object):
GEOP.ndarray2gdal(data_arr,path_output,direction=3)
self.arr = path_output
else:
self.logger.error('Missing HDF4 support. Reading HDF file failed.')
raise ImportError('No suitable library for reading HDF4 data available.')
ds=None
......
......@@ -719,6 +719,8 @@ class GMS_JOB(object):
def from_sceneIDlist(self, list_sceneIDs, virtual_sensor_id, datasetid_spatial_ref=249, comment=None):
# type: (list, int, int, str) -> object
"""
Create a GMS_JOB instance based on the given list of scene IDs.
:param list_sceneIDs: <list> of scene IDs, e.g. [26781907, 26781917, 26542650, 26542451, 26541679]
:param virtual_sensor_id : <int> a valid ID from the 'virtual_sensors' table of the postgreSQL database
:param datasetid_spatial_ref: <int> a valid dataset ID of the dataset to be chosen as spatial reference
......@@ -733,7 +735,7 @@ class GMS_JOB(object):
list_sceneIDs = list(list_sceneIDs)
# query 'satellite', 'sensor', 'filename' from database and summarize in GeoDataFrame
with psycopg2.connect(CFG.job.conn_database) as conn:
with psycopg2.connect(self.conn) as conn:
with conn.cursor() as cursor:
execute_pgSQL_query(cursor,
"""SELECT scenes.id, satellites.name, sensors.name, scenes.filename FROM scenes
......@@ -763,6 +765,16 @@ class GMS_JOB(object):
return self
def from_entityIDlist(self):
"""
Create a GMS_JOB instance based on the given list of entity IDs.
"""
raise NotImplementedError # TODO
return self
def _get_validated_sceneInfoGDFs(self, GDF_SatSenFname):
# type: (GeoDataFrame) -> GeoDataFrame
"""
......@@ -845,7 +857,7 @@ class GMS_JOB(object):
self.timerange_end = self.dataframe.acquisitiondate.max().to_datetime()
def from_job_ID(self,job_ID):
def from_job_ID(self, job_ID):
# type: (int) -> object
"""
Create a GMS_JOB instance by querying the database for a specific job ID.
......
......@@ -25,5 +25,5 @@ export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/cBc # needed to f
# execute python script
ipython run_gms.py "$@"
ipython run_gms.py jobid "$@"
#python scenes_jobs_dbaccess.py "$@"
......@@ -25,5 +25,5 @@ export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/cBc # needed to f
# execute python script
ipython run_gms.py "$@"
ipython run_gms.py jobid "$@"
#python scenes_jobs_dbaccess.py "$@"
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
__author__='Daniel Scheffler'
import sys
import argparse
import warnings
try:
from GeoMultiSens import process_controller
except ImportError:
from GeoMultiSens_dev import process_controller
from GeoMultiSens import process_controller, __version__
from GeoMultiSens.misc.database_tools import GMS_JOB
except ImportError as e:
try:
from GeoMultiSens_dev import process_controller, __version__
from GeoMultiSens_dev.misc.database_tools import GMS_JOB
except:
raise e
if __name__=='__main__':
if len(sys.argv)<2:
# TODO: allow to run a job from list of scene IDs or from dictlist (use GMS_JOB)
# TODO: provide console interface
# a job ID has not been given
# ID = 26184107
# ID = 26185175 # 1x TM5
# ID = 26185176 # 1x Landsat
# ID = 26185177 # 1. Sentinel-2-Testszene
# ID = 26185189 # direkt benachbarte Granules von 1. Sentinel-2-Testszene
# ID = 26185237 # 4 x Landsat-8 -> Job per database tools erstellt
# ID = 26185239 # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
# ID = 26185242 # 1 x Landsat-8 - Bug files_in_archive=None
# ID = 26185250 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
# ID = 26185251 # 1x L8, Zielsensor L8
# ID = 26185252 # 1x L8, Zielsensor L8, spat.ref L8
# ID = 26185253 # 25x L8, Zielsensor L8, spat.ref L8
# ID = 26185254 # 10x L8, Zielsensor L8, spat.ref L8
# ID = 26185255 # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
# ID = 26185256 # 1x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185257 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
# ID = 26185258 # Beta-Job - 219 x L8, spatref L8
# ID = 26185259 # Beta-Job - 172 x L7, spatref L8
# ID = 26185260 # Beta-Job - 111 x S2, spatref L8
# ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
# ID = 26185264 # 1x L8, Bug L1B_masks not found
# ID = 26185265 # 1x L8, Bug L2B_masks not found
# ID = 26185268 # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
# ID = 26185269 # "10x L8, Bug L2B_masks not found"
# ID = 26185272 # "1x S2A Sips"
ID = 26185273 # "1x L7, target L8, spat.ref L8"
# ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
# ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
# ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
# ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
# ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
# ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
# ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
# ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
# ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
# ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
# ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
# ID = 26185398 # "GEOMS: 1x S2A granule multi GSD testing"
else:
ID = int(sys.argv[1])
def run_from_jobid(args):
# set up process controller instance
PC = process_controller(ID, parallelization_level='scenes')
PC = process_controller(args.jobid, parallelization_level='scenes')
#PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
#PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
def run_from_sceneids(args):
# create and run a download job
warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.') # TODO
# create a new processing job from scene IDs
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
dbJob = GMS_JOB(db_connection)
dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
virtual_sensor_id=virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref,
comment='')
dbJob.create()
jobid = dbJob.id
# set up process controller instance
parallelization_level = 'scenes'
warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO
PC = process_controller(jobid, parallelization_level=parallelization_level)
#PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
#PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
def run_from_entityids(args):
# create a new job from entity IDs
# TODO
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
def run_from_constraints(args):
# create a new job from constraints
# TODO
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
if __name__=='__main__':
# if len(sys.argv)<2:
# # a job ID has not been given
#
# # ID = 26184107
# # ID = 26185175 # 1x TM5
# # ID = 26185176 # 1x Landsat
# # ID = 26185177 # 1. Sentinel-2-Testszene
# # ID = 26185189 # direkt benachbarte Granules von 1. Sentinel-2-Testszene
# # ID = 26185237 # 4 x Landsat-8 -> Job per database tools erstellt
# # ID = 26185239 # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
# # ID = 26185242 # 1 x Landsat-8 - Bug files_in_archive=None
# # ID = 26185250 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
# # ID = 26185251 # 1x L8, Zielsensor L8
# # ID = 26185252 # 1x L8, Zielsensor L8, spat.ref L8
# # ID = 26185253 # 25x L8, Zielsensor L8, spat.ref L8
# # ID = 26185254 # 10x L8, Zielsensor L8, spat.ref L8
# # ID = 26185255 # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
# # ID = 26185256 # 1x L7 SLC off, Zielsensor L8, spat.ref L8
# # ID = 26185257 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
# # ID = 26185258 # Beta-Job - 219 x L8, spatref L8
# # ID = 26185259 # Beta-Job - 172 x L7, spatref L8
# # ID = 26185260 # Beta-Job - 111 x S2, spatref L8
# # ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
# # ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
# # ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
# # ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
# # ID = 26185264 # 1x L8, Bug L1B_masks not found
# # ID = 26185265 # 1x L8, Bug L2B_masks not found
# # ID = 26185268 # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
# # ID = 26185269 # "10x L8, Bug L2B_masks not found"
# # ID = 26185272 # "1x S2A Sips"
# ID = 26185273 # "1x L7, target L8, spat.ref L8"
# # ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
# # ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
# # ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
# # ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
# # ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
# # ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
# # ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
# # ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
# # ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
# # ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
# # ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
# # ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
# # ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
# # ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
# # ID = 26185398 # "GEOMS: 1x S2A granule multi GSD testing"
#
# else:
# ID = int(sys.argv[1])
### CONFIGURE MAIN PARSER FOR GeoMultiSens
parser = argparse.ArgumentParser(
prog='run_gms.py',
description='='*70+'\n'+'GeoMultiSens console argument parser. ' \
'Python implementation by Daniel Scheffler (daniel.scheffler@gfz-potsdam.de)',
epilog="")
parser.add_argument('--version', action='version', version=__version__)
subparsers = parser.add_subparsers()
### CONFIGURE SUBPARSERS FOR GeoMultiSens
parse_jobid = subparsers.add_parser('jobid',
description= 'Run a GeoMultiSens job using an already existing job ID.',
help="use '>>> python /path/to/GeMultiSens/run_gms.py jobid -h' for documentation and usage hints")
parse_sceneids = subparsers.add_parser('sceneids',
description='Run a GeoMultiSens job for a given list of scene IDs.',
help="use '>>> python /path/to/GeMultiSens/run_gms.py sceneids -h' for documentation and usage hints")
parse_entityids = subparsers.add_parser('entityids',
description='Run a GeoMultiSens job for a given list of entity IDs.',
help="use '>>> python /path/to/GeMultiSens/run_gms.py entityids -h' for documentation and usage hints")
parse_constraints = subparsers.add_parser('constraints',
description='Run a GeoMultiSens job matching the given constraints.',
help="use '>>> python /path/to/GeMultiSens/run_gms.py constraints -h' for documentation and usage hints")
#parse_from_sceneids = subparsers.add_parser('from_sceneids',
# description='Run a GeoMultiSens job for a given list of scene IDs.',
# help="use '>>> python /path/to/GeMultiSens/run_gms.py from_sceneids -h' for documentation and usage hints")
## ADD ARGUMENTS
if sys.argv[1]=='jobid':
# add arguments to parse_jobid
jid_p = parse_jobid.add_argument
jid_p('jobid', type=int,
help='job ID of an already created GeoMultiSens job (must be present in the jobs table of the database)')
elif sys.argv[1]=='sceneids':
# add arguments to parse_sceneids
sid_p = parse_sceneids.add_argument
sid_p('sceneids', nargs='+', type=int,
help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")
elif sys.argv[1]=='entityids':
# add arguments to parse_entityids
eid_p = parse_sceneids.add_argument
eid_p('entityids', nargs='+', type=str,
help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
# FIXME satellite and sensor are required
elif sys.argv[1]=='constraints':
# add arguments to parse_constraints
con_p = parse_sceneids.add_argument
# TODO
#con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
# "'scenes' table of the database")
# add general arguments # TODO add these configs to each subparser
general_opts = {
'-db_host':dict(),
'-exec_mode':dict(nargs=3, type=bool, help="L1A Processor configuration",
metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
'-exec_L1AP':dict(),
'-exec_L1BP':dict(),
'-exec_L1CP':dict(),
'-exec_L2AP':dict(),
'-exec_L2BP':dict(),
'-exec_L2CP':dict(),
'-sub_multiProc':dict(),
'-exc_handler':dict(),
'-blocksize':dict(),
'-profiling':dict(),
'-bench_all':dict(),
'-bench_cloudMask':dict(),
}
## LINK PARSERS TO RUN FUNCTIONS
parse_jobid.set_defaults(func=run_from_jobid)
parse_sceneids.set_defaults(func=run_from_sceneids)
parse_entityids.set_defaults(func=run_from_entityids)
parse_constraints.set_defaults(func=run_from_constraints)
# RUN!
parsed_args = parser.parse_args()
parsed_args.func(parsed_args)
print('\nready.')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment