run_gms.py 10.6 KB
Newer Older
1
    # -*- coding: utf-8 -*-
2
3
__author__='Daniel Scheffler'

4
5
6
import argparse
import sys
import warnings
7

8
import matplotlib
9

10
matplotlib.use('Agg', warn=False) # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
11
12

from geomultisens import process_controller, __version__
13
from geomultisens.misc.database_tools import GMS_JOB
14
15


16
def run_from_jobid(args):
17
18
19
20
21
    # TODO distinguish between ID of a master, processing or download job
    # TODO master: find corresponding sub-jobs and run them
    # TODO processing: check for not downloaded scenes and run processing after download
    # TODO download: run only the downloader

22
    # set up process controller instance
23
    PC = process_controller(args.jobid, parallelization_level='scenes')
24
25
    #PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    #PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
26
27
28
29

    # run the job
    PC.run_all_processors()

30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151

def run_from_sceneids(args):
    # create and run a download job
    warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.') # TODO

    # create a new processing job from scene IDs
    db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
    warnings.warn('Currently the console argument parser expects the database at localhost.')  # TODO
    virtual_sensor_id = 1 # TODO
    warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.')  # TODO
    datasetid_spatial_ref = 249 # TODO
    warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.')  # TODO

    dbJob = GMS_JOB(db_connection)
    dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
                           virtual_sensor_id=virtual_sensor_id,
                           datasetid_spatial_ref=datasetid_spatial_ref,
                           comment='')
    dbJob.create()
    jobid = dbJob.id

    # set up process controller instance
    parallelization_level = 'scenes'
    warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.")  # TODO

    PC = process_controller(jobid, parallelization_level=parallelization_level)
    #PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    #PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'

    # run the job
    PC.run_all_processors()


def run_from_entityids(args):
    # create a new job from entity IDs
    # TODO



    # set up process controller instance
    PC = process_controller(args.ID, parallelization_level='scenes')
    # PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    # PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'

    # run the job
    PC.run_all_processors()


def run_from_constraints(args):
    # create a new job from constraints
    # TODO



    # set up process controller instance
    PC = process_controller(args.ID, parallelization_level='scenes')
    # PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    # PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'

    # run the job
    PC.run_all_processors()




if __name__=='__main__':
    # if len(sys.argv)<2:
    #     # a job ID has not been given
    #
    #     # ID = 26184107
    #     # ID = 26185175   # 1x TM5
    #     # ID = 26185176   # 1x Landsat
    #     # ID = 26185177  # 1. Sentinel-2-Testszene
    #     # ID = 26185189   # direkt benachbarte Granules von 1. Sentinel-2-Testszene
    #     # ID = 26185237  # 4 x Landsat-8 -> Job per database tools erstellt
    #     # ID = 26185239  # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
    #     # ID = 26185242  # 1 x Landsat-8 - Bug files_in_archive=None
    #     # ID = 26185250  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
    #     # ID = 26185251  # 1x L8, Zielsensor L8
    #     # ID = 26185252  # 1x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185253  # 25x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185254  # 10x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185255  # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
    #     # ID = 26185256  # 1x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185257  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
    #     # ID = 26185258  # Beta-Job - 219 x L8, spatref L8
    #     # ID = 26185259  # Beta-Job - 172 x L7, spatref L8
    #     # ID = 26185260  # Beta-Job - 111 x S2, spatref L8
    #     # ID = 26185268  # 25x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185269  # 1x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185270  # 5x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185275  # 1x L8, spat. Ref. L8 Bug L1B_mask not found
    #     # ID = 26185264  # 1x L8, Bug L1B_masks not found
    #     # ID = 26185265  # 1x L8, Bug L2B_masks not found
    #     # ID = 26185268  # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
    #     # ID = 26185269 # "10x L8, Bug L2B_masks not found"
    #     # ID = 26185272 # "1x S2A Sips"
    #     ID = 26185273  # "1x L7, target L8, spat.ref L8"
    #     # ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
    #     # ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
    #     # ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
    #     # ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
    #     # ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
    #     # ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
    #     # ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
    #     # ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
    #     # ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
    #     # ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
    #     # ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
    #     # ID = 26185398  # "GEOMS: 1x S2A granule multi GSD testing"
    #
    # else:
    #     ID = int(sys.argv[1])

    ### CONFIGURE MAIN PARSER FOR GeoMultiSens
    parser = argparse.ArgumentParser(
        prog='run_gms.py',
        description='='*70+'\n'+'GeoMultiSens console argument parser. ' \
                    'Python implementation by Daniel Scheffler (daniel.scheffler@gfz-potsdam.de)',
152
153
154
        epilog="The argument parser offers multiple sub-argument parsers (jobid, sceneids, ...) for starting GMS jobs. "
               "use '>>> python /path/to/GeMultiSens/run_gms.py <sub-parser> -h' for detailed documentation and usage "
               "hints.")
155
156
157
158
159
160
161

    parser.add_argument('--version', action='version', version=__version__)

    subparsers = parser.add_subparsers()


    ### CONFIGURE SUBPARSERS FOR GeoMultiSens
162
    parser_jobid = subparsers.add_parser('jobid',
163
        description= 'Run a GeoMultiSens job using an already existing job ID.',
164
        help="Run a GeoMultiSens job using an already existing job ID (Sub-Parser).")
165

166
    parser_sceneids = subparsers.add_parser('sceneids',
167
        description='Run a GeoMultiSens job for a given list of scene IDs.',
168
        help="Run a GeoMultiSens job for a given list of scene IDs (Sub-Parser).")
169

170
    parser_entityids = subparsers.add_parser('entityids',
171
        description='Run a GeoMultiSens job for a given list of entity IDs.',
172
        help="Run a GeoMultiSens job for a given list of entity IDs (Sub-Parser).")
173

174
    parser_constraints = subparsers.add_parser('constraints',
175
        description='Run a GeoMultiSens job matching the given constraints.',
176
        help="Run a GeoMultiSens job matching the given constraints (Sub-Parser).")
177
178
179
180
181
182
183
184
185

    #parse_from_sceneids = subparsers.add_parser('from_sceneids',
    #    description='Run a GeoMultiSens job for a given list of scene IDs.',
    #    help="use '>>> python /path/to/GeMultiSens/run_gms.py from_sceneids -h' for documentation and usage hints")


    ## ADD ARGUMENTS
    if sys.argv[1]=='jobid':
        # add arguments to parse_jobid
186
        jid_p = parser_jobid.add_argument
187
188
189
190
191
        jid_p('jobid', type=int,
              help='job ID of an already created GeoMultiSens job (must be present in the jobs table of the database)')

    elif sys.argv[1]=='sceneids':
        # add arguments to parse_sceneids
192
        sid_p = parser_sceneids.add_argument
193
194
195
196
197
        sid_p('sceneids', nargs='+', type=int,
              help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")

    elif sys.argv[1]=='entityids':
        # add arguments to parse_entityids
198
        eid_p = parser_entityids.add_argument
199
200
201
202
203
204
205
        eid_p('entityids', nargs='+', type=str,
              help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
        # FIXME satellite and sensor are required


    elif sys.argv[1]=='constraints':
        # add arguments to parse_constraints
206
        con_p = parser_constraints.add_argument
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
        # TODO
        #con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
        #                                            "'scenes' table of the database")


    # add general arguments # TODO add these configs to each subparser
    general_opts = {
        '-db_host':dict(),
        '-exec_mode':dict(nargs=3, type=bool, help="L1A Processor configuration",
           metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
        '-exec_L1AP':dict(),
        '-exec_L1BP':dict(),
        '-exec_L1CP':dict(),
        '-exec_L2AP':dict(),
        '-exec_L2BP':dict(),
        '-exec_L2CP':dict(),
        '-sub_multiProc':dict(),
        '-exc_handler':dict(),
        '-blocksize':dict(),
        '-profiling':dict(),
        '-bench_all':dict(),
        '-bench_cloudMask':dict(),

    }


    ## LINK PARSERS TO RUN FUNCTIONS
234
235
236
237
    parser_jobid.set_defaults(func=run_from_jobid)
    parser_sceneids.set_defaults(func=run_from_sceneids)
    parser_entityids.set_defaults(func=run_from_entityids)
    parser_constraints.set_defaults(func=run_from_constraints)
238
239
240
241
242
243


    # RUN!
    parsed_args = parser.parse_args()
    parsed_args.func(parsed_args)

244
245
246
    print('\nready.')