run_gms.py 13 KB
Newer Older
1
2
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
3

4
5
import argparse
import warnings
6

7
import matplotlib
8

9
matplotlib.use('Agg', warn=False)  # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
10

11
12
from gms_preprocessing import process_controller, __version__  # noqa: E402
from gms_preprocessing.misc.database_tools import GMS_JOB  # noqa: E402
13
14
15
16
17
18
19
20
21
22
23
from gms_preprocessing.options.config import get_conn_database  # noqa: E402
from gms_preprocessing.options.config import path_options_default  # noqa: E402
from gms_preprocessing.options.config import get_options  # noqa: E402
from gms_preprocessing.options.config import get_config_kwargs_default  # noqa: E402

options_default = get_options(path_options_default, validation=True)  # type: dict
config_kwargs_default = get_config_kwargs_default()  # type: dict


def get_config_kwargs_from_cli_args(cli_args):
    return {k: v for k, v in cli_args.__dict__.items() if k in config_kwargs_default.keys()}
24
25


26
def run_from_jobid(args):
27
28
29
30
31
    # TODO distinguish between ID of a master, processing or download job
    # TODO master: find corresponding sub-jobs and run them
    # TODO processing: check for not downloaded scenes and run processing after download
    # TODO download: run only the downloader

32
    # set up process controller instance
33
    PC = process_controller(args.jobid, **get_config_kwargs_from_cli_args(args))
34
35
36
37

    # run the job
    PC.run_all_processors()

38
39
40

def run_from_sceneids(args):
    # create and run a download job
41
    warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.')  # TODO
42
43

    # create a new processing job from scene IDs
44
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
45
    dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
46
47
48
49
                           virtual_sensor_id=args.virtual_sensor_id,
                           datasetid_spatial_ref=args.datasetid_spatial_ref,
                           comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
50
51
52


def run_from_entityids(args):
53
    """Create a new job from entity IDs.
54

55
56
57
    :param args:
    :return:
    """
58
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
59
    dbJob.from_entityIDlist(list_entityids=args.entityids,
60
61
62
63
                            virtual_sensor_id=args.virtual_sensor_id,
                            datasetid_spatial_ref=args.datasetid_spatial_ref,
                            comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
64
65
66


def run_from_filenames(args):
67
    """Create a new GMS job from filenames of downloaded archives and run it!
68

69
70
71
    :param args:
    :return:
    """
72
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
73
    dbJob.from_filenames(list_filenames=args.filenames,
74
75
76
77
                         virtual_sensor_id=args.virtual_sensor_id,
                         datasetid_spatial_ref=args.datasetid_spatial_ref,
                         comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
78
79
80
81
82


def run_from_constraints(args):
    # create a new job from constraints
    # TODO
83
84
85
    raise NotImplementedError


86
def _run_job(dbJob, **config_kwargs):
87
88
    # type: (GMS_JOB) -> None
    """
89

90
91
92
    :param dbJob:
    :return:
    """
93
    # create a database record for the given job
94
    dbJob.create()
95
96

    # set up process controller instance
97
    PC = process_controller(dbJob.id, **config_kwargs)
98
99
100
101
102

    # run the job
    PC.run_all_processors()


103
104
def get_gms_argparser():
    """Return argument parser for run_gms.py program."""
105

106
107
108
109
    ##################################################################
    # CONFIGURE MAIN PARSER FOR THE GEOMULTISENS PREPROCESSING CHAIN #
    ##################################################################

110
111
    parser = argparse.ArgumentParser(
        prog='run_gms.py',
112
113
        description='=' * 70 + '\n' + 'GeoMultiSens preprocessing console argument parser. '
                                      'Python implementation by Daniel Scheffler (daniel.scheffler@gfz-potsdam.de)',
114
        epilog="The argument parser offers multiple sub-argument parsers (jobid, sceneids, ...) for starting GMS jobs. "
115
116
               "use '>>> python /path/to/gms_preprocessing/run_gms.py <sub-parser> -h' for detailed documentation and "
               "usage hints.")
117
118
119

    parser.add_argument('--version', action='version', version=__version__)

120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
    #################################################################
    # CONFIGURE SUBPARSERS FOR THE GEOMULTISENS PREPROCESSING CHAIN #
    #################################################################

    ##############################################
    # define parsers containing common arguments #
    ##############################################

    general_opts_parser = argparse.ArgumentParser(add_help=False)
    gop_p = general_opts_parser.add_argument

    gop_p('-jc', '--json_config', nargs='?', type=str,
          help='file path of a JSON file containing options. See here for an example:'
               'https://gitext.gfz-potsdam.de/geomultisens/gms_preprocessing/'
               'blob/master/gms_preprocessing/options/options_default.json')
135

136
137
    # '-exec_L1AP': dict(nargs=3, type=bool, help="L1A Processor configuration",
    #                   metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
138

139
140
141
    gop_p('-DH', '--database_hostname', nargs='?', type=str,
          default=options_default["global_opts"]["db_host"],
          help='host name of the server that runs the postgreSQL database')
142

143
144
145
    gop_p('-vid', '--virtual_sensor_id', type=int,
          default=options_default["usecase"]["virtual_sensor_id"],
          help='ID of the target (virtual) sensor')
146

147
148
149
    gop_p('-dsid_spat', '--datasetid_spatial_ref', type=int,
          default=options_default["usecase"]["datasetid_spatial_ref"],
          help='dataset ID of the spatial reference')
150

151
152
153
    gop_p('-c', '--comment', nargs='?', type=str,
          default='',
          help='comment concerning the job')
154

155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
    ##################
    # add subparsers #
    ##################

    subparsers = parser.add_subparsers()

    parser_jobid = subparsers.add_parser(
        'jobid', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job using an already existing job ID.',
        help="Run a GeoMultiSens preprocessing job using an already existing job ID (Sub-Parser).")

    parser_sceneids = subparsers.add_parser(
        'sceneids', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.',
        help="Run a GeoMultiSens preprocessing job for a given list of scene IDs (Sub-Parser).")

    parser_entityids = subparsers.add_parser(
        'entityids', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of entity IDs.',
        help="Run a GeoMultiSens preprocessing job for a given list of entity IDs (Sub-Parser).")

    parser_filenames = subparsers.add_parser(
        'filenames', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of filenames of '
                    'downloaded satellite image archives!',
        help="Run a GeoMultiSens preprocessing job for a given list of filenames of downloaded satellite "
             "image archives! (Sub-Parser).")

    parser_constraints = subparsers.add_parser(
        'constraints', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job matching the given constraints.',
        help="Run a GeoMultiSens preprocessing job matching the given constraints (Sub-Parser).")

    #################
    # ADD ARGUMENTS #
    #################

    ##########################
    # add indivial arguments #
    ##########################
195

196
197
    # add arguments to parser_jobid
    jid_p = parser_jobid.add_argument
198
199
    jid_p('jobid', type=int, help='job ID of an already created GeoMultiSens preprocessing job '
                                  '(must be present in the jobs table of the database)')
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220

    # add arguments to parser_sceneids
    sid_p = parser_sceneids.add_argument
    sid_p('sceneids', nargs='+', type=int,
          help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")

    # add arguments to parser_entityids
    eid_p = parser_entityids.add_argument
    eid_p('entityids', nargs='+', type=str,
          help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
    # FIXME satellite and sensor are required

    # add arguments to parser_filenames
    eid_p = parser_filenames.add_argument
    eid_p('filenames', nargs='+', type=str,
          help="list of filenames of satellite image archives corresponding to valid records within the 'scenes' "
               "table of the database")

    # add arguments to parse_constraints
    con_p = parser_constraints.add_argument
    # TODO
221
    # con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
222
    #                                            "'scenes' table of the database")
223

224
225
226
227
    #################################
    # LINK PARSERS TO RUN FUNCTIONS #
    #################################

228
229
230
    parser_jobid.set_defaults(func=run_from_jobid)
    parser_sceneids.set_defaults(func=run_from_sceneids)
    parser_entityids.set_defaults(func=run_from_entityids)
231
    parser_filenames.set_defaults(func=run_from_filenames)
232
    parser_constraints.set_defaults(func=run_from_constraints)
233

234
235
236
    return parser


237
if __name__ == '__main__':
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
    # if len(sys.argv)<2:
    #     # a job ID has not been given
    #
    #     # ID = 26184107
    #     # ID = 26185175   # 1x TM5
    #     # ID = 26185176   # 1x Landsat
    #     # ID = 26185177  # 1. Sentinel-2-Testszene
    #     # ID = 26185189   # direkt benachbarte Granules von 1. Sentinel-2-Testszene
    #     # ID = 26185237  # 4 x Landsat-8 -> Job per database tools erstellt
    #     # ID = 26185239  # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
    #     # ID = 26185242  # 1 x Landsat-8 - Bug files_in_archive=None
    #     # ID = 26185250  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
    #     # ID = 26185251  # 1x L8, Zielsensor L8
    #     # ID = 26185252  # 1x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185253  # 25x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185254  # 10x L8, Zielsensor L8, spat.ref L8
254
255
    #     # Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job:
    #     # ID = 26185255  # 1x L8 Bug 5 corners found
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
    #     # ID = 26185256  # 1x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185257  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
    #     # ID = 26185258  # Beta-Job - 219 x L8, spatref L8
    #     # ID = 26185259  # Beta-Job - 172 x L7, spatref L8
    #     # ID = 26185260  # Beta-Job - 111 x S2, spatref L8
    #     # ID = 26185268  # 25x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185269  # 1x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185270  # 5x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185275  # 1x L8, spat. Ref. L8 Bug L1B_mask not found
    #     # ID = 26185264  # 1x L8, Bug L1B_masks not found
    #     # ID = 26185265  # 1x L8, Bug L2B_masks not found
    #     # ID = 26185268  # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
    #     # ID = 26185269 # "10x L8, Bug L2B_masks not found"
    #     # ID = 26185272 # "1x S2A Sips"
    #     ID = 26185273  # "1x L7, target L8, spat.ref L8"
    #     # ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
    #     # ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
    #     # ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
274
275
276
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within
    #     # mapinfo2geotransform":
    #     # ID = 26185280
277
278
279
280
    #     # ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
281
282
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable":
    #     # ID = 26185322
283
284
285
286
287
288
289
290
291
292
    #     # ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
    #     # ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
    #     # ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
    #     # ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
    #     # ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
    #     # ID = 26185398  # "GEOMS: 1x S2A granule multi GSD testing"
    #
    # else:
    #     ID = int(sys.argv[1])

293
    # RUN!
294
    parsed_args = get_gms_argparser().parse_args()
295
296
    parsed_args.func(parsed_args)

297
    print('\nready.')