run_gms.py 13.6 KB
Newer Older
1
2
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
3

4
5
import argparse
import warnings
6
import os
7

8
import matplotlib
9

10
matplotlib.use('Agg', warn=False)  # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
11

12
13
from gms_preprocessing import process_controller, __version__  # noqa: E402
from gms_preprocessing.misc.database_tools import GMS_JOB  # noqa: E402
14
15
16
17
18
19
20
21
22
23
24
from gms_preprocessing.options.config import get_conn_database  # noqa: E402
from gms_preprocessing.options.config import path_options_default  # noqa: E402
from gms_preprocessing.options.config import get_options  # noqa: E402
from gms_preprocessing.options.config import get_config_kwargs_default  # noqa: E402

options_default = get_options(path_options_default, validation=True)  # type: dict
config_kwargs_default = get_config_kwargs_default()  # type: dict


def get_config_kwargs_from_cli_args(cli_args):
    return {k: v for k, v in cli_args.__dict__.items() if k in config_kwargs_default.keys()}
25
26


27
def run_from_jobid(args):
28
29
30
31
32
    # TODO distinguish between ID of a master, processing or download job
    # TODO master: find corresponding sub-jobs and run them
    # TODO processing: check for not downloaded scenes and run processing after download
    # TODO download: run only the downloader

33
    # set up process controller instance
34
    PC = process_controller(args.jobid, **get_config_kwargs_from_cli_args(args))
35
36

    # run the job
37
38
39
40
41
42
    if 'GMS_IS_TEST' in os.environ and os.environ['GMS_IS_TEST'] == 'True':
        # in case of software test, it is enough to get an instance of process controller because all inputs are
        # validated within options.config.Job_Config (indirectly called by ProcessController.__init__() )
        pass
    else:
        PC.run_all_processors()
43

44
45
46

def run_from_sceneids(args):
    # create and run a download job
47
    warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.')  # TODO
48
49

    # create a new processing job from scene IDs
50
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
51
    dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
52
53
54
55
                           virtual_sensor_id=args.virtual_sensor_id,
                           datasetid_spatial_ref=args.datasetid_spatial_ref,
                           comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
56
57
58


def run_from_entityids(args):
59
    """Create a new job from entity IDs.
60

61
62
63
    :param args:
    :return:
    """
64
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
65
    dbJob.from_entityIDlist(list_entityids=args.entityids,
66
67
68
69
                            virtual_sensor_id=args.virtual_sensor_id,
                            datasetid_spatial_ref=args.datasetid_spatial_ref,
                            comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
70
71
72


def run_from_filenames(args):
73
    """Create a new GMS job from filenames of downloaded archives and run it!
74

75
76
77
    :param args:
    :return:
    """
78
    dbJob = GMS_JOB(get_conn_database(args.database_hostname))
79
    dbJob.from_filenames(list_filenames=args.filenames,
80
81
82
83
                         virtual_sensor_id=args.virtual_sensor_id,
                         datasetid_spatial_ref=args.datasetid_spatial_ref,
                         comment=args.comment)
    _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
84
85
86
87
88


def run_from_constraints(args):
    # create a new job from constraints
    # TODO
89
90
91
    raise NotImplementedError


92
def _run_job(dbJob, **config_kwargs):
93
94
    # type: (GMS_JOB) -> None
    """
95

96
97
98
    :param dbJob:
    :return:
    """
99
    # create a database record for the given job
100
    dbJob.create()
101
102

    # set up process controller instance
103
    PC = process_controller(dbJob.id, **config_kwargs)
104
105

    # run the job
106
107
108
109
110
111
    if 'GMS_IS_TEST' in os.environ and os.environ['GMS_IS_TEST'] == 'True':
        # in case of software test, it is enough to get an instance of process controller because all inputs are
        # validated within options.config.Job_Config (indirectly called by ProcessController.__init__() )
        pass
    else:
        PC.run_all_processors()
112
113


114
115
def get_gms_argparser():
    """Return argument parser for run_gms.py program."""
116

117
118
119
120
    ##################################################################
    # CONFIGURE MAIN PARSER FOR THE GEOMULTISENS PREPROCESSING CHAIN #
    ##################################################################

121
122
    parser = argparse.ArgumentParser(
        prog='run_gms.py',
123
124
        description='=' * 70 + '\n' + 'GeoMultiSens preprocessing console argument parser. '
                                      'Python implementation by Daniel Scheffler (daniel.scheffler@gfz-potsdam.de)',
125
        epilog="The argument parser offers multiple sub-argument parsers (jobid, sceneids, ...) for starting GMS jobs. "
126
127
               "use '>>> python /path/to/gms_preprocessing/run_gms.py <sub-parser> -h' for detailed documentation and "
               "usage hints.")
128
129
130

    parser.add_argument('--version', action='version', version=__version__)

131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
    #################################################################
    # CONFIGURE SUBPARSERS FOR THE GEOMULTISENS PREPROCESSING CHAIN #
    #################################################################

    ##############################################
    # define parsers containing common arguments #
    ##############################################

    general_opts_parser = argparse.ArgumentParser(add_help=False)
    gop_p = general_opts_parser.add_argument

    gop_p('-jc', '--json_config', nargs='?', type=str,
          help='file path of a JSON file containing options. See here for an example:'
               'https://gitext.gfz-potsdam.de/geomultisens/gms_preprocessing/'
               'blob/master/gms_preprocessing/options/options_default.json')
146

147
148
    # '-exec_L1AP': dict(nargs=3, type=bool, help="L1A Processor configuration",
    #                   metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
149

150
151
152
    gop_p('-DH', '--database_hostname', nargs='?', type=str,
          default=options_default["global_opts"]["db_host"],
          help='host name of the server that runs the postgreSQL database')
153

154
155
156
    gop_p('-vid', '--virtual_sensor_id', type=int,
          default=options_default["usecase"]["virtual_sensor_id"],
          help='ID of the target (virtual) sensor')
157

158
159
160
    gop_p('-dsid_spat', '--datasetid_spatial_ref', type=int,
          default=options_default["usecase"]["datasetid_spatial_ref"],
          help='dataset ID of the spatial reference')
161

162
163
164
    gop_p('-c', '--comment', nargs='?', type=str,
          default='',
          help='comment concerning the job')
165

166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
    ##################
    # add subparsers #
    ##################

    subparsers = parser.add_subparsers()

    parser_jobid = subparsers.add_parser(
        'jobid', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job using an already existing job ID.',
        help="Run a GeoMultiSens preprocessing job using an already existing job ID (Sub-Parser).")

    parser_sceneids = subparsers.add_parser(
        'sceneids', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.',
        help="Run a GeoMultiSens preprocessing job for a given list of scene IDs (Sub-Parser).")

    parser_entityids = subparsers.add_parser(
        'entityids', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of entity IDs.',
        help="Run a GeoMultiSens preprocessing job for a given list of entity IDs (Sub-Parser).")

    parser_filenames = subparsers.add_parser(
        'filenames', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job for a given list of filenames of '
                    'downloaded satellite image archives!',
        help="Run a GeoMultiSens preprocessing job for a given list of filenames of downloaded satellite "
             "image archives! (Sub-Parser).")

    parser_constraints = subparsers.add_parser(
        'constraints', parents=[general_opts_parser],
        description='Run a GeoMultiSens preprocessing job matching the given constraints.',
        help="Run a GeoMultiSens preprocessing job matching the given constraints (Sub-Parser).")

    #################
    # ADD ARGUMENTS #
    #################

    ##########################
    # add indivial arguments #
    ##########################
206

207
208
    # add arguments to parser_jobid
    jid_p = parser_jobid.add_argument
209
210
    jid_p('jobid', type=int, help='job ID of an already created GeoMultiSens preprocessing job '
                                  '(must be present in the jobs table of the database)')
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231

    # add arguments to parser_sceneids
    sid_p = parser_sceneids.add_argument
    sid_p('sceneids', nargs='+', type=int,
          help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")

    # add arguments to parser_entityids
    eid_p = parser_entityids.add_argument
    eid_p('entityids', nargs='+', type=str,
          help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
    # FIXME satellite and sensor are required

    # add arguments to parser_filenames
    eid_p = parser_filenames.add_argument
    eid_p('filenames', nargs='+', type=str,
          help="list of filenames of satellite image archives corresponding to valid records within the 'scenes' "
               "table of the database")

    # add arguments to parse_constraints
    con_p = parser_constraints.add_argument
    # TODO
232
    # con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
233
    #                                            "'scenes' table of the database")
234

235
236
237
238
    #################################
    # LINK PARSERS TO RUN FUNCTIONS #
    #################################

239
240
241
    parser_jobid.set_defaults(func=run_from_jobid)
    parser_sceneids.set_defaults(func=run_from_sceneids)
    parser_entityids.set_defaults(func=run_from_entityids)
242
    parser_filenames.set_defaults(func=run_from_filenames)
243
    parser_constraints.set_defaults(func=run_from_constraints)
244

245
246
247
    return parser


248
if __name__ == '__main__':
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
    # if len(sys.argv)<2:
    #     # a job ID has not been given
    #
    #     # ID = 26184107
    #     # ID = 26185175   # 1x TM5
    #     # ID = 26185176   # 1x Landsat
    #     # ID = 26185177  # 1. Sentinel-2-Testszene
    #     # ID = 26185189   # direkt benachbarte Granules von 1. Sentinel-2-Testszene
    #     # ID = 26185237  # 4 x Landsat-8 -> Job per database tools erstellt
    #     # ID = 26185239  # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
    #     # ID = 26185242  # 1 x Landsat-8 - Bug files_in_archive=None
    #     # ID = 26185250  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
    #     # ID = 26185251  # 1x L8, Zielsensor L8
    #     # ID = 26185252  # 1x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185253  # 25x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185254  # 10x L8, Zielsensor L8, spat.ref L8
265
266
    #     # Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job:
    #     # ID = 26185255  # 1x L8 Bug 5 corners found
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
    #     # ID = 26185256  # 1x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185257  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
    #     # ID = 26185258  # Beta-Job - 219 x L8, spatref L8
    #     # ID = 26185259  # Beta-Job - 172 x L7, spatref L8
    #     # ID = 26185260  # Beta-Job - 111 x S2, spatref L8
    #     # ID = 26185268  # 25x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185269  # 1x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185270  # 5x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185275  # 1x L8, spat. Ref. L8 Bug L1B_mask not found
    #     # ID = 26185264  # 1x L8, Bug L1B_masks not found
    #     # ID = 26185265  # 1x L8, Bug L2B_masks not found
    #     # ID = 26185268  # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
    #     # ID = 26185269 # "10x L8, Bug L2B_masks not found"
    #     # ID = 26185272 # "1x S2A Sips"
    #     ID = 26185273  # "1x L7, target L8, spat.ref L8"
    #     # ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
    #     # ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
    #     # ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
285
286
287
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within
    #     # mapinfo2geotransform":
    #     # ID = 26185280
288
289
290
291
    #     # ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
292
293
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable":
    #     # ID = 26185322
294
295
296
297
298
299
300
301
302
303
    #     # ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
    #     # ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
    #     # ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
    #     # ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
    #     # ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
    #     # ID = 26185398  # "GEOMS: 1x S2A granule multi GSD testing"
    #
    # else:
    #     ID = int(sys.argv[1])

304
    # RUN!
305
    parsed_args = get_gms_argparser().parse_args()
306
307
    parsed_args.func(parsed_args)

308
    print('\nready.')