run_gms.py 12.9 KB
Newer Older
1
2
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
3

4
5
import argparse
import warnings
6

7
import matplotlib
8

9
matplotlib.use('Agg', warn=False)  # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
10

11
12
from gms_preprocessing import process_controller, __version__  # noqa: E402
from gms_preprocessing.misc.database_tools import GMS_JOB  # noqa: E402
13
14


15
def run_from_jobid(args):
16
17
18
19
20
    # TODO distinguish between ID of a master, processing or download job
    # TODO master: find corresponding sub-jobs and run them
    # TODO processing: check for not downloaded scenes and run processing after download
    # TODO download: run only the downloader

21
    # set up process controller instance
22
    PC = process_controller(args.jobid, parallelization_level='scenes')
23
24
    # PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    # PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
25
26
27
28

    # run the job
    PC.run_all_processors()

29
30
31

def run_from_sceneids(args):
    # create and run a download job
32
    warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.')  # TODO
33
34

    # create a new processing job from scene IDs
35
    db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3"  # TODO
36
    warnings.warn('Currently the console argument parser expects the database at localhost.')  # TODO
37
    virtual_sensor_id = 1  # TODO
38
    warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.')  # TODO
39
    datasetid_spatial_ref = 249  # TODO
40
41
42
43
44
45
46
    warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.')  # TODO

    dbJob = GMS_JOB(db_connection)
    dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
                           virtual_sensor_id=virtual_sensor_id,
                           datasetid_spatial_ref=datasetid_spatial_ref,
                           comment='')
47
    _run_job(dbJob)
48
49
50


def run_from_entityids(args):
51
    """Create a new job from entity IDs.
52

53
54
55
    :param args:
    :return:
    """
56

57
    db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3"  # TODO
58
    warnings.warn('Currently the console argument parser expects the database at localhost.')  # TODO
59
    virtual_sensor_id = 1  # TODO
60
    warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.')  # TODO
61
    datasetid_spatial_ref = 249  # TODO
62
    warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.')  # TODO
63

64
65
66
67
68
69
    dbJob = GMS_JOB(db_connection)
    dbJob.from_entityIDlist(list_entityids=args.entityids,
                            virtual_sensor_id=virtual_sensor_id,
                            datasetid_spatial_ref=datasetid_spatial_ref,
                            comment='')
    _run_job(dbJob)
70
71
72


def run_from_filenames(args):
73
    """Create a new GMS job from filenames of downloaded archives and run it!
74

75
76
77
    :param args:
    :return:
    """
78

79
    db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3"  # TODO
80
    warnings.warn('Currently the console argument parser expects the database at localhost.')  # TODO
81
    virtual_sensor_id = 1  # TODO
82
    warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.')  # TODO
83
    datasetid_spatial_ref = 249  # TODO
84
    warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.')  # TODO
85

86
87
88
89
90
91
    dbJob = GMS_JOB(db_connection)
    dbJob.from_filenames(list_filenames=args.filenames,
                         virtual_sensor_id=virtual_sensor_id,
                         datasetid_spatial_ref=datasetid_spatial_ref,
                         comment='')
    _run_job(dbJob)
92
93
94
95
96


def run_from_constraints(args):
    # create a new job from constraints
    # TODO
97
98
99
100
101
102
    raise NotImplementedError


def _run_job(dbJob, parallelization_level='scenes'):
    # type: (GMS_JOB) -> None
    """
103

104
105
106
    :param dbJob:
    :return:
    """
107

108
109
    dbJob.create()
    jobid = dbJob.id
110
111

    # set up process controller instance
112
113
114
    warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.")  # TODO

    PC = process_controller(jobid, parallelization_level=parallelization_level)
115
116
117
118
119
120
121
    # PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
    # PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'

    # run the job
    PC.run_all_processors()


122
123
def get_gms_argparser():
    """Return argument parser for run_gms.py program."""
124

125
    # CONFIGURE MAIN PARSER FOR THE GEOMULTISENS PREPROCESSING CHAIN
126
127
    parser = argparse.ArgumentParser(
        prog='run_gms.py',
128
129
        description='=' * 70 + '\n' + 'GeoMultiSens preprocessing console argument parser. '
                                      'Python implementation by Daniel Scheffler (daniel.scheffler@gfz-potsdam.de)',
130
        epilog="The argument parser offers multiple sub-argument parsers (jobid, sceneids, ...) for starting GMS jobs. "
131
132
               "use '>>> python /path/to/gms_preprocessing/run_gms.py <sub-parser> -h' for detailed documentation and "
               "usage hints.")
133
134
135
136
137

    parser.add_argument('--version', action='version', version=__version__)

    subparsers = parser.add_subparsers()

138
139
140
141
    # CONFIGURE SUBPARSERS FOR THE GEOMULTISENS PREPROCESSING CHAIN
    parser_jobid = subparsers\
        .add_parser('jobid', description='Run a GeoMultiSens preprocessing job using an already existing job ID.',
                    help="Run a GeoMultiSens preprocessing job using an already existing job ID (Sub-Parser).")
142

143
144
145
    parser_sceneids = subparsers\
        .add_parser('sceneids', description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.',
                    help="Run a GeoMultiSens preprocessing job for a given list of scene IDs (Sub-Parser).")
146

147
148
149
    parser_entityids = subparsers\
        .add_parser('entityids', description='Run a GeoMultiSens preprocessing job for a given list of entity IDs.',
                    help="Run a GeoMultiSens preprocessing job for a given list of entity IDs (Sub-Parser).")
150

151
152
153
154
155
    parser_filenames = subparsers\
        .add_parser('filenames', description='Run a GeoMultiSens preprocessing job for a given list of filenames of '
                                             'downloaded satellite image archives!',
                    help="Run a GeoMultiSens preprocessing job for a given list of filenames of downloaded satellite "
                         "image archives! (Sub-Parser).")
156

157
158
159
    parser_constraints = subparsers\
        .add_parser('constraints', description='Run a GeoMultiSens preprocessing job matching the given constraints.',
                    help="Run a GeoMultiSens preprocessing job matching the given constraints (Sub-Parser).")
160

161
    # parse_from_sceneids = subparsers.add_parser('from_sceneids',
162
    #    description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.',
163
164
    #    help="use '>>> python /path/to/GeMultiSens/run_gms.py from_sceneids -h' for documentation and usage hints")

165
    # ADD ARGUMENTS
166
167
    # add arguments to parser_jobid
    jid_p = parser_jobid.add_argument
168
169
    jid_p('jobid', type=int, help='job ID of an already created GeoMultiSens preprocessing job (must be present in the '
                                  'jobs table of the database)')
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190

    # add arguments to parser_sceneids
    sid_p = parser_sceneids.add_argument
    sid_p('sceneids', nargs='+', type=int,
          help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")

    # add arguments to parser_entityids
    eid_p = parser_entityids.add_argument
    eid_p('entityids', nargs='+', type=str,
          help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
    # FIXME satellite and sensor are required

    # add arguments to parser_filenames
    eid_p = parser_filenames.add_argument
    eid_p('filenames', nargs='+', type=str,
          help="list of filenames of satellite image archives corresponding to valid records within the 'scenes' "
               "table of the database")

    # add arguments to parse_constraints
    con_p = parser_constraints.add_argument
    # TODO
191
    # con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
192
    #                                            "'scenes' table of the database")
193
194
195

    # add general arguments # TODO add these configs to each subparser
    general_opts = {
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
        '-db_host': dict(),
        '-exec_mode': dict(nargs=3, type=bool, help="L1A Processor configuration",
                           metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
        '-exec_L1AP': dict(),
        '-exec_L1BP': dict(),
        '-exec_L1CP': dict(),
        '-exec_L2AP': dict(),
        '-exec_L2BP': dict(),
        '-exec_L2CP': dict(),
        '-sub_multiProc': dict(),
        '-exc_handler': dict(),
        '-blocksize': dict(),
        '-profiling': dict(),
        '-bench_all': dict(),
        '-bench_cloudMask': dict(),
211
212
213

    }

214
    # LINK PARSERS TO RUN FUNCTIONS
215
216
217
    parser_jobid.set_defaults(func=run_from_jobid)
    parser_sceneids.set_defaults(func=run_from_sceneids)
    parser_entityids.set_defaults(func=run_from_entityids)
218
    parser_filenames.set_defaults(func=run_from_filenames)
219
    parser_constraints.set_defaults(func=run_from_constraints)
220

221
222
223
    return parser


224
if __name__ == '__main__':
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
    # if len(sys.argv)<2:
    #     # a job ID has not been given
    #
    #     # ID = 26184107
    #     # ID = 26185175   # 1x TM5
    #     # ID = 26185176   # 1x Landsat
    #     # ID = 26185177  # 1. Sentinel-2-Testszene
    #     # ID = 26185189   # direkt benachbarte Granules von 1. Sentinel-2-Testszene
    #     # ID = 26185237  # 4 x Landsat-8 -> Job per database tools erstellt
    #     # ID = 26185239  # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
    #     # ID = 26185242  # 1 x Landsat-8 - Bug files_in_archive=None
    #     # ID = 26185250  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
    #     # ID = 26185251  # 1x L8, Zielsensor L8
    #     # ID = 26185252  # 1x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185253  # 25x L8, Zielsensor L8, spat.ref L8
    #     # ID = 26185254  # 10x L8, Zielsensor L8, spat.ref L8
241
242
    #     # Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job:
    #     # ID = 26185255  # 1x L8 Bug 5 corners found
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
    #     # ID = 26185256  # 1x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185257  # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
    #     # ID = 26185258  # Beta-Job - 219 x L8, spatref L8
    #     # ID = 26185259  # Beta-Job - 172 x L7, spatref L8
    #     # ID = 26185260  # Beta-Job - 111 x S2, spatref L8
    #     # ID = 26185268  # 25x L7 SLC off, Zielsensor L8, spat.ref L8
    #     # ID = 26185269  # 1x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185270  # 5x L7 SLC off, Bug SpatialIndexMediator
    #     # ID = 26185275  # 1x L8, spat. Ref. L8 Bug L1B_mask not found
    #     # ID = 26185264  # 1x L8, Bug L1B_masks not found
    #     # ID = 26185265  # 1x L8, Bug L2B_masks not found
    #     # ID = 26185268  # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
    #     # ID = 26185269 # "10x L8, Bug L2B_masks not found"
    #     # ID = 26185272 # "1x S2A Sips"
    #     ID = 26185273  # "1x L7, target L8, spat.ref L8"
    #     # ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
    #     # ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
    #     # ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
261
262
263
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within
    #     # mapinfo2geotransform":
    #     # ID = 26185280
264
265
266
267
    #     # ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
    #     # ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
268
269
    #     # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable":
    #     # ID = 26185322
270
271
272
273
274
275
276
277
278
279
    #     # ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
    #     # ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
    #     # ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
    #     # ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
    #     # ID = 26185396 # "GEOMS: 1x S2A multi GSD testing"
    #     # ID = 26185398  # "GEOMS: 1x S2A granule multi GSD testing"
    #
    # else:
    #     ID = int(sys.argv[1])

280
    # RUN!
281
    parsed_args = get_gms_argparser().parse_args()
282
283
    parsed_args.func(parsed_args)

284
    print('\nready.')