gms_object.py 72.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
# -*- coding: utf-8 -*-

import collections
import copy
import datetime
import glob
import json
import os
import re
import shutil
import sys
import warnings
13
import logging
14
from itertools import chain
15
16
17

import numpy as np
import spectral
18
from spectral.io import envi
19
from numba import jit
20
from pandas import DataFrame, read_csv
21

22
23
24
25
try:
    from osgeo import gdalnumeric
except ImportError:
    import gdalnumeric
26

27
from geoarray import GeoArray
28
from py_tools_ds.geo.coord_grid import is_coord_grid_equal
29
from py_tools_ds.geo.projection import EPSG2WKT
30
31
32
from py_tools_ds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
33
from sicor.options import get_options as get_ac_options
34

35
from ..misc.logging import GMS_logger as DatasetLogger
36
from ..model.mgrs_tile import MGRS_tile
37
from ..model.metadata import METADATA, get_dict_LayerOptTherm, metaDict_to_metaODict
38
39
40
from ..model.dataset import Dataset
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
41
from ..options.config import GMS_config as CFG
42
43
44
from ..algorithms import geoprocessing as GEOP
from ..io import input_reader as INP_R
from ..io import output_writer as OUT_W
45
46
from ..misc import helper_functions as HLP_F
from ..misc import definition_dicts as DEF_D
47

48
__author__ = 'Daniel Scheffler'
49
50


51
class GMS_object(Dataset):
52
53
54
55
    def __init__(self, pathImage=''):
        # get all attributes of base class "Dataset"
        super(GMS_object, self).__init__()

56
        # add private attributes
57
        self._dict_LayerOptTherm = None
58
59
        self._cloud_masking_algorithm = None
        self._meta_odict = None
60
        self._coreg_info = None
61

62
        self.job_ID = CFG.ID
63
        # FIXME not needed anymore?:
64
        # self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['datasetid'],
65
66
67
        #                                {'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
        self.scenes_proc_ID = None  # set by Output writer after creation/update of db record in table scenes_proc
        self.mgrs_tiles_proc_ID = None  # set by Output writer after creation/update of db rec in table mgrs_tiles_proc
68
        self.MGRS_info = None
69
70

        # set pathes
71
72
73
74
        self.path_cloud_class_obj = ''

        # handle initialization arguments
        if pathImage:
75
76
            # run the setter for 'arr' of the base class 'Dataset' which creates an Instance of GeoArray
            self.arr = pathImage
77
78
79
80

    def __getstate__(self):
        """Defines how the attributes of GMS object are pickled."""

Daniel Scheffler's avatar
Bugfix    
Daniel Scheffler committed
81
        self.close_loggers()
82
        del self.pathGen  # path generator can only be used for the current processing level
83
84

        # delete arrays if their in-mem size is to big to be pickled
85
        # => (avoids MaybeEncodingError: Error sending result: '[<gms_preprocessing.algorithms.L2C_P.L2C_object
86
        #    object at 0x7fc44f6399e8>]'. Reason: 'error("'i' format requires -2147483648 <= number <= 2147483647",)')
87
        if self.proc_level == 'L2C' and CFG.inmem_serialization:
88
89
            # FIXME check by bandname
            if self.mask_nodata is not None and self.masks.bands > 1 and self.mask_clouds is not None:
90
                del self.masks
91

Daniel Scheffler's avatar
Bugfix    
Daniel Scheffler committed
92
93
        return self.__dict__

94
    def set_pathes(self):
95
96
97
98
99
100
        self.baseN = self.pathGen.get_baseN()
        self.path_procdata = self.pathGen.get_path_procdata()
        self.ExtractedFolder = self.pathGen.get_path_tempdir()
        self.path_logfile = self.pathGen.get_path_logfile()
        self.pathGen = PG.path_generator(self.__dict__)  # passes a logger in addition to previous attributes
        self.path_archive = self.pathGen.get_local_archive_path_baseN()
101

102
        if not CFG.inmem_serialization:
Daniel Scheffler's avatar
Daniel Scheffler committed
103
            self.path_InFilePreprocessor = os.path.join(self.ExtractedFolder, '%s%s_DN.bsq'
104
105
                                                        % (self.entity_ID,
                                                           ('_%s' % self.subsystem if self.subsystem else '')))
106
        else:  # keep data in memory
107
            self.path_InFilePreprocessor = None  # None: keeps all produced data in memory (numpy array attributes)
108
109
110
111
112
113

        self.path_MetaPreprocessor = self.path_archive

    def validate_pathes(self):
        if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
            self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
114
                             "has been found at %s." % (self.sensor, self.entity_ID, self.path_archive))
115
            self.logger.info('Trying to download the dataset...')
116
            self.path_archive_valid = self._data_downloader(self.sensor, self.entity_ID)
117
118
119
        else:
            self.path_archive_valid = True

120
        if not CFG.inmem_serialization and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
121
122
123
124
125
126
127
            os.makedirs(self.ExtractedFolder)

        assert os.path.exists(self.path_archive), 'Invalid path to RAW data. File %s does not exist at %s.' \
                                                  % (os.path.basename(self.path_archive),
                                                     os.path.dirname(self.path_archive))
        assert isinstance(self.path_archive, str), 'Invalid path to RAW data. Got %s instead of string or unicode.' \
                                                   % type(self.path_archive)
128
        if not CFG.inmem_serialization and self.ExtractedFolder:
129
130
            assert os.path.exists(self.path_archive), \
                'Invalid path for temporary files. Directory %s does not exist.' % self.ExtractedFolder
131
132
133
134
135
136
137
138

    @property
    def logger(self):
        if self._loggers_disabled:
            return None
        if self._logger and self._logger.handlers[:]:
            return self._logger
        else:
139
            self._logger = DatasetLogger('log__' + self.baseN, fmt_suffix=self.scene_ID, path_logfile=self.path_logfile,
140
                                         log_level=CFG.log_level, append=True)
141
142
143
144
            return self._logger

    @logger.setter
    def logger(self, logger):
145
        assert isinstance(logger, logging.Logger) or logger in ['not set', None], \
146
            "GMS_obj.logger can not be set to %s." % logger
147
148

        # save prior logs
149
        # if logger is None and self._logger is not None:
150
        #    self.log += self.logger.captured_stream
151
152
153
154
        self._logger = logger

    @property
    def GMS_identifier(self):
155
156
        return collections.OrderedDict(zip(
            ['image_type', 'Satellite', 'Sensor', 'Subsystem', 'proc_level', 'dataset_ID', 'logger'],
157
158
            [self.image_type, self.satellite, self.sensor, self.subsystem, self.proc_level, self.dataset_ID,
             self.logger]))
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174

    @property
    def MetaObj(self):
        if self._meta_odict:
            # if there is already a meta_odict -> create a new MetaObj from it (ensures synchronization!)
            self._MetaObj = METADATA(self.GMS_identifier).from_odict(self._meta_odict)
            del self.meta_odict
        elif not self._MetaObj:
            # if there is no meta_odict and no MetaObj -> create MetaObj by reading metadata from disk
            pass  # reading from disk should use L1A_P.L1A_object.import_metadata -> so just return None

        return self._MetaObj

    @MetaObj.setter
    def MetaObj(self, MetaObj):
        assert isinstance(MetaObj, METADATA), "'MetaObj' can only be set to an instance of METADATA class. " \
175
                                              "Got %s." % type(MetaObj)
176
177
178
        self._MetaObj = MetaObj

        # update meta_odict
179
        del self.meta_odict  # it is recreated if getter is used the next time
180
181
182

    @MetaObj.deleter
    def MetaObj(self):
183
184
185
186
187
        if hasattr(self, '_MetaObj') and self._MetaObj and hasattr(self._MetaObj, 'logger') and \
                self._MetaObj.logger not in [None, 'not set']:
            self._MetaObj.logger.close()
            self._MetaObj.logger = None

188
189
190
191
192
        self._MetaObj = None

    @property
    def meta_odict(self):
        if self._MetaObj:
193
            # if there is already a MetaObj -> create new meta_odict from it (ensures synchronization!)
194
195
196
197
            self._meta_odict = self._MetaObj.to_odict()
            del self.MetaObj
        elif not self._meta_odict:
            # if there is no MetaObj and no meta_odict -> use MetaObj getter to read metadata from disk
198
            pass  # reading from disk should use L1A_P.L1A_object.import_metadata -> so just return None
199
200
201
202
203
204
            self._meta_odict = None

        return self._meta_odict

    @meta_odict.setter
    def meta_odict(self, odict):
205
206
        assert isinstance(odict, (collections.OrderedDict, dict)), "'meta_odict' can only be set to an instance of " \
                                                                   "collections.OrderedDict. Got %s." % type(odict)
207
208
209
        self._meta_odict = odict

        # update MetaObj
210
        del self.MetaObj  # it is recreated if getter is used the next time
211
212
213
214
215

    @meta_odict.deleter
    def meta_odict(self):
        self._meta_odict = None

216
217
218
219
220
    @property
    def dict_LayerOptTherm(self):
        if self._dict_LayerOptTherm:
            return self._dict_LayerOptTherm
        elif self.LayerBandsAssignment:
221
            self._dict_LayerOptTherm = get_dict_LayerOptTherm(self.identifier, self.LayerBandsAssignment)
222
223
224
225
226
227
228
            return self._dict_LayerOptTherm
        else:
            return None

    @property
    def georef(self):
        """Returns True if the current dataset can serve as spatial reference."""
Daniel Scheffler's avatar
Daniel Scheffler committed
229

230
231
232
233
        return True if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else False

    @property
    def coreg_needed(self):
234
        if self._coreg_needed is None:
235
            self._coreg_needed = not (self.dataset_ID == CFG.datasetid_spatial_ref)
236
        return self._coreg_needed
237
238
239
240
241

    @coreg_needed.setter
    def coreg_needed(self, value):
        self._coreg_needed = value

242
243
244
245
246
247
248
249
250
251
252
253
254
255
    @property
    def coreg_info(self):
        if not self._coreg_info:
            self._coreg_info = {
                'corrected_shifts_px': {'x': 0, 'y': 0},
                'corrected_shifts_map': {'x': 0, 'y': 0},
                'original map info': self.meta_odict['map info'],
                'updated map info': None,
                'reference scene ID': None,
                'reference entity ID': None,
                'reference geotransform': None,
                # reference projection must be the own projection in order to avoid overwriting with a wrong EPSG
                'reference projection': self.meta_odict['coordinate system string'],
                'reference extent': {'rows': None, 'cols': None},
256
257
                'reference grid': [list(CFG.spatial_ref_gridx),
                                   list(CFG.spatial_ref_gridy)],
258
259
260
261
262
263
264
265
266
                'success': False
            }

        return self._coreg_info

    @coreg_info.setter
    def coreg_info(self, val):
        self._coreg_info = val

267
268
269
270
    @property
    def resamp_needed(self):
        if self._resamp_needed is None:
            gt = mapinfo2geotransform(self.meta_odict['map info'])
271
272
            self._resamp_needed = not is_coord_grid_equal(gt, CFG.spatial_ref_gridx,
                                                          CFG.spatial_ref_gridy)
273
274
275
276
277
278
279
280
        return self._resamp_needed

    @resamp_needed.setter
    def resamp_needed(self, value):
        self._resamp_needed = value

    @property
    def masks(self):
281
        # if self.mask_nodata is not None and self.mask_clouds is not None and \
282
283
284
285
        #     self._masks is not None and self._masks.bands==1:

        #     self.build_combined_masks_array()

286
287
288
        return self._masks

    @masks.setter
289
    def masks(self, *geoArr_initArgs):
290
291
292
293
        """
        NOTE: This does not automatically update mask_nodata and mask_clouds BUT if mask_nodata and mask_clouds are
        None their getters will automatically synchronize!
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
294

295
        if geoArr_initArgs[0] is not None:
296
            self._masks = GeoArray(*geoArr_initArgs)
297
            self._masks.nodata = 0
298
299
            self._masks.gt = self.arr.gt
            self._masks.prj = self.arr.prj
300
301
        else:
            del self.masks
302

303
304
305
306
    @masks.deleter
    def masks(self):
        self._masks = None

307
308
309
    @property
    def cloud_masking_algorithm(self):
        if not self._cloud_masking_algorithm:
310
            self._cloud_masking_algorithm = CFG.cloud_masking_algorithm[self.satellite]
311
312
        return self._cloud_masking_algorithm

313
314
315
    @property
    def ac_options(self):
        """
316
317
        Returns the options dictionary needed as input for atmospheric correction. If an empty dictionary is returned,
        atmospheric correction is not yet available for the current sensor and will later be skipped.
318
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
319

320
        if not self._ac_options:
321
            path_ac_options = CFG.path_custom_sicor_options or PG.get_path_ac_options(self.GMS_identifier)
322

323
            if path_ac_options and os.path.exists(path_ac_options):
324
325
                # don't validate because options contain pathes that do not exist on another server:
                opt_dict = get_ac_options(path_ac_options, validation=False)
326

Daniel Scheffler's avatar
Daniel Scheffler committed
327
                # update some file paths depending on the current environment
328
329
                opt_dict['DEM']['fn'] = CFG.path_dem_proc_srtm_90m
                opt_dict['ECMWF']['path_db'] = CFG.path_ECMWF_db
330
331
332
                opt_dict['S2Image'][
                    'S2_MSI_granule_path'] = None  # only a placeholder -> will always be None for GMS usage
                opt_dict['output'] = []  # outputs are not needed for GMS -> so
333
                opt_dict['report']['report_path'] = os.path.join(self.pathGen.get_path_procdata(), '[TYPE]')
334
335
                if 'uncertainties' in opt_dict:
                    opt_dict['uncertainties']['snr_model'] = PG.get_path_snr_model(self.GMS_identifier)
336

337
338
339
340
341
342
                # apply custom configuration
                opt_dict["logger"]['level'] = CFG.log_level
                opt_dict["ram"]['upper_limit'] = CFG.ac_max_ram_gb
                opt_dict["ram"]['unit'] = 'GB'
                opt_dict["AC"]['fill_nonclear_areas'] = CFG.ac_fillnonclear_areas
                opt_dict["AC"]['clear_area_labels'] = CFG.ac_clear_area_labels
343
                # opt_dict['AC']['n_cores'] = CFG.CPUs if CFG.allow_subMultiprocessing else 1
344

345
                self._ac_options = opt_dict
346
347
348
            else:
                self.logger.warning('There is no options file available for atmospheric correction. '
                                    'Atmospheric correction must be skipped.')
349

350
351
        return self._ac_options

352
    def get_copied_dict_and_props(self, remove_privates=False):
353
        # type: (bool) -> dict
354
        """Returns a copy of the current object dictionary including the current values of all object properties."""
355
356
357

        # loggers must be closed
        self.close_GMS_loggers()
358
359
        # this disables automatic recreation of loggers (otherwise loggers are created by using getattr()):
        self._loggers_disabled = True
360
361
362
363
364

        out_dict = self.__dict__.copy()

        # add properties
        property_names = [p for p in dir(self.__class__) if isinstance(getattr(self.__class__, p), property)]
365
        [out_dict.update({propK: copy.copy(getattr(self, propK))}) for propK in property_names]
366
367
368
369
370
371
372
373
374

        # remove private attributes
        if remove_privates:
            out_dict = {k: v for k, v in out_dict.items() if not k.startswith('_')}

        self._loggers_disabled = False  # enables automatic recreation of loggers

        return out_dict

375
376
    def attributes2dict(self, remove_privates=False):
        # type: (bool) -> dict
377
        """Returns a copy of the current object dictionary including the current values of all object properties."""
378
379
380

        # loggers must be closed
        self.close_GMS_loggers()
381
382
        # this disables automatic recreation of loggers (otherwise loggers are created by using getattr()):
        self._loggers_disabled = True
383
384
385
386

        out_dict = self.__dict__.copy()

        # add some selected property values
387
388
        for i in ['GMS_identifier', 'LayerBandsAssignment', 'coreg_needed', 'coreg_info', 'resamp_needed',
                  'dict_LayerOptTherm', 'georef', 'meta_odict']:
389
            out_dict[i] = getattr(self, i)
390
391
392
393
394

        # remove private attributes
        if remove_privates:
            out_dict = {k: v for k, v in out_dict.items() if not k.startswith('_')}

395
        self._loggers_disabled = False  # enables automatic recreation of loggers
396
397
        return out_dict

398
    def _data_downloader(self, sensor, entity_ID):
399
400
401
402
        self.logger.info('Data downloader started.')
        success = False
        " > download source code for Landsat here < "
        if not success:
403
404
            self.logger.critical(
                "Download for %s dataset '%s' failed. No further processing possible." % (sensor, entity_ID))
405
            raise RuntimeError('Archive download failed.')
406
407
        return success

408
409
410
411
412
    def from_disk(self, tuple_GMS_subset):
        """Fills an already instanced GMS object with data from disk. Excludes array attributes in Python mode.

        :param tuple_GMS_subset:    <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
        """
413

414
        path_GMS_file = tuple_GMS_subset[0]
415
        GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
416
417

        # copy all attributes from GMS file (private attributes are not touched since they are not included in GMS file)
418
        self.meta_odict = GMSfileDict['meta_odict']  # set that first in order to make some getters and setters work
419
420
        for key, value in GMSfileDict.items():
            if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
421
                continue  # properties that should better be created on the fly
422
423
            try:
                setattr(self, key, value)
424
425
            except Exception:
                raise AttributeError("Can't set attribute %s." % key)
426

427
        self.acq_datetime = datetime.datetime.strptime(self.acq_datetime, '%Y-%m-%d %H:%M:%S.%f%z')
428
429
        self.arr_shape, self.arr_pos = tuple_GMS_subset[1]

430
431
432
        self.arr = self.pathGen.get_path_imagedata()
        # self.mask_nodata and self.mask_clouds are auto-synchronized via self.masks (see their getters):
        self.masks = self.pathGen.get_path_maskdata()
433

434
435
        return copy.copy(self)

436
437
438
439
440
441
442
443
    def from_sensor_subsystems(self, list_GMS_objs):
        # type: (list) -> GMS_object
        """Merge separate GMS objects belonging to the same scene-ID into ONE GMS object.

        :param list_GMS_objs:   <list> of GMS objects covering the same geographic area but representing different
                                sensor subsystems (e.g. 3 GMS_objects for Sentinel-2 10m/20m/60m bands)
        """

444
        # assertions
445
446
        assert len(list_GMS_objs) > 1, "'GMS_object.from_sensor_subsystems()' expects multiple input GMS objects. " \
                                       "Got %d." % len(list_GMS_objs)
447
        assert all([is_coord_grid_equal(list_GMS_objs[0].arr.gt, *obj.arr.xygrid_specs) for obj in list_GMS_objs[1:]]),\
448
449
450
            "The input GMS objects must have the same pixel grid. Received: %s" \
            % np.array([obj.arr.xygrid_specs for obj in list_GMS_objs])
        assert len(list(set([GMS_obj.proc_level for GMS_obj in list_GMS_objs]))) == 1, \
451
452
453
            "The input GMS objects for GMS_object.from_sensor_subsystems() must have the same processing level."
        subsystems = [GMS_obj.subsystem for GMS_obj in list_GMS_objs]
        assert len(subsystems) == len(list(set(subsystems))), \
454
            "The input 'list_GMS_objs' contains duplicates: %s" % subsystems
455

456
        # log
457
        list_GMS_objs[0].logger.info('Merging the subsystems %s to a single GMS object...'
458
                                     % ', '.join([GMS_obj.subsystem for GMS_obj in list_GMS_objs]))
459
460

        # find the common extent. NOTE: boundsMap is expected in the order [xmin,xmax,ymin,ymax]
461
462
        geoExtents = np.array([GMS_obj.arr.box.boundsMap for GMS_obj in list_GMS_objs])
        common_extent = (min(geoExtents[:, 0]), max(geoExtents[:, 1]), min(geoExtents[:, 2]), max(geoExtents[:, 3]))
463
464
465
466
467

        # MERGE METADATA
        # copy all attributes from the first input GMS file (private attributes are not touched)
        for key, value in list_GMS_objs[0].__dict__.copy().items():
            if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
468
                continue  # properties that should better be created on the fly
469
470
            try:
                setattr(self, key, value)
471
472
            except Exception:
                raise AttributeError("Can't set attribute %s." % key)
473

474
        # update LayerBandsAssignment and get full list of output bandnames
475
        from .metadata import get_LayerBandsAssignment
476
477
        # use identifier of first input GMS object for getting LBA (respects current proc_level):
        gms_idf = list_GMS_objs[0].GMS_identifier
478
        self.LayerBandsAssignment = get_LayerBandsAssignment(gms_idf, return_fullLBA=True)
479
        bandnames = ['B%s' % i if len(i) == 2 else 'B0%s' % i for i in self.LayerBandsAssignment]
480
481
482

        # update layer-dependent metadata with respect to remaining input GMS objects
        self.meta_odict.update({
483
484
485
486
            'band names': [('Band %s' % i) for i in self.LayerBandsAssignment],
            'LayerBandsAssignment': self.LayerBandsAssignment,
            'Subsystem': '',
            'PhysUnit': self.meta_odict['PhysUnit'],  # TODO can contain val_optical / val_thermal
487
488
        })
        self.subsystem = ''
489
490
        del self.pathGen  # must be refreshed because subsystem is now ''
        self.close_GMS_loggers()  # must also be refreshed because it depends on pathGen
491

492
493
        for attrN in ['SolIrradiance', 'CWL', 'FWHM', 'Offsets', 'OffsetsRef', 'Gains', 'GainsRef',
                      'ThermalConstK1', 'ThermalConstK2', 'ViewingAngle_arrProv', 'IncidenceAngle_arrProv']:
494
495
496
497
498
499
500

            # combine values from separate subsystems to a single value
            attrDic_fullLBA = {}
            for GMS_obj in list_GMS_objs:
                attr_val = getattr(GMS_obj.MetaObj, attrN)
                if isinstance(attr_val, list):
                    attrDic_fullLBA.update(dict(zip(GMS_obj.LayerBandsAssignment, attr_val)))
501
                elif isinstance(attr_val, (dict, collections.OrderedDict)):
502
503
504
505
506
507
508
                    attrDic_fullLBA.update(attr_val)
                else:
                    raise ValueError(attrN)

            # update the attribute in self.MetaObj
            if attrDic_fullLBA:
                val2set = [attrDic_fullLBA[bN] for bN in self.LayerBandsAssignment] \
509
                    if isinstance(getattr(list_GMS_objs[0].MetaObj, attrN), list) else attrDic_fullLBA
510
511
512
                setattr(self.MetaObj, attrN, val2set)

        # merge logfiles (read all logs into DataFrame, sort it by the first column and write to new logfile
513
        [GMS_obj.close_GMS_loggers() for GMS_obj in list_GMS_objs]  # close the loggers of the input objects
514
        paths_inLogs = [GMS_obj.pathGen.get_path_logfile() for GMS_obj in list_GMS_objs]
515
        allLogs_df = DataFrame()
516
        for log in paths_inLogs:
517
518
519
520
            df = read_csv(log, sep='\n', delimiter=':   ', header=None,
                          engine='python')  # engine suppresses a pandas warning
            allLogs_df = allLogs_df.append(
                df)  # FIXME this will log e.g. atm. corr 3 times for S2A -> use captured streams instead?
521
522
523
524
525

        allLogs_df = allLogs_df.sort_values(0)
        np.savetxt(self.pathGen.get_path_logfile(), np.array(allLogs_df), delimiter=':   ', fmt="%s")

        # MERGE ARRAY DATA
526
        # overwrite array data with merged arrays, clipped to common_extent and reordered according to FullLayerBandsAss
527
528
529
        for attrname in ['arr', 'ac_errors', 'dem', 'mask_nodata', 'mask_clouds', 'mask_clouds_confidence', 'masks']:

            # get current attribute of each subsystem without running property getters
530
            all_arrays = [getattr(GMS_obj, '_%s' % attrname) for GMS_obj in list_GMS_objs]
531
532
533
534
535
536
537
538
539

            # get the same geographical extent for each input GMS object
            if len(set(tuple(ext) for ext in geoExtents.tolist())) > 1:
                # in case of different extents
                geoArrs_same_extent = []

                for geoArr in all_arrays:

                    if geoArr is not None:
540
541
                        # FIXME mask_clouds_confidence is until here no GeoArray
                        # FIXME -> has no nodata value -> calculation throughs warning
542
543
                        geoArr_same_extent = \
                            GeoArray(*geoArr.get_mapPos(
544
545
546
547
                                mapBounds=np.array(common_extent)[[0, 2, 1, 3]],  # pass (xmin, ymin, xmax, ymax)
                                mapBounds_prj=geoArr.prj),
                                     bandnames=list(geoArr.bandnames.keys()),
                                     nodata=geoArr.nodata)
548
549
                        geoArrs_same_extent.append(geoArr_same_extent)
                    else:
550
551
                        # e.g. in case of cloud mask that is only appended to the GMS object with the same
                        # spatial resolution)
552
553
554
555
556
557
558
                        geoArrs_same_extent.append(None)

            else:
                # skip get_mapPos() if all input GMS objects have the same extent
                geoArrs_same_extent = all_arrays

            # validate output GeoArrays
559
560
            if len([gA for gA in geoArrs_same_extent if gA is not None]) > 1:
                equal_bounds = all([geoArrs_same_extent[0].box.boundsMap == gA.box.boundsMap
561
                                    for gA in geoArrs_same_extent[1:]])
562
563
                equal_epsg = all([geoArrs_same_extent[0].epsg == gA.epsg for gA in geoArrs_same_extent[1:]])
                equal_xydims = all([geoArrs_same_extent[0].shape[:2] == gA.shape[:2] for gA in geoArrs_same_extent[1:]])
564
565
566
567
568
569
570
571
572
                if not all([equal_bounds, equal_epsg, equal_xydims]):
                    raise RuntimeError('Something went wrong during getting the same geographical extent for all the '
                                       'input GMS objects. The extents, projections or pixel dimensions of the '
                                       'calculated input GMS objects are not equal.')

            # set output arrays
            if attrname in ['arr', 'ac_errors'] and list(set(geoArrs_same_extent)) != [None]:
                # the bands of these arrays have to be reordered according to FullLayerBandsAssignment

573
574
                # check that each desired band name for the current attribute is provided by one of the input
                # GMS objects
575
576
                available_bandNs = list(chain.from_iterable([list(gA.bandnames) for gA in geoArrs_same_extent]))
                for bN in bandnames:
577
                    if bN not in available_bandNs:
578
                        raise ValueError("The given input GMS objects (subsystems) do not provide a bandname '%s' for "
579
580
                                         "the attribute '%s'. Available band names amongst all input GMS objects are: "
                                         "%s" % (bN, attrname, str(available_bandNs)))
581
582

                # merge arrays
583
                def get_band(bandN): return [gA[bandN] for gA in geoArrs_same_extent if gA and bandN in gA.bandnames][0]
584
585
                full_geoArr = GeoArray(np.dstack((get_band(bandN) for bandN in bandnames)),
                                       geoArrs_same_extent[0].gt, geoArrs_same_extent[0].prj,
586
587
                                       bandnames=bandnames,
                                       nodata=geoArrs_same_extent[0].nodata)
588
589
590
591
592
                setattr(self, attrname, full_geoArr)

            else:
                # masks, dem, mask_nodata, mask_clouds, mask_clouds_confidence
                if attrname == 'dem':
593
594
                    # use the DEM of the first input object
                    # (if the grid is the same, the DEMs should be the same anyway)
595
596
597
598
599
600
601
                    self.dem = geoArrs_same_extent[0]
                elif attrname == 'mask_nodata':
                    # must not be merged -> self.arr is already merged, so just recalculate it (np.all)
                    self.mask_nodata = self.calc_mask_nodata(overwrite=True)
                elif attrname == 'mask_clouds':
                    # possibly only present in ONE subsystem (set by atm. Corr.)
                    mask_clouds = [msk for msk in geoArrs_same_extent if msk is not None]
602
603
                    if len(mask_clouds) > 1:
                        raise ValueError('Expected mask clouds in only one subsystem. Got %s.' % len(mask_clouds))
604
605
606
607
                    self.mask_clouds = mask_clouds[0] if mask_clouds else None
                elif attrname == 'mask_clouds_confidence':
                    # possibly only present in ONE subsystem (set by atm. Corr.)
                    mask_clouds_conf = [msk for msk in geoArrs_same_extent if msk is not None]
608
609
610
                    if len(mask_clouds_conf) > 1:
                        raise ValueError(
                            'Expected mask_clouds_conf in only one subsystem. Got %s.' % len(mask_clouds_conf))
611
                    self.mask_clouds_confidence = mask_clouds_conf[0] if mask_clouds_conf else None
612
                elif attrname == 'masks':
613
614
615
616
617
618
619
620
                    # self.mask_nodata and self.mask_clouds will already be set here -> so just recreate it from there
                    self.masks = None

        # recreate self.masks
        self.build_combined_masks_array()

        # update array-dependent metadata
        self.meta_odict.update({
621
622
            'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
            'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
623
624

        # set shape of full array
625
        self.shape_fullArr = self.arr.shape
626
627
628

        return copy.copy(self)

629
630
631
632
633
634
    def from_tiles(self, list_GMS_tiles):
        # type: (list) -> self
        """Merge separate GMS objects with different spatial coverage but belonging to the same scene-ID to ONE GMS object.

        :param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
635

636
637
638
639
640
        if 'IMapUnorderedIterator' in str(type(list_GMS_tiles)):
            list_GMS_tiles = list(list_GMS_tiles)

        # copy all attributes except of array attributes
        tile1 = list_GMS_tiles[0]
641
642
        [setattr(self, i, getattr(tile1, i)) for i in tile1.__dict__
         if not callable(getattr(tile1, i)) and not isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
643
644

        # MERGE ARRAY-ATTRIBUTES
645
        list_arraynames = [i for i in tile1.__dict__ if not callable(getattr(tile1, i)) and
646
                           isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
647
648
        list_arraynames = ['_arr'] + [i for i in list_arraynames if
                                      i != '_arr']  # list must start with _arr, otherwise setters will not work
649
650
651
652

        for arrname in list_arraynames:
            samplearray = getattr(tile1, arrname)
            assert isinstance(samplearray, (np.ndarray, GeoArray)), \
653
                'Received a %s object for attribute %s. Expected a numpy array or an instance of GeoArray.' \
654
                % (type(samplearray), arrname)
655
656
            is_3d = samplearray.ndim == 3
            bands = (samplearray.shape[2],) if is_3d else ()  # dynamic -> works for arr, cld_arr,...
657
658
659
660
            target_shape = tuple(self.shape_fullArr[:2]) + bands
            target_dtype = samplearray.dtype
            merged_array = self._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)

661
662
            setattr(self, arrname if not arrname.startswith('_') else arrname[1:],
                    merged_array)  # use setters if possible
663
664
665
666
            # NOTE: this asserts that each attribute starting with '_' has also a property with a setter!

        # UPDATE ARRAY-DEPENDENT ATTRIBUTES
        self.arr_shape = 'cube'
667
        self.arr_pos = None
668
669
670

        # update MetaObj attributes
        self.meta_odict.update({
671
672
            'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
            'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
673
674
675
676

        # calculate data_corners_imXY (mask_nodata is always an array here because get_mapPos always returns an array)
        corners_imYX = calc_FullDataset_corner_positions(
            self.mask_nodata, assert_four_corners=False, algorithm='shapely')
677
        self.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX]  # [UL, UR, LL, LR]
678
679
680
681
682
683

        # calculate trueDataCornerLonLat
        data_corners_LatLon = pixelToLatLon(self.trueDataCornerPos, geotransform=self.arr.gt, projection=self.arr.prj)
        self.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]

        # calculate trueDataCornerUTM
684
685
        data_corners_utmYX = pixelToMapYX(self.trueDataCornerPos, geotransform=self.arr.gt,
                                          projection=self.arr.prj)  # FIXME asserts gt in UTM coordinates
686
687
688
689
        self.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]

        return copy.copy(self)

690
691
692
    @staticmethod
    @jit
    def _numba_array_merger(list_GMS_tiles, arrname2merge, target_shape, target_dtype):
Daniel Scheffler's avatar
Daniel Scheffler committed
693
        # type: (list, str, tuple, np.dtype) -> np.ndarray
694
695
696
697
698
699
700
701
702
        """
        private function, e.g. called by merge_GMS_tiles_to_GMS_obj() in order to fasten array merging

        :param list_GMS_tiles:
        :param arrname2merge:
        :param target_shape:
        :param target_dtype:
        :return:
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
703

704
705
706
707
708
709
710
        out_arr = np.empty(target_shape, dtype=target_dtype)
        for idx, tile in enumerate(list_GMS_tiles):
            rowStart, rowEnd = tile.arr_pos[0]
            colStart, colEnd = tile.arr_pos[1]
            out_arr[rowStart:rowEnd + 1, colStart:colEnd + 1] = getattr(tile, arrname2merge)
        return out_arr

Daniel Scheffler's avatar
Daniel Scheffler committed
711
    def log_for_fullArr_or_firstTile(self, log_msg, subset=None):
712
713
714
715
716
717
718
        """Send a message to the logger only if full array or the first tile is currently processed.
        This function can be called when processing any tile but log message will only be sent from first tile.

        :param log_msg:  the log message to be logged
        :param subset:   subset argument as sent to e.g. DN2TOARadRefTemp that indicates which tile is to be processed.
                         Not needed if self.arr_pos is not None.
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
719

720
721
722
723
        if subset is None and \
            (self.arr_shape == 'cube' or self.arr_pos is None or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]) or\
                subset == ['cube', None] or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
                hasattr(self, 'logAtThisTile') and getattr(self, 'logAtThisTile'):  # cube or 1st tile
Daniel Scheffler's avatar
Daniel Scheffler committed
724
            self.logger.info(log_msg)
725
726
727
728
        else:
            pass

    def apply_nodata_mask_to_ObjAttr(self, attrname, out_nodata_val=None):
729
        # type: (str,int) -> None
730
        """Applies self.mask_nodata to the specified array attribute by setting all values where mask_nodata is 0 to the
731
732
733
734
735
736
737
        given nodata value.

        :param attrname:         The attribute to apply the nodata mask to. Must be an array attribute or
                                 a string path to a previously saved ENVI-file.
        :param out_nodata_val:   set the values of the given attribute to this value.
        """

738
        assert hasattr(self, attrname)
739

740
        if getattr(self, attrname) is not None:
741

742
743
744
            if isinstance(getattr(self, attrname), str):
                update_spec_vals = True if attrname == 'arr' else False
                self.apply_nodata_mask_to_saved_ENVIfile(getattr(self, attrname), out_nodata_val, update_spec_vals)
745
            else:
746
                assert isinstance(getattr(self, attrname), (np.ndarray, GeoArray)), \
747
                    'L1A_obj.%s must be a numpy array or an instance of GeoArray. Got type %s.' \
748
749
                    % (attrname, type(getattr(self, attrname)))
                assert hasattr(self, 'mask_nodata') and self.mask_nodata is not None
750

751
                self.log_for_fullArr_or_firstTile('Applying nodata mask to L1A_object.%s...' % attrname)
752

753
                nodata_val = out_nodata_val if out_nodata_val else \
754
                    DEF_D.get_outFillZeroSaturated(getattr(self, attrname).dtype)[0]
755
                getattr(self, attrname)[self.mask_nodata.astype(np.int8) == 0] = nodata_val
756

757
758
                if attrname == 'arr':
                    self.MetaObj.spec_vals['fill'] = nodata_val
759
760
761
762
763
764

    def build_combined_masks_array(self):
        # type: () -> dict
        """Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
        The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""

765
        arrays2combine = [aN for aN in ['mask_nodata', 'mask_clouds']
766
                          if hasattr(self, aN) and isinstance(getattr(self, aN), (GeoArray, np.ndarray))]
767
768
        if arrays2combine:
            self.log_for_fullArr_or_firstTile('Combining masks...')
769
770

            def get_data(arrName): return getattr(self, arrName).astype(np.uint8)[:, :, None]
771
772

            for aN in arrays2combine:
773
                if False in np.equal(getattr(self, aN), getattr(self, aN).astype(np.uint8)):
774
775
776
                    warnings.warn('Some pixel values of attribute %s changed during data type '
                                  'conversion within build_combined_masks_array().')

777
            # set self.masks
778
779
780
            self.masks = get_data(arrays2combine[0]) if len(arrays2combine) == 1 else \
                np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
            self.masks.bandnames = arrays2combine  # set band names of GeoArray (allows later indexing by band name)
781

782
            # set self.masks_meta
783
            nodataVal = DEF_D.get_outFillZeroSaturated(self.masks.dtype)[0]
784
            self.masks_meta = {'map info': self.MetaObj.map_info, 'coordinate system string': self.MetaObj.projection,
785
786
                               'bands': len(arrays2combine), 'band names': arrays2combine,
                               'data ignore value': nodataVal}
787
788

            return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
789
                    'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks}  # usually not needed
790
791

    def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None, update_spec_vals=False):
792
        # type: (str,int,bool) -> None
793
794
        """Applies self.mask_nodata to a saved ENVI file with the same X/Y dimensions like self.mask_nodata by setting all
         values where mask_nodata is 0 to the given nodata value.
795
796
797
798
799
800
801
802

        :param path_saved_ENVIhdr:  <str> The path of the ENVI file to apply the nodata mask to.
        :param custom_nodata_val:   <int> set the values of the given attribute to this value.
        :param update_spec_vals:    <bool> whether to update self.MetaObj.spec_vals['fill']
        """

        self.log_for_fullArr_or_firstTile('Applying nodata mask to saved ENVI file...')
        assert os.path.isfile(path_saved_ENVIhdr)
803
804
805
        assert hasattr(self, 'mask_nodata') and self.mask_nodata is not None
        if not path_saved_ENVIhdr.endswith('.hdr') and os.path.isfile(os.path.splitext(path_saved_ENVIhdr)[0] + '.hdr'):
            path_saved_ENVIhdr = os.path.splitext(path_saved_ENVIhdr)[0] + '.hdr'
806
        if custom_nodata_val is None:
807
            dtype_IDL = int(INP_R.read_ENVIhdr_to_dict(path_saved_ENVIhdr)['data type'])
808
            nodata_val = DEF_D.get_outFillZeroSaturated(DEF_D.dtype_lib_IDL_Python[dtype_IDL])[0]
809
810
        else:
            nodata_val = custom_nodata_val
811
        FileObj = spectral.open_image(path_saved_ENVIhdr)
812
        File_memmap = FileObj.open_memmap(writable=True)
813
        File_memmap[self.mask_nodata == 0] = nodata_val
814
815
        if update_spec_vals:
            self.MetaObj.spec_vals['fill'] = nodata_val
816
817

    def combine_tiles_to_ObjAttr(self, tiles, target_attr):
818
        # type: (list,str) -> None
819
        """Combines tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single attribute.
820
        If CFG.inmem_serialization is False, the produced attribute is additionally written to disk.
821
822
823
824
825

        :param tiles:           <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
                                'col_start' and 'col_end'
        :param target_attr:     <str> the name of the attribute to be produced
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
826

827
        warnings.warn("'combine_tiles_to_ObjAttr' is deprecated.", DeprecationWarning)
828
        assert tiles[0] and isinstance(tiles, list) and isinstance(tiles[0], dict), \
829
830
831
            "The 'tiles' argument has to be list of dictionaries with the keys 'desc', 'data', 'row_start'," \
            "'row_end', 'col_start' and 'col_end'."
        self.logger.info("Building L1A attribute '%s' by combining given tiles..." % target_attr)
832
833
        tiles = [tiles] if not isinstance(tiles, list) else tiles
        sampleTile = dict(tiles[0])
834
835
        target_shape = self.shape_fullArr if len(sampleTile['data'].shape) == 3 else self.shape_fullArr[:2]
        setattr(self, target_attr, np.empty(target_shape, dtype=sampleTile['data'].dtype))
836
        for tile in tiles:  # type: dict
837
838
839
840
841
            rS, rE, cS, cE = tile['row_start'], tile['row_end'], tile['col_start'], tile['col_end']
            if len(target_shape) == 3:
                getattr(self, target_attr)[rS:rE + 1, cS:cE + 1, :] = tile['data']
            else:
                getattr(self, target_attr)[rS:rE + 1, cS:cE + 1] = tile['data']
842
        if target_attr == 'arr':
843
844
            self.arr_desc = sampleTile['desc']
            self.arr_shape = 'cube' if len(self.arr.shape) == 3 else 'band' if len(self.arr.shape) == 2 else 'unknown'
845

846
            if not CFG.inmem_serialization:
847
                path_radref_file = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc)
848
849
850
851
852
853
854
855
856
857
858
                # path_radref_file = os.path.abspath('./testing/out/%s_TOA_Ref' % self.baseN)
                while not os.path.isdir(os.path.dirname(path_radref_file)):
                    try:
                        os.makedirs(os.path.dirname(path_radref_file))
                    except OSError as e:  # maybe not neccessary anymore in python 3
                        if not e.errno == 17:
                            raise
                GEOP.ndarray2gdal(self.arr, path_radref_file, importFile=self.MetaObj.Dataname, direction=3)
                self.MetaObj.Dataname = path_radref_file

    def write_tiles_to_ENVIfile(self, tiles, overwrite=True):
859
        # type: (list,bool) -> None
860
861
862
863
864
865
866
867
        """Writes tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single output ENVI file.

        :param tiles:           <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
                                'col_start' and 'col_end'
        :param overwrite:       whether to overwrite files that have been produced earlier
        """

        self.logger.info("Writing tiles '%s' temporarily to disk..." % tiles[0]['desc'])
868
        outpath = os.path.join(self.ExtractedFolder, '%s__%s.%s' % (self.baseN, tiles[0]['desc'], self.outInterleave))
869
870
        if CFG.target_radunit_optical in tiles[0]['desc'] or \
           CFG.target_radunit_thermal in tiles[0]['desc']:
871
            self.meta_odict = self.MetaObj.to_odict()  # important in order to keep geotransform/projection
872
873
874
875
            self.arr_desc = tiles[0]['desc']
            self.arr = outpath
            # self.arr = os.path.abspath('./testing/out/%s_TOA_Ref.bsq' % self.baseN)
            self.MetaObj.Dataname = self.arr
876
            self.arr_shape = \
877
878
                'cube' if len(tiles[0]['data'].shape) == 3 else 'band' if len(
                    tiles[0]['data'].shape) == 2 else 'unknown'
879
880
881
        elif tiles[0]['desc'] == 'masks':
            self.masks = outpath
        elif tiles[0]['desc'] == 'lonlat_arr':
882
883
884
885
886
            # outpath = os.path.join(os.path.abspath('./testing/out/'),'%s__%s.%s'
            #     %(self.baseN, tiles[0]['desc'], self.outInterleave))
            self.lonlat_arr = outpath  # FIXME
        outpath = os.path.splitext(outpath)[0] + '.hdr' if not outpath.endswith('.hdr') else outpath
        out_shape = self.shape_fullArr[:2] + ([tiles[0]['data'].shape[2]] if len(tiles[0]['data'].shape) == 3 else [1])
887
888
        OUT_W.Tiles_Writer(tiles, outpath, out_shape, tiles[0]['data'].dtype, self.outInterleave, self.meta_odict,
                           overwrite=overwrite)
889
890

    def to_MGRS_tiles(self, pixbuffer=10, v=False):
891
        # type: (int) -> self
892
        """Returns a generator object where items represent the MGRS tiles for the GMS object.
893
894
895
896
897

        :param pixbuffer:   <int> a buffer in pixel values used to generate an overlap between the returned MGRS tiles
        :param v:           <bool> verbose mode
        :return:            <list> of MGRS_tile objects
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
898

899
        assert self.arr_shape == 'cube', "Only 'cube' objects can be cut into MGRS tiles. Got %s." % self.arr_shape
900
        self.logger.info('Cutting scene %s (entity ID %s) into MGRS tiles...' % (self.scene_ID, self.entity_ID))
901

902
903
        # get GeoDataFrame containing all overlapping MGRS tiles
        # (MGRS geometries completely within nodata area are excluded)
904
        GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(CFG.conn_database,
905
                                                         tgt_corners_lonlat=self.trueDataCornerLonLat)
906
907

        # calculate image coordinate bounds of the full GMS object for each MGRS tile within the GeoDataFrame
908
909
910
911
        gt = mapinfo2geotransform(self.meta_odict['map info'])

        def get_arrBounds(MGRStileObj):
            return list(np.array(MGRStileObj.poly_utm.buffer(pixbuffer * gt[1]).bounds)[[0, 2, 1, 3]])
912

913
914
        GDF_MGRS_tiles['MGRStileObj'] = list(GDF_MGRS_tiles['granuleid'].map(lambda mgrsTileID: MGRS_tile(mgrsTileID)))
        GDF_MGRS_tiles['map_bounds_MGRS'] = list(GDF_MGRS_tiles['MGRStileObj'].map(get_arrBounds))  # xmin,xmax,ymin,yma
915
916
917
918
919
920

        # find first tile to log and assign 'logAtThisTile' later
        dictIDxminymin = {(b[0] + b[2]): ID for ID, b in
                          zip(GDF_MGRS_tiles['granuleid'], GDF_MGRS_tiles['map_bounds_MGRS'])}
        firstTile_ID = dictIDxminymin[min(dictIDxminymin.keys())]

921
922
        # ensure self.masks exists (does not exist in case of inmem_serialization mode
        # because in that case self.fill_from_disk() is skipped)
923
924
925
        if not hasattr(self, 'masks') or self.masks is None:
            self.build_combined_masks_array()  # creates self.masks and self.masks_meta

926
927
928
        # read whole dataset into RAM in order to fasten subsetting
        self.arr.to_mem()
        self.masks.to_mem()  # to_mem ensures that the whole dataset is present in memory
929
930
931

        # produce data for each MGRS tile in loop
        for GDF_idx, GDF_row in GDF_MGRS_tiles.iterrows():
932
933
934
935
936
937
938
            tileObj = self.get_subset_obj(mapBounds=GDF_row.map_bounds_MGRS,
                                          mapBounds_prj=EPSG2WKT(GDF_row['MGRStileObj'].EPSG),
                                          out_prj=EPSG2WKT(GDF_row['MGRStileObj'].EPSG),
                                          logmsg='Producing MGRS tile %s from scene %s (entity ID %s).'
                                                 % (GDF_row.granuleid, self.scene_ID, self.entity_ID),
                                          v=v)
            MGRS_tileID = GDF_row['granuleid']
939
940
941
942
943
944
945
946

            # set MGRS info
            tileObj.arr_shape = 'MGRS_tile'
            tileObj.MGRS_info = {'tile_ID': MGRS_tileID, 'grid1mil': MGRS_tileID[:3], 'grid100k': MGRS_tileID[3:]}

            # set logAtThisTile
            tileObj.logAtThisTile = MGRS_tileID == firstTile_ID

947
948
949
950
            # close logger of tileObj and of self in order to avoid logging permission errors
            tileObj.close_GMS_loggers()
            self.close_GMS_loggers()

951
952
953
            yield tileObj

        # set array attributes back to file path if they had been a filePath before
954
955
956
957
        if self.arr.filePath:
            self.arr.to_disk()
        if self.masks.filePath:
            self.masks.to_disk()