gms_object.py 72.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
# -*- coding: utf-8 -*-

import collections
import copy
import datetime
import glob
import json
import os
import re
import shutil
import sys
import warnings
13
import logging
14
from itertools import chain
15
16
17

import numpy as np
import spectral
18
from spectral.io import envi
19
from numba import jit
20
from pandas import DataFrame, read_csv
21

22
23
24
25
try:
    from osgeo import gdalnumeric
except ImportError:
    import gdalnumeric
26

27
from geoarray import GeoArray
28
from py_tools_ds.geo.coord_grid import is_coord_grid_equal
29
from py_tools_ds.geo.projection import EPSG2WKT
30
31
32
from py_tools_ds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
33
from sicor.options import get_options as get_ac_options
34

35
from ..misc.logging import GMS_logger as DatasetLogger
36
from ..model.mgrs_tile import MGRS_tile
37
from ..model.metadata import METADATA, get_dict_LayerOptTherm, metaDict_to_metaODict
38
39
40
from ..model.dataset import Dataset
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
41
from ..options.config import GMS_config as CFG
42
43
44
from ..algorithms import geoprocessing as GEOP
from ..io import input_reader as INP_R
from ..io import output_writer as OUT_W
45
46
from ..misc import helper_functions as HLP_F
from ..misc import definition_dicts as DEF_D
47

48
__author__ = 'Daniel Scheffler'
49
50


51
class GMS_object(Dataset):
52
53
54
55
    def __init__(self, pathImage=''):
        # get all attributes of base class "Dataset"
        super(GMS_object, self).__init__()

56
        # add private attributes
57
        self._dict_LayerOptTherm = None
58
59
        self._cloud_masking_algorithm = None
        self._meta_odict = None
60
        self._coreg_info = None
61

62
        self.job_ID = CFG.ID
63
        # FIXME not needed anymore?:
64
        # self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['datasetid'],
65
66
67
        #                                {'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
        self.scenes_proc_ID = None  # set by Output writer after creation/update of db record in table scenes_proc
        self.mgrs_tiles_proc_ID = None  # set by Output writer after creation/update of db rec in table mgrs_tiles_proc
68
        self.MGRS_info = None
69
70

        # set pathes
71
72
73
74
        self.path_cloud_class_obj = ''

        # handle initialization arguments
        if pathImage:
75
76
            # run the setter for 'arr' of the base class 'Dataset' which creates an Instance of GeoArray
            self.arr = pathImage
77
78
79
80

    def __getstate__(self):
        """Defines how the attributes of GMS object are pickled."""

Daniel Scheffler's avatar
Bugfix    
Daniel Scheffler committed
81
        self.close_loggers()
82
        del self.pathGen  # path generator can only be used for the current processing level
83
84

        # delete arrays if their in-mem size is to big to be pickled
85
        # => (avoids MaybeEncodingError: Error sending result: '[<gms_preprocessing.algorithms.L2C_P.L2C_object
86
        #    object at 0x7fc44f6399e8>]'. Reason: 'error("'i' format requires -2147483648 <= number <= 2147483647",)')
87
        if self.proc_level == 'L2C' and CFG.exec_mode == 'Flink':
88
89
            # FIXME check by bandname
            if self.mask_nodata is not None and self.masks.bands > 1 and self.mask_clouds is not None:
90
                del self.masks
91

Daniel Scheffler's avatar
Bugfix    
Daniel Scheffler committed
92
93
        return self.__dict__

94
    def set_pathes(self):
95
96
97
98
99
100
        self.baseN = self.pathGen.get_baseN()
        self.path_procdata = self.pathGen.get_path_procdata()
        self.ExtractedFolder = self.pathGen.get_path_tempdir()
        self.path_logfile = self.pathGen.get_path_logfile()
        self.pathGen = PG.path_generator(self.__dict__)  # passes a logger in addition to previous attributes
        self.path_archive = self.pathGen.get_local_archive_path_baseN()
101

102
        if CFG.exec_mode == 'Python':
Daniel Scheffler's avatar
Daniel Scheffler committed
103
            self.path_InFilePreprocessor = os.path.join(self.ExtractedFolder, '%s%s_DN.bsq'
104
105
106
107
                                                        % (self.entity_ID,
                                                           ('_%s' % self.subsystem if self.subsystem else '')))
        else:  # Flink
            self.path_InFilePreprocessor = None  # None: keeps all produced data in memory (numpy array attributes)
108
109
110
111
112
113

        self.path_MetaPreprocessor = self.path_archive

    def validate_pathes(self):
        if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
            self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
114
                             "has been found at %s." % (self.sensor, self.entity_ID, self.path_archive))
115
            self.logger.info('Trying to download the dataset...')
116
            self.path_archive_valid = self._data_downloader(self.sensor, self.entity_ID)
117
118
119
        else:
            self.path_archive_valid = True

120
        if CFG.exec_mode == 'Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
121
122
123
124
125
126
127
            os.makedirs(self.ExtractedFolder)

        assert os.path.exists(self.path_archive), 'Invalid path to RAW data. File %s does not exist at %s.' \
                                                  % (os.path.basename(self.path_archive),
                                                     os.path.dirname(self.path_archive))
        assert isinstance(self.path_archive, str), 'Invalid path to RAW data. Got %s instead of string or unicode.' \
                                                   % type(self.path_archive)
128
        if CFG.exec_mode == 'Python' and self.ExtractedFolder:
129
130
            assert os.path.exists(self.path_archive), \
                'Invalid path for temporary files. Directory %s does not exist.' % self.ExtractedFolder
131
132
133
134
135
136
137
138

    @property
    def logger(self):
        if self._loggers_disabled:
            return None
        if self._logger and self._logger.handlers[:]:
            return self._logger
        else:
139
            self._logger = DatasetLogger('log__' + self.baseN, fmt_suffix=self.scene_ID, path_logfile=self.path_logfile,
140
                                         log_level=CFG.log_level, append=True)
141
142
143
144
            return self._logger

    @logger.setter
    def logger(self, logger):
145
        assert isinstance(logger, logging.Logger) or logger in ['not set', None], \
146
            "GMS_obj.logger can not be set to %s." % logger
147
148

        # save prior logs
149
        # if logger is None and self._logger is not None:
150
        #    self.log += self.logger.captured_stream
151
152
153
154
        self._logger = logger

    @property
    def GMS_identifier(self):
155
156
        return collections.OrderedDict(zip(
            ['image_type', 'Satellite', 'Sensor', 'Subsystem', 'proc_level', 'dataset_ID', 'logger'],
157
158
            [self.image_type, self.satellite, self.sensor, self.subsystem, self.proc_level, self.dataset_ID,
             self.logger]))
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174

    @property
    def MetaObj(self):
        if self._meta_odict:
            # if there is already a meta_odict -> create a new MetaObj from it (ensures synchronization!)
            self._MetaObj = METADATA(self.GMS_identifier).from_odict(self._meta_odict)
            del self.meta_odict
        elif not self._MetaObj:
            # if there is no meta_odict and no MetaObj -> create MetaObj by reading metadata from disk
            pass  # reading from disk should use L1A_P.L1A_object.import_metadata -> so just return None

        return self._MetaObj

    @MetaObj.setter
    def MetaObj(self, MetaObj):
        assert isinstance(MetaObj, METADATA), "'MetaObj' can only be set to an instance of METADATA class. " \
175
                                              "Got %s." % type(MetaObj)
176
177
178
        self._MetaObj = MetaObj

        # update meta_odict
179
        del self.meta_odict  # it is recreated if getter is used the next time
180
181
182
183
184
185
186
187

    @MetaObj.deleter
    def MetaObj(self):
        self._MetaObj = None

    @property
    def meta_odict(self):
        if self._MetaObj:
188
            # if there is already a MetaObj -> create new meta_odict from it (ensures synchronization!)
189
190
191
192
            self._meta_odict = self._MetaObj.to_odict()
            del self.MetaObj
        elif not self._meta_odict:
            # if there is no MetaObj and no meta_odict -> use MetaObj getter to read metadata from disk
193
            pass  # reading from disk should use L1A_P.L1A_object.import_metadata -> so just return None
194
195
196
197
198
199
            self._meta_odict = None

        return self._meta_odict

    @meta_odict.setter
    def meta_odict(self, odict):
200
201
        assert isinstance(odict, (collections.OrderedDict, dict)), "'meta_odict' can only be set to an instance of " \
                                                                   "collections.OrderedDict. Got %s." % type(odict)
202
203
204
        self._meta_odict = odict

        # update MetaObj
205
        del self.MetaObj  # it is recreated if getter is used the next time
206
207
208
209
210

    @meta_odict.deleter
    def meta_odict(self):
        self._meta_odict = None

211
212
213
214
215
    @property
    def dict_LayerOptTherm(self):
        if self._dict_LayerOptTherm:
            return self._dict_LayerOptTherm
        elif self.LayerBandsAssignment:
216
            self._dict_LayerOptTherm = get_dict_LayerOptTherm(self.identifier, self.LayerBandsAssignment)
217
218
219
220
221
222
223
            return self._dict_LayerOptTherm
        else:
            return None

    @property
    def georef(self):
        """Returns True if the current dataset can serve as spatial reference."""
Daniel Scheffler's avatar
Daniel Scheffler committed
224

225
226
227
228
        return True if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else False

    @property
    def coreg_needed(self):
229
        if self._coreg_needed is None:
230
            self._coreg_needed = not (self.dataset_ID == CFG.datasetid_spatial_ref)
231
        return self._coreg_needed
232
233
234
235
236

    @coreg_needed.setter
    def coreg_needed(self, value):
        self._coreg_needed = value

237
238
239
240
241
242
243
244
245
246
247
248
249
250
    @property
    def coreg_info(self):
        if not self._coreg_info:
            self._coreg_info = {
                'corrected_shifts_px': {'x': 0, 'y': 0},
                'corrected_shifts_map': {'x': 0, 'y': 0},
                'original map info': self.meta_odict['map info'],
                'updated map info': None,
                'reference scene ID': None,
                'reference entity ID': None,
                'reference geotransform': None,
                # reference projection must be the own projection in order to avoid overwriting with a wrong EPSG
                'reference projection': self.meta_odict['coordinate system string'],
                'reference extent': {'rows': None, 'cols': None},
251
252
                'reference grid': [list(CFG.spatial_ref_gridx),
                                   list(CFG.spatial_ref_gridy)],
253
254
255
256
257
258
259
260
261
                'success': False
            }

        return self._coreg_info

    @coreg_info.setter
    def coreg_info(self, val):
        self._coreg_info = val

262
263
264
265
    @property
    def resamp_needed(self):
        if self._resamp_needed is None:
            gt = mapinfo2geotransform(self.meta_odict['map info'])
266
267
            self._resamp_needed = not is_coord_grid_equal(gt, CFG.spatial_ref_gridx,
                                                          CFG.spatial_ref_gridy)
268
269
270
271
272
273
274
275
        return self._resamp_needed

    @resamp_needed.setter
    def resamp_needed(self, value):
        self._resamp_needed = value

    @property
    def masks(self):
276
        # if self.mask_nodata is not None and self.mask_clouds is not None and \
277
278
279
280
        #     self._masks is not None and self._masks.bands==1:

        #     self.build_combined_masks_array()

281
282
283
        return self._masks

    @masks.setter
284
    def masks(self, *geoArr_initArgs):
285
286
287
288
        """
        NOTE: This does not automatically update mask_nodata and mask_clouds BUT if mask_nodata and mask_clouds are
        None their getters will automatically synchronize!
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
289

290
        if geoArr_initArgs[0] is not None:
291
            self._masks = GeoArray(*geoArr_initArgs)
292
            self._masks.nodata = 0
293
294
            self._masks.gt = self.arr.gt
            self._masks.prj = self.arr.prj
295
296
        else:
            del self.masks
297

298
299
300
301
    @masks.deleter
    def masks(self):
        self._masks = None

302
303
304
    @property
    def cloud_masking_algorithm(self):
        if not self._cloud_masking_algorithm:
305
            self._cloud_masking_algorithm = CFG.cloud_masking_algorithm[self.satellite]
306
307
        return self._cloud_masking_algorithm

308
309
310
    @property
    def ac_options(self):
        """
311
312
        Returns the options dictionary needed as input for atmospheric correction. If an empty dictionary is returned,
        atmospheric correction is not yet available for the current sensor and will later be skipped.
313
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
314

315
        if not self._ac_options:
316
317
            path_ac_options = PG.get_path_ac_options(self.GMS_identifier)

318
            if path_ac_options and os.path.exists(path_ac_options):
319
320
                # don't validate because options contain pathes that do not exist on another server:
                opt_dict = get_ac_options(path_ac_options, validation=False)
321

Daniel Scheffler's avatar
Daniel Scheffler committed
322
                # update some file paths depending on the current environment
323
324
                opt_dict['DEM']['fn'] = CFG.path_dem_proc_srtm_90m
                opt_dict['ECMWF']['path_db'] = CFG.path_ECMWF_db
325
326
327
                opt_dict['S2Image'][
                    'S2_MSI_granule_path'] = None  # only a placeholder -> will always be None for GMS usage
                opt_dict['output'] = []  # outputs are not needed for GMS -> so
328
                opt_dict['report']['report_path'] = os.path.join(self.pathGen.get_path_procdata(), '[TYPE]')
329
330
                if 'uncertainties' in opt_dict:
                    opt_dict['uncertainties']['snr_model'] = PG.get_path_snr_model(self.GMS_identifier)
331

332
                # opt_dict['AC']['n_cores'] = CFG.CPUs if CFG.allow_subMultiprocessing else 1
333

334
                self._ac_options = opt_dict
335
336
337
            else:
                self.logger.warning('There is no options file available for atmospheric correction. '
                                    'Atmospheric correction must be skipped.')
338

339
340
        return self._ac_options

341
    def get_copied_dict_and_props(self, remove_privates=False):
342
        # type: (bool) -> dict
343
        """Returns a copy of the current object dictionary including the current values of all object properties."""
344
345
346

        # loggers must be closed
        self.close_GMS_loggers()
347
348
        # this disables automatic recreation of loggers (otherwise loggers are created by using getattr()):
        self._loggers_disabled = True
349
350
351
352
353

        out_dict = self.__dict__.copy()

        # add properties
        property_names = [p for p in dir(self.__class__) if isinstance(getattr(self.__class__, p), property)]
354
        [out_dict.update({propK: copy.copy(getattr(self, propK))}) for propK in property_names]
355
356
357
358
359
360
361
362
363

        # remove private attributes
        if remove_privates:
            out_dict = {k: v for k, v in out_dict.items() if not k.startswith('_')}

        self._loggers_disabled = False  # enables automatic recreation of loggers

        return out_dict

364
365
    def attributes2dict(self, remove_privates=False):
        # type: (bool) -> dict
366
        """Returns a copy of the current object dictionary including the current values of all object properties."""
367
368
369

        # loggers must be closed
        self.close_GMS_loggers()
370
371
        # this disables automatic recreation of loggers (otherwise loggers are created by using getattr()):
        self._loggers_disabled = True
372
373
374
375

        out_dict = self.__dict__.copy()

        # add some selected property values
376
377
        for i in ['GMS_identifier', 'LayerBandsAssignment', 'coreg_needed', 'coreg_info', 'resamp_needed',
                  'dict_LayerOptTherm', 'georef', 'meta_odict']:
378
            out_dict[i] = getattr(self, i)
379
380
381
382
383

        # remove private attributes
        if remove_privates:
            out_dict = {k: v for k, v in out_dict.items() if not k.startswith('_')}

384
        self._loggers_disabled = False  # enables automatic recreation of loggers
385
386
        return out_dict

387
    def _data_downloader(self, sensor, entity_ID):
388
389
390
391
        self.logger.info('Data downloader started.')
        success = False
        " > download source code for Landsat here < "
        if not success:
392
393
            self.logger.critical(
                "Download for %s dataset '%s' failed. No further processing possible." % (sensor, entity_ID))
394
            raise RuntimeError('Archive download failed.')
395
396
        return success

397
398
399
400
401
    def from_disk(self, tuple_GMS_subset):
        """Fills an already instanced GMS object with data from disk. Excludes array attributes in Python mode.

        :param tuple_GMS_subset:    <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
        """
402

403
        path_GMS_file = tuple_GMS_subset[0]
404
        GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
405
406

        # copy all attributes from GMS file (private attributes are not touched since they are not included in GMS file)
407
        self.meta_odict = GMSfileDict['meta_odict']  # set that first in order to make some getters and setters work
408
409
        for key, value in GMSfileDict.items():
            if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
410
                continue  # properties that should better be created on the fly
411
412
            try:
                setattr(self, key, value)
413
414
            except Exception:
                raise AttributeError("Can't set attribute %s." % key)
415

416
        self.acq_datetime = datetime.datetime.strptime(self.acq_datetime, '%Y-%m-%d %H:%M:%S.%f%z')
417
418
        self.arr_shape, self.arr_pos = tuple_GMS_subset[1]

419
420
421
        self.arr = self.pathGen.get_path_imagedata()
        # self.mask_nodata and self.mask_clouds are auto-synchronized via self.masks (see their getters):
        self.masks = self.pathGen.get_path_maskdata()
422

423
424
        return copy.copy(self)

425
426
427
428
429
430
431
432
    def from_sensor_subsystems(self, list_GMS_objs):
        # type: (list) -> GMS_object
        """Merge separate GMS objects belonging to the same scene-ID into ONE GMS object.

        :param list_GMS_objs:   <list> of GMS objects covering the same geographic area but representing different
                                sensor subsystems (e.g. 3 GMS_objects for Sentinel-2 10m/20m/60m bands)
        """

433
        # assertions
434
435
        assert len(list_GMS_objs) > 1, "'GMS_object.from_sensor_subsystems()' expects multiple input GMS objects. " \
                                       "Got %d." % len(list_GMS_objs)
436
        assert all([is_coord_grid_equal(list_GMS_objs[0].arr.gt, *obj.arr.xygrid_specs) for obj in list_GMS_objs[1:]]),\
437
438
439
            "The input GMS objects must have the same pixel grid. Received: %s" \
            % np.array([obj.arr.xygrid_specs for obj in list_GMS_objs])
        assert len(list(set([GMS_obj.proc_level for GMS_obj in list_GMS_objs]))) == 1, \
440
441
442
            "The input GMS objects for GMS_object.from_sensor_subsystems() must have the same processing level."
        subsystems = [GMS_obj.subsystem for GMS_obj in list_GMS_objs]
        assert len(subsystems) == len(list(set(subsystems))), \
443
            "The input 'list_GMS_objs' contains duplicates: %s" % subsystems
444

445
        # log
446
        list_GMS_objs[0].logger.info('Merging the subsystems %s to a single GMS object...'
447
                                     % ', '.join([GMS_obj.subsystem for GMS_obj in list_GMS_objs]))
448
449

        # find the common extent. NOTE: boundsMap is expected in the order [xmin,xmax,ymin,ymax]
450
451
        geoExtents = np.array([GMS_obj.arr.box.boundsMap for GMS_obj in list_GMS_objs])
        common_extent = (min(geoExtents[:, 0]), max(geoExtents[:, 1]), min(geoExtents[:, 2]), max(geoExtents[:, 3]))
452
453
454
455
456

        # MERGE METADATA
        # copy all attributes from the first input GMS file (private attributes are not touched)
        for key, value in list_GMS_objs[0].__dict__.copy().items():
            if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
457
                continue  # properties that should better be created on the fly
458
459
            try:
                setattr(self, key, value)
460
461
            except Exception:
                raise AttributeError("Can't set attribute %s." % key)
462

463
        # update LayerBandsAssignment and get full list of output bandnames
464
        from .metadata import get_LayerBandsAssignment
465
466
        # use identifier of first input GMS object for getting LBA (respects current proc_level):
        gms_idf = list_GMS_objs[0].GMS_identifier
467
        self.LayerBandsAssignment = get_LayerBandsAssignment(gms_idf, return_fullLBA=True)
468
        bandnames = ['B%s' % i if len(i) == 2 else 'B0%s' % i for i in self.LayerBandsAssignment]
469
470
471

        # update layer-dependent metadata with respect to remaining input GMS objects
        self.meta_odict.update({
472
473
474
475
            'band names': [('Band %s' % i) for i in self.LayerBandsAssignment],
            'LayerBandsAssignment': self.LayerBandsAssignment,
            'Subsystem': '',
            'PhysUnit': self.meta_odict['PhysUnit'],  # TODO can contain val_optical / val_thermal
476
477
        })
        self.subsystem = ''
478
479
        del self.pathGen  # must be refreshed because subsystem is now ''
        self.close_GMS_loggers()  # must also be refreshed because it depends on pathGen
480

481
482
        for attrN in ['SolIrradiance', 'CWL', 'FWHM', 'Offsets', 'OffsetsRef', 'Gains', 'GainsRef',
                      'ThermalConstK1', 'ThermalConstK2', 'ViewingAngle_arrProv', 'IncidenceAngle_arrProv']:
483
484
485
486
487
488
489

            # combine values from separate subsystems to a single value
            attrDic_fullLBA = {}
            for GMS_obj in list_GMS_objs:
                attr_val = getattr(GMS_obj.MetaObj, attrN)
                if isinstance(attr_val, list):
                    attrDic_fullLBA.update(dict(zip(GMS_obj.LayerBandsAssignment, attr_val)))
490
                elif isinstance(attr_val, (dict, collections.OrderedDict)):
491
492
493
494
495
496
497
                    attrDic_fullLBA.update(attr_val)
                else:
                    raise ValueError(attrN)

            # update the attribute in self.MetaObj
            if attrDic_fullLBA:
                val2set = [attrDic_fullLBA[bN] for bN in self.LayerBandsAssignment] \
498
                    if isinstance(getattr(list_GMS_objs[0].MetaObj, attrN), list) else attrDic_fullLBA
499
500
501
                setattr(self.MetaObj, attrN, val2set)

        # merge logfiles (read all logs into DataFrame, sort it by the first column and write to new logfile
502
        [GMS_obj.close_GMS_loggers() for GMS_obj in list_GMS_objs]  # close the loggers of the input objects
503
        paths_inLogs = [GMS_obj.pathGen.get_path_logfile() for GMS_obj in list_GMS_objs]
504
        allLogs_df = DataFrame()
505
        for log in paths_inLogs:
506
507
508
509
            df = read_csv(log, sep='\n', delimiter=':   ', header=None,
                          engine='python')  # engine suppresses a pandas warning
            allLogs_df = allLogs_df.append(
                df)  # FIXME this will log e.g. atm. corr 3 times for S2A -> use captured streams instead?
510
511
512
513
514

        allLogs_df = allLogs_df.sort_values(0)
        np.savetxt(self.pathGen.get_path_logfile(), np.array(allLogs_df), delimiter=':   ', fmt="%s")

        # MERGE ARRAY DATA
515
        # overwrite array data with merged arrays, clipped to common_extent and reordered according to FullLayerBandsAss
516
517
518
        for attrname in ['arr', 'ac_errors', 'dem', 'mask_nodata', 'mask_clouds', 'mask_clouds_confidence', 'masks']:

            # get current attribute of each subsystem without running property getters
519
            all_arrays = [getattr(GMS_obj, '_%s' % attrname) for GMS_obj in list_GMS_objs]
520
521
522
523
524
525
526
527
528

            # get the same geographical extent for each input GMS object
            if len(set(tuple(ext) for ext in geoExtents.tolist())) > 1:
                # in case of different extents
                geoArrs_same_extent = []

                for geoArr in all_arrays:

                    if geoArr is not None:
529
530
                        # FIXME mask_clouds_confidence is until here no GeoArray
                        # FIXME -> has no nodata value -> calculation throughs warning
531
532
                        geoArr_same_extent = \
                            GeoArray(*geoArr.get_mapPos(
533
534
535
536
                                mapBounds=np.array(common_extent)[[0, 2, 1, 3]],  # pass (xmin, ymin, xmax, ymax)
                                mapBounds_prj=geoArr.prj),
                                     bandnames=list(geoArr.bandnames.keys()),
                                     nodata=geoArr.nodata)
537
538
                        geoArrs_same_extent.append(geoArr_same_extent)
                    else:
539
540
                        # e.g. in case of cloud mask that is only appended to the GMS object with the same
                        # spatial resolution)
541
542
543
544
545
546
547
                        geoArrs_same_extent.append(None)

            else:
                # skip get_mapPos() if all input GMS objects have the same extent
                geoArrs_same_extent = all_arrays

            # validate output GeoArrays
548
549
            if len([gA for gA in geoArrs_same_extent if gA is not None]) > 1:
                equal_bounds = all([geoArrs_same_extent[0].box.boundsMap == gA.box.boundsMap
550
                                    for gA in geoArrs_same_extent[1:]])
551
552
                equal_epsg = all([geoArrs_same_extent[0].epsg == gA.epsg for gA in geoArrs_same_extent[1:]])
                equal_xydims = all([geoArrs_same_extent[0].shape[:2] == gA.shape[:2] for gA in geoArrs_same_extent[1:]])
553
554
555
556
557
558
559
560
561
                if not all([equal_bounds, equal_epsg, equal_xydims]):
                    raise RuntimeError('Something went wrong during getting the same geographical extent for all the '
                                       'input GMS objects. The extents, projections or pixel dimensions of the '
                                       'calculated input GMS objects are not equal.')

            # set output arrays
            if attrname in ['arr', 'ac_errors'] and list(set(geoArrs_same_extent)) != [None]:
                # the bands of these arrays have to be reordered according to FullLayerBandsAssignment

562
563
                # check that each desired band name for the current attribute is provided by one of the input
                # GMS objects
564
565
                available_bandNs = list(chain.from_iterable([list(gA.bandnames) for gA in geoArrs_same_extent]))
                for bN in bandnames:
566
                    if bN not in available_bandNs:
567
                        raise ValueError("The given input GMS objects (subsystems) do not provide a bandname '%s' for "
568
569
                                         "the attribute '%s'. Available band names amongst all input GMS objects are: "
                                         "%s" % (bN, attrname, str(available_bandNs)))
570
571

                # merge arrays
572
                def get_band(bandN): return [gA[bandN] for gA in geoArrs_same_extent if gA and bandN in gA.bandnames][0]
573
574
                full_geoArr = GeoArray(np.dstack((get_band(bandN) for bandN in bandnames)),
                                       geoArrs_same_extent[0].gt, geoArrs_same_extent[0].prj,
575
576
                                       bandnames=bandnames,
                                       nodata=geoArrs_same_extent[0].nodata)
577
578
579
580
581
                setattr(self, attrname, full_geoArr)

            else:
                # masks, dem, mask_nodata, mask_clouds, mask_clouds_confidence
                if attrname == 'dem':
582
583
                    # use the DEM of the first input object
                    # (if the grid is the same, the DEMs should be the same anyway)
584
585
586
587
588
589
590
                    self.dem = geoArrs_same_extent[0]
                elif attrname == 'mask_nodata':
                    # must not be merged -> self.arr is already merged, so just recalculate it (np.all)
                    self.mask_nodata = self.calc_mask_nodata(overwrite=True)
                elif attrname == 'mask_clouds':
                    # possibly only present in ONE subsystem (set by atm. Corr.)
                    mask_clouds = [msk for msk in geoArrs_same_extent if msk is not None]
591
592
                    if len(mask_clouds) > 1:
                        raise ValueError('Expected mask clouds in only one subsystem. Got %s.' % len(mask_clouds))
593
594
595
596
                    self.mask_clouds = mask_clouds[0] if mask_clouds else None
                elif attrname == 'mask_clouds_confidence':
                    # possibly only present in ONE subsystem (set by atm. Corr.)
                    mask_clouds_conf = [msk for msk in geoArrs_same_extent if msk is not None]
597
598
599
                    if len(mask_clouds_conf) > 1:
                        raise ValueError(
                            'Expected mask_clouds_conf in only one subsystem. Got %s.' % len(mask_clouds_conf))
600
                    self.mask_clouds_confidence = mask_clouds_conf[0] if mask_clouds_conf else None
601
                elif attrname == 'masks':
602
603
604
605
606
607
608
609
                    # self.mask_nodata and self.mask_clouds will already be set here -> so just recreate it from there
                    self.masks = None

        # recreate self.masks
        self.build_combined_masks_array()

        # update array-dependent metadata
        self.meta_odict.update({
610
611
            'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
            'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
612
613

        # set shape of full array
614
        self.shape_fullArr = self.arr.shape
615
616
617

        return copy.copy(self)

618
619
620
621
622
623
    def from_tiles(self, list_GMS_tiles):
        # type: (list) -> self
        """Merge separate GMS objects with different spatial coverage but belonging to the same scene-ID to ONE GMS object.

        :param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
624

625
626
627
628
629
        if 'IMapUnorderedIterator' in str(type(list_GMS_tiles)):
            list_GMS_tiles = list(list_GMS_tiles)

        # copy all attributes except of array attributes
        tile1 = list_GMS_tiles[0]
630
631
        [setattr(self, i, getattr(tile1, i)) for i in tile1.__dict__
         if not callable(getattr(tile1, i)) and not isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
632
633

        # MERGE ARRAY-ATTRIBUTES
634
        list_arraynames = [i for i in tile1.__dict__ if not callable(getattr(tile1, i)) and
635
                           isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
636
637
        list_arraynames = ['_arr'] + [i for i in list_arraynames if
                                      i != '_arr']  # list must start with _arr, otherwise setters will not work
638
639
640
641

        for arrname in list_arraynames:
            samplearray = getattr(tile1, arrname)
            assert isinstance(samplearray, (np.ndarray, GeoArray)), \
642
                'Received a %s object for attribute %s. Expected a numpy array or an instance of GeoArray.' \
643
                % (type(samplearray), arrname)
644
645
            is_3d = samplearray.ndim == 3
            bands = (samplearray.shape[2],) if is_3d else ()  # dynamic -> works for arr, cld_arr,...
646
647
648
649
            target_shape = tuple(self.shape_fullArr[:2]) + bands
            target_dtype = samplearray.dtype
            merged_array = self._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)

650
651
            setattr(self, arrname if not arrname.startswith('_') else arrname[1:],
                    merged_array)  # use setters if possible
652
653
654
655
            # NOTE: this asserts that each attribute starting with '_' has also a property with a setter!

        # UPDATE ARRAY-DEPENDENT ATTRIBUTES
        self.arr_shape = 'cube'
656
        self.arr_pos = None
657
658
659

        # update MetaObj attributes
        self.meta_odict.update({
660
661
            'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
            'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
662
663
664
665

        # calculate data_corners_imXY (mask_nodata is always an array here because get_mapPos always returns an array)
        corners_imYX = calc_FullDataset_corner_positions(
            self.mask_nodata, assert_four_corners=False, algorithm='shapely')
666
        self.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX]  # [UL, UR, LL, LR]
667
668
669
670
671
672

        # calculate trueDataCornerLonLat
        data_corners_LatLon = pixelToLatLon(self.trueDataCornerPos, geotransform=self.arr.gt, projection=self.arr.prj)
        self.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]

        # calculate trueDataCornerUTM
673
674
        data_corners_utmYX = pixelToMapYX(self.trueDataCornerPos, geotransform=self.arr.gt,
                                          projection=self.arr.prj)  # FIXME asserts gt in UTM coordinates
675
676
677
678
        self.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]

        return copy.copy(self)

679
680
681
    @staticmethod
    @jit
    def _numba_array_merger(list_GMS_tiles, arrname2merge, target_shape, target_dtype):
Daniel Scheffler's avatar
Daniel Scheffler committed
682
        # type: (list, str, tuple, np.dtype) -> np.ndarray
683
684
685
686
687
688
689
690
691
        """
        private function, e.g. called by merge_GMS_tiles_to_GMS_obj() in order to fasten array merging

        :param list_GMS_tiles:
        :param arrname2merge:
        :param target_shape:
        :param target_dtype:
        :return:
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
692

693
694
695
696
697
698
699
        out_arr = np.empty(target_shape, dtype=target_dtype)
        for idx, tile in enumerate(list_GMS_tiles):
            rowStart, rowEnd = tile.arr_pos[0]
            colStart, colEnd = tile.arr_pos[1]
            out_arr[rowStart:rowEnd + 1, colStart:colEnd + 1] = getattr(tile, arrname2merge)
        return out_arr

Daniel Scheffler's avatar
Daniel Scheffler committed
700
    def log_for_fullArr_or_firstTile(self, log_msg, subset=None):
701
702
703
704
705
706
707
        """Send a message to the logger only if full array or the first tile is currently processed.
        This function can be called when processing any tile but log message will only be sent from first tile.

        :param log_msg:  the log message to be logged
        :param subset:   subset argument as sent to e.g. DN2TOARadRefTemp that indicates which tile is to be processed.
                         Not needed if self.arr_pos is not None.
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
708

709
710
711
712
        if subset is None and \
            (self.arr_shape == 'cube' or self.arr_pos is None or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]) or\
                subset == ['cube', None] or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
                hasattr(self, 'logAtThisTile') and getattr(self, 'logAtThisTile'):  # cube or 1st tile
Daniel Scheffler's avatar
Daniel Scheffler committed
713
            self.logger.info(log_msg)
714
715
716
717
        else:
            pass

    def apply_nodata_mask_to_ObjAttr(self, attrname, out_nodata_val=None):
718
        # type: (str,int) -> None
719
        """Applies self.mask_nodata to the specified array attribute by setting all values where mask_nodata is 0 to the
720
721
722
723
724
725
726
        given nodata value.

        :param attrname:         The attribute to apply the nodata mask to. Must be an array attribute or
                                 a string path to a previously saved ENVI-file.
        :param out_nodata_val:   set the values of the given attribute to this value.
        """

727
        assert hasattr(self, attrname)
728

729
        if getattr(self, attrname) is not None:
730

731
732
733
            if isinstance(getattr(self, attrname), str):
                update_spec_vals = True if attrname == 'arr' else False
                self.apply_nodata_mask_to_saved_ENVIfile(getattr(self, attrname), out_nodata_val, update_spec_vals)
734
            else:
735
                assert isinstance(getattr(self, attrname), (np.ndarray, GeoArray)), \
736
                    'L1A_obj.%s must be a numpy array or an instance of GeoArray. Got type %s.' \
737
738
                    % (attrname, type(getattr(self, attrname)))
                assert hasattr(self, 'mask_nodata') and self.mask_nodata is not None
739

740
                self.log_for_fullArr_or_firstTile('Applying nodata mask to L1A_object.%s...' % attrname)
741

742
                nodata_val = out_nodata_val if out_nodata_val else \
743
                    DEF_D.get_outFillZeroSaturated(getattr(self, attrname).dtype)[0]
744
                getattr(self, attrname)[self.mask_nodata.astype(np.int8) == 0] = nodata_val
745

746
747
                if attrname == 'arr':
                    self.MetaObj.spec_vals['fill'] = nodata_val
748
749
750
751
752
753

    def build_combined_masks_array(self):
        # type: () -> dict
        """Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
        The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""

754
        arrays2combine = [aN for aN in ['mask_nodata', 'mask_clouds']
755
                          if hasattr(self, aN) and isinstance(getattr(self, aN), (GeoArray, np.ndarray))]
756
757
        if arrays2combine:
            self.log_for_fullArr_or_firstTile('Combining masks...')
758
759

            def get_data(arrName): return getattr(self, arrName).astype(np.uint8)[:, :, None]
760
761

            for aN in arrays2combine:
762
                if False in np.equal(getattr(self, aN), getattr(self, aN).astype(np.uint8)):
763
764
765
                    warnings.warn('Some pixel values of attribute %s changed during data type '
                                  'conversion within build_combined_masks_array().')

766
            # set self.masks
767
768
769
            self.masks = get_data(arrays2combine[0]) if len(arrays2combine) == 1 else \
                np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
            self.masks.bandnames = arrays2combine  # set band names of GeoArray (allows later indexing by band name)
770

771
            # set self.masks_meta
772
            nodataVal = DEF_D.get_outFillZeroSaturated(self.masks.dtype)[0]
773
            self.masks_meta = {'map info': self.MetaObj.map_info, 'coordinate system string': self.MetaObj.projection,
774
775
                               'bands': len(arrays2combine), 'band names': arrays2combine,
                               'data ignore value': nodataVal}
776
777

            return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
778
                    'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks}  # usually not needed
779
780

    def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None, update_spec_vals=False):
781
        # type: (str,int,bool) -> None
782
783
        """Applies self.mask_nodata to a saved ENVI file with the same X/Y dimensions like self.mask_nodata by setting all
         values where mask_nodata is 0 to the given nodata value.
784
785
786
787
788
789
790
791

        :param path_saved_ENVIhdr:  <str> The path of the ENVI file to apply the nodata mask to.
        :param custom_nodata_val:   <int> set the values of the given attribute to this value.
        :param update_spec_vals:    <bool> whether to update self.MetaObj.spec_vals['fill']
        """

        self.log_for_fullArr_or_firstTile('Applying nodata mask to saved ENVI file...')
        assert os.path.isfile(path_saved_ENVIhdr)
792
793
794
        assert hasattr(self, 'mask_nodata') and self.mask_nodata is not None
        if not path_saved_ENVIhdr.endswith('.hdr') and os.path.isfile(os.path.splitext(path_saved_ENVIhdr)[0] + '.hdr'):
            path_saved_ENVIhdr = os.path.splitext(path_saved_ENVIhdr)[0] + '.hdr'
795
        if custom_nodata_val is None:
796
            dtype_IDL = int(INP_R.read_ENVIhdr_to_dict(path_saved_ENVIhdr)['data type'])
797
            nodata_val = DEF_D.get_outFillZeroSaturated(DEF_D.dtype_lib_IDL_Python[dtype_IDL])[0]
798
799
        else:
            nodata_val = custom_nodata_val
800
        FileObj = spectral.open_image(path_saved_ENVIhdr)
801
        File_memmap = FileObj.open_memmap(writable=True)
802
        File_memmap[self.mask_nodata == 0] = nodata_val
803
804
        if update_spec_vals:
            self.MetaObj.spec_vals['fill'] = nodata_val
805
806

    def combine_tiles_to_ObjAttr(self, tiles, target_attr):
807
        # type: (list,str) -> None
808
        """Combines tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single attribute.
809
        If CFG.CFG.exec_mode == 'Python' the produced attribute is additionally written to disk.
810
811
812
813
814

        :param tiles:           <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
                                'col_start' and 'col_end'
        :param target_attr:     <str> the name of the attribute to be produced
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
815

816
        warnings.warn("'combine_tiles_to_ObjAttr' is deprecated.", DeprecationWarning)
817
        assert tiles[0] and isinstance(tiles, list) and isinstance(tiles[0], dict), \
818
819
820
            "The 'tiles' argument has to be list of dictionaries with the keys 'desc', 'data', 'row_start'," \
            "'row_end', 'col_start' and 'col_end'."
        self.logger.info("Building L1A attribute '%s' by combining given tiles..." % target_attr)
821
822
        tiles = [tiles] if not isinstance(tiles, list) else tiles
        sampleTile = dict(tiles[0])
823
824
        target_shape = self.shape_fullArr if len(sampleTile['data'].shape) == 3 else self.shape_fullArr[:2]
        setattr(self, target_attr, np.empty(target_shape, dtype=sampleTile['data'].dtype))
825
        for tile in tiles:  # type: dict
826
827
828
829
830
            rS, rE, cS, cE = tile['row_start'], tile['row_end'], tile['col_start'], tile['col_end']
            if len(target_shape) == 3:
                getattr(self, target_attr)[rS:rE + 1, cS:cE + 1, :] = tile['data']
            else:
                getattr(self, target_attr)[rS:rE + 1, cS:cE + 1] = tile['data']
831
        if target_attr == 'arr':
832
833
            self.arr_desc = sampleTile['desc']
            self.arr_shape = 'cube' if len(self.arr.shape) == 3 else 'band' if len(self.arr.shape) == 2 else 'unknown'
834

835
            if CFG.exec_mode == 'Python':  # and not 'Flink'
836
                path_radref_file = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc)
837
838
839
840
841
842
843
844
845
846
847
                # path_radref_file = os.path.abspath('./testing/out/%s_TOA_Ref' % self.baseN)
                while not os.path.isdir(os.path.dirname(path_radref_file)):
                    try:
                        os.makedirs(os.path.dirname(path_radref_file))
                    except OSError as e:  # maybe not neccessary anymore in python 3
                        if not e.errno == 17:
                            raise
                GEOP.ndarray2gdal(self.arr, path_radref_file, importFile=self.MetaObj.Dataname, direction=3)
                self.MetaObj.Dataname = path_radref_file

    def write_tiles_to_ENVIfile(self, tiles, overwrite=True):
848
        # type: (list,bool) -> None
849
850
851
852
853
854
855
856
        """Writes tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single output ENVI file.

        :param tiles:           <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
                                'col_start' and 'col_end'
        :param overwrite:       whether to overwrite files that have been produced earlier
        """

        self.logger.info("Writing tiles '%s' temporarily to disk..." % tiles[0]['desc'])
857
        outpath = os.path.join(self.ExtractedFolder, '%s__%s.%s' % (self.baseN, tiles[0]['desc'], self.outInterleave))
858
859
        if CFG.conversion_type_optical in tiles[0]['desc'] or \
           CFG.conversion_type_thermal in tiles[0]['desc']:
860
            self.meta_odict = self.MetaObj.to_odict()  # important in order to keep geotransform/projection
861
862
863
864
            self.arr_desc = tiles[0]['desc']
            self.arr = outpath
            # self.arr = os.path.abspath('./testing/out/%s_TOA_Ref.bsq' % self.baseN)
            self.MetaObj.Dataname = self.arr
865
            self.arr_shape = \
866
867
                'cube' if len(tiles[0]['data'].shape) == 3 else 'band' if len(
                    tiles[0]['data'].shape) == 2 else 'unknown'
868
869
870
        elif tiles[0]['desc'] == 'masks':
            self.masks = outpath
        elif tiles[0]['desc'] == 'lonlat_arr':
871
872
873
874
875
            # outpath = os.path.join(os.path.abspath('./testing/out/'),'%s__%s.%s'
            #     %(self.baseN, tiles[0]['desc'], self.outInterleave))
            self.lonlat_arr = outpath  # FIXME
        outpath = os.path.splitext(outpath)[0] + '.hdr' if not outpath.endswith('.hdr') else outpath
        out_shape = self.shape_fullArr[:2] + ([tiles[0]['data'].shape[2]] if len(tiles[0]['data'].shape) == 3 else [1])
876
877
        OUT_W.Tiles_Writer(tiles, outpath, out_shape, tiles[0]['data'].dtype, self.outInterleave, self.meta_odict,
                           overwrite=overwrite)
878
879

    def to_MGRS_tiles(self, pixbuffer=10, v=False):
880
        # type: (int) -> self
881
        """Returns a generator object where items represent the MGRS tiles for the GMS object.
882
883
884
885
886

        :param pixbuffer:   <int> a buffer in pixel values used to generate an overlap between the returned MGRS tiles
        :param v:           <bool> verbose mode
        :return:            <list> of MGRS_tile objects
        """
Daniel Scheffler's avatar
Daniel Scheffler committed
887

888
        assert self.arr_shape == 'cube', "Only 'cube' objects can be cut into MGRS tiles. Got %s." % self.arr_shape
889
        self.logger.info('Cutting scene %s (entity ID %s) into MGRS tiles...' % (self.scene_ID, self.entity_ID))
890

891
892
        # get GeoDataFrame containing all overlapping MGRS tiles
        # (MGRS geometries completely within nodata area are excluded)
893
        GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(CFG.conn_database,
894
                                                         tgt_corners_lonlat=self.trueDataCornerLonLat)
895
896

        # calculate image coordinate bounds of the full GMS object for each MGRS tile within the GeoDataFrame
897
898
899
900
        gt = mapinfo2geotransform(self.meta_odict['map info'])

        def get_arrBounds(MGRStileObj):
            return list(np.array(MGRStileObj.poly_utm.buffer(pixbuffer * gt[1]).bounds)[[0, 2, 1, 3]])
901

902
903
        GDF_MGRS_tiles['MGRStileObj'] = list(GDF_MGRS_tiles['granuleid'].map(lambda mgrsTileID: MGRS_tile(mgrsTileID)))
        GDF_MGRS_tiles['map_bounds_MGRS'] = list(GDF_MGRS_tiles['MGRStileObj'].map(get_arrBounds))  # xmin,xmax,ymin,yma
904
905
906
907
908
909

        # find first tile to log and assign 'logAtThisTile' later
        dictIDxminymin = {(b[0] + b[2]): ID for ID, b in
                          zip(GDF_MGRS_tiles['granuleid'], GDF_MGRS_tiles['map_bounds_MGRS'])}
        firstTile_ID = dictIDxminymin[min(dictIDxminymin.keys())]

910
        # ensure self.masks exists (does not exist in Flink mode because in that case self.fill_from_disk() is skipped)
911
912
913
        if not hasattr(self, 'masks') or self.masks is None:
            self.build_combined_masks_array()  # creates self.masks and self.masks_meta

914
915
916
        # read whole dataset into RAM in order to fasten subsetting
        self.arr.to_mem()
        self.masks.to_mem()  # to_mem ensures that the whole dataset is present in memory
917
918
919

        # produce data for each MGRS tile in loop
        for GDF_idx, GDF_row in GDF_MGRS_tiles.iterrows():
920
921
922
923
924
925
926
            tileObj = self.get_subset_obj(mapBounds=GDF_row.map_bounds_MGRS,
                                          mapBounds_prj=EPSG2WKT(GDF_row['MGRStileObj'].EPSG),
                                          out_prj=EPSG2WKT(GDF_row['MGRStileObj'].EPSG),
                                          logmsg='Producing MGRS tile %s from scene %s (entity ID %s).'
                                                 % (GDF_row.granuleid, self.scene_ID, self.entity_ID),
                                          v=v)
            MGRS_tileID = GDF_row['granuleid']
927
928
929
930
931
932
933
934

            # set MGRS info
            tileObj.arr_shape = 'MGRS_tile'
            tileObj.MGRS_info = {'tile_ID': MGRS_tileID, 'grid1mil': MGRS_tileID[:3], 'grid100k': MGRS_tileID[3:]}

            # set logAtThisTile
            tileObj.logAtThisTile = MGRS_tileID == firstTile_ID

935
936
937
938
            # close logger of tileObj and of self in order to avoid logging permission errors
            tileObj.close_GMS_loggers()
            self.close_GMS_loggers()

939
940
941
            yield tileObj

        # set array attributes back to file path if they had been a filePath before
942
943
944
945
        if self.arr.filePath:
            self.arr.to_disk()
        if self.masks.filePath:
            self.masks.to_disk()
946
947
948
949

    def to_GMS_file(self, path_gms_file=None):
        self.close_GMS_loggers()

950
951
952
        # make sure meta_odict is present
        self.meta_odict = self.meta_odict

953
        dict2write = self.attributes2dict(remove_privates=True)
954
        dict2write['arr_shape'], dict2write['arr_pos'] = ['cube', None]
955
        path_gms_file = path_gms_file if path_gms_file else self.pathGen.get_path_gmsfile()
956