Commit 26034202 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Converted all regular expression strings to raw strings. Revised code style in metadata.py.


Signed-off-by: Daniel Scheffler's avatarDaniel Scheffler <danschef@gfz-potsdam.de>
parent f210e3f3
Pipeline #8003 passed with stage
in 17 minutes and 48 seconds
...@@ -565,6 +565,8 @@ Fixes and improvements: ...@@ -565,6 +565,8 @@ Fixes and improvements:
* Added .coveragerc * Added .coveragerc
* Modified coverage section in Makefile * Modified coverage section in Makefile
* Removed pyhdf from automatically installed setup requirements * Removed pyhdf from automatically installed setup requirements
* Converted all regular expression strings to raw strings.
* Revised code style in metadata.py.
0.4.0 (2017-06-26) 0.4.0 (2017-06-26)
......
...@@ -75,13 +75,13 @@ class L1A_object(GMS_object): ...@@ -75,13 +75,13 @@ class L1A_object(GMS_object):
self.proc_status = proc_status or 'initialized' # if proc_status = 'running' is given by L1A_map self.proc_status = proc_status or 'initialized' # if proc_status = 'running' is given by L1A_map
def import_rasterdata(self): def import_rasterdata(self):
if re.search("ALOS", self.satellite, re.I): if re.search(r"ALOS", self.satellite, re.I):
'''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver. '''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver.
But: UL of metadata refers to [row =0, col=21]! So the imported GeoTransform is correct when But: UL of metadata refers to [row =0, col=21]! So the imported GeoTransform is correct when
the first 21 columns are deleted.''' the first 21 columns are deleted.'''
self.archive_to_rasObj(self.path_archive, self.path_InFilePreprocessor, self.archive_to_rasObj(self.path_archive, self.path_InFilePreprocessor,
subset=['block', [[None, None], [21, None]]]) subset=['block', [[None, None], [21, None]]])
elif re.search("Terra", self.satellite, re.I): elif re.search(r"Terra", self.satellite, re.I):
self.ASTER_HDF_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor) self.ASTER_HDF_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor)
else: else:
self.archive_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor) self.archive_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor)
...@@ -113,23 +113,23 @@ class L1A_object(GMS_object): ...@@ -113,23 +113,23 @@ class L1A_object(GMS_object):
image_files = [] image_files = []
is_ALOS_Landsat_S2 = \ is_ALOS_Landsat_S2 = \
re.search('ALOS', self.satellite) or re.search('Landsat', self.satellite) or \ re.search(r'ALOS', self.satellite) or re.search(r'Landsat', self.satellite) or \
re.search('Sentinel-2', self.satellite) re.search(r'Sentinel-2', self.satellite)
n_files2search = len(full_LayerBandsAssignment) if is_ALOS_Landsat_S2 else 1 n_files2search = len(full_LayerBandsAssignment) if is_ALOS_Landsat_S2 else 1
for File in HLP_F.sorted_nicely(files_in_archive): for File in HLP_F.sorted_nicely(files_in_archive):
search_res = \ search_res = \
re.search("IMG-0[0-9]-[\s\S]*", File) if re.search('ALOS', self.satellite) else \ re.search(r"IMG-0[0-9]-[\s\S]*", File) if re.search(r'ALOS', self.satellite) else \
re.search("[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search('Landsat', self.satellite) else \ re.search(r"[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search(r'Landsat', self.satellite) else \
re.search("[0-9]*.tif", File) if re.search('RapidEye', self.satellite) else \ re.search(r"[0-9]*.tif", File) if re.search(r'RapidEye', self.satellite) else \
re.search("imagery.tif", File) if re.search('SPOT', self.satellite) else \ re.search(r"imagery.tif", File) if re.search(r'SPOT', self.satellite) else \
re.search("[\S]*.SAFE/GRANULE/%s/IMG_DATA/[\S]*_B[0-9][\S]*.jp2" re.search(r"[\S]*.SAFE/GRANULE/%s/IMG_DATA/[\S]*_B[0-9][\S]*.jp2"
% self.entity_ID, File) if re.search('Sentinel-2', self.satellite) else None % self.entity_ID, File) if re.search(r'Sentinel-2', self.satellite) else None
if search_res: if search_res:
if re.search('Sentinel-2', self.satellite): if re.search(r'Sentinel-2', self.satellite):
# add only those files that are corresponding to subsystem (e.g. S2A10: fullLBA = ['2','3','4','8']) # add only those files that are corresponding to subsystem (e.g. S2A10: fullLBA = ['2','3','4','8'])
if 1 in [1 if re.search("[\S]*_B[0]?%s.jp2" % LBAn, os.path.basename(File)) else 0 if 1 in [1 if re.search(r"[\S]*_B[0]?%s.jp2" % LBAn, os.path.basename(File)) else 0
for LBAn in full_LayerBandsAssignment]: for LBAn in full_LayerBandsAssignment]:
image_files.append(File) image_files.append(File)
else: else:
...@@ -141,7 +141,7 @@ class L1A_object(GMS_object): ...@@ -141,7 +141,7 @@ class L1A_object(GMS_object):
# validate number of expected files # # validate number of expected files #
##################################### #####################################
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31): if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
expected_files_count = 2 * len(full_LayerBandsAssignment) expected_files_count = 2 * len(full_LayerBandsAssignment)
else: else:
expected_files_count = len(full_LayerBandsAssignment) expected_files_count = len(full_LayerBandsAssignment)
...@@ -299,9 +299,9 @@ class L1A_object(GMS_object): ...@@ -299,9 +299,9 @@ class L1A_object(GMS_object):
self.arr_desc = \ self.arr_desc = \
'DN' if self.MetaObj.PhysUnit == 'DN' else \ 'DN' if self.MetaObj.PhysUnit == 'DN' else \
'Rad' if self.MetaObj.PhysUnit == "W * m-2 * sr-1 * micrometer-1" else \ 'Rad' if self.MetaObj.PhysUnit == "W * m-2 * sr-1 * micrometer-1" else \
'TOA_Ref' if re.search('TOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \ 'TOA_Ref' if re.search(r'TOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'BOA_Ref' if re.search('BOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \ 'BOA_Ref' if re.search(r'BOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'Temp' if re.search('Degrees Celsius', self.MetaObj.PhysUnit, re.I) else None 'Temp' if re.search(r'Degrees Celsius', self.MetaObj.PhysUnit, re.I) else None
assert self.arr_desc, 'GMS_obj contains an unexpected physical unit: %s' % self.MetaObj.PhysUnit assert self.arr_desc, 'GMS_obj contains an unexpected physical unit: %s' % self.MetaObj.PhysUnit
...@@ -391,7 +391,7 @@ class L1A_object(GMS_object): ...@@ -391,7 +391,7 @@ class L1A_object(GMS_object):
(PI * U__earth_sun_distance_correction_factor); (PI * U__earth_sun_distance_correction_factor);
L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos( L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos(
Z__Sun_Angles_Grid_Zenith_Values)) / PI;""" Z__Sun_Angles_Grid_Zenith_Values)) / PI;"""
if re.search('Sentinel-2', self.satellite, re.I): if re.search(r'Sentinel-2', self.satellite, re.I):
warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when ' warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when '
'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for ' 'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for '
'13 bands and it not clear for which bands the gains are provided.') '13 bands and it not clear for which bands the gains are provided.')
...@@ -456,7 +456,7 @@ class L1A_object(GMS_object): ...@@ -456,7 +456,7 @@ class L1A_object(GMS_object):
os.chdir(os.path.dirname(self.path_archive)) os.chdir(os.path.dirname(self.path_archive))
rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger) rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger)
if rasObj.geotransform == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) and rasObj.projection == '': if rasObj.geotransform == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) and rasObj.projection == '':
if re.search('ALOS', self.satellite) and self.MetaObj.ProcLCode == '1B2': if re.search(r'ALOS', self.satellite) and self.MetaObj.ProcLCode == '1B2':
self.GeoTransProj_ok, self.GeoAlign_ok = False, True self.GeoTransProj_ok, self.GeoAlign_ok = False, True
else: else:
self.GeoTransProj_ok, self.GeoAlign_ok = False, False self.GeoTransProj_ok, self.GeoAlign_ok = False, False
...@@ -547,7 +547,7 @@ class L1A_object(GMS_object): ...@@ -547,7 +547,7 @@ class L1A_object(GMS_object):
'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask." 'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...') self.logger.info('Calculating true data corner positions (image and world coordinates)...')
# if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31, # if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
# tzinfo=datetime.timezone.utc): # tzinfo=datetime.timezone.utc):
if is_dataset_provided_as_fullScene(self.GMS_identifier): if is_dataset_provided_as_fullScene(self.GMS_identifier):
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy', self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
...@@ -577,7 +577,7 @@ class L1A_object(GMS_object): ...@@ -577,7 +577,7 @@ class L1A_object(GMS_object):
else: else:
if re.search('AVNIR', self.sensor): if re.search(r'AVNIR', self.sensor):
self.fullSceneCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy', self.fullSceneCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
assert_four_corners=False) assert_four_corners=False)
# set true data corner positions (lon/lat coordinates) # set true data corner positions (lon/lat coordinates)
...@@ -588,7 +588,7 @@ class L1A_object(GMS_object): ...@@ -588,7 +588,7 @@ class L1A_object(GMS_object):
else: else:
# RapidEye or Sentinel-2 data # RapidEye or Sentinel-2 data
if re.search('Sentinel-2', self.satellite): if re.search(r'Sentinel-2', self.satellite):
# get fullScene corner coordinates by database query # get fullScene corner coordinates by database query
# -> calculate footprints for all granules of the same S2 datatake # -> calculate footprints for all granules of the same S2 datatake
# -> merge them and calculate overall corner positions # -> merge them and calculate overall corner positions
......
...@@ -269,7 +269,7 @@ class AtmCorr(object): ...@@ -269,7 +269,7 @@ class AtmCorr(object):
# append AtmCorr object to input L1C objects # append AtmCorr object to input L1C objects
# [setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization # [setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization
if not re.search('Sentinel-2', self.inObjs[0].satellite, re.I): if not re.search(r'Sentinel-2', self.inObjs[0].satellite, re.I):
self.logger.debug('Calculation of acquisition geometry arrays is currently only validated for Sentinel-2!') self.logger.debug('Calculation of acquisition geometry arrays is currently only validated for Sentinel-2!')
# validation possible by comparing S2 angles provided by ESA with own angles # TODO # validation possible by comparing S2 angles provided by ESA with own angles # TODO
...@@ -649,7 +649,7 @@ class AtmCorr(object): ...@@ -649,7 +649,7 @@ class AtmCorr(object):
:return: <np.ndarray> 2D array (with 20m resolution in case of Sentinel-2) :return: <np.ndarray> 2D array (with 20m resolution in case of Sentinel-2)
""" """
# determine which input GMS object is used to generate DEM # determine which input GMS object is used to generate DEM
if re.search('Sentinel-2', self.inObjs[0].satellite): if re.search(r'Sentinel-2', self.inObjs[0].satellite):
# in case of Sentinel-2 the 20m DEM must be passed # in case of Sentinel-2 the 20m DEM must be passed
inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd == 20] inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd == 20]
if not inObj4dem: if not inObj4dem:
......
...@@ -481,7 +481,7 @@ class Cloud_Mask_Creator(object): ...@@ -481,7 +481,7 @@ class Cloud_Mask_Creator(object):
self.GMS_obj.logger.info("Calculating cloud mask based on '%s' algorithm..." % self.algorithm) self.GMS_obj.logger.info("Calculating cloud mask based on '%s' algorithm..." % self.algorithm)
if self.algorithm == 'FMASK': if self.algorithm == 'FMASK':
if re.search('Landsat', self.GMS_obj.satellite, re.I): if re.search(r'Landsat', self.GMS_obj.satellite, re.I):
FMR = FMASK_Runner_Landsat(self.GMS_obj.path_archive, self.GMS_obj.satellite) FMR = FMASK_Runner_Landsat(self.GMS_obj.path_archive, self.GMS_obj.satellite)
else: else:
......
...@@ -90,7 +90,7 @@ class GEOPROCESSING(object): ...@@ -90,7 +90,7 @@ class GEOPROCESSING(object):
# '/vsitar' '.gz': '/vsigzip'} # '/vsitar' '.gz': '/vsigzip'}
p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz'] p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz']
if len(geodata.split(i)) > 1 and geodata.split(i)[1].startswith('/')][0] if len(geodata.split(i)) > 1 and geodata.split(i)[1].startswith('/')][0]
path2check = os.path.abspath('.' + re.search('/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1)) path2check = os.path.abspath('.' + re.search(r'/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1))
assert os.path.exists(path2check), "ERROR: data %s does not exist!" % path2check assert os.path.exists(path2check), "ERROR: data %s does not exist!" % path2check
assert self.inDs is not None, "ERROR: Could not open %s!" % self.filename assert self.inDs is not None, "ERROR: Could not open %s!" % self.filename
elif isinstance(geodata, gdal.Dataset): elif isinstance(geodata, gdal.Dataset):
...@@ -682,7 +682,7 @@ class GEOPROCESSING(object): ...@@ -682,7 +682,7 @@ class GEOPROCESSING(object):
with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF: with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF:
lines = inF.readlines() lines = inF.readlines()
outContent = ''.join([i for i in lines if not re.search('map info', i, re.I)]) outContent = ''.join([i for i in lines if not re.search(r'map info', i, re.I)])
with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF: with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF:
outF.write(outContent) outF.write(outContent)
...@@ -696,7 +696,7 @@ class GEOPROCESSING(object): ...@@ -696,7 +696,7 @@ class GEOPROCESSING(object):
# #
# with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF: # with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF:
# lines = inF.readlines() # lines = inF.readlines()
# outContent = ''.join([line if not re.search('coordinate system string', line, re.I) else # outContent = ''.join([line if not re.search(r'coordinate system string', line, re.I) else
# 'coordinate system string = %s' % self.projection for line in lines]) # 'coordinate system string = %s' % self.projection for line in lines])
# #
# with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF: # with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF:
......
...@@ -581,9 +581,9 @@ def get_filename_by_entityID(conn_DB, entityid, satellite): ...@@ -581,9 +581,9 @@ def get_filename_by_entityID(conn_DB, entityid, satellite):
:param satellite: <str> satellite name to which the entity ID is belonging :param satellite: <str> satellite name to which the entity ID is belonging
""" """
if re.search('Landsat', satellite, re.I): if re.search(r'Landsat', satellite, re.I):
filename = '%s.tar.gz' % entityid filename = '%s.tar.gz' % entityid
elif re.search('Sentinel-2', satellite, re.I): elif re.search(r'Sentinel-2', satellite, re.I):
filename = get_info_from_postgreSQLdb(conn_DB, 'scenes', ['filename'], filename = get_info_from_postgreSQLdb(conn_DB, 'scenes', ['filename'],
{'entityid': entityid}, records2fetch=1)[0][0] {'entityid': entityid}, records2fetch=1)[0][0]
else: else:
...@@ -771,12 +771,18 @@ class GMS_JOB(object): ...@@ -771,12 +771,18 @@ class GMS_JOB(object):
if isinstance(datadict['filenames'], str) and datadict['filenames'].endswith('.csv'): if isinstance(datadict['filenames'], str) and datadict['filenames'].endswith('.csv'):
datadict['filenames'] = None # TODO implement csv reader here datadict['filenames'] = None # TODO implement csv reader here
raise NotImplementedError raise NotImplementedError
else: else:
temp_gdf = GeoDataFrame(datadict, columns=['satellite', 'sensor', 'filenames']) temp_gdf = GeoDataFrame(datadict, columns=['satellite', 'sensor', 'filenames'])
if re.search('Landsat-7', datadict['satellite'], re.I) and re.search('ETM+', datadict['sensor'], re.I):
if re.search(r'Landsat-7', datadict['satellite'], re.I) and \
re.search(r'ETM+', datadict['sensor'], re.I):
from .helper_functions import Landsat_entityID_decrypter as LED from .helper_functions import Landsat_entityID_decrypter as LED
def get_L7_sensor(fN): return LED(fN.split('.tar.gz')[0]).sensorIncSLC def get_L7_sensor(fN):
return LED(fN.split('.tar.gz')[0]).sensorIncSLC
temp_gdf['sensor'] = list(temp_gdf['filenames'].map(get_L7_sensor)) temp_gdf['sensor'] = list(temp_gdf['filenames'].map(get_L7_sensor))
all_gdfs.append(temp_gdf) all_gdfs.append(temp_gdf)
...@@ -1405,7 +1411,7 @@ def archive_exists_on_fileserver(conn_DB, entityID): ...@@ -1405,7 +1411,7 @@ def archive_exists_on_fileserver(conn_DB, entityID):
archive_fold = os.path.join(CFG.path_archive, satellite, sensor) archive_fold = os.path.join(CFG.path_archive, satellite, sensor)
assert os.path.exists(archive_fold), 'Archive folder not found: %s.' % archive_fold assert os.path.exists(archive_fold), 'Archive folder not found: %s.' % archive_fold
if re.search('Landsat', satellite, re.I): if re.search(r'Landsat', satellite, re.I):
exists = os.path.exists(os.path.join(archive_fold, entityID + '.tar.gz')) exists = os.path.exists(os.path.join(archive_fold, entityID + '.tar.gz'))
else: else:
raise NotImplementedError raise NotImplementedError
......
...@@ -30,7 +30,7 @@ def get_GMS_sensorcode(GMS_id): ...@@ -30,7 +30,7 @@ def get_GMS_sensorcode(GMS_id):
# type: (GMS_identifier) -> str # type: (GMS_identifier) -> str
Satellite, Sensor, Subsystem = (GMS_id.satellite, GMS_id.sensor, GMS_id.subsystem) Satellite, Sensor, Subsystem = (GMS_id.satellite, GMS_id.sensor, GMS_id.subsystem)
Sensor = Sensor[:-1] if re.match('SPOT', Satellite, re.I) and Sensor[-1] not in ['1', '2'] else Sensor Sensor = Sensor[:-1] if re.match(r'SPOT', Satellite, re.I) and Sensor[-1] not in ['1', '2'] else Sensor
meta_sensorcode = Satellite + '_' + Sensor + ('_' + Subsystem if Subsystem not in ["", None] else "") meta_sensorcode = Satellite + '_' + Sensor + ('_' + Subsystem if Subsystem not in ["", None] else "")
sensorcode_dic = { sensorcode_dic = {
'ALOS_AVNIR-2': 'AVNIR-2', 'ALOS_AVNIR-2': 'AVNIR-2',
......
...@@ -288,8 +288,8 @@ def get_path_srf_file(GMS_id, bandname=''): ...@@ -288,8 +288,8 @@ def get_path_srf_file(GMS_id, bandname=''):
""" """
satellite, sensor = GMS_id.satellite, GMS_id.sensor satellite, sensor = GMS_id.satellite, GMS_id.sensor
satellite = 'RapidEye' if re.match('RapidEye', satellite, re.I) else satellite satellite = 'RapidEye' if re.match(r'RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match('SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor sensor = sensor[:-1] if re.match(r'SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
filename = 'band_%s' % bandname if bandname else '' filename = 'band_%s' % bandname if bandname else ''
return os.path.join(CFG.path_SRFs, satellite, sensor, filename) return os.path.join(CFG.path_SRFs, satellite, sensor, filename)
...@@ -302,8 +302,8 @@ def get_path_snr_model(GMS_id): ...@@ -302,8 +302,8 @@ def get_path_snr_model(GMS_id):
""" """
satellite, sensor = (GMS_id.satellite, GMS_id.sensor) satellite, sensor = (GMS_id.satellite, GMS_id.sensor)
satellite = 'RapidEye' if re.match('RapidEye', satellite, re.I) else satellite satellite = 'RapidEye' if re.match(r'RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match('SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor sensor = sensor[:-1] if re.match(r'SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
return os.path.join(CFG.path_SNR_models, satellite, sensor, 'SNR_model.csv') return os.path.join(CFG.path_SNR_models, satellite, sensor, 'SNR_model.csv')
......
...@@ -62,7 +62,7 @@ class SpatialIndexMediatorServer: ...@@ -62,7 +62,7 @@ class SpatialIndexMediatorServer:
running = 'is running' in outputStr running = 'is running' in outputStr
# get PID # get PID
_process_id = re.search('with pid ([\d]*)', outputStr) _process_id = re.search(r'with pid ([\d]*)', outputStr)
if _process_id and _process_id.group(1): if _process_id and _process_id.group(1):
process_id = int(_process_id.group(1)) process_id = int(_process_id.group(1))
else: else:
...@@ -87,7 +87,7 @@ class SpatialIndexMediatorServer: ...@@ -87,7 +87,7 @@ class SpatialIndexMediatorServer:
def stop(self): def stop(self):
outputStr = self._communicate('stop') outputStr = self._communicate('stop')
if outputStr == 'success' or re.search('index-mediator-server stopped', outputStr, re.I): if outputStr == 'success' or re.search(r'index-mediator-server stopped', outputStr, re.I):
return 'stopped' return 'stopped'
else: else:
warnings.warn("\nStopping Spatial Index Mediator Server failed with message '%s'!" warnings.warn("\nStopping Spatial Index Mediator Server failed with message '%s'!"
......
...@@ -354,7 +354,7 @@ class GMS_object(object): ...@@ -354,7 +354,7 @@ class GMS_object(object):
def georef(self): def georef(self):
"""Returns True if the current dataset can serve as spatial reference.""" """Returns True if the current dataset can serve as spatial reference."""
return True if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else False return True if self.image_type == 'RSD' and re.search(r'OLI', self.sensor, re.I) else False
@property @property
def coreg_needed(self): def coreg_needed(self):
...@@ -1138,7 +1138,7 @@ class GMS_object(object): ...@@ -1138,7 +1138,7 @@ class GMS_object(object):
# apply cloud mask to image data and all products derived from image data # apply cloud mask to image data and all products derived from image data
# (only if image data represents BOA-Ref and cloud areas are not to be filled with TOA-Ref) # (only if image data represents BOA-Ref and cloud areas are not to be filled with TOA-Ref)
if re.search('BOA_Reflectance', GMS_obj_merged.MetaObj.PhysUnit, re.I) and not CFG.ac_fillnonclear_areas: if re.search(r'BOA_Reflectance', GMS_obj_merged.MetaObj.PhysUnit, re.I) and not CFG.ac_fillnonclear_areas:
# fill non-clear areas with no data values (for all bands) # fill non-clear areas with no data values (for all bands)
for pixVal in nonclear_pixVals: for pixVal in nonclear_pixVals:
mask_nonclear = GMS_obj_merged.mask_clouds[:] == pixVal mask_nonclear = GMS_obj_merged.mask_clouds[:] == pixVal
...@@ -1189,7 +1189,7 @@ class GMS_object(object): ...@@ -1189,7 +1189,7 @@ class GMS_object(object):
@classmethod @classmethod
def from_tiles(cls, list_GMS_tiles): def from_tiles(cls, list_GMS_tiles):
# type: (list) -> GMS_object # type: (list) -> GMS_object
"""Merge separate GMS objects with different spatial coverage but belonging to the same scene-ID to ONE GMS object. """Merge separate GMS objects with different spatial coverage but belonging to one scene-ID to ONE GMS object.
:param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks() :param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()
""" """
......
This diff is collapsed.
...@@ -596,7 +596,7 @@ class JobConfig(object): ...@@ -596,7 +596,7 @@ class JobConfig(object):
ds["entity_ID"] = row["entityid"] ds["entity_ID"] = row["entityid"]
ds["filename"] = row["filename"] ds["filename"] = row["filename"]
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor'] ds['sensor'] = 'ETM+' if re.search(r'ETM+', ds['sensor']) else ds['sensor']
if self.skip_thermal and ds['subsystem'] == 'TIR': if self.skip_thermal and ds['subsystem'] == 'TIR':
continue # removes ASTER TIR in case of skip_thermal continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem'] ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
...@@ -604,17 +604,17 @@ class JobConfig(object): ...@@ -604,17 +604,17 @@ class JobConfig(object):
if self.skip_pan and ds['sensormode'] == 'P': if self.skip_pan and ds['sensormode'] == 'P':
continue # removes e.g. SPOT PAN in case of skip_pan continue # removes e.g. SPOT PAN in case of skip_pan
if re.search("Sentinel-2A", ds['satellite'], re.I): if re.search(r"Sentinel-2A", ds['satellite'], re.I):
for subsystem in ['S2A10', 'S2A20', 'S2A60']: for subsystem in ['S2A10', 'S2A20', 'S2A60']:
sub_ds = ds.copy() sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem sub_ds['subsystem'] = subsystem
data_list.append(sub_ds) data_list.append(sub_ds)
elif re.search("Sentinel-2B", ds['satellite'], re.I): elif re.search(r"Sentinel-2B", ds['satellite'], re.I):
for subsystem in ['S2B10', 'S2B20', 'S2B60']: for subsystem in ['S2B10', 'S2B20', 'S2B60']:
sub_ds = ds.copy() sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem sub_ds['subsystem'] = subsystem
data_list.append(sub_ds) data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I): elif re.search(r"Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR', 'TIR']: for subsystem in ['VNIR1', 'VNIR2', 'SWIR', 'TIR']:
sub_ds = ds.copy() sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem sub_ds['subsystem'] = subsystem
......
...@@ -159,7 +159,7 @@ class ProcessController(object): ...@@ -159,7 +159,7 @@ class ProcessController(object):
AllWrittenProcL_dueLog = [] AllWrittenProcL_dueLog = []
else: else:
logfile = open(path_log, 'r').read() logfile = open(path_log, 'r').read()
AllWrittenProcL_dueLog = re.findall(":*(\S*\s*) data successfully saved.", logfile, re.I) AllWrittenProcL_dueLog = re.findall(r":*(\S*\s*) data successfully saved.", logfile, re.I)
if not AllWrittenProcL_dueLog and path_logfile == path_logfile_merged_ss: # AllWrittenProcL_dueLog = [] if not AllWrittenProcL_dueLog and path_logfile == path_logfile_merged_ss: # AllWrittenProcL_dueLog = []
self.logger.info('%s: According to logfile no completely processed data exist at any ' self.logger.info('%s: According to logfile no completely processed data exist at any '
'processing level. Dataset has to be reprocessed.' % dataset['entity_ID']) 'processing level. Dataset has to be reprocessed.' % dataset['entity_ID'])
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment