Commit d982da7b authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'enhancement/separate_spectral_homogenization'

parents 00d961d7 6543234b
Pipeline #8016 passed with stage
in 17 minutes and 51 seconds
......@@ -34,6 +34,8 @@ test_gms_preprocessing:
# - cd ../../
# make tests
- pip install git+https://gitext.gfz-potsdam.de/geomultisens/spechomo.git # TODO remove as soon as this is included in the CI container
# run tests
- make nosetests
- make docs
......
This diff is collapsed.
......@@ -55,7 +55,7 @@ Level-2 processing:
^^^^^^^^^^^^^^^^^^^
* spatial homogenization
* spectral homogenization
* spectral homogenization (using `SpecHomo <https://gitext.gfz-potsdam.de/geomultisens/spechomo>`_
* estimation of accuracy layers
=> application oriented analysis dataset
......
......@@ -122,7 +122,7 @@ def run_from_constraints(args):
def _run_job(dbJob, **config_kwargs):
# type: (GMS_JOB) -> None
# type: (GMS_JOB, dict) -> None
"""
:param dbJob:
......
......@@ -75,13 +75,13 @@ class L1A_object(GMS_object):
self.proc_status = proc_status or 'initialized' # if proc_status = 'running' is given by L1A_map
def import_rasterdata(self):
if re.search("ALOS", self.satellite, re.I):
if re.search(r"ALOS", self.satellite, re.I):
'''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver.
But: UL of metadata refers to [row =0, col=21]! So the imported GeoTransform is correct when
the first 21 columns are deleted.'''
self.archive_to_rasObj(self.path_archive, self.path_InFilePreprocessor,
subset=['block', [[None, None], [21, None]]])
elif re.search("Terra", self.satellite, re.I):
elif re.search(r"Terra", self.satellite, re.I):
self.ASTER_HDF_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor)
else:
self.archive_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor)
......@@ -113,23 +113,23 @@ class L1A_object(GMS_object):
image_files = []
is_ALOS_Landsat_S2 = \
re.search('ALOS', self.satellite) or re.search('Landsat', self.satellite) or \
re.search('Sentinel-2', self.satellite)
re.search(r'ALOS', self.satellite) or re.search(r'Landsat', self.satellite) or \
re.search(r'Sentinel-2', self.satellite)
n_files2search = len(full_LayerBandsAssignment) if is_ALOS_Landsat_S2 else 1
for File in HLP_F.sorted_nicely(files_in_archive):
search_res = \
re.search("IMG-0[0-9]-[\s\S]*", File) if re.search('ALOS', self.satellite) else \
re.search("[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search('Landsat', self.satellite) else \
re.search("[0-9]*.tif", File) if re.search('RapidEye', self.satellite) else \
re.search("imagery.tif", File) if re.search('SPOT', self.satellite) else \
re.search("[\S]*.SAFE/GRANULE/%s/IMG_DATA/[\S]*_B[0-9][\S]*.jp2"
% self.entity_ID, File) if re.search('Sentinel-2', self.satellite) else None
re.search(r"IMG-0[0-9]-[\s\S]*", File) if re.search(r'ALOS', self.satellite) else \
re.search(r"[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search(r'Landsat', self.satellite) else \
re.search(r"[0-9]*.tif", File) if re.search(r'RapidEye', self.satellite) else \
re.search(r"imagery.tif", File) if re.search(r'SPOT', self.satellite) else \
re.search(r"[\S]*.SAFE/GRANULE/%s/IMG_DATA/[\S]*_B[0-9][\S]*.jp2"
% self.entity_ID, File) if re.search(r'Sentinel-2', self.satellite) else None
if search_res:
if re.search('Sentinel-2', self.satellite):
if re.search(r'Sentinel-2', self.satellite):
# add only those files that are corresponding to subsystem (e.g. S2A10: fullLBA = ['2','3','4','8'])
if 1 in [1 if re.search("[\S]*_B[0]?%s.jp2" % LBAn, os.path.basename(File)) else 0
if 1 in [1 if re.search(r"[\S]*_B[0]?%s.jp2" % LBAn, os.path.basename(File)) else 0
for LBAn in full_LayerBandsAssignment]:
image_files.append(File)
else:
......@@ -141,7 +141,7 @@ class L1A_object(GMS_object):
# validate number of expected files #
#####################################
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
expected_files_count = 2 * len(full_LayerBandsAssignment)
else:
expected_files_count = len(full_LayerBandsAssignment)
......@@ -299,9 +299,9 @@ class L1A_object(GMS_object):
self.arr_desc = \
'DN' if self.MetaObj.PhysUnit == 'DN' else \
'Rad' if self.MetaObj.PhysUnit == "W * m-2 * sr-1 * micrometer-1" else \
'TOA_Ref' if re.search('TOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'BOA_Ref' if re.search('BOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'Temp' if re.search('Degrees Celsius', self.MetaObj.PhysUnit, re.I) else None
'TOA_Ref' if re.search(r'TOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'BOA_Ref' if re.search(r'BOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \
'Temp' if re.search(r'Degrees Celsius', self.MetaObj.PhysUnit, re.I) else None
assert self.arr_desc, 'GMS_obj contains an unexpected physical unit: %s' % self.MetaObj.PhysUnit
......@@ -391,7 +391,7 @@ class L1A_object(GMS_object):
(PI * U__earth_sun_distance_correction_factor);
L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos(
Z__Sun_Angles_Grid_Zenith_Values)) / PI;"""
if re.search('Sentinel-2', self.satellite, re.I):
if re.search(r'Sentinel-2', self.satellite, re.I):
warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when '
'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for '
'13 bands and it not clear for which bands the gains are provided.')
......@@ -456,7 +456,7 @@ class L1A_object(GMS_object):
os.chdir(os.path.dirname(self.path_archive))
rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger)
if rasObj.geotransform == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) and rasObj.projection == '':
if re.search('ALOS', self.satellite) and self.MetaObj.ProcLCode == '1B2':
if re.search(r'ALOS', self.satellite) and self.MetaObj.ProcLCode == '1B2':
self.GeoTransProj_ok, self.GeoAlign_ok = False, True
else:
self.GeoTransProj_ok, self.GeoAlign_ok = False, False
......@@ -547,7 +547,7 @@ class L1A_object(GMS_object):
'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...')
# if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
# if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
# tzinfo=datetime.timezone.utc):
if is_dataset_provided_as_fullScene(self.GMS_identifier):
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
......@@ -577,7 +577,7 @@ class L1A_object(GMS_object):
else:
if re.search('AVNIR', self.sensor):
if re.search(r'AVNIR', self.sensor):
self.fullSceneCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
assert_four_corners=False)
# set true data corner positions (lon/lat coordinates)
......@@ -588,7 +588,7 @@ class L1A_object(GMS_object):
else:
# RapidEye or Sentinel-2 data
if re.search('Sentinel-2', self.satellite):
if re.search(r'Sentinel-2', self.satellite):
# get fullScene corner coordinates by database query
# -> calculate footprints for all granules of the same S2 datatake
# -> merge them and calculate overall corner positions
......
......@@ -269,7 +269,7 @@ class AtmCorr(object):
# append AtmCorr object to input L1C objects
# [setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization
if not re.search('Sentinel-2', self.inObjs[0].satellite, re.I):
if not re.search(r'Sentinel-2', self.inObjs[0].satellite, re.I):
self.logger.debug('Calculation of acquisition geometry arrays is currently only validated for Sentinel-2!')
# validation possible by comparing S2 angles provided by ESA with own angles # TODO
......@@ -649,7 +649,7 @@ class AtmCorr(object):
:return: <np.ndarray> 2D array (with 20m resolution in case of Sentinel-2)
"""
# determine which input GMS object is used to generate DEM
if re.search('Sentinel-2', self.inObjs[0].satellite):
if re.search(r'Sentinel-2', self.inObjs[0].satellite):
# in case of Sentinel-2 the 20m DEM must be passed
inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd == 20]
if not inObj4dem:
......
This diff is collapsed.
# -*- coding: utf-8 -*-
"""
Algorithms for multispectral image classification.
"""
import numpy as np
from typing import Union, List # noqa F401 # flake8 issue
from multiprocessing import Pool
from tqdm import tqdm
from sklearn.neighbors import KNeighborsClassifier, NearestCentroid
from pysptools.classification import SAM
from geoarray import GeoArray
class _ImageClassifier(object):
"""Base class for GMS image classifiers."""
def __init__(self, train_spectra, train_labels, CPUs=1):
# type: (np.ndarray, Union[np.ndarray, List[int]], int) -> None
self.CPUs = CPUs
self.train_spectra = train_spectra
self.train_labels = train_labels
self.n_samples = train_spectra.shape[0]
self.n_features = train_spectra.shape[1]
self.clf = None # to be implemented by the subclass
self.cmap = None
def _predict(self, tilepos, tileimdata):
raise NotImplementedError('This method has to be implemented by the subclass.')
def classify(self, image_cube, nodataVal=None, tiledims=(1000, 1000)):
image_cube_gA = GeoArray(image_cube, nodata=nodataVal)
self.cmap = GeoArray(np.empty((image_cube_gA.rows, image_cube_gA.cols),
dtype=np.array(self.train_labels).dtype), nodata=nodataVal)
if self.CPUs is None or self.CPUs > 1:
with Pool(self.CPUs) as pool:
tiles_cm = pool.starmap(self._predict, image_cube_gA.tiles(tiledims))
for ((rS, rE), (cS, cE)), tile_cm in tiles_cm:
self.cmap[rS: rE + 1, cS: cE + 1] = tile_cm
else:
for ((rS, rE), (cS, cE)), tile in tqdm(image_cube_gA.tiles(tiledims)):
print('Performing classification for tile ((%s, %s), (%s, %s))...' % (rS, rE, cS, cE))
self.cmap[rS: rE + 1, cS: cE + 1] = self._predict(((rS, rE), (cS, cE)), tile)[1]
if nodataVal is not None:
self.cmap[image_cube_gA.mask_nodata.astype(np.int8) == 0] = nodataVal
return self.cmap.astype(image_cube.dtype)
def show_cmap(self):
if self.cmap:
self.cmap.show()
class MinimumDistance_Classifier(_ImageClassifier):
"""Classifier computing the n-dimensional euclidian distance of each pixel vector to each cluster mean vector.
NOTE: distance equation: D² = sqrt(sum((Xvi - Xvj)²)
"""
def __init__(self, train_spectra, train_labels, CPUs=1):
# type: (np.ndarray, Union[np.ndarray, List[int]], int) -> None
super(MinimumDistance_Classifier, self).__init__(train_spectra, train_labels, CPUs=CPUs)
self.clf = NearestCentroid()
self.clf.fit(train_spectra, train_labels)
def _predict(self, tilepos, tileimdata):
spectra = tileimdata.reshape((tileimdata.shape[0] * tileimdata.shape[1], tileimdata.shape[2]))
return tilepos, self.clf.predict(spectra).reshape(*tileimdata.shape[:2])
class kNN_Classifier(_ImageClassifier):
def __init__(self, train_spectra, train_labels, CPUs=1, n_neighbors=10):
# type: (np.ndarray, Union[np.ndarray, List[int]], int, int) -> None
super(kNN_Classifier, self).__init__(train_spectra, train_labels, CPUs=CPUs)
self.clf = KNeighborsClassifier(n_neighbors=n_neighbors, n_jobs=CPUs)
self.clf.fit(train_spectra, train_labels)
def _predict(self, tilepos, tileimdata):
spectra = tileimdata.reshape((tileimdata.shape[0] * tileimdata.shape[1], tileimdata.shape[2]))
return tilepos, self.clf.predict(spectra).reshape(*tileimdata.shape[:2])
class SAM_Classifier(_ImageClassifier):
def __init__(self, train_spectra, threshold=0.1, CPUs=1):
# type: (np.ndarray, Union[np.ndarray, List[int]], int) -> None
super(SAM_Classifier, self).__init__(train_spectra, np.array(range(train_spectra.shape[0])), CPUs=CPUs)
self.clf = SAM()
self.threshold = threshold
def _predict(self, tilepos, tileimdata):
return self.clf.classify(tileimdata, self.train_spectra, self.threshold)
def classify(self, image_cube, nodataVal=None, tiledims=(1000, 1000), mask=None):
image_cube_gA = GeoArray(image_cube, nodata=nodataVal)
# avoid "RuntimeWarning: invalid value encountered in less" during SAM.classify()
if mask:
image_cube_gA[mask] = np.max(image_cube_gA)
elif nodataVal is not None:
image_cube_gA[image_cube_gA[:] == nodataVal] = np.max(image_cube_gA)
else:
image_cube_gA[image_cube_gA.mask_nodata.astype(np.int8) == 0] = np.max(image_cube_gA)
cmap = super(SAM_Classifier, self).classify(image_cube_gA, nodataVal=nodataVal, tiledims=tiledims)
if mask:
cmap[mask] = -9999
return cmap
def classify_image(image, train_spectra, train_labels, classif_alg,
kNN_n_neighbors=10, nodataVal=None, tiledims=(1000, 1000), CPUs=None):
# type: (Union[np.ndarray, GeoArray], np.ndarray, Union[np.ndarray, List[int]], str, int, ...) -> GeoArray
"""Classify image to find the cluster each spectrum belongs to.
:param image: image to be classified
:param train_spectra:
:param train_labels:
:param classif_alg: algorithm to be used for image classification
(to define which cluster each pixel belongs to)
'MinDist': Minimum Distance (Nearest Centroid)
'kNN': k-nearest-neighbour
'SAM': spectral angle mapping
:param kNN_n_neighbors: The number of neighbors to be considered in case 'classif_alg' is set to
'kNN'. Otherwise, this parameter is ignored.
:param nodataVal:
:param tiledims:
:param CPUs: number of CPUs to be used for classification
"""
if classif_alg == 'kNN':
clf = kNN_Classifier(
train_spectra,
train_labels,
CPUs=CPUs,
n_neighbors=kNN_n_neighbors)
elif classif_alg == 'MinDist':
clf = MinimumDistance_Classifier(
train_spectra,
train_labels,
CPUs=CPUs)
elif classif_alg == 'SAM':
clf = SAM_Classifier(
train_spectra,
CPUs=CPUs)
else:
raise NotImplementedError("Currently only the methods 'kNN', 'MinDist' and 'SAM' are implemented.")
cmap = clf.classify(image, nodataVal=nodataVal, tiledims=tiledims)
return cmap
......@@ -481,7 +481,7 @@ class Cloud_Mask_Creator(object):
self.GMS_obj.logger.info("Calculating cloud mask based on '%s' algorithm..." % self.algorithm)
if self.algorithm == 'FMASK':
if re.search('Landsat', self.GMS_obj.satellite, re.I):
if re.search(r'Landsat', self.GMS_obj.satellite, re.I):
FMR = FMASK_Runner_Landsat(self.GMS_obj.path_archive, self.GMS_obj.satellite)
else:
......
......@@ -90,7 +90,7 @@ class GEOPROCESSING(object):
# '/vsitar' '.gz': '/vsigzip'}
p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz']
if len(geodata.split(i)) > 1 and geodata.split(i)[1].startswith('/')][0]
path2check = os.path.abspath('.' + re.search('/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1))
path2check = os.path.abspath('.' + re.search(r'/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1))
assert os.path.exists(path2check), "ERROR: data %s does not exist!" % path2check
assert self.inDs is not None, "ERROR: Could not open %s!" % self.filename
elif isinstance(geodata, gdal.Dataset):
......@@ -682,7 +682,7 @@ class GEOPROCESSING(object):
with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF:
lines = inF.readlines()
outContent = ''.join([i for i in lines if not re.search('map info', i, re.I)])
outContent = ''.join([i for i in lines if not re.search(r'map info', i, re.I)])
with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF:
outF.write(outContent)
......@@ -696,7 +696,7 @@ class GEOPROCESSING(object):
#
# with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF:
# lines = inF.readlines()
# outContent = ''.join([line if not re.search('coordinate system string', line, re.I) else
# outContent = ''.join([line if not re.search(r'coordinate system string', line, re.I) else
# 'coordinate system string = %s' % self.projection for line in lines])
#
# with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF:
......
......@@ -26,7 +26,9 @@ from scipy.interpolate import interp1d
from geoarray import GeoArray
from py_tools_ds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.geo.vector.geometry import boxObj
from py_tools_ds.geo.coord_trafo import transform_any_prj
from py_tools_ds.geo.projection import isProjectedOrGeographic
from py_tools_ds.numeric.vector import find_nearest
from ..options.config import GMS_config as CFG
......@@ -465,7 +467,8 @@ class DEM_Creator(object):
:param db_conn: database connection string
"""
if dem_sensor not in ['SRTM', 'ASTER']:
raise ValueError('%s is not a supported DEM sensor. Choose between SRTM and ASTER (both 30m native GSD).')
raise ValueError('%s is not a supported DEM sensor. Choose between SRTM and ASTER (both 30m native GSD).'
% dem_sensor)
self.dem_sensor = dem_sensor
self.db_conn = db_conn if db_conn else CFG.conn_database
......@@ -584,13 +587,18 @@ class DEM_Creator(object):
def from_extent(self, cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
"""Returns a GeoArray of a DEM according to the given target coordinates
:param cornerCoords_tgt: list of target coordinates [[X,Y], [X,Y], ...]]
:param cornerCoords_tgt: list of target coordinates [[X,Y], [X,Y], ...]] (at least 2 coordinates)
:param prj: WKT string of the projection belonging cornerCoords_tgt
:param tgt_xgsd: output X GSD
:param tgt_ygsd: output Y GSD
:return: DEM GeoArray
"""
# generate at least 4 coordinates in case less coords have been given in order to avoid nodata triangles in DEM
if len(cornerCoords_tgt) < 4 and isProjectedOrGeographic(prj) == 'projected':
co_yx = [(y, x) for x, y in cornerCoords_tgt]
cornerCoords_tgt = boxObj(boxMapYX=co_yx).boxMapXY
# handle coordinate infos
tgt_corner_coord_lonlat = self._get_corner_coords_lonlat(cornerCoords_tgt, prj)
......
......@@ -581,9 +581,9 @@ def get_filename_by_entityID(conn_DB, entityid, satellite):
:param satellite: <str> satellite name to which the entity ID is belonging
"""
if re.search('Landsat', satellite, re.I):
if re.search(r'Landsat', satellite, re.I):
filename = '%s.tar.gz' % entityid
elif re.search('Sentinel-2', satellite, re.I):
elif re.search(r'Sentinel-2', satellite, re.I):
filename = get_info_from_postgreSQLdb(conn_DB, 'scenes', ['filename'],
{'entityid': entityid}, records2fetch=1)[0][0]
else:
......@@ -771,12 +771,18 @@ class GMS_JOB(object):
if isinstance(datadict['filenames'], str) and datadict['filenames'].endswith('.csv'):
datadict['filenames'] = None # TODO implement csv reader here
raise NotImplementedError
else:
temp_gdf = GeoDataFrame(datadict, columns=['satellite', 'sensor', 'filenames'])
if re.search('Landsat-7', datadict['satellite'], re.I) and re.search('ETM+', datadict['sensor'], re.I):
if re.search(r'Landsat-7', datadict['satellite'], re.I) and \
re.search(r'ETM+', datadict['sensor'], re.I):
from .helper_functions import Landsat_entityID_decrypter as LED
def get_L7_sensor(fN): return LED(fN.split('.tar.gz')[0]).sensorIncSLC
def get_L7_sensor(fN):
return LED(fN.split('.tar.gz')[0]).sensorIncSLC
temp_gdf['sensor'] = list(temp_gdf['filenames'].map(get_L7_sensor))
all_gdfs.append(temp_gdf)
......@@ -1405,7 +1411,7 @@ def archive_exists_on_fileserver(conn_DB, entityID):
archive_fold = os.path.join(CFG.path_archive, satellite, sensor)
assert os.path.exists(archive_fold), 'Archive folder not found: %s.' % archive_fold
if re.search('Landsat', satellite, re.I):
if re.search(r'Landsat', satellite, re.I):
exists = os.path.exists(os.path.join(archive_fold, entityID + '.tar.gz'))
else:
raise NotImplementedError
......
......@@ -30,7 +30,7 @@ def get_GMS_sensorcode(GMS_id):
# type: (GMS_identifier) -> str
Satellite, Sensor, Subsystem = (GMS_id.satellite, GMS_id.sensor, GMS_id.subsystem)
Sensor = Sensor[:-1] if re.match('SPOT', Satellite, re.I) and Sensor[-1] not in ['1', '2'] else Sensor
Sensor = Sensor[:-1] if re.match(r'SPOT', Satellite, re.I) and Sensor[-1] not in ['1', '2'] else Sensor
meta_sensorcode = Satellite + '_' + Sensor + ('_' + Subsystem if Subsystem not in ["", None] else "")
sensorcode_dic = {
'ALOS_AVNIR-2': 'AVNIR-2',
......
......@@ -37,26 +37,3 @@ class FmaskError(RuntimeError):
class ACNotSupportedError(RuntimeError):
"""An error raised if there is currently no AC supported for the current sensor."""
####################################
# SPECTRAL HOMOGENIZATION EXCEPTIONS
####################################
class ClassifierNotAvailableError(RuntimeError):
def __init__(self, spechomo_method, src_sat, src_sen, src_LBA, tgt_sat, tgt_sen, tgt_LBA, n_clusters):
self.spechomo_method = spechomo_method
self.src_sat = src_sat
self.src_sen = src_sen
self.src_LBA = src_LBA
self.tgt_sat = tgt_sat
self.tgt_sen = tgt_sen
self.tgt_LBA = tgt_LBA
self.n_clusters = n_clusters
RuntimeError.__init__(self)
def __str__(self):
return 'No %s classifier available for predicting %s %s %s from %s %s %s (%d clusters).'\
% (self.spechomo_method, self.tgt_sat, self.tgt_sen, self.tgt_LBA,
self.src_sat, self.src_sen, self.src_LBA, self.n_clusters)
......@@ -9,6 +9,7 @@ import os
from ..options.config import GMS_config as CFG
# try:
# # noinspection PyCompatibility
# from StringIO import StringIO # Python 2
# except ImportError:
# from io import StringIO # Python 3
......@@ -32,6 +33,7 @@ class GMS_logger(logging.Logger):
# private attributes
self._captured_stream = ''
# attributes that need to be present in order to unpickle the logger via __setstate_
self.name_logfile = name_logfile
self.fmt_suffix = fmt_suffix
self.path_logfile = path_logfile
......
......@@ -288,8 +288,8 @@ def get_path_srf_file(GMS_id, bandname=''):
"""
satellite, sensor = GMS_id.satellite, GMS_id.sensor
satellite = 'RapidEye' if re.match('RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match('SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
satellite = 'RapidEye' if re.match(r'RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match(r'SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
filename = 'band_%s' % bandname if bandname else ''
return os.path.join(CFG.path_SRFs, satellite, sensor, filename)
......@@ -302,8 +302,8 @@ def get_path_snr_model(GMS_id):
"""
satellite, sensor = (GMS_id.satellite, GMS_id.sensor)
satellite = 'RapidEye' if re.match('RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match('SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
satellite = 'RapidEye' if re.match(r'RapidEye', satellite, re.I) else satellite
sensor = sensor[:-1] if re.match(r'SPOT', satellite, re.I) and sensor[-1] not in ['1', '2'] else sensor
return os.path.join(CFG.path_SNR_models, satellite, sensor, 'SNR_model.csv')
......
......@@ -62,7 +62,7 @@ class SpatialIndexMediatorServer:
running = 'is running' in outputStr
# get PID
_process_id = re.search('with pid ([\d]*)', outputStr)
_process_id = re.search(r'with pid ([\d]*)', outputStr)
if _process_id and _process_id.group(1):
process_id = int(_process_id.group(1))
else:
......@@ -87,7 +87,7 @@ class SpatialIndexMediatorServer:
def stop(self):
outputStr = self._communicate('stop')
if outputStr == 'success' or re.search('index-mediator-server stopped', outputStr, re.I):
if outputStr == 'success' or re.search(r'index-mediator-server stopped', outputStr, re.I):
return 'stopped'
else:
warnings.warn("\nStopping Spatial Index Mediator Server failed with message '%s'!"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment