Commit 26eca763 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'feature/add_accuracy_layers'

parents 21789fb4 a28caef0
......@@ -23,7 +23,7 @@ test_gms_preprocessing:
# - python setup.py install
# - cd ../../
# make tests
- pip install nested_dict # FIXME: remove as soon as runner is rebuilt
- pip install nested_dict openpyxl # FIXME: remove as soon as runner is rebuilt
- make nosetests
- make docs
artifacts:
......
......@@ -148,7 +148,7 @@ def get_gms_argparser():
# metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
gop_p('-DH', '--db_host', nargs='?', type=str,
default=options_default["global_opts"]["db_host"],
default='localhost', # hardcoded here because default json is read from database and host must be available
help='host name of the server that runs the postgreSQL database')
gop_p('-DOO', '--delete_old_output', nargs='?', type=bool,
......
......@@ -137,6 +137,7 @@ class L1A_object(GMS_object):
#####################################
# validate number of expected files #
#####################################
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
expected_files_count = 2 * len(full_LayerBandsAssignment)
else:
......@@ -149,6 +150,7 @@ class L1A_object(GMS_object):
###############################
# get paths of files to stack #
###############################
# NOTE: image_files is a SORTED list of image filenames; self.LayerBandsAssignment may be sorted by CWL
filtered_files = []
for bN in self.LayerBandsAssignment: # unsorted, e.g., ['1', '2', '3', '4', '5', '9', '6', '7']
......@@ -161,6 +163,7 @@ class L1A_object(GMS_object):
#########################
# read the raster data #
#########################
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger)
# in case a subset is to be read: prepare rasObj instance to read a subset
......
......@@ -87,7 +87,9 @@ class Scene_finder(object):
"""
for i in range(10):
try:
SpIM = SpatialIndexMediator(timeout=timeout)
SpIM = SpatialIndexMediator(host=CFG.spatial_index_server_host,
port=CFG.spatial_index_server_port,
timeout=timeout)
self.possib_ref_scenes = \
SpIM.getFullSceneDataForDataset(self.boundsLonLat, self.timeStart, self.timeEnd, self.min_cloudcov,
self.max_cloudcov, CFG.datasetid_spatial_ref,
......
......@@ -487,9 +487,9 @@ class AtmCorr(object):
@property
def options(self):
# type: () -> dict
"""Returns a dictionary containing AC options.
"""
# type: -> dict
if self._options:
return self._options
else:
......@@ -827,7 +827,8 @@ class AtmCorr(object):
self._check_or_download_ECMWF_data()
# validate SNR
self._validate_snr_source()
if CFG.ac_estimate_accuracy:
self._validate_snr_source()
# create an instance of RSImage
rs_image = RSImage(**rs_data)
......@@ -850,6 +851,12 @@ class AtmCorr(object):
self.results = ac_gms(rs_image, self.options, logger=self.logger, script=script)
except Exception as e:
self.logger.error('\nAn error occurred during atmospheric correction. BE AWARE THAT THE SCENE %s '
'(ENTITY ID %s) HAS NOT BEEN ATMOSPHERICALLY CORRECTED! Error message was: \n%s\n'
% (self.inObjs[0].scene_ID, self.inObjs[0].entity_ID, repr(e)))
self.logger.error(traceback.format_exc())
# TODO include that in the job summary
# serialialize AC input
if dump_ac_input:
path_dump = self.inObjs[0].pathGen.get_path_ac_input_dump()
......@@ -863,16 +870,10 @@ class AtmCorr(object):
for inObj in self.inObjs: # type: L1C_object
inObj.delete_ac_input_arrays()
self.logger.error('\nAn error occurred during atmospheric correction. BE AWARE THAT THE SCENE %s '
'(ENTITY ID %s) HAS NOT BEEN ATMOSPHERICALLY CORRECTED! Error message was: \n%s\n'
% (self.inObjs[0].scene_ID, self.inObjs[0].entity_ID, repr(e)))
self.logger.error(traceback.format_exc())
# TODO include that in the job summary
return list(self.inObjs)
# get processing infos
self.proc_info = self.ac_input['options']['processing'] # FIXME this is not appended to GMS objects
self.proc_info = self.ac_input['options']['processing']
# join results
self._join_results_to_inObjs() # sets self.outObjs
......@@ -900,6 +901,9 @@ class AtmCorr(object):
# update masks (always do that because masks can also only contain one layer)
[inObj.build_combined_masks_array() for inObj in self.inObjs]
# update AC processing info
[inObj.ac_options['processing'].update(self.proc_info) for inObj in self.inObjs]
self.outObjs = self.inObjs
def _join_data_ac(self):
......@@ -964,6 +968,9 @@ class AtmCorr(object):
ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
# TODO how to handle nans?
elif not CFG.ac_estimate_accuracy:
self.logger.info("Atmospheric correction did not provide a 'data_errors' array because "
"'ac_estimate_accuracy' was set to False in the job configuration.")
else:
self.logger.warning("Atmospheric correction did not provide a 'data_errors' array. Maybe due to "
"missing SNR model? GMS_object.ac_errors kept None.")
......@@ -1016,8 +1023,7 @@ class AtmCorr(object):
"""
Join confidence array for mask_clouds.
"""
if self.results.mask_clouds.mask_confidence_array is not None:
if self.results.mask_clouds.mask_confidence_array is not None and CFG.ac_estimate_accuracy:
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.scale_factor_BOARef).astype(np.int16)
......@@ -1036,7 +1042,9 @@ class AtmCorr(object):
if not joined:
self.logger.warning('Cloud mask confidence array has not been appended to one of the AC inputs because '
'there was no input GMS object with the same dimensions.')
elif not CFG.ac_estimate_accuracy:
self.logger.info("Cloud mask confidence array is not appended to AC outputs because "
"'ac_estimate_accuracy' was set to False in the job configuration.")
else:
self.logger.warning("Atmospheric correction did not provide a 'mask_confidence_array' array for "
"attribute 'mask_clouds. GMS_object.mask_clouds_confidence kept None.")
......@@ -103,6 +103,7 @@ class L2B_object(L2A_object):
tgt_sensor=tgt_sen,
tgt_LBA=tgt_LBA,
nodataVal=self.arr.nodata,
compute_errors=CFG.spechomo_estimate_accuracy,
fallback_argskwargs=dict(
args=dict(source_CWLs=src_cwls, target_CWLs=tgt_cwls,),
kwargs=dict(kind='linear')
......@@ -118,7 +119,7 @@ class L2B_object(L2A_object):
del self.meta_odict['band names'] # TODO
self.arr = im # type: GeoArray
self.spec_homo_errors = errs # type: np.ndarray # int16
self.spec_homo_errors = errs # type: Union[np.ndarray, None] # int16, None if ~CFG.spechomo_estimate_accuracy
class SpectralHomogenizer(object):
......@@ -160,8 +161,9 @@ class SpectralHomogenizer(object):
return outarr
def predict_by_machine_learner(self, arrcube, method, src_satellite, src_sensor, src_LBA,
tgt_satellite, tgt_sensor, tgt_LBA, nodataVal=None, **fallback_argskwargs):
# type: (Union[np.ndarray, GeoArray], str, str, str, list, str, str, list, int, dict) -> tuple
tgt_satellite, tgt_sensor, tgt_LBA, nodataVal=None, compute_errors=False,
**fallback_argskwargs):
# type: (Union[np.ndarray, GeoArray], str, str, str, list, str, str, list, int, bool, dict) -> tuple
"""Predict spectral bands of target sensor by applying a machine learning approach.
:param arrcube: input image array for target sensor spectral band prediction (rows x cols x bands)
......@@ -175,9 +177,11 @@ class SpectralHomogenizer(object):
:param tgt_sensor: target sensor, e.g., 'OLI_TIRS'
:param tgt_LBA: target LayerBandsAssignment
:param nodataVal: no data value
:param compute_errors: whether to compute pixel- / bandwise model errors for estimated pixel values
(default: false)
:param fallback_argskwargs: arguments and keyword arguments for fallback algorithm ({'args':{}, 'kwargs': {}}
:return: predicted array (rows x columns x bands)
:rtype: Tuple[np.ndarray, np.ndarray]
:rtype: Tuple[np.ndarray, Union[np.ndarray, None]]
"""
# TODO: add LBA validation to .predict()
PR = RSImage_Predictor(method=method, classifier_rootDir=self.classifier_rootDir)
......@@ -207,16 +211,19 @@ class SpectralHomogenizer(object):
# run prediction #
##################
errors = None
if cls:
self.logger.info('Performing spectral homogenization using %s. Target is %s %s %s.'
% (method, tgt_satellite, tgt_sensor, tgt_LBA))
im_homo = PR.predict(arrcube, classifier=cls, nodataVal=nodataVal)
errors = PR.compute_prediction_errors(im_homo, cls, nodataVal=nodataVal)
if compute_errors:
errors = PR.compute_prediction_errors(im_homo, cls, nodataVal=nodataVal)
elif fallback_argskwargs:
# fallback: use linear interpolation and set errors to an array of zeros
im_homo = self.interpolate_cube(arrcube, *fallback_argskwargs['args'], **fallback_argskwargs['kwargs'])
errors = np.zeros_like(im_homo, dtype=np.int16)
if compute_errors:
errors = np.zeros_like(im_homo, dtype=np.int16)
else:
raise exc
......
......@@ -856,7 +856,7 @@ class GMS_JOB(object):
count_no_match = len(list_entityids) - len(list_sceneIDs)
if count_no_match:
warnings.warn('%s datasets could not be found the database. They cannot be processed.')
warnings.warn('%s datasets could not be found the database. They cannot be processed.' % count_no_match)
return self.from_sceneIDlist(list_sceneIDs, virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref, comment=comment)
......
......@@ -12,7 +12,7 @@ except ImportError:
from logging import getLogger
from ..options.config import GMS_config as CFG
from .spatial_index_mediator import SpatialIndexMediatorServer
from .spatial_index_mediator import SpatialIndexMediatorServer, Connection
from .exceptions import GMSEnvironmentError, MissingNonPipLibraryWarning
__author__ = 'Daniel Scheffler'
......@@ -35,9 +35,13 @@ class GMSEnvironment(object):
if not SpatIdxSrv.is_running:
SpatIdxSrv.start()
# test connection
conn = Connection(host=CFG.spatial_index_server_host, port=CFG.spatial_index_server_port, timeout=5.0)
conn.disconnect()
os.environ['GMS_SPAT_IDX_SRV_STATUS'] = 'available'
except GMSEnvironmentError as e:
except Exception as e:
self.logger.error(e, exc_info=False)
self.logger.warning('Coregistration will be skipped!')
os.environ['GMS_SPAT_IDX_SRV_STATUS'] = 'unavailable'
......
......@@ -44,10 +44,15 @@ class SpatialIndexMediatorServer:
"""
outputStr = self._communicate('status')
# decrypt
# decrypt status
running = 'is running' in outputStr
# get PID
_process_id = re.search('with pid ([\d]*)', outputStr)
process_id = int(_process_id.group(1)) if _process_id else None
if _process_id and _process_id.group(1):
process_id = int(_process_id.group(1))
else:
process_id = None
return {'running': running, 'process_id': process_id}
......@@ -108,7 +113,7 @@ class SpatialIndexMediator:
""" message value for a full scene query message """
# def __init__(self, host="geoms.gfz-potsdam.de", port=8654):
def __init__(self, host="localhost", port=8654, timeout=5.0): # FIXME could be a problem on other nodes
def __init__(self, host="localhost", port=8654, timeout=5.0):
"""
Establishes a connection to the spatial index mediator server.
......@@ -278,7 +283,10 @@ class Connection:
def __init__(self, host, port, timeout):
# connect to index mediator server
self.socket = socket.create_connection((host, port), timeout)
try:
self.socket = socket.create_connection((host, port), timeout)
except ConnectionRefusedError:
raise ConnectionRefusedError('The spatial index mediator server refused the connection!')
# send hello and confirm response
if not self.__greet():
......
......@@ -339,6 +339,7 @@ class GMS_object(Dataset):
@property
def ac_options(self):
# type: () -> dict
"""
Returns the options dictionary needed as input for atmospheric correction. If an empty dictionary is returned,
atmospheric correction is not yet available for the current sensor and will later be skipped.
......@@ -359,7 +360,10 @@ class GMS_object(Dataset):
opt_dict['output'] = [] # outputs are not needed for GMS -> so
opt_dict['report']['report_path'] = os.path.join(self.pathGen.get_path_procdata(), '[TYPE]')
if 'uncertainties' in opt_dict:
opt_dict['uncertainties']['snr_model'] = PG.get_path_snr_model(self.GMS_identifier)
if CFG.ac_estimate_accuracy:
opt_dict['uncertainties']['snr_model'] = PG.get_path_snr_model(self.GMS_identifier)
else:
del opt_dict['uncertainties'] # SICOR will not compute uncertainties if that key is missing
# apply custom configuration
opt_dict["logger"]['level'] = CFG.log_level
......
......@@ -97,6 +97,7 @@ def set_config(job_ID, json_config='', inmem_serialization=False, parallelizatio
:param path_archive: input path where downloaded data are stored
:param virtual_sensor_id: 1: Landsat-8, 10: Sentinel-2A 10m
:param datasetid_spatial_ref: 249 Sentinel-2A
:rtype: JobConfig
"""
#################################
# set GMS_JobConfig in builtins #
......@@ -196,6 +197,10 @@ class JobConfig(object):
gp('inmem_serialization', json_globts['inmem_serialization'])
self.parallelization_level = \
gp('parallelization_level', json_globts['parallelization_level'])
self.spatial_index_server_host = \
gp('spatial_index_server_host', json_globts['spatial_index_server_host'])
self.spatial_index_server_port = \
gp('spatial_index_server_port', json_globts['spatial_index_server_port'])
self.CPUs = \
gp('CPUs', json_globts['CPUs'], fallback=multiprocessing.cpu_count())
self.delete_old_output = \
......@@ -351,6 +356,8 @@ class JobConfig(object):
gp('ac_scale_factor_errors', json_processors['L1C']['ac_scale_factor_errors'])
self.ac_max_ram_gb = \
gp('ac_max_ram_gb', json_processors['L1C']['ac_max_ram_gb'])
self.ac_estimate_accuracy = \
gp('ac_estimate_accuracy', json_processors['L1C']['ac_estimate_accuracy'])
# L2A
self.exec_L2AP = gp('exec_L2AP', [
......@@ -368,6 +375,8 @@ class JobConfig(object):
json_processors['L2B']['write_output'],
json_processors['L2B']['delete_output']])
self.spechomo_method = gp('spechomo_method', json_processors['L2B']['spechomo_method'])
self.spechomo_estimate_accuracy = \
gp('spechomo_estimate_accuracy', json_processors['L2B']['spechomo_estimate_accuracy'])
# L2C
self.exec_L2CP = gp('exec_L2CP', [
......@@ -797,11 +806,10 @@ class GMSValidator(Validator):
def get_options(target, validation=True):
"""
return dictionary will all options
:param validation: True / False, whether to validate options read from files ot not
:param target: if path to file, then json is used to load, otherwise the default template
is used
"""Return dictionary with all options.
:param validation: True / False, whether to validate options read from files or not
:param target: if path to file, then json is used to load, otherwise the default template is used
:return: dictionary with options
"""
......@@ -814,4 +822,4 @@ def get_options(target, validation=True):
return options
else:
raise FileNotFoundError("target: %s is not a valid file path" % target)
raise FileNotFoundError("Options file not found at file path %s." % target)
......@@ -3,7 +3,10 @@
"inmem_serialization": false, /*If "true", all intermediate processing results are kept in memory. This avoids
disk I/O but requires a lot of RAM. Implemented for execution via Flink.*/
"parallelization_level": "scenes", /*"scenes" or "tiles"*/
"db_host": "localhost",
"spatial_index_server_host": "localhost", /*name of the host that runs the spatial index mediator server
NOTE: The host that runs the GeoMultiSens database has to be
CLI frontend or to the set_config function directly!*/
"spatial_index_server_port": 8654, /*"port used for connecting to the spatial index mediator server"*/
"CPUs": "None", /*number of CPU cores to be used for processing (default: "None" -> use all available)*/
"delete_old_output": false, /*whether to delete previously created output of the given job ID*/
"allow_subMultiprocessing": true, /*allow multiprocessing within multiprocessing workers*/
......@@ -94,7 +97,8 @@
/*Defines to which area labels SICOR is applied. Full list:
["Clear", "Snow", "Water", "Shadow", "Cirrus", "Cloud"] => whole image is atmosperically corrected*/
"ac_scale_factor_errors": 255,
"ac_max_ram_gb": 20 /*maximum amount of RAM to be allocated for atmospheric correction [gigabytes]*/
"ac_max_ram_gb": 20, /*maximum amount of RAM to be allocated for atmospheric correction [gigabytes]*/
"ac_estimate_accuracy": false /*whether to produce an 'AC errors' and a 'mask confidence' array*/
},
"L2A": { /*Level 2A processing: geometric homogenization*/
......@@ -113,8 +117,10 @@
"run_processor": true,
"write_output": true,
"delete_output": false,
"spechomo_method": "LR" /*Method used for spectral homogenization.
/*LI: Linear interpolation; LR: Linear regression; RR: Ridge regression*/
"spechomo_method": "LR", /*Method used for spectral homogenization.
/*LI: Linear interpolation; LR: Linear regression; RR: Ridge regression*/
"spechomo_estimate_accuracy": false /*whether to produce pixel- and bandwise information about estimation
acurracy of spectral homogenization*/
},
"L2C": {
......
......@@ -5,7 +5,8 @@ gms_schema_input = dict(
schema=dict(
inmem_serialization=dict(type='boolean', required=False),
parallelization_level=dict(type='string', required=False, allowed=['scenes', 'tiles']),
db_host=dict(type='string', required=False),
spatial_index_server_host=dict(type='string', required=False),
spatial_index_server_port=dict(type='integer', required=False),
CPUs=dict(type='integer', required=False, nullable=True),
delete_old_output=dict(type='boolean', required=False),
allow_subMultiprocessing=dict(type='boolean', required=False),
......@@ -95,6 +96,7 @@ gms_schema_input = dict(
"Clear", "Snow", "Water", "Shadow", "Cirrus", "Cloud"])),
ac_scale_factor_errors=dict(type='integer', required=False),
ac_max_ram_gb=dict(type='integer', required=False),
ac_estimate_accuracy=dict(type='boolean', required=False),
)),
L2A=dict(type='dict', required=False, schema=dict(
run_processor=dict(type='boolean', required=False),
......@@ -111,7 +113,8 @@ gms_schema_input = dict(
run_processor=dict(type='boolean', required=False),
write_output=dict(type='boolean', required=False),
delete_output=dict(type='boolean', required=False),
spechomo_method=dict(type='string', required=False, allowed=['LI', 'LR', 'RR'])
spechomo_method=dict(type='string', required=False, allowed=['LI', 'LR', 'RR']),
spechomo_estimate_accuracy=dict(type='boolean', required=False),
)),
L2C=dict(type='dict', required=False, schema=dict(
run_processor=dict(type='boolean', required=False),
......
......@@ -58,7 +58,7 @@ def imap_unordered(func, args, CPUs=None, flatten_output=False):
CPUs = CPUs if CPUs <= CFG.CPUs else CFG.CPUs # treat CFG.CPUs as maximum number of CPUs
if CPUs and CPUs > 1 and len(args) > 1:
with Pool(CPUs) as pool:
with Pool(CPUs, maxtasksperchild=1) as pool:
results = list(pool.imap_unordered(func, args)) # returns an iterator
else:
results = [func(argset) for argset in args] # generator does not always work properly here
......
......@@ -36,11 +36,11 @@ def L1A_map(dataset_dict): # map (scene-wise parallelization)
L1A_obj.calc_mean_VAA()
L1A_obj.calc_orbit_overpassParams() # requires corner positions
L1A_obj.apply_nodata_mask_to_ObjAttr('mask_clouds', 0)
if CFG.exec_L1AP[1]:
L1A_obj.to_ENVI()
L1A_obj.delete_tempFiles()
else:
L1A_obj.delete_tempFiles()
L1A_obj.delete_tempFiles()
return L1A_obj
......
......@@ -11,6 +11,7 @@ from itertools import chain
import signal
import re
from typing import TYPE_CHECKING
import shutil
from ..io import output_writer as OUT_W
from ..io import input_reader as INP_R
......@@ -395,7 +396,7 @@ class process_controller(object):
self.profiler.stop()
print(self.profiler.output_text(unicode=True, color=True))
shutdown_loggers()
self.shutdown()
except Exception: # noqa E722 # bare except
if self.config.profiling:
......@@ -407,10 +408,10 @@ class process_controller(object):
if not self.config.disable_exception_handler:
self.logger.error('Execution failed with an error:', exc_info=True)
shutdown_loggers()
self.shutdown()
else:
self.logger.error('Execution failed with an error:')
shutdown_loggers()
self.shutdown()
raise
def run_all_processors(self, custom_data_list=None):
......@@ -476,7 +477,7 @@ class process_controller(object):
self.profiler.stop()
print(self.profiler.output_text(unicode=True, color=True))
shutdown_loggers()
self.shutdown()
except Exception: # noqa E722 # bare except
if self.config.profiling:
......@@ -488,10 +489,10 @@ class process_controller(object):
if not self.config.disable_exception_handler:
self.logger.error('Execution failed with an error:', exc_info=True)
shutdown_loggers()
self.shutdown()
else:
self.logger.error('Execution failed with an error:')
shutdown_loggers()
self.shutdown()
raise
def stop(self, signum, frame):
......@@ -500,11 +501,21 @@ class process_controller(object):
self.config.status = 'canceled'
self.update_DB_job_record()
self.shutdown()
self.logger.warning('Process controller stopped by user.')
raise KeyboardInterrupt # terminate execution and show traceback
def shutdown(self):
"""Shutdown the process controller instance (loggers, remove temporary directories, ...)."""
del self.logger
shutdown_loggers()
raise KeyboardInterrupt # terminate execution and show traceback
# clear any temporary files
tempdir = os.path.join(self.config.path_tempdir + 'GeoMultiSens_*')
self.logger.warning('Deleting temporary directory %s.' % tempdir)
shutil.rmtree(tempdir)
def benchmark(self):
"""
......@@ -719,7 +730,7 @@ class process_controller(object):
{'failed_sceneids': sceneids_failed, # update 'failed_sceneids' column
'finishtime': self.config.end_time, # add job finish timestamp
'status': self.config.status}, # update 'job_status' column
{'id': self.config.ID})
{'id': self.config.ID}, timeout=30000)
def update_DB_job_statistics(self, usecase_datalist):
"""
......
......@@ -110,8 +110,8 @@ class BaseTestCases:
dss = self.PC.add_local_availability([GMS_object_2_dataset_dict(obj) for obj in GMS_objs])
for ds in dss:
self.assertEqual(ds['proc_level'], tgt_procL,
msg='Written %s dataset %s %s %s is not found by PC.add_local_availability.'
% (ds['proc_level'], ds['satellite'], ds['sensor'], ds['subsystem']))
msg='Written %s dataset cannot be found by PC.add_local_availability().'
% (' '.join([ds['satellite'], ds['sensor'], ds['subsystem'], tgt_procL])))
def test_L1A_processing(self):
self.L1A_newObjects = self.PC.L1A_processing()
......@@ -387,7 +387,7 @@ class Test_ProcessContinuing_CompletePipeline(unittest.TestCase):
[cls.validate_db_entry(ds['filename']) for ds in cls.PC.config.data_list]
def setUp(self):
self.cfg_kw = job_config_kwargs
self.cfg_kw = job_config_kwargs.copy() # copy, because job_config_kwargs is modified otherwise
self.cfg_kw.update(dict(
reset_status=True,
exec_L1BP=[False, False, False],
......
......@@ -201,7 +201,9 @@ class Test_SpectralHomogenizer(unittest.TestCase):
src_satellite='Landsat-8', src_sensor='OLI_TIRS',
src_LBA=['1', '2', '3', '4', '5', '6', '7'],
tgt_satellite='Sentinel-2A', tgt_sensor='MSI',
tgt_LBA=['1', '2', '3', '4', '5', '6', '7', '8', '8A', '9', '10', '11', '12'])
tgt_LBA=['1', '2', '3', '4', '5', '6', '7', '8', '8A', '9', '10', '11', '12'],
compute_errors=True
)
self.assertIsInstance(predarr, GeoArray)
self.assertEqual(predarr.shape, (50, 50, 13))
......@@ -209,7 +211,7 @@ class Test_SpectralHomogenizer(unittest.TestCase):
self.assertIsInstance(errors, np.ndarray)
self.assertEqual(errors.shape, (50, 50, 13))
# TODO add dtype assertion
self.assertEqual(errors.dtype, np.int16)
@unittest.SkipTest # Ridge regression classifiers have not yet been created
def test_predict_by_machine_learner__RR_L8_S2(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment