Commit be56836c authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Added IO locks for array reader and writer.

parent 00c74adf
......@@ -24,6 +24,7 @@ from . import geoprocessing as GEOP
from ..io import output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene
from ..misc.locks import Lock
from ..model.gms_object import GMS_object
from ..model import metadata as META
......@@ -175,11 +176,12 @@ class L1A_object(GMS_object):
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset)
# perform layer stack
with Lock('IO', allowed_threads=1, logger=self.logger): # FIXME hardcoded
if CFG.inmem_serialization and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0]
else: # 'MEMORY' or physical output
rasObj.Layerstacking(paths_files2stack, path_output=path_output) # this writes an output (gdal_merge)
rasObj.Layerstacking(paths_files2stack, path_output=path_output) # writes an output (gdal_merge)
self.arr = path_output
else:
......@@ -194,6 +196,8 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset)
# read a single file
with Lock('IO', allowed_threads=1, logger=self.logger): # FIXME hardcoded
if CFG.inmem_serialization and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows)
......
......@@ -15,6 +15,7 @@ from ecmwfapi.api import APIKeyFetchError, get_apikey_values
from ..options.config import GMS_config as CFG
from .spatial_index_mediator import SpatialIndexMediatorServer, Connection
from .exceptions import GMSEnvironmentError, MissingNonPipLibraryWarning
from ..misc.locks import redis_conn
__author__ = 'Daniel Scheffler'
......@@ -47,6 +48,11 @@ class GMSEnvironment(object):
self.logger.warning('Coregistration will be skipped!')
os.environ['GMS_SPAT_IDX_SRV_STATUS'] = 'unavailable'
def _check_redis_server(self):
if not redis_conn:
self.logger.warning("Unable to connect to redis server. Is the server installed and running? For "
"installation on Ubuntu, use 'sudo apt install redis-server'.")
def _check_nonpip_packages(self):
"""Check for not pip-installable packages."""
......@@ -84,6 +90,7 @@ class GMSEnvironment(object):
# javaLibs = []
self._check_spatial_index_mediator_server()
self._check_redis_server()
self._check_nonpip_packages()
def check_ports(self):
......
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
import time
import redis_lock
import logging
try:
redis_conn = redis_lock.StrictRedis(host='localhost')
redis_conn.keys() # may raise ConnectionError
except ConnectionError:
redis_conn = None
class Lock(redis_lock.Lock):
def __init__(self, name, allowed_threads=1, logger=None, **kwargs):
self.conn = redis_conn
self.allowed_threads = allowed_threads
self.allowed_slot_names = ['%s, slot #%s' % (name, i) for i in range(1, allowed_threads + 1)]
if redis_conn:
if allowed_threads > 1:
while True:
name_free_slot = self.get_free_slot_name()
if not name_free_slot:
time.sleep(0.2)
else:
break
name = name_free_slot
super().__init__(self.conn, name, **kwargs)
else:
pass
self.name = name
self.logger = logger or logging.getLogger("RedisLock: '%s'" % name)
def get_existing_locks(self):
return [i.decode('utf8').split('lock:')[1] for i in self.conn.keys()]
def get_free_slot_name(self):
free_slots = [sn for sn in self.allowed_slot_names if sn not in self.get_existing_locks()]
if free_slots:
return free_slots[0]
def __enter__(self):
if self.conn:
super().__enter__()
self.logger.info("Acquired lock '%s'." % self.name)
else:
pass
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
if self.conn:
super().__exit__(exc_type=exc_type, exc_value=exc_value, traceback=traceback)
self.logger.info("Released lock '%s'." % self.name)
else:
pass
......@@ -48,6 +48,7 @@ from ..io import input_reader as INP_R
from ..io import output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import definition_dicts as DEF_D
from ..misc.locks import Lock
if TYPE_CHECKING:
from ..algorithms.L1C_P import L1C_object # noqa F401 # flake8 issue
......@@ -1307,6 +1308,7 @@ class GMS_object(Dataset):
# loop through all attributes to write and execute writer #
###########################################################
with Lock('IO', allowed_threads=1, logger=self.logger):
for arrayname in attributes2write:
descriptor = '%s_%s' % (image_type_dict[arrayname], self.proc_level)
......@@ -1315,9 +1317,9 @@ class GMS_object(Dataset):
# initial assertions
assert arrayname in metaAttr_dict, "GMS_object.to_ENVI cannot yet write %s attribute." % arrayname
assert isinstance(arrayval, (GeoArray, np.ndarray)), "Expected a GeoArray instance or a numpy array " \
"for object attribute %s. Got %s." % (
arrayname, type(arrayval))
assert isinstance(arrayval, (GeoArray, np.ndarray)), "Expected a GeoArray instance or a numpy " \
"array for object attribute %s. Got %s." \
% (arrayname, type(arrayval))
outpath_hdr = self.pathGen.get_outPath_hdr(arrayname)
outpath_hdr = os.path.splitext(outpath_hdr)[0] + '__TEMPFILE.hdr' if is_tempfile else outpath_hdr
......@@ -1372,7 +1374,7 @@ class GMS_object(Dataset):
try:
shutil.copy(path_to_array, outpath_arr) # copies file + permissions
except PermissionError:
# prevents permission error if outputfile already exists and is owned by another user
# prevents permission error if outfile already exists and is owned by another user
HLP_F.silentremove(outpath_arr)
shutil.copy(path_to_array, outpath_arr)
......@@ -1465,9 +1467,10 @@ class GMS_object(Dataset):
if not compression or not success:
class_names = meta2write['class names']
class_colors = meta2write['class lookup']
envi.save_classification(outpath_hdr, arr2write, metadata=meta2write, dtype=out_dtype,
interleave=self.outInterleave, ext=self.outInterleave,
force=True, class_names=class_names, class_colors=class_colors)
envi.save_classification(outpath_hdr, arr2write, metadata=meta2write,
dtype=out_dtype, interleave=self.outInterleave,
ext=self.outInterleave, force=True,
class_names=class_names, class_colors=class_colors)
if os.path.exists(outpath_hdr):
OUT_W.reorder_ENVI_header(outpath_hdr, OUT_W.enviHdr_keyOrder)
......
......@@ -17,3 +17,4 @@ cerberus
nested_dict
openpyxl
timeout_decorator
redis_lock
......@@ -15,7 +15,7 @@ requirements = [
'matplotlib', 'numpy', 'scikit-learn', 'scipy', 'gdal', 'pyproj', 'shapely', 'ephem', 'pyorbital', 'dill', 'pytz',
'pandas', 'numba', 'spectral>=0.16', 'geopandas', 'iso8601', 'pyinstrument', 'geoalchemy2', 'sqlalchemy',
'psycopg2', 'py_tools_ds>=0.12.4', 'geoarray>=0.7.1', 'arosics>=0.6.6', 'six', 'tqdm', 'jsmin', 'cerberus',
'nested_dict', 'openpyxl', 'timeout_decorator'
'nested_dict', 'openpyxl', 'timeout_decorator', 'redis_lock'
# spectral<0.16 has some problems with writing signed integer 8bit data
# fmask # conda install -c conda-forge python-fmask
# 'pyhdf', # conda install --yes -c conda-forge pyhdf
......
......@@ -74,6 +74,7 @@ dependencies:
- nested_dict
- openpyxl
- timeout_decorator
- redis_lock
- py_tools_ds>=0.12.4
- geoarray>=0.7.0
- arosics>=0.6.6
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment