Commit 5abead96 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

L1B_P.COREG now supports direct warping from arrays instead of reading inputs from disk

-> now nearly no physical tempfiles are needed anymore
- revised code structure
- GDAL variant of get_image_windows_to_match() is now deprecated and has been replaced by rasterio variant
- COREG: added non-working code of an automatic getter for matching specttral bands
- added a function to clean up tempfiles
 GEOP.warp_ndarray: fixed wrong output when a numpy view is taken as input for warping instead if an independent array
 L1A_P: minor code improvements
 INP_R: fixed a bug that caused wrong output of read_ENVIfile()
 HLP_F: added a function for subprocess calls including proper returning of error messages
 PG: fixed a bug in PG.__init__()
 config:  added database connection and statement timeouts
 PC: added code for reading of L1A cubes as input for L1B_P
parent 0037b20a
......@@ -28,6 +28,7 @@ import subprocess
import builtins
import time
import rasterio
import warnings
from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING
......@@ -3126,7 +3127,7 @@ def ndarray2gdal(ndarray, outPath=None, importFile=None, direction=1, GDAL_Type=
elif geotransform is not None and projection is not None:
outDs.SetGeoTransform(geotransform)
outDs.SetProjection(projection)
if GDAL_Type != 'MEM':
if GDAL_Type == 'MEM': # FIXME geändert von GDAL_Type != 'MEM' (sinnlos!) -> checken, ob L1AP noch läuft
outDs = None
else:
return outDs
......@@ -3629,6 +3630,10 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
:return out_gt: warped gdal GeoTransform
:return out_prj: warped projection as WKT string
"""
if not ndarray.flags['OWNDATA']:
temp = np.empty_like(ndarray)
temp[:] = ndarray
ndarray = temp # deep copy: converts view to its own array
with rasterio.drivers():
if out_gt is None:
......@@ -3652,7 +3657,7 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
else:
rows,cols = ndarray.shape if outRowsCols is None else outRowsCols
out_dtype = ndarray.dtype if out_dtype is None else out_dtype
out_dtype = ndarray.dtype if out_dtype is None else out_dtype
gt2bounds = lambda gt,r,c: [gt[0], gt[3] + r*gt[5], gt[0] + c*gt[1], gt[3]] # left, bottom, right, top
#get dst_transform
......@@ -3702,11 +3707,12 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
if len(ndarray.shape)==3 else np.zeros((out_rows,out_cols), out_dtype)
# FIXME direct passing of src_transform and dst_transform results in a wrong output image. Maybe a rasterio-bug?
# FIXME indirect passing causes Future warning
#rio_reproject(ndarray, out_arr, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform,
# dst_crs=dst_crs, resampling=dict_rspInt_rspAlg[rsp_alg])
rio_reproject(ndarray, out_arr,
# FIXME indirect passing causes Future warning
with warnings.catch_warnings():
warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0.
rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
......
......@@ -100,7 +100,6 @@ def get_data_list_of_current_jobID(): # called in webapp mode
'''OrderedDict([('datasetid', 104), ('image_type', 'RSD'), ('satellite', 'Landsat-8'), ('sensor', 'OLI_TIRS'),
('subsystem', ''), ('acquisition_date', datetime.datetime(2013, 7, 3, 5, 48, 32)),
('entityid', 'LC81510322013184LGN00'), ('filename', 'LC81510322013184LGN00.tar.gz'), ('sensormode', 'M')])'''
return data_list
def LandsatID2dataset(ID_list):
......
......@@ -200,11 +200,13 @@ class L1A_object(object):
self.arr_shape = tuple_GMS_subset[1][0]
self.arr_pos = tuple_GMS_subset[1][1]
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, self.job_CPUs,append=1)
#PG_obj = PG.path_generator(self.__dict__)
#get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0]+'.hdr'
#self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()), self.arr_shape, self.arr_pos,self.logger)
#if self.arr_pos is None: self.logger.info('Reading file: %s' % (self.baseN))
#else: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
include_arrays = 0
if include_arrays:
PG_obj = PG.path_generator(self.__dict__) # FIXME
get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0]+'.hdr' # FIXME
self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()), self.arr_shape, self.arr_pos,self.logger) # FIXME
if self.arr_pos is None: self.logger.info('Reading file: %s' %self.baseN) # FIXME
else: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos)) # FIXME
self.GMS_identifier = collections.OrderedDict({'image_type':self.image_type, 'Satellite':self.satellite,
'Sensor':self.sensor,'Subsystem': self.subsystem,
'logger':self.logger})
......@@ -1012,7 +1014,7 @@ class L1A_object(object):
for i in range(2): # delete empty folders: subsystem > sensor > Rootname
deldir = self.ExtractedFolder if i == 0 else pardir
pardir = os.path.abspath(os.path.join(deldir,os.path.pardir))
if glob.glob('%s/*' %pardir) == []:
if not glob.glob('%s/*' % pardir):
os.rmdir(pardir)
else:
break
......@@ -1044,32 +1046,21 @@ def merge_L1A_tiles_to_L1A_obj(list_L1A_tiles):
if not callable(getattr(list_L1A_tiles[0],i)) and not isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
L1A_obj.arr_shape = 'cube'
L1A_obj.arr_pos = None
list_ndarrays = [i for i in list_L1A_tiles[0].__dict__ if not callable(getattr(list_L1A_tiles[0],i)) and \
isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
if list_ndarrays!=[]:
L1A_obj = numba_array_merger(L1A_obj,list_ndarrays, list_L1A_tiles)
# path_arr = PG.path_generator()
#assert list_ndarrays!=[], \
# 'Cannot merge L1A tiles because the given L1A objects do not have any numpy array attributes."'
# for ndarray in list_ndarrays:
# target_shape = L1A_obj.shape_fullArr[:2]+[getattr(list_L1A_tiles[0],ndarray).shape[2]] \
# if len(getattr(list_L1A_tiles[0],ndarray).shape) == 3 else L1A_obj.shape_fullArr[:2]
# setattr(L1A_obj, ndarray, np.empty(target_shape, dtype=getattr(list_L1A_tiles[0],ndarray).dtype))
# for idx,tile in enumerate(list_L1A_tiles):
# rowStart,rowEnd = tile.arr_pos[0]
# colStart,colEnd = tile.arr_pos[1]
# getattr(L1A_obj, ndarray)[rowStart:rowEnd+1, colStart:colEnd+1]=getattr(tile,ndarray)
list_arraynames = [i for i in list_L1A_tiles[0].__dict__ if not callable(getattr(list_L1A_tiles[0],i)) and \
isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
if list_arraynames:
L1A_obj = numba_array_merger(L1A_obj,list_arraynames, list_L1A_tiles)
return L1A_obj
@autojit
def numba_array_merger(L1A_obj, list_arraynames, list_L1A_tiles):
for arrname in list_arraynames:
is_3d = len(getattr(list_L1A_tiles[0],arrname).shape) == 3
bands = [getattr(list_L1A_tiles[0],arrname).shape[2]] if is_3d else [] # dynamic -> works for arr, cld_arr,...
array = getattr(list_L1A_tiles[0],arrname)
is_3d = len(array.shape) == 3
bands = [array.shape[2]] if is_3d else [] # dynamic -> works for arr, cld_arr,...
target_shape = L1A_obj.shape_fullArr[:2]+bands
target_dtype = getattr(list_L1A_tiles[0],arrname).dtype
target_dtype = array.dtype
setattr(L1A_obj, arrname, np.empty(target_shape, dtype=target_dtype))
for idx,tile in enumerate(list_L1A_tiles):
rowStart,rowEnd = tile.arr_pos[0]
......
This diff is collapsed.
......@@ -81,7 +81,8 @@ class job:
# path_archive = '/srv/gms2/scheffler/GeoMultiSens/database/sampledata/'
path_archive = joinP(path_fileserver, 'database/sampledata/')
elif GMS_call_type == 'webapp':
conn_database = "dbname='usgscache' user='gmsdb' password='gmsdb' host='geoms'"
conn_database = "dbname='usgscache' user='gmsdb' password='gmsdb' host='geoms' connect_timeout=3 options=" \
"'-c statement_timeout=1000'"
conn_db_meta = conn_database
path_fileserver = query(conn_db_meta,'path_data_root')
path_procdata = joinP(path_fileserver, query(conn_db_meta,'foldername_procdata'))
......@@ -135,7 +136,7 @@ class usecase:
sort_bands_by_cwl = int(query(job.conn_db_meta,'sort_bands_by_cwl'))
conversion_type_optical = query(job.conn_db_meta,'conversion_type_optical')
conversion_type_thermal = query(job.conn_db_meta,'conversion_type_thermal')
align_coord_grids = 1 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
# FIXME: Zeitreihen ein grid aligning voraussetzt
userInp_target_gsd = 30 # [meters], overridden if match_gsd==True
match_gsd = True
......
......@@ -67,8 +67,8 @@ class out_object(object):
def ENVI2object(tuple_GMS_arrshp_arrpos):
return out_object(tuple_GMS_arrshp_arrpos)
def read_ENVIfile(hdr_path,arr_shape,arr_pos,logger=None):
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=True)
def read_ENVIfile(hdr_path,arr_shape,arr_pos,logger=None, return_meta=False):
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=return_meta)
def read_ENVIhdr_to_dict(hdr_path, logger=None):
if not os.path.isfile(hdr_path):
......
......@@ -151,7 +151,7 @@ def mask_to_ENVI_Classification(InObj,maskname):
pixelVals_in_mask = list(np.unique(mask_array[InObj.mask_1bit == 1]))
pixelVals_expected = sorted(list(cd.values()))
pixelVals_unexpected = [i for i in pixelVals_in_mask if i not in pixelVals_expected]
if pixelVals_unexpected != []:
if pixelVals_unexpected:
InObj.logger.warning('The cloud mask contains unexpected pixel values: %s ' \
% ', '.join(str(i) for i in pixelVals_unexpected))
mask_md['classes'] = len(pixelVals_in_mask)+1 # 1 class for no data pixels
......
......@@ -100,7 +100,7 @@ def data_DB_updater(obj_dict):
cursor.execute('''CREATE TABLE IF NOT EXISTS processed_data (%s)''' %', '.join(fullColumnList))
currentColumnList = [i[1] for i in cursor.execute("PRAGMA table_info('processed_data')").fetchall()]
missingColumns = [col for col in fullColumnList if col not in currentColumnList]
if missingColumns != []: # automatic adding of missing columns
if missingColumns: # automatic adding of missing columns
cursor.execute('''CREATE TABLE IF NOT EXISTS processed_data_temp (%s)''' %', '.join(fullColumnList))
cursor.execute("SELECT "+','.join(currentColumnList)+" FROM processed_data")
[cursor.execute("INSERT INTO processed_data_temp (%(cols)s) VALUES (%(vals)s)" %{'cols':','.join(
......
......@@ -18,10 +18,12 @@ import zipfile
import fnmatch
import psycopg2
import builtins
import shlex
from shapely.geometry import Polygon
from multiprocessing import sharedctypes
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
from subprocess import Popen, PIPE
import algorithms.gms_cloud_classifier as CLD_P # Cloud Processor
import misc.database_tools as DB_T
......@@ -74,6 +76,15 @@ def setup_logger(name_logfile, path_logfile,CPUs,append=1):
# logger.addHandler(consoleHandler)
return logger
def subcall_with_output(cmd):
"""Execute external command and get its stdout, exitcode and stderr.
:param cmd: a normal shell command including parameters
"""
proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
exitcode = proc.returncode
return out, exitcode, err
def sorted_nicely(iterable):
""" Sort the given iterable in the way that humans expect.
:param iterable:
......
......@@ -19,8 +19,10 @@ class path_generator(object):
if 'scene_ID' in kwargs:
from misc.database_tools import get_scene_and_dataset_infos_from_postgreSQLdb
args = [get_scene_and_dataset_infos_from_postgreSQLdb(kwargs['scene_ID'])] # return [dict]
isdict = 1 if len(args) == 1 and type(args[0] in [dict,collections.OrderedDict]) else 0
isdict = len(args) == 1 and type(args[0] in [dict,collections.OrderedDict])
args = args[0] if isdict else args
isdict = type(args in [dict,collections.OrderedDict])
self.proc_level = args['proc_level'] if isdict else args[0]
self.image_type = args['image_type'] if isdict else args[1]
self.satellite = args['satellite'] if isdict else args[2]
......@@ -29,7 +31,7 @@ class path_generator(object):
self.AcqDate = args['acquisition_date'] if isdict else args[5]
self.entity_ID = args['entity_ID'] if isdict else args[6]
self.logger = args['logger'] if isdict and 'logger' in args \
else args[7] if len(args)==8 else None
else args[7] if not isdict and len(args)==8 else None
def get_path_procdata(self):
return os.path.join(job.path_procdata, self.satellite, self.sensor, self.AcqDate.strftime('%Y-%m-%d'),
......@@ -76,6 +78,7 @@ def get_tempfile(ext=None,prefix=None,tgt_dir=None):
:param prefix: optional file prefix
:param tgt_dir: target directory (automatically set if None)
"""
prefix = 'GeoMultiSens__' if prefix is None else prefix
fd, path = tempfile.mkstemp(prefix=prefix,suffix=ext,dir=tgt_dir)
os.close(fd)
return path
......
......@@ -24,7 +24,7 @@ print('#########################################################################
called_from_iPyNb = 1 if 'ipykernel/__main__.py' in sys.argv[0] else 0
# check if process_controller is executed by debugger
isdebugging = 1 if True in [frame[1].endswith("pydevd.py") for frame in inspect.stack()] else False
isdebugging = 1 # FIXME
builtins.GMS_call_type = 'console' if len(sys.argv) < 2 or called_from_iPyNb else 'webapp'
builtins.GMS_process_ID = datetime.datetime.now().strftime("%Y%m%d__%Hh%Mm%Ss") if len(sys.argv) < 2\
or called_from_iPyNb else int(sys.argv[1])
......@@ -282,46 +282,58 @@ def run_processController_in_multiprocessing(usecase_data_list):
if job.exec__L1BP[0]:
solution = 1
if solution == 1:
if L1A_newObjects:
"""if newly processed L1A objects are present: cut them into tiles"""
tuple__newL1Aobjects__blocksize = [[obj, [1000,1000]] for obj in L1A_newObjects]
L1A_newObjects = pool.map(L1A_P.cut_L1A_obj_into_blocks, tuple__newL1Aobjects__blocksize) # returns [[obj,obj,ob]]
L1A_newObjects = L1A_newObjects[0] if isinstance(L1A_newObjects[0],list) else L1A_newObjects
"""prepare earlier processed L1A data for further processing"""
GMSfile_list_L1A_inDB = INP_R.get_list_GMSfiles([datalist_inDB, 'L1A'])
get_tilepos_list = lambda GMSfile: HLP_F.get_image_tileborders('block', [500,500],
shape_fullArr=INP_R.GMSfile2dict(GMSfile)['shape_fullArr']) # """defines tile positions and size"""
# GMSfile_tilepos_list_L1A_inDB = [ [GMSfile,['block',tp]] for GMSfile in GMSfile_list_L1A_inDB \
# for i,tp in enumerate(get_tilepos_list(GMSfile)) if i<100]
GMSfile_tilepos_list_L1A_inDB = [ [GMSfile,['block',tp]] for GMSfile in GMSfile_list_L1A_inDB \
for i,tp in enumerate(get_tilepos_list(GMSfile))]
if GMSfile_tilepos_list_L1A_inDB:
""" create L1A objects from processed and saved L1A data """
# Instancing a L1A object with argument L0B_object set to 'None' creates an empty L1A object that is
# filled with the data from disk.
L1A_DBObjects = pool.imap(L1A_P.L1A_object(None).fill_from_disk, GMSfile_tilepos_list_L1A_inDB)
run_tilewise = 0
if run_tilewise:
if L1A_newObjects:
"""if newly processed L1A objects are present: cut them into tiles"""
tuple__newL1Aobjects__blocksize = [[obj, [1000,1000]] for obj in L1A_newObjects]
L1A_newObjects = pool.map(L1A_P.cut_L1A_obj_into_blocks, tuple__newL1Aobjects__blocksize) # returns [[obj,obj,ob]]
L1A_newObjects = L1A_newObjects[0] if isinstance(L1A_newObjects[0],list) else L1A_newObjects
"""prepare earlier processed L1A data for further processing"""
GMSfile_list_L1A_inDB = INP_R.get_list_GMSfiles([datalist_inDB, 'L1A'])
get_tilepos_list = lambda GMSfile: HLP_F.get_image_tileborders('block', [500,500],
shape_fullArr=INP_R.GMSfile2dict(GMSfile)['shape_fullArr']) # """defines tile positions and size"""
# GMSfile_tilepos_list_L1A_inDB = [ [GMSfile,['block',tp]] for GMSfile in GMSfile_list_L1A_inDB \
# for i,tp in enumerate(get_tilepos_list(GMSfile)) if i<100]
GMSfile_tilepos_list_L1A_inDB = [ [GMSfile,['block',tp]] for GMSfile in GMSfile_list_L1A_inDB \
for i,tp in enumerate(get_tilepos_list(GMSfile))]
if GMSfile_tilepos_list_L1A_inDB:
""" create L1A objects from processed and saved L1A data """
# Instancing a L1A object with argument L0B_object set to 'None' creates an empty L1A object that is
# filled with the data from disk.
L1A_DBObjects = pool.imap(L1A_P.L1A_object(None).fill_from_disk, GMSfile_tilepos_list_L1A_inDB)
L1A_DBObjects = list(L1A_DBObjects)
# L1A_DBObjects = L1A_DBObjects[0] if isinstance(L1A_DBObjects[0],list) else L1A_DBObjects
else:
L1A_DBObjects = []
"""combine newly and earlier processed L1A data"""
L1A_Instances = L1A_newObjects + L1A_DBObjects
#L1A_Instances = L1A_newObjects
#L1A_Instances = L1A_DBObjects
grouped_L1A_Tiles = HLP_F.group_objects_by_attribute(L1A_Instances,'baseN')
else: # run on full cubes
GMSfile_list_L1A_inDB = INP_R.get_list_GMSfiles([datalist_inDB, 'L1A'])
work = [[GMS,['cube',None]] for GMS in GMSfile_list_L1A_inDB]
L1A_DBObjects = pool.imap(L1A_P.L1A_object(None).fill_from_disk, work)
L1A_DBObjects = list(L1A_DBObjects)
# L1A_DBObjects = L1A_DBObjects[0] if isinstance(L1A_DBObjects[0],list) else L1A_DBObjects
else:
L1A_DBObjects = []
"""combine newly and earlier processed L1A data"""
L1A_Instances = L1A_newObjects + L1A_DBObjects
#L1A_Instances = L1A_newObjects
#L1A_Instances = L1A_DBObjects
grouped_L1A_Tiles = HLP_F.group_objects_by_attribute(L1A_Instances,'baseN')
L1A_Instances = L1A_newObjects + L1A_DBObjects
"""process all L1A data to fullfill the requirements of L1B processing"""
# processed_grouped_L1A_Tiles = []
for scene_tilelist in grouped_L1A_Tiles: # scene-wise processing (grouped by baseN)
L1A_obj = L1A_P.merge_L1A_tiles_to_L1A_obj(scene_tilelist)
#for scene_tilelist in grouped_L1A_Tiles: # scene-wise processing (grouped by baseN)
#L1A_obj = L1A_P.merge_L1A_tiles_to_L1A_obj(scene_tilelist)
for L1A_obj in L1A_Instances: # scene-wise processing (grouped by baseN)
"""L1A_obj enthält KEINE ARRAY-DATEN!, nur die für die ganze Szene gültigen Metadaten"""
if not HLP_F.proc_level_already_present(L1A_obj.proc_level,'L1B') and not L1A_obj.georef:
L1A_obj = scene_tilelist[0]
#L1A_obj = scene_tilelist[0]
#[print(i) for i in scene_tilelist[0].meta]
"""1. calculate shifts"""
......@@ -335,23 +347,28 @@ def run_processController_in_multiprocessing(usecase_data_list):
del L1A_obj
"""3. perform deshifting"""
#GDAL variante
deshift_configs = L1B_P.get_DESHIFTER_configs(L1B_obj.__dict__.copy(),
['arr','masks'])
DESHIFT_instances = [L1B_P.DESHIFTER(obj,attr,**kwargs) for obj,attr,kwargs in deshift_configs]
deshift_results = []
for inst in DESHIFT_instances:
deshift_results.append(inst.correct_shifts())
#deshift_configs = L1B_P.get_DESHIFTER_configs(L1B_obj.__dict__.copy(),
# ['arr','masks'],proc_bandwise=True)
#DESHIFT_instances = [L1B_P.DESHIFTER(obj,attr,**kwargs) for obj,attr,kwargs in deshift_configs]
#deshift_results = pool.map(L1B_P.DESHIFTER.correct_shifts,DESHIFT_instances)
var='RASTERIO'
#var='GDAL' # FIXME
if var=='GDAL': # fast in singleprocessing
deshift_configs = L1B_P.get_DESHIFTER_configs(L1B_obj.__dict__.copy(),
['arr','masks'])
DESHIFT_instances = [L1B_P.DESHIFTER(obj,attr,**kwargs) for obj,attr,kwargs in deshift_configs]
deshift_results = []
for inst in DESHIFT_instances:
deshift_results.append(inst.correct_shifts())
elif var=='RASTERIO': # fast in mutiprocessing
deshift_configs = L1B_P.get_DESHIFTER_configs(L1B_obj.__dict__.copy(),
['arr','masks'],proc_bandwise=True)
DESHIFT_instances = [L1B_P.DESHIFTER(obj,attr,**kwargs) for obj,attr,kwargs in deshift_configs]
deshift_results = pool.map(L1B_P.DESHIFTER.correct_shifts,DESHIFT_instances)
else:
raise Exception
grouped_deshift_results = HLP_F.group_dicts_by_key(deshift_results,'attrname')
[L1B_obj.apply_deshift_results(deshift_results) for deshift_results in grouped_deshift_results]
"""write L1B"""
OUT_W.Obj2ENVI(L1B_obj,1)
L1B_P.delete_tempFiles()
else:
print('%s skipped in L1B_P' %L1A_obj.baseN)
......@@ -553,7 +570,7 @@ def apply_L1A_funcs_to_tiles(L1A_obj):
L1A_obj.RAA_arr = L1A_obj.calc_RAA_array(subset)['data']
return L1A_obj
try:
try:
# L0A-P
# parse cli arguments
if GMS_call_type == 'console' :
......@@ -562,7 +579,6 @@ try:
else: # webapp
usecase.data_list = L0A_P.get_data_list_of_current_jobID()
# [print(i) for i in usecase.data_list]
if not job.benchmark_global:
if job.CPUs == 1:
run_processController_in_singleprocessing(usecase.data_list)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment