Commit dc5605f3 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Updated all package imports and __init__.py for proper importing

HLP_F:
- added MAP() dummy function

PC:
. added job 26185275
parent 0a4f1083
......@@ -7,9 +7,13 @@
#
###############################################################################
from . import algorithms
from . import gms_io
from . import misc
__version__ = '0.1.0'
__all__=['algorithms',
'gms_io',
'misc']
from .algorithms import *
from .gms_io import *
from .testing import *
\ No newline at end of file
__version__ = '20160905.01'
__author__='Daniel Scheffler'
\ No newline at end of file
......@@ -57,8 +57,8 @@ from shapely.geometry import shape
from shapely.geometry import MultiPoint
from numba import jit, autojit
from gms_io import envifilehandling_BD as ef
from misc import helper_functions as HLP_F
from ..gms_io import envifilehandling_BD as ef
from ..misc import helper_functions as HLP_F
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
class GEOPROCESSING(object):
......
......@@ -20,11 +20,11 @@ import glob
import builtins
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from gms_io import Input_reader as INP_R
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
from misc import path_generator as PG
from algorithms.METADATA import get_LayerBandsAssignment
from ..gms_io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from .METADATA import get_LayerBandsAssignment
########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode
......
......@@ -23,8 +23,8 @@ import re
import builtins
import collections
import misc.helper_functions as HLP_F
import misc.path_generator as PG
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
job = builtins.GMS_config.job # read from builtins (set by process_controller)
########################### core functions ####################################
......
......@@ -38,16 +38,15 @@ import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
from algorithms import METADATA as META
from algorithms import GEOPROCESSING as GEOP
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import py_tools_ah
from gms_io import envifilehandling_BD as ef
from misc import helper_functions as HLP_F
from gms_io import Input_reader as INP_R
from gms_io import Output_writer as OUT_W
from misc import path_generator as PG
from misc import database_tools as DB_T
from . import METADATA as META
from . import GEOPROCESSING as GEOP
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from . import py_tools_ah
from ..misc import helper_functions as HLP_F
from ..gms_io import Input_reader as INP_R
from ..gms_io import Output_writer as OUT_W
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
sys.path.append('./algorithms')
......
......@@ -52,14 +52,14 @@ from geopandas import GeoDataFrame
#except ImportError: print('cv2-lib missing..')
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING as GEOP
from misc import path_generator as PG
from misc import database_tools as DB_T
from algorithms.L1A_P import L1A_object
from misc.SpatialIndexMediator import SpatialIndexMediator
#from algorithms.L2A_P import get_DESHIFTER_configs, DESHIFTER
from ..gms_io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from . import GEOPROCESSING as GEOP
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from .L1A_P import L1A_object
from ..misc.SpatialIndexMediator import SpatialIndexMediator
#from .L2A_P import get_DESHIFTER_configs, DESHIFTER
#sys.path.append('/home/gfz-fe/scheffler/python')
#from CoReg_Sat import COREG
......
......@@ -28,11 +28,11 @@ try:
except ImportError:
import osr
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING as GEOP
from gms_io import Input_reader as INP_R
from misc import path_generator as PG
from algorithms.L1B_P import L1B_object
from ..misc import helper_functions as HLP_F
from . import GEOPROCESSING as GEOP
from ..gms_io import Input_reader as INP_R
from ..misc import path_generator as PG
from .L1B_P import L1B_object
job = builtins.GMS_config.job
......
......@@ -17,10 +17,10 @@ import gdal
import numpy as np
import rasterio
from algorithms import GEOPROCESSING as GEOP
from misc import path_generator as PG
from misc import helper_functions as HLP_F
from algorithms.L1C_P import L1C_object
from . import GEOPROCESSING as GEOP
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from .L1C_P import L1C_object
usecase = builtins.GMS_config.usecase
......
......@@ -11,8 +11,8 @@ import builtins
import numpy as np
from scipy.interpolate import interp1d
from gms_io import Input_reader as INP_R
from algorithms.L2A_P import L2A_object
from ..gms_io import Input_reader as INP_R
from .L2A_P import L2A_object
usecase = builtins.GMS_config.usecase
......
......@@ -10,7 +10,7 @@ __author__='Daniel Scheffler'
shared = {}
res = {}
from algorithms.L2B_P import L2B_object
from .L2B_P import L2B_object
class L2C_object(L2B_object):
def __init__(self, L2B_obj):
......
......@@ -32,10 +32,10 @@ try: from osgeo import osr
except ImportError: import osr
from gms_io import envifilehandling_BD as ef
from algorithms import GEOPROCESSING as GEOP
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
from ..gms_io import envifilehandling_BD as ef
from . import GEOPROCESSING as GEOP
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
......@@ -527,7 +527,7 @@ class METADATA(object):
# Fill missing values
if self.EntityID=='':
self.logger.info('Scene-ID could not be extracted and has to be retrieved from %s metadata database...' %self.Satellite)
import gms_io.Input_reader as INP_R
from ..gms_io import Input_reader as INP_R
if job.call_type == 'console':
DB_T.update_metaDB_if_needed(self.Satellite,self.Sensor,self.Subsystem,self.AcqDate)
tablename = '%s_%s_%s' %(self.Satellite.replace('-',''),self.Sensor.replace('+',''),self.Subsystem) \
......@@ -1408,7 +1408,7 @@ class METADATA(object):
sensorcode = HLP_F.get_GMS_sensorcode(self.get_GMS_identifier())
# ms_pan = ('multi' if self.nBands > 1 else 'pan')
if sensorcode:
from gms_io import Input_reader as INP_R
from ..gms_io import Input_reader as INP_R
sol_irr = INP_R.Solar_Irradiance_reader(wvl_min_nm = 350, wvl_max_nm = 2500)
srf_dict = INP_R.SRF_reader(self.get_GMS_identifier()) # = OrderedDict
irr_bands = []
......@@ -1662,7 +1662,7 @@ def metaDict_to_metaODict(metaDict,logger=None):
:param metaDict: <dict> GMS metadata dictionary
:param logger: <logging.logger> if given, warnings will be logged. Otherwise they are raised.
"""
from gms_io.Output_writer import enviHdr_keyOrder
from ..gms_io.Output_writer import enviHdr_keyOrder
expected_keys = [k for k in enviHdr_keyOrder if k in metaDict]
only_gmsFile_keys = ['ViewingAngle_arrProv','IncidenceAngle_arrProv','projection']
unexpected_keys = [k for k in metaDict.keys() if k not in expected_keys and k not in only_gmsFile_keys]
......
......@@ -7,14 +7,4 @@
#
###############################################################################
#from . import L0A_P
#from . import L0B_P
#from . import L1A_P
#from . import L1B_P
#from . import L1C_P
#from . import L2A_P
#from . import L2B_P
#from . import L2C_P
#from . import L2D_P
#from . import GEOPROCESSING_BD
#from . import METADATA_BD
\ No newline at end of file
__author__='Daniel Scheffler'
\ No newline at end of file
......@@ -23,9 +23,9 @@ import builtins
import warnings
import scipy.interpolate
from algorithms import METADATA as META
from misc import database_tools as DB_T
from misc import path_generator as PG
from ..algorithms import METADATA as META
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
......@@ -189,7 +189,7 @@ def SRF_reader(GMS_identifier):
def pickle_SRF_DB(L1A_Instances):
from misc.helper_functions import setup_logger
from ..misc.helper_functions import setup_logger
list_GMS_identifiers = [i.GMS_identifier for i in L1A_Instances]
out_dict = collections.OrderedDict()
logger = setup_logger('log__SRF2PKL', os.path.join(job.path_testing,'out/log__SRF2PKL.log'),append=0)
......
......@@ -30,10 +30,10 @@ import warnings
import logging
from itertools import chain
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
from misc import path_generator as PG
from gms_io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from . import Input_reader as INP_R
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
......@@ -201,7 +201,7 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea
'interleave':out_interleave,'data type':HLP_F.dtype_lib_Python_IDL[out_dtype]}
out_meta = out_meta if out_meta else {}
out_meta.update(std_meta)
from algorithms.METADATA import metaDict_to_metaODict
from ..algorithms.METADATA import metaDict_to_metaODict
out_meta = metaDict_to_metaODict(out_meta)
if not os.path.exists(oP_hdr) or overwrite:
......@@ -322,7 +322,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
is not overwritten or written once more later, but only renamed."""
envi._write_image = silent_envi_write_image # monkey patch writer function in order to silence output stream
from algorithms.METADATA import metaDict_to_metaODict
from ..algorithms.METADATA import metaDict_to_metaODict
assert str(type(InObj))[1:6] == 'class',\
'Input for Output writer is expected to be class type. Got %s.' %(type(InObj))
......
......@@ -7,5 +7,4 @@
#
###############################################################################
#from . import Input_reader
#from . import Output_writer
\ No newline at end of file
__author__='Daniel Scheffler'
\ No newline at end of file
......@@ -7,4 +7,4 @@
#
###############################################################################
#from . import helper_functions
\ No newline at end of file
__author__='Daniel Scheffler'
\ No newline at end of file
......@@ -24,7 +24,7 @@ from sqlalchemy import create_engine
from sqlalchemy.types import to_instance,TypeEngine
from geoalchemy2.types import Geometry as GEOMETRY
from misc import path_generator as PG
from . import path_generator as PG
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
# + misc.helper_functions.cornerLonLat_to_postgreSQL_poly: (left out here in order to avoid circular dependencies)
......@@ -308,7 +308,7 @@ def get_pgSQL_geospatial_query_cond(conn_params, table2query, geomCol2use='bound
assert trueDataCornerLonLat if scene_ID is None else scene_ID, "Provide eihter scene_ID or trueDataCornerLonLat!"
if trueDataCornerLonLat:
from misc.helper_functions import cornerLonLat_to_postgreSQL_poly
from .helper_functions import cornerLonLat_to_postgreSQL_poly
pGSQL_poly = cornerLonLat_to_postgreSQL_poly(trueDataCornerLonLat)
src_geom = "'SRID=4326;%s'::geometry" %pGSQL_poly # source geometry is given
tgt_geom = "%s.%s::geometry" %(table2query, geomCol2use) # FIXME scenes tabelle hat "geography" geoinfos -> eigener Index wird bei "geometry" nicht genutzt
......@@ -633,7 +633,7 @@ class GMS_JOB(object):
else:
temp_gdf = GeoDataFrame(datadict, columns=['satellite', 'sensor', 'filenames'])
if re.search('Landsat-7', datadict['satellite'], re.I) and re.search('ETM+', datadict['sensor'], re.I):
from misc.helper_functions import Landsat_entityID_decrypter as LED
from .helper_functions import Landsat_entityID_decrypter as LED
get_L7_sensor = lambda fN: LED(fN.split('.tar.gz')[0]).sensorIncSLC
temp_gdf['sensor'] = [*temp_gdf['filenames'].map(get_L7_sensor)]
......@@ -1097,7 +1097,7 @@ def update_metaDB_if_needed(satellite,sensor,subsystem,dates2check):
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
included_tables = [i[0] for i in cursor.fetchall()]
if tablename not in included_tables or not os.path.isfile(job.path_db_meta):
from misc.path_generator import get_path_metaCSV
from .path_generator import get_path_metaCSV
path_csv = get_path_metaCSV(satellite,sensor)
if path_csv == 'metaCSV not found':
paths_CSVs = download_current_ProviderMetaDB(satellite, sensor, subsystem, date2check, rebuild_sensorsDB=True)
......
......@@ -45,17 +45,17 @@ from matplotlib import pyplot as plt
from subprocess import Popen, PIPE
from xml.etree.ElementTree import QName
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import GEOPROCESSING as GEOP # Cloud Processor
from misc import database_tools as DB_T
from misc import path_generator as PG
from algorithms.L1A_P import L1A_object
from algorithms.L1B_P import L1B_object
from algorithms.L1C_P import L1C_object
from algorithms.L2A_P import L2A_object
from algorithms.L2B_P import L2B_object
from algorithms.L2C_P import L2C_object
from ..algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from ..algorithms import GEOPROCESSING as GEOP # Cloud Processor
from . import database_tools as DB_T
from . import path_generator as PG
from ..algorithms.L1A_P import L1A_object
from ..algorithms.L1B_P import L1B_object
from ..algorithms.L1C_P import L1C_object
from ..algorithms.L2A_P import L2A_object
from ..algorithms.L2B_P import L2B_object
from ..algorithms.L2C_P import L2C_object
config = builtins.GMS_config # read from builtins (set by process_controller)
......@@ -119,6 +119,13 @@ def setup_logger(name_logfile, path_logfile,append=1):
return logger
def MAP(func, *args, CPUs=None):
import multiprocessing
with multiprocessing.Pool() as pool:
results = pool.starmap(func,*args)
return results
def trace_unhandled_exceptions(func):
@functools.wraps(func)
def wrapped_func(*args, **kwargs):
......@@ -502,7 +509,7 @@ def get_subset_GMS_obj(GMS_obj,bounds):
corners_imYX = GEOP.calc_FullDataset_corner_positions(
sub_GMS_obj.mask_1bit, assert_four_corners=False, algorithm='shapely')
else: # str
from gms_io.Input_reader import read_mask_subset
from ..gms_io.Input_reader import read_mask_subset
subset = ('block', ((rS, rE), (cS, cE)))
mask_1bit = read_mask_subset(sub_GMS_obj.mask_1bit, 'mask_1bit', sub_GMS_obj.logger, subset)
corners_imYX = GEOP.calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False,algorithm='shapely')
......
......@@ -20,7 +20,7 @@ class path_generator(object):
Instead they are retrieved from postgreSQLdb."""
def __init__(self, *args, **kwargs):
if 'scene_ID' in kwargs:
from misc.database_tools import get_scene_and_dataset_infos_from_postgreSQLdb
from .database_tools import get_scene_and_dataset_infos_from_postgreSQLdb
args = [get_scene_and_dataset_infos_from_postgreSQLdb(kwargs['scene_ID'])] # return [dict]
assert len(args) in [1,8,9], "Received invalid length of 'args' argument."
......@@ -160,7 +160,7 @@ def get_path_cloud_class_obj(GMS_identifier, get_all=False):
:param GMS_identifier:
:param get_all:
"""
from misc.helper_functions import get_GMS_sensorcode
from .helper_functions import get_GMS_sensorcode
GMS_sensorcode = get_GMS_sensorcode(GMS_identifier)
satellite,sensor,logger = (GMS_identifier['Satellite'],GMS_identifier['Sensor'],GMS_identifier['logger'])
path_cloud_classifier_objects = os.path.join(job.path_cloud_classif,satellite,sensor)
......
......@@ -46,17 +46,18 @@ if isdebugging: #override the existing settings in order to get write access eve
#builtins.GMS_process_ID = 26185255 # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
#builtins.GMS_process_ID = 26185256 # 1x L7 SLC off, Zielsensor L8, spat.ref L8
#builtins.GMS_process_ID = 26185257 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
builtins.GMS_process_ID = 26185258 # Beta-Job - 219 x L8, spatref L8
#builtins.GMS_process_ID = 26185258 # Beta-Job - 219 x L8, spatref L8
#builtins.GMS_process_ID = 26185259 # Beta-Job - 172 x L7, spatref L8
#builtins.GMS_process_ID = 26185260 # Beta-Job - 111 x S2, spatref L8
#builtins.GMS_process_ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
#builtins.GMS_process_ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
#builtins.GMS_process_ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
builtins.GMS_process_ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
import config
from . import config
builtins.GMS_config = config
for i in [attr for attr in dir(config) if attr.startswith('exec__')]:
globals()[i] = getattr(config,i)
......@@ -70,18 +71,31 @@ if job.profiling:
from pyinstrument import Profiler
profiler = Profiler() # or Profiler(use_signal=False), see below
profiler.start()
import gms_io.Output_writer as OUT_W # Output_writer
import gms_io.Input_reader as INP_R # Input_reader
import misc.helper_functions as HLP_F # Helper functions
import misc.database_tools as DB_T # database tools
import algorithms.L0A_P as L0A_P # Level 0A Processor
import algorithms.L0B_P as L0B_P # Level 0B Processor
import algorithms.L1A_P as L1A_P # Level 1A Processor
import algorithms.L1B_P as L1B_P # Level 1B Processor
import algorithms.L1C_P as L1C_P # Level 1C Processor
import algorithms.L2A_P as L2A_P # Level 2A Processor
import algorithms.L2B_P as L2B_P # Level 2B Processor
import algorithms.L2C_P as L2C_P # Level 2C Processor
# import gms_io.Output_writer as OUT_W # Output_writer
# import gms_io.Input_reader as INP_R # Input_reader
# import misc.helper_functions as HLP_F # Helper functions
# import misc.database_tools as DB_T # database tools
# import algorithms.L0A_P as L0A_P # Level 0A Processor
# import algorithms.L0B_P as L0B_P # Level 0B Processor
# import algorithms.L1A_P as L1A_P # Level 1A Processor
# import algorithms.L1B_P as L1B_P # Level 1B Processor
# import algorithms.L1C_P as L1C_P # Level 1C Processor
# import algorithms.L2A_P as L2A_P # Level 2A Processor
# import algorithms.L2B_P as L2B_P # Level 2B Processor
# import algorithms.L2C_P as L2C_P # Level 2C Processor
from .gms_io import Output_writer as OUT_W # Output_writer
from .gms_io import Input_reader as INP_R # Input_reader
from .misc import database_tools as DB_T # database tools
from .misc import helper_functions as HLP_F # Helper functions
from .algorithms import L0A_P # Level 0A Processor
from .algorithms import L0B_P # Level 0B Processor
from .algorithms import L1A_P # Level 1A Processor
from .algorithms import L1B_P # Level 1B Processor
from .algorithms import L1C_P # Level 1C Processor
from .algorithms import L2A_P # Level 2A Processor
from .algorithms import L2B_P # Level 2B Processor
from .algorithms import L2C_P # Level 2C Processor
########################### core functions ####################################
......@@ -497,8 +511,8 @@ def run_processController_in_multiprocessing(usecase_data_list):
"""Create job success summary"""
detailed_JS, quick_JS = HLP_F.get_job_summary(L2C_newObjects+failed_objects)
#detailed_JS.to_excel(os.path.join(job.path_job_logs,'%s_summary.xlsx' % job.ID))
detailed_JS.to_csv (os.path.join(job.path_job_logs,'%s_summary.csv' % job.ID))
detailed_JS.to_excel(os.path.join(job.path_job_logs,'%s_summary.xlsx' % job.ID))
detailed_JS.to_csv (os.path.join(job.path_job_logs,'%s_summary.csv' % job.ID),sep='\t')
job.logger.info('\nQUICK JOB SUMMARY (ID %s):\n'%job.ID+quick_JS.to_string())
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment