Commit 7c2c3a7b authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

merged the rest of old process_controller_for_testing and removed deprecated modules

processing.process_controller:
- moved the rest of process_controller_for_testing here
- cleaned up deprecated code snippets
run_gms:
- added possibility to pass job ID via console call
- removed deprecated modules 'config_old', 'process_controller_for_testing' and 'unified_config'
- updated __version__
parent c24961ca
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20161201.01'
__version__ = '20161201.02'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
###############################################################################
#
# GeoMultiSens configuration file.
#
# Written by Daniel Scheffler
# GFZ Potsdam, Section 1.4
#
###############################################################################
# vars from builtins:
# GMS_call_type <- process_controller
# GMS_process_ID <- process_controller
import builtins
import datetime
import os
import multiprocessing
import socket
import psycopg2
import collections
import warnings
import numpy as np
os.environ['DISPLAY'] = '127.0.0.0:0.0'
def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,records2fetch=0):
if not isinstance(vals2return,list): vals2return = [vals2return]
assert isinstance(records2fetch,int), \
"get_info_from_postgreSQLdb: Expected an integer for the argument 'records2fetch'. Got %s" %type(records2fetch)
connection = psycopg2.connect(conn_params)
if connection is None: return 'database connection fault'
cursor = connection.cursor()
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
cursor.execute("SELECT " +','.join(vals2return)+ " FROM " +tablename+ " " + condition)
records2return = cursor.fetchall() if records2fetch == 0 else [cursor.fetchone()] if records2fetch == 1 else \
cursor.fetchmany(size = records2fetch) # e.g. [('LE71950282003121EDC00',), ('LE71950282003105ASN00',)]
cursor.close()
connection.close()
return records2return
query_cfg = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key': "'%s'" % key})[0][0]
query_vir = lambda conn_param, col, VSID: get_info_from_postgreSQLdb(conn_param, 'virtual_sensors', col, {'id':VSID})[0][0]
absP, joinP = lambda r: os.path.join(os.path.dirname(__file__), os.path.relpath(r)), lambda *x: os.path.join(*x)
class Job:
start_time = str(datetime.datetime.now())
hostname = socket.gethostname()
CPUs = multiprocessing.cpu_count()
benchmark_global = False
# bench_CLD_class = True
bench_CLD_class = False
SZA_SAA_calculation_accurracy = 'coarse'
export_VZA_SZA_SAA_RAA_stats = True
export_L1C_obj_dumps = False
profiling = False
path_earthSunDist = absP('./database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv')
path_SRFs = absP('./database/srf/')
path_cloud_classif = absP('./database/cloud_classifier/')
path_solar_irr = absP('./database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt')
path_testing = absP('./testing/')
path_benchmarks = absP('./benchmarks/')
path_job_logs = absP('./testing/job_logs/')
# processor configuration: [run processor, write output, delete output if not needed anymore]
exec__L1AP = [1, 1, 1]
exec__L1BP = [1, 1, 1]
exec__L1CP = [1, 1, 1]
exec__L2AP = [1, 1, 1]
exec__L2BP = [1, 1, 0]
exec__L2CP = [1, 1, 0]
def __init__(self, call_type, ID, exec_mode='Python'):
self.ID = ID
self.call_type = call_type
self.exec_mode = exec_mode
assert exec_mode in ['Flink','Python']
if call_type == 'console':
"""path_fileserver is to be replaced by Fileserver URL"""
self.path_fileserver = '/misc/gms2/scheffler/GeoMultiSens/' if self.hostname != 'geoms' else absP('./')
self.path_tempdir = '/dev/shm/GeoMultiSens/'
# path_procdata = absP('../database/processed_data/')
# path_procdata = '/srv/gms2/scheffler/GeoMultiSens/database/processed_data/'
self.path_procdata = joinP(self.path_fileserver, 'database/processed_scenes%s/' %('_bench' if self.benchmark_global else ''))
self.path_procdata_MGRS = joinP(self.path_fileserver, 'database/processed_mgrs_tiles%s/' %('_bench' if self.benchmark_global else ''))
self.path_database = joinP(self.path_fileserver, 'database/processed_scenes%s/data_DB.db' %('_bench' if self.benchmark_global else ''))
# path_database = absP('./database/processed_data/data_DB.db')
# path_db_meta = absP('./database/metadata/')
self.path_db_meta = absP('./database/metadata/metadata_DB.db') # ('geoms.gfz-potsdam.de:5432')
# path_archive = absP('./database/sampledata/')
# path_archive = '/srv/gms2/scheffler/GeoMultiSens/database/sampledata/'
self.path_archive = joinP(self.path_fileserver, 'database/sampledata/')
elif call_type == 'webapp':
self.conn_database = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # FIXME: localhost could be a problem on other nodes
self.conn_db_meta = self.conn_database
self.path_fileserver = query_cfg(self.conn_db_meta, 'path_data_root')
self.path_tempdir = query_cfg(self.conn_db_meta, 'path_tempdir')
self.path_procdata_scenes = joinP(self.path_fileserver, query_cfg(self.conn_db_meta, 'foldername_procdata_scenes'))
self.path_procdata_MGRS = joinP(self.path_fileserver, query_cfg(self.conn_db_meta, 'foldername_procdata_MGRS'))
self.path_archive = joinP(self.path_fileserver, query_cfg(self.conn_db_meta, 'foldername_download'))
self.path_spatIdxSrv = query_cfg(self.conn_db_meta, 'path_spatial_index_mediator_server')
self.path_earthSunDist = absP(query_cfg(self.conn_db_meta, 'path_earthSunDist'))
self.path_SRFs = absP(query_cfg(self.conn_db_meta, 'path_SRFs'))
self.path_cloud_classif = absP(query_cfg(self.conn_db_meta, 'path_cloud_classif'))
self.path_solar_irr = absP(query_cfg(self.conn_db_meta, 'path_solar_irr'))
self.path_testing = absP(query_cfg(self.conn_db_meta, 'path_testing'))
self.path_benchmarks = absP(query_cfg(self.conn_db_meta, 'path_benchmarks'))
self.path_job_logs = absP(query_cfg(self.conn_db_meta, 'path_job_logs'))
self.java_commands = collections.OrderedDict()
self.java_commands["keyword"] = query_cfg(self.conn_db_meta, 'command_keyword')
self.java_commands["value_download"] = query_cfg(self.conn_db_meta, 'command_value_download')
assert os.path.isdir(self.path_archive), "Given archive folder '%s' does not exist. Execution stopped" % self.path_archive
if not os.path.isdir(self.path_job_logs): os.makedirs(self.path_job_logs)
if exec_mode == 'Python':
for i in ['L1AP','L1BP','L1CP','L2AP','L2BP','L2CP']:
exec_lvl = getattr(self, 'exec__%s' % i)
if exec_lvl[1] == 0:
warnings.warn("If job.exec_mode is set to 'Python' the output writer for %s has to be enabled because "
"any operations on GMS_obj.arr read the intermediate results from disk. Turning it on.." %i)
exec_lvl[1] = 1
try:
GMS_call_type, GMS_process_ID = builtins.GMS_call_type, builtins.GMS_process_ID
job = Job(GMS_call_type, GMS_process_ID)
except AttributeError:
job = None
class Usecase:
def __init__(self, _job=job):
if _job.call_type == 'console':
self.filt_coord = [None, None, None, None]
# filt_datasets = ['ALOS', 'Terra', 'Landsat', 'SPOT', 'RapidEye', 'SRTM', 'ATM']
# filt_datasets = ['ALOS', 'Terra', 'SPOT', 'RapidEye', 'SRTM', 'ATM']
self.filt_datasets = ['ALOS', 'Terra', 'Landsat', 'SPOT', 'RapidEye']
# filt_datasets = ['Terra']
# filt_datasets = ['Landsat']
# filt_datasets = ['ALOS']
# filt_datasets = ['SPOT']
# filt_datasets = ['RapidEye','ALOS']
# filt_datasets = ['RapidEye']
# filt_datasets = ['Landsat','SPOT','RapidEye']
# filt_datasets = ['Landsat','SPOT']
self.filt_date = [2000, 2015]
# filt_date = [2012,2015]
self.skip_thermal = True
self.skip_pan = True
self.sort_bands_by_cwl = True
self.conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
self.conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
self.scale_factor_TOARef = 10000
self.virtual_sensor_id = 10 # Sentinel-2A 10m
self.datasetid_spectral_ref = 249 # Sentinel-2A
self.target_CWL = []
self.target_FWHM = []
elif _job.call_type == 'webapp':
query_job = lambda conn_param, col: get_info_from_postgreSQLdb(conn_param,'jobs',col,{'id':_job.ID})[0][0]
#skip_thermal = int(query_cfg(_job.conn_db_meta, 'skip_thermal'))
self.skip_thermal = True
self.skip_pan = int(query_cfg(_job.conn_db_meta, 'skip_pan'))
self.sort_bands_by_cwl = int(query_cfg(_job.conn_db_meta, 'sort_bands_by_cwl'))
self.conversion_type_optical = query_cfg(_job.conn_db_meta, 'conversion_type_optical')
self.conversion_type_thermal = query_cfg(_job.conn_db_meta, 'conversion_type_thermal')
self.datasetid_spatial_ref = query_job(_job.conn_db_meta, 'datasetid_spatial_ref')
self.virtual_sensor_id = query_job(_job.conn_db_meta, 'virtualsensorid')
self.virtual_sensor_id = self.virtual_sensor_id if self.virtual_sensor_id != -1 else 10 # Sentinel-2A 10m
self.datasetid_spatial_ref = query_job(_job.conn_db_meta, 'datasetid_spatial_ref')
self.virtual_sensor_name = query_vir(_job.conn_db_meta, 'name',self.virtual_sensor_id)
self.datasetid_spectral_ref = query_vir(_job.conn_db_meta, 'spectral_characteristics_datasetid', self.virtual_sensor_id)
self.target_CWL = query_vir(_job.conn_db_meta, 'wavelengths_pos', self.virtual_sensor_id) # FIXME column is empty a known datasetid as spectral characteristics virtual sensor is chosen
self.target_FWHM = query_vir(_job.conn_db_meta, 'band_width', self.virtual_sensor_id) # FIXME column is empty a known datasetid as spectral characteristics virtual sensor is chosen
self.target_gsd = query_vir(_job.conn_db_meta, 'spatial_resolution', self.virtual_sensor_id) # table features only 1 value for X/Y-dims
self.target_gsd = [self.target_gsd,self.target_gsd] if type(self.target_gsd) in [int,float] else self.target_gsd
self.EPSG = query_vir(_job.conn_db_meta, 'projection_epsg', self.virtual_sensor_id)
self.spatial_ref_gridx = np.array(np.arange(self.target_gsd[0]/2., self.target_gsd[0]/2.+2*self.target_gsd[0], self.target_gsd[0])) # e.g. [15,45
self.spatial_ref_gridy = np.array(np.arange(self.target_gsd[1]/2., self.target_gsd[1]/2.+2*self.target_gsd[1], self.target_gsd[1]))
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
self.scale_factor_TOARef = int(query_cfg(_job.conn_db_meta, 'scale_factor_TOARef'))
self.align_coord_grids = 1 # ONLY TO FORCE DEACTIVATION OF IMAGE RESAMPLING
self.match_gsd = True
assert isinstance(self.target_gsd,list) and len(self.target_gsd)==2
if job is not None:
usecase = Usecase(job)
def get_usecase_coord_grid():
"""consider projections of images with status georef = master"""
geotransform = (0,1,0,0,0,-1) # FIXME
EPSG = 'EPSG:4326' # FIXME
GSD_meters = 30 # default
return geotransform , EPSG, GSD_meters
# def init_gms_globals():
# global CLD_obj
# CLD_obj = 'not_set'
#
# # CLD_obj = None
# # print (CLD_obj)
# def set_CLD_obj_classifier(path_cloud_class_obj):
# # global CLD_obj
# CLD_obj = CLD_P.GmsCloudClassifier(classifier=path_cloud_class_obj)
# # print('CLD_obj' in globals())
# # def mk_global(var_names):
# # for var_name in var_names:
# # exec("builtins.%s = %s" % tuple(2*[var_name]))
This diff is collapsed.
......@@ -13,7 +13,7 @@ from ..io import Input_reader as INP_R
from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import environment as ENV
from ..misc.logging import GMS_logger
from ..misc.logging import GMS_logger, shutdown_loggers
from ..algorithms import L0A_P, L1A_P, L1B_P, L1C_P, L2A_P, L2B_P, L2C_P
from .pipeline import (L0B_L1A_map, L0B_L1A_map_1, L1A_map_2, L1A_map_3, L1B_map_1, L1C_map_1,
L2A_map, L2A_map_1, L2A_map_2, L2B_map_1, L2C_map_1)
......@@ -45,6 +45,7 @@ class process_controller(object):
self.call_type = call_type
self.parallLev = parallelization_level
self._logger = None
self.profiler = None
self.failed_objects = []
self.L1A_newObjects = []
......@@ -66,6 +67,13 @@ class process_controller(object):
self.logger.info('Checking system environment...')
ENV.check_dependencies(self.logger)
# check if process_controller is executed by debugger
# isdebugging = 1 if True in [frame[1].endswith("pydevd.py") for frame in inspect.stack()] else False
#if isdebugging: # override the existing settings in order to get write access everywhere
# pass
#called_from_iPyNb = 1 if 'ipykernel/__main__.py' in sys.argv[0] else 0
@property
def logger(self):
......@@ -76,20 +84,24 @@ class process_controller(object):
os.path.join(self.job.path_job_logs,'%s.log' % self.job.ID), 0)
return self._logger
@logger.setter
def logger(self, logger):
self._logger = logger
@logger.deleter
def logger(self):
if self._logger not in [None, 'not set']:
self.logger.close()
self.logger = None
@property
def sceneids_failed(self):
return [obj.scene_ID for obj in self.failed_objects]
def get_data_list(self):
"""
Get a list of datasets to be processed from database and return it together with some metadata.
......@@ -140,6 +152,10 @@ class process_controller(object):
Run all processors at once.
"""
# TODO handle errors
if self.job.profiling:
from pyinstrument import Profiler
self.profiler = Profiler() # or Profiler(use_signal=False), see below
self.profiler.start()
self.logger.info('Starting job with ID %s (comment: %s)...'
% (self.job.ID, self.DB_job_record.comment))
......@@ -171,6 +187,12 @@ class process_controller(object):
self.job.computation_time = self.job.end_time-self.job.start_time
self.logger.info('Time for execution: %s' % self.job.computation_time)
if self.job.profiling:
self.profiler.stop()
print(self.profiler.output_text(unicode=True, color=True))
shutdown_loggers()
def benchmark(self):
"""
......@@ -401,11 +423,11 @@ class process_controller(object):
GMSfile_list_L2B_inDB = INP_R.get_list_GMSfiles(datalist_L2C_P, 'L2B')
# FIXME only parallelization_level == 'scenes' implemented
work = [[GMS, ['cube', None]] for GMS in GMSfile_list_L2B_inDB]
L2B_DBObjects = MAP(L2B_P.L2B_object(None).fill_from_disk, work)
L2B_DBObjects = list(L2B_DBObjects)
work = [[GMS, ['cube', None]] for GMS in GMSfile_list_L2B_inDB]
L2B_DBObjects = MAP(L2B_P.L2B_object(None).fill_from_disk, work)
L2B_DBObjects = list(L2B_DBObjects)
L2B_Instances = self.L2B_newObjects + L2B_DBObjects # combine newly and earlier processed L2A data
L2B_Instances = self.L2B_newObjects + L2B_DBObjects # combine newly and earlier processed L2A data
# print('L2B_Instances', L2B_Instances)
L2C_resObjects = MAP(L2C_map_1, L2B_Instances, CPUs=8) # FIXME 8 workers due to heavy IO
......@@ -457,13 +479,3 @@ class process_controller(object):
self.L2C_newObjects = []
#if job.profiling: # TODO
# profiler.stop()
# print(profiler.output_text(unicode=True, color=True))
# -*- coding: utf-8 -*-
__author__='Daniel Scheffler'
import sys
from GeoMultiSens_dev import process_controller
if __name__=='__main__':
# ID = 26184107
# ID = 26185175 # 1x TM5
# ID = 26185176 # 1x Landsat
# ID = 26185177 # 1. Sentinel-2-Testszene
# ID = 26185189 # direkt benachbarte Granules von 1. Sentinel-2-Testszene
# ID = 26185237 # 4 x Landsat-8 -> Job per database tools erstellt
# ID = 26185239 # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
# ID = 26185242 # 1 x Landsat-8 - Bug files_in_archive=None
# ID = 26185250 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
# ID = 26185251 # 1x L8, Zielsensor L8
# ID = 26185252 # 1x L8, Zielsensor L8, spat.ref L8
# ID = 26185253 # 25x L8, Zielsensor L8, spat.ref L8
# ID = 26185254 # 10x L8, Zielsensor L8, spat.ref L8
# ID = 26185255 # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
# ID = 26185256 # 1x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185257 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
# ID = 26185258 # Beta-Job - 219 x L8, spatref L8
# ID = 26185259 # Beta-Job - 172 x L7, spatref L8
# ID = 26185260 # Beta-Job - 111 x S2, spatref L8
# ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
# ID = 26185264 # 1x L8, Bug L1B_masks not found
# ID = 26185265 # 1x L8, Bug L2B_masks not found
# ID = 26185268 # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
# ID = 26185269 # "10x L8, Bug L2B_masks not found"
# ID = 26185272 # "1x S2A Sips"
ID = 26185273 # "1x L7, target L8, spat.ref L8"
# ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
# ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
# ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
# ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
# ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
# ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
# ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
# ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
# ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
# ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
if len(sys.argv)<2:
# a job ID has not been given
# ID = 26184107
# ID = 26185175 # 1x TM5
# ID = 26185176 # 1x Landsat
# ID = 26185177 # 1. Sentinel-2-Testszene
# ID = 26185189 # direkt benachbarte Granules von 1. Sentinel-2-Testszene
# ID = 26185237 # 4 x Landsat-8 -> Job per database tools erstellt
# ID = 26185239 # 50 x Landsat-8 -> Job per database tools erstellt - 1. L8 Beta-Testjob
# ID = 26185242 # 1 x Landsat-8 - Bug files_in_archive=None
# ID = 26185250 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref S2
# ID = 26185251 # 1x L8, Zielsensor L8
# ID = 26185252 # 1x L8, Zielsensor L8, spat.ref L8
# ID = 26185253 # 25x L8, Zielsensor L8, spat.ref L8
# ID = 26185254 # 10x L8, Zielsensor L8, spat.ref L8
# ID = 26185255 # 1x L8 Bug 5 corners found -> Grund=Schreibfehler L1A im tiled Python-mode bei mehr als 1 Szene im Job
# ID = 26185256 # 1x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185257 # Beta-Job - 219 x L8, 172 x L7, 111 x S2, spatref L8
# ID = 26185258 # Beta-Job - 219 x L8, spatref L8
# ID = 26185259 # Beta-Job - 172 x L7, spatref L8
# ID = 26185260 # Beta-Job - 111 x S2, spatref L8
# ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
# ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
# ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
# ID = 26185264 # 1x L8, Bug L1B_masks not found
# ID = 26185265 # 1x L8, Bug L2B_masks not found
# ID = 26185268 # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
# ID = 26185269 # "10x L8, Bug L2B_masks not found"
# ID = 26185272 # "1x S2A Sips"
ID = 26185273 # "1x L7, target L8, spat.ref L8"
# ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
# ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
# ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
# ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
# ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
# ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
# ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
# ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
# ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
# ID = 26185284 # "GMS41: "all beta-L8 with cloud cover <30% (74 scenes)"
# ID = 26185285 # "GMS41: "all beta-L7 with cloud cover <30% (49 scenes)"
else:
ID = int(sys.argv[1])
# set up process controller instance
PC = process_controller(ID, parallelization_level='scenes')
......
import builtins
_config = None
def set_config(config):
global _config
if _config is None:
_config = config
def get_config():
global _config
if _config is not None:
return _config
else:
return builtins.GMS_config
def get_job():
try:
return get_config()[0]
except TypeError:
return get_config().job
def get_usecase():
try:
return get_config()[1]
except TypeError:
return get_config().usecase
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment