Commit a101ae14 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

- some minor additions to last commit

- added first dummy processors for L1B and L1C
parent d5efcd2b
......@@ -478,10 +478,11 @@ class L1A_object(object):
<sensorname:[SPOT,RapidEye,ASTER,LANDSAT,ALOS]>;<path of dataname>;<path of corresponding ASCII metafile>
works for: RapidEye (metadata.xml),SPOT(metadata.dim),LANDSAT(mtl.txt),ASTER(downloaded coremetadata),ALOS(summary.txt & Leader file) -> need both files seperated by a & sign
'''
self.logger.info('Reading %s %s %s metadata...' %(self.satellite,self.sensor,self.subsystem))
self.MetaObj = META.METADATA(self.satellite, self.subsystem, self.path_InFilePreprocessor, \
self.path_MetaPreprocessor, self.logger, self.LayerBandsAssignment)
self.logger.debug("The following metadata have been read:")
[self.logger.debug("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
# def get_rasObj(self):
# self.rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger)
......
......@@ -32,8 +32,11 @@ try:
import cv2
except: print('cv2-lib missing..')
########################### core functions ####################################
def dummy_calculate_spatial_shifts(L1A_obj,L1A_obj_Ref):
L1A_obj.x_shift = np.zeros_like(L1A_obj.arr,dtype=np.float)
L1A_obj.y_shift = np.zeros_like(L1A_obj.arr,dtype=np.float)
def calculate_TiePoints(im_ref,im_rpc,distance = 200):
detector = cv2.FeatureDetector_create ("SIFT")
descriptor = cv2.DescriptorExtractor_create("SIFT")
......
......@@ -21,21 +21,17 @@
###############################################################################
########################### Library import ####################################
from __future__ import (division, print_function, unicode_literals,absolute_import)
import numpy as np,os,glob,logging
import glob
import logging
import numpy as np
import os
########################### core functions ####################################
def atm_corr(RSD_L1B, RSD_md_L1B, masks_L1B, masks_md_L1B, DGM_L1B, DGM_md_L1B, ATM_L1B, ATM_md_L1B, SRF_fold):
''' Performs an atmospheric correction and returns atmospherically corrected reflectance data.'''
print('\n##################### Level 1C Processing #####################')
logging.info('Level 1C Processing started.')
def dummy_atm_corr(L1B_obj):
''' Performs an atmospheric correction and returns atmospherically corrected reflectance data.'''
temp_logger = HLP_F.setup_logger('log__' + L1B_obj.baseN, L1B_obj.path_logfile, L1B_obj.job_CPUs, append=1)
temp_logger.info('Dummy Level 1C Processing started.')
SRF_dict = _SRF_reader(SRF_fold,RSD_md_L1B)
## CODE for atmospheric correction
RSD_L1C = RSD_L1B
RSD_md_L1C = RSD_md_L1B
return RSD_L1C, RSD_md_L1C
\ No newline at end of file
L1B_obj.arr = L1B_obj.arr/2
......@@ -477,6 +477,7 @@ class METADATA(object):
# fill missing values
if self.SceneID=='':
self.logger.info('Scene-ID could not be extracted and has to be retrieved from %s metadata database...' %self.Satellite)
HLP_F.update_metaDB_if_needed(self.Satellite,self.Sensor,self.Subsystem,self.AcqDate)
import gms_io.Input_reader as INP_R
tablename = '%s_%s_%s' %(self.Satellite.replace('-',''),self.Sensor.replace('+',''),self.Subsystem) \
......@@ -1194,7 +1195,6 @@ class METADATA(object):
logger.warning('Overpass duration and scene length cannot be calculated because the given data represents' \
'a subset of the original scene.')
def _FilterLayerdependentMetadata(self):
FULL_LayerBandsAssignment = get_LayerBandsAssignment(self.get_GMS_identifier()) if self.Satellite!='Terra' else ['1','2','3N','3B','4','5','6','7','8','9','10','11','12','13','14']
for attrname in ['SolIrradiance','CWL','FWHM','Offsets','OffsetsRef','Gains','GainsRef']:
......@@ -1229,7 +1229,6 @@ def get_LayerBandsAssignment(GMS_identifier,nBands=None): # nBands should be spe
logger.critical('Unable to get Layer Bands Assignment. Provided number of bands doesn´t match known layer band assignments.'); sys.exit()
else:
return ['1']
def isPAN(GMS_identifier,LayerNr):
GMS_sensorcode = HLP_F.get_GMS_sensorcode(GMS_identifier)
......
......@@ -138,10 +138,19 @@ def get_info_from_SQLdb(path_db,tablename,vals2return,cond_dict):
if not isinstance(vals2return,list): vals2return = [vals2return]
if not os.path.isfile(path_db):
return 'DB_file_not_found'
# import time
# t1 = time.time()
connection = sqlite3.connect(path_db)
# print(time.time() -t1)
# t2 = time.time()
cursor = connection.cursor()
# print(time.time() -t2)
# t3 = time.time()
cursor.execute("SELECT " +','.join(vals2return)+ " FROM " +tablename+ " WHERE " + " AND ".join(["%s=?" %(list(cond_dict.keys())[i]) for i in range(len(cond_dict))]), list(cond_dict.values()))
records2return = cursor.fetchall()
# print(time.time() -t3)
# t4 = time.time()
records2return = cursor.fetchall() if path_db is not job.path_db_meta else cursor.fetchone()
# print(time.time() -t4)
cursor.close()
connection.close()
return records2return
......@@ -172,14 +181,14 @@ def SRF_reader(GMS_identifier):
SRF_dict[key] = np.loadtxt(os.path.join(SRF_path,key),skiprows=1)
logger.info('Reading SRF for %s %s, %s...' %(satellite,sensor,key))
except:
logger.warning('No spectral response function found for %s %s %s at %s! >None< is returned.\n' %(satellite,sensor,key,os.path.join(SRF_path,key)))
logger.warning('No spectral response function found for %s %s %s at %s! >None< is returned.' %(satellite,sensor,key,os.path.join(SRF_path,key)))
# SRF_dict = dict((os.path.basename(key), np.loadtxt(SRF_filelist[count],skiprows=1)) for (count, key) in enumerate(SRF_filelist))
# print(SRF_dict[os.path.basename(SRF_filelist[0])][:,0])
# logger.info('SRFs for the following %s bands read:' %[i for i in SRF_dict.keys()])
else:
SRF_dict = {}
logger.warning("No spectral response functions available for '%s %s'. Preconfigured values are used for solar irradiance and central wavelength instead.\n" %(satellite,sensor))
logger.warning("No spectral response functions available for '%s %s'. Preconfigured values are used for solar irradiance and central wavelength instead." %(satellite,sensor))
return SRF_dict
def pickle_SRF_DB(L1A_Instances):
......
......@@ -90,7 +90,7 @@ def run_processController_in_singleprocessing(usecase_data_list):
for ind, i in enumerate(L1A_Instances):
if i.arr_shape == 'cube':
OUT_W.export_VZA_SZA_SAA_RAA_stats(i)
# print(i.satellite, i.sensor, i.subsystem if i.subsystem is not None else '')
# print(i.satellite, i.sensor, i.subsystem if i.subsystem is not None else '')
# for arr in [i.VZA_arr,i.SZA_arr,i.SAA_arr,i.RAA_arr]:
# i.VZA_arr[i.VZA_arr == -9999] = 0
# i.SZA_arr[i.SZA_arr == -9999] = 90-float(i.meta['SunElevation'])
......@@ -175,8 +175,6 @@ def apply_L1A_funcs_to_tiles(L1A_obj):
try:
# L0A-P
usecase.data_list = L0A_P.get_data_IDs_within_AOI()
# if metaDB_is_as_current_as_needed([i['satellite'] for i in ])
# [print(i) for i in usecase.data_list]
# sys.exit()
if job.CPUs == 1:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment