Commit 6c280204 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

L1B_P: second running version:

- much more stability and robustness
- many nw features that have been developed in the context of the standalone version
L1A_P:
- Bugfix: fixed a bug that prevented further processing when array attributes contain only a path
OUT_W:
- added a function to write shapefiles from shapely polygons
parent bef92b9d
......@@ -3705,7 +3705,7 @@ def get_footprint_polygon(CornerLonLat):
return outpoly
def get_overlap_polygon(poly1, poly2):
""" Returns the overlap of two shapely.Polygon() objects.
""" Returns a dict with the overlap of two shapely.Polygon() objects, the overlap percentage and the overlap area.
:param poly1: first shapely.Polygon() object
:param poly2: second shapely.Polygon() object
:return: overlap polygon as shapely.Polygon() object
......
......@@ -174,22 +174,23 @@ class L1A_object(object):
del self.logger, self.GMS_identifier['logger'], self.MetaObj.logger
def fill_from_disk(self,tuple_GMS_subset):
"""This method does NOT read any array data from disk"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
for key,value in zip(GMSfileDict.keys(), GMSfileDict.values()):
setattr(self, key, value)
[setattr(self, key, value) for key, value in GMSfileDict.items()] # copy all attributes from GMS file
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape = tuple_GMS_subset[1][0]
self.arr_pos = tuple_GMS_subset[1][1]
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, self.job_CPUs,append=1)
# if self.arr_pos is None:
# self.logger.info('Reading file: %s' % (self.baseN))
# else:
# self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
#PG_obj = PG.path_generator(self.__dict__)
#get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0]+'.hdr'
#self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()), self.arr_shape, self.arr_pos,self.logger)
#if self.arr_pos is None: self.logger.info('Reading file: %s' % (self.baseN))
#else: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
self.GMS_identifier = collections.OrderedDict({'image_type':self.image_type, 'Satellite':self.satellite, \
'Sensor':self.sensor,'Subsystem': self.subsystem, \
'logger':self.logger})
path_img_hdr = os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr'
path_img_hdr = os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr'
# path_cld_hdr = os.path.splitext(self.path_Outfile_L1A_masks)[0]+'.hdr'
# self.arr, self.meta = INP_R.read_ENVIfile(os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr', \
# self.arr_shape, self.arr_pos,self.logger)
......@@ -1039,7 +1040,13 @@ def merge_L1A_tiles_to_L1A_obj(list_L1A_tiles):
L1A_obj.arr_pos = None
list_ndarrays = [i for i in list_L1A_tiles[0].__dict__ if not callable(getattr(list_L1A_tiles[0],i)) and \
isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
L1A_obj = numba_array_merger(L1A_obj,list_ndarrays, list_L1A_tiles)
if list_ndarrays!=[]:
L1A_obj = numba_array_merger(L1A_obj,list_ndarrays, list_L1A_tiles)
# path_arr = PG.path_generator()
#assert list_ndarrays!=[], \
# 'Cannot merge L1A tiles because the given L1A objects do not have any numpy array attributes."'
# for ndarray in list_ndarrays:
# target_shape = L1A_obj.shape_fullArr[:2]+[getattr(list_L1A_tiles[0],ndarray).shape[2]] \
# if len(getattr(list_L1A_tiles[0],ndarray).shape) == 3 else L1A_obj.shape_fullArr[:2]
......@@ -1052,12 +1059,14 @@ def merge_L1A_tiles_to_L1A_obj(list_L1A_tiles):
@autojit
def numba_array_merger(L1A_obj, list_arraynames, list_L1A_tiles):
for ndarray in list_arraynames:
target_shape = L1A_obj.shape_fullArr[:2]+[getattr(list_L1A_tiles[0],ndarray).shape[2]] \
if len(getattr(list_L1A_tiles[0],ndarray).shape) == 3 else L1A_obj.shape_fullArr[:2]
setattr(L1A_obj, ndarray, np.empty(target_shape, dtype=getattr(list_L1A_tiles[0],ndarray).dtype))
for arrname in list_arraynames:
is_3d = len(getattr(list_L1A_tiles[0],arrname).shape) == 3
bands = [getattr(list_L1A_tiles[0],arrname).shape[2]] if is_3d else [] # dynamic -> works for arr, cld_arr,...
target_shape = L1A_obj.shape_fullArr[:2]+bands
target_dtype = getattr(list_L1A_tiles[0],arrname).dtype
setattr(L1A_obj, arrname, np.empty(target_shape, dtype=target_dtype))
for idx,tile in enumerate(list_L1A_tiles):
rowStart,rowEnd = tile.arr_pos[0]
colStart,colEnd = tile.arr_pos[1]
getattr(L1A_obj, ndarray)[rowStart:rowEnd+1, colStart:colEnd+1]=getattr(tile,ndarray)
getattr(L1A_obj, arrname)[rowStart:rowEnd+1, colStart:colEnd+1]=getattr(tile,arrname)
return L1A_obj
\ No newline at end of file
This diff is collapsed.
......@@ -27,6 +27,8 @@ import inspect
import errno
import dill
import datetime
import ogr
import osr
from misc.helper_functions import get_mask_classdefinition, get_mask_colormap, setup_logger
import misc.helper_functions as HLP_F
......@@ -204,7 +206,6 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification = True):
reorder_ENVI_header(outpath_hdr)
elif isinstance(getattr(InObj,arrayname),str) and os.path.isfile(getattr(InObj,arrayname)):
print('OUT_W207')
'''object attribute contains PATH to array'''
path_to_array = getattr(InObj,arrayname)
outpath_arr = '%s%s' %(os.path.splitext(outpath_hdr)[0],os.path.splitext(path_to_array)[1])
......@@ -278,4 +279,30 @@ def write_global_benchmark_output(list__processing_time__all_runs, list__IO_time
str2write_IOtimes, str2write_times_per_ds,'\n'])
if not os.path.isdir(job.path_benchmarks): os.makedirs(job.path_benchmarks)
with open(os.path.join(job.path_benchmarks,job.ID),'a') as outF:
outF.write(str2write)
\ No newline at end of file
outF.write(str2write)
def write_shp(shapely_poly,path_out,prj=None):
print('Writing %s ...' %path_out)
if os.path.exists(path_out): os.remove(path_out)
ds = (lambda drv: drv.CreateDataSource(path_out))(ogr.GetDriverByName("Esri Shapefile"))
if prj is not None:
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
layer = ds.CreateLayer('', srs, ogr.wkbPolygon)
else:
layer = ds.CreateLayer('', None, ogr.wkbPolygon)
layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) # Add one attribute
defn = layer.GetLayerDefn()
# Create a new feature (attribute and geometry)
feat = ogr.Feature(defn)
feat.SetField('id', 123)
# Make a geometry, from Shapely object
geom = ogr.CreateGeometryFromWkb(shapely_poly.wkb)
feat.SetGeometry(geom)
layer.CreateFeature(feat)
feat = geom = None # destroy these
# Save and close everything
ds = layer = feat = geom = None
\ No newline at end of file
......@@ -315,7 +315,7 @@ def sceneID_to_trueDataCornerLonLat(scene_ID):
return postgreSQL_poly_to_cornerLonLat(postgreSQL_geometry_to_postgreSQL_poly(pgSQL_geom))
def scene_ID_to_shapelyPolygon(scene_ID):
"""Returns a shapely.Polygon() object corresponding to the given scene_ID. """
"""Returns a LonLat shapely.Polygon() object corresponding to the given scene_ID. """
return Polygon(reorder_CornerLonLat(sceneID_to_trueDataCornerLonLat(scene_ID)))
def CornerLonLat_to_shapelyPoly(CornerLonLat):
......
......@@ -23,6 +23,7 @@ import glob
import dill
import builtins
import inspect
import time
print('##################################################################################################')
called_from_iPyNb = 1 if 'ipykernel/__main__.py' in sys.argv[0] else 0
......@@ -311,33 +312,38 @@ def run_processController_in_multiprocessing(usecase_data_list):
L1A_DBObjects = []
"""combine newly and earlier processed L1A data"""
#print(L1A_newObjects[0].arr_shape)
#print(L1A_newObjects[0].arr_pos)
#print(L1A_newObjects[0].arr)
#print(L1A_newObjects[0].arr.shape)
L1A_Instances = L1A_newObjects + L1A_DBObjects
#L1A_Instances = L1A_newObjects
#L1A_Instances = L1A_DBObjects
grouped_L1A_Tiles = HLP_F.group_objects_by_attribute(L1A_Instances,'baseN')
"""process all L1A data to fullfill the requirements of L1B processing"""
import time
# processed_grouped_L1A_Tiles = []
for scene_tilelist in grouped_L1A_Tiles: # scene-wise processing (grouped by baseN)
L1A_obj = L1A_P.merge_L1A_tiles_to_L1A_obj(scene_tilelist)
print('duuuurch'); sys.exit()
"""L1A_obj enthält KEINE ARRAYDATEN!, nur die für die ganze Szene gültigen Metadaten"""
if scene_tilelist[0].scene_ID == 14536400:
L1A_obj = scene_tilelist[0]
#[print(i) for i in scene_tilelist[0].meta]
COREG_obj = L1B_P.COREG(L1A_obj.__dict__.copy())
COREG_obj = L1B_P.COREG(L1A_obj.__dict__.copy(),v=1)
"""1. get reference image that has overlap to current im or tile"""
COREG_obj.get_reference_image_params()
#"""1. get reference image that has overlap to current im or tile"""
#COREG_obj.get_reference_image_params()
"""2. get optimal window position and size to be used for shift calculation (within overlap)"""
COREG_obj.get_opt_winpos_winsize()
#"""2. get optimal window position and size to be used for shift calculation (within overlap)"""
#COREG_obj.get_opt_winpos_winsize()
#[print(k,v) for k,v in L1B_obj.__dict__.items()]
"""3. calculate shifts"""
COREG_obj.calculate_spatial_shifts(v=0)
COREG_obj.calculate_spatial_shifts()
print('shifts:', COREG_obj.x_shift_px, COREG_obj.y_shift_px)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment