Commit f1f3607b authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Landsat-7 processing now available in exec_mode='Python' and...

Landsat-7 processing now available in exec_mode='Python' and parallelization_level='scenes'; bugfixes in MGRS tile creation; database query for spatial reference not fully operable yet
GEOP:
- calc_FullDataset_corner_positions(): bugfix for returning wrong data corner positions for MGRS tiles
- transform_utm_to_wgs84(): bugfix for returning "wrong UTM zone" error in case of negative zone number (south)
- geotransform2mapinfo(): added NotImplementedError in case of rotated datasets
- mapinfo2geotransform(): bugfix for improper handling of UL coordinate if UL coordinate is not 1/1
- moved find_line_intersection_point() from calc_FullDataset_corner_positions() to top level
L1A_P:
- log_for_fullArr_or_firstTile(): added check of  MGRS_tile_obj.logAtThisTile in case of MGRS tile -> needed for proper logging for MGRS tiles if xmin=0/ymin=0 not exists
- delete_tempFiles() bugfix for running delete_previous_proc_level_results() for the wrong processing level
- added method __getstate__(): for defining custom pickling settings for logger
- added method __setstate__(): for defining custom unpickling settings for logger
- replaced all temp_logger-calls by self.logger calls
- log_for_fullArr_or_firstTile(self, log_msg, subset=None, logger=None): deleted deprecated logger argument
L1B_P:
- calculate_spatial_shifts(): updated a warning if calculated shifts are out of given threshold
- calculate_spatial_shifts(): bugfix for returning an updated map info although shift calculation did not pass validity test
- L1B_obj: replaced all temp_logger-calls by self.logger calls
- added a new version of get_reference_image_params() -> not fully operable yet
- renamed old version of get_reference_image_params() to get_reference_image_params_pgSQL()
- added editor fold to class GMS_COREG and commented out imports
- commented out deprecated imports and extensions of sys.path
L1C_P:
- L1C_obj: replaced all temp_logger-calls by self.logger calls
L2A_P:
- correct_shifts(): bugfix for not processing scenes if no reference scene was available
- correcr_shifts(): out_gsd is now properly set
- L2A_obj: replaced all temp_logger-calls by self.logger calls
 META:
 - added docstring to metaDict_to_metaODict()
 OUT_W:
- Obj2ENVI():
     - better documentation
     - replaced all temp_logger-calls by InObj.logger calls
- ASCII_writer(): proper exclusion of loggers from JSON serialization
 DB_T:
 - updated GMS_JOB.jobs_table_columns due to two deleted columns in 'jobs' table
 - updated data_DB_updater() due to changes in attribute namings og GMS object
 - added class SpatialIndexMediator(): for fast geospatial queries targeting 'scenes' table of postgreSQL database
 HLP_F:
 - get_subset_GMS_obj(): major revision
    - some renamed object attributes -> new attributes: data_corners_LonLat, data_corners_utm, fullScene_corner_lonlat, bounds_LonLat, bounds_utm, corner_utm, corner_lonlat
    - bugfix for wrong map info of the returned subsetted GMS objects
    - much better documentation
- cut_GMS_obj_into_MGRS_tiles():
    - better documentation
    - moved calculation of tile attributes to get_subset_GMS_obj()
    - added MGRS_tile_obj,logAtThisTile attribute for proper logging for MGRS tiles
- corner_coord_to_minmax():
    - added documentation
    - updated first timestamp of Landsat-7 SLC-off data
PG:
- added job 26185268  # 25x L7 SLC off
parent 4d148f9f
......@@ -663,7 +663,6 @@
{
"ename": "AssertionError",
"evalue": "Mixed data types in postgreSQL matching expressions are not supported. Got [].",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mAssertionError\u001b[0m Traceback (most recent call last)",
......@@ -674,7 +673,8 @@
"\u001b[1;32m/home/gfz-fe/GeoMultiSens/misc/database_tools.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[1;34m'database connection fault'\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 111\u001b[0m \u001b[0mcursor\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mconnection\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcursor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 112\u001b[1;33m \u001b[0mcondition\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"WHERE \"\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m\" AND \"\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mget_postgreSQL_matchingExp\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mv\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mv\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mcond_dict\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m\\\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 113\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mcond_dict\u001b[0m \u001b[1;32melse\u001b[0m \u001b[1;34m\"\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 114\u001b[0m \u001b[0mcmd\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"SELECT \"\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m','\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mvals2return\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m\" FROM \"\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mtablename\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m\" \"\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mcondition\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m/home/gfz-fe/GeoMultiSens/misc/database_tools.py\u001b[0m in \u001b[0;36mget_postgreSQL_matchingExp\u001b[1;34m(key, value)\u001b[0m\n\u001b[0;32m 83\u001b[0m \u001b[0mdTypes_in_value\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mset\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mtype\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mvalue\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 84\u001b[0m \u001b[1;32massert\u001b[0m \u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdTypes_in_value\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m==\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0;31m\\\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 85\u001b[1;33m \u001b[1;34m'Mixed data types in postgreSQL matching expressions are not supported. Got %s.'\u001b[0m \u001b[1;33m%\u001b[0m\u001b[0mdTypes_in_value\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 86\u001b[0m \u001b[1;32massert\u001b[0m \u001b[0mdTypes_in_value\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;32min\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mint\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mstr\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 87\u001b[0m \u001b[0mpgList\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\",\"\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m\"'%s'\"\u001b[0m \u001b[1;33m%\u001b[0m\u001b[0mi\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mstr\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32melse\u001b[0m \u001b[1;34m\"%s\"\u001b[0m \u001b[1;33m%\u001b[0m\u001b[0mi\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mvalue\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31mAssertionError\u001b[0m: Mixed data types in postgreSQL matching expressions are not supported. Got []."
]
],
"output_type": "error"
}
],
"source": [
......@@ -1130,8 +1130,8 @@
"start_time": "2016-08-24T22:40:17.916639"
},
"code_folding": [
0,
23
0.0,
23.0
],
"collapsed": false,
"hidden": true
......@@ -1307,8 +1307,8 @@
"start_time": "2016-08-24T22:41:43.675055"
},
"code_folding": [
10,
37
10.0,
37.0
],
"collapsed": false,
"hidden": true,
......@@ -2089,7 +2089,7 @@
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
"version": 3.0
},
"file_extension": ".py",
"mimetype": "text/x-python",
......@@ -2103,7 +2103,7 @@
"navigate_menu": true,
"number_sections": true,
"sideBar": true,
"threshold": 6,
"threshold": 6.0,
"toc_cell": false,
"toc_section_display": "block",
"toc_window_display": false
......@@ -2111,4 +2111,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}
\ No newline at end of file
......@@ -2703,7 +2703,7 @@ def Intersect_ext_hdr(hdr_l, ref_ext, v=0):
return hdr_inters
def transform_utm_to_wgs84(easting, northing, zone):
def transform_utm_to_wgs84(easting, northing, zone, south=False):
# ''' returns lon, lat, altitude '''
# utm_coordinate_system = osr.SpatialReference()
# utm_coordinate_system.SetWellKnownGeogCS("WGS84") # Set geographic coordinate system to handle lat/lon
......@@ -2717,9 +2717,10 @@ def transform_utm_to_wgs84(easting, northing, zone):
# print('hier2')
# print(utm_to_wgs84_geo_transform.TransformPoint(easting, northing, 0))
# return utm_to_wgs84_geo_transform.TransformPoint(easting, northing, 0) #[Lon,Lat,meters above sea level]
UTM = pyproj.Proj(proj='utm', zone=zone, ellps='WGS84')
UTM = pyproj.Proj(proj='utm', zone=abs(zone), ellps='WGS84',south=(zone<0 or south))
return UTM(easting, northing, inverse=True)
def transform_wgs84_to_utm(lon, lat, zone=None):
""" returns easting, northing, altitude. If zone is not set it is derived automatically.
:param lon:
......@@ -3149,6 +3150,9 @@ def geotransform2mapinfo(gt,prj):
:returns: ENVI map info, e.g. [ UTM , 1 , 1 , 256785.0 , 4572015.0 , 30.0 , 30.0 , 43 , North , WGS-84 ]
:rtype: list
"""
if gt[2]!=0 or gt[4]!=0: # TODO
raise NotImplementedError('Currently rotated datasets are not supported.')
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
Proj4 = [i[1:] for i in srs.ExportToProj4().split()]
......@@ -3163,6 +3167,7 @@ def geotransform2mapinfo(gt,prj):
['UTM',1,1,UL_X,UL_Y,GSD_X,abs(GSD_Y),srs.GetUTMZone(),is_UTM_North_South(utm2wgs84(UL_X,UL_Y)),ellps]
return map_info
def mapinfo2geotransform(map_info):
# type: (list) -> list
"""Builds GDAL GeoTransform tuple from an ENVI map info.
......@@ -3170,9 +3175,16 @@ def mapinfo2geotransform(map_info):
:returns: GDAL GeoTransform, e.g. [249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0]
:rtype: tuple
"""
# FIXME rotated datasets are currently not supported -> function must return rotation at gt[2] and gt[4]
if map_info[1]==1 and map_info[2]==1:
ULmapX, ULmapY = float(map_info[3]),float(map_info[4])
else:
ULmapX = float(map_info[3])-(float(map_info[1])*float(map_info[5])-float(map_info[5]))
ULmapY = float(map_info[4])+(float(map_info[2])*float(map_info[6])-float(map_info[6]))
assert isinstance(map_info,list) and len(map_info)==10,\
"The map info argument has to be a list of length 10. Got %s." %map_info
return [float(map_info[3]),float(map_info[5]),0.,float(map_info[4]),0.,-float(map_info[6])]
return [ULmapX,float(map_info[5]),0.,ULmapY,0.,-float(map_info[6])]
def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
"""Returns (UL, LL, LR, UR) in the same coordinate units like the given geotransform."""
......@@ -3243,6 +3255,7 @@ def get_footprint_polygon(CornerLonLat):
def get_overlap_polygon(poly1, poly2):
""" Returns a dict with the overlap of two shapely.Polygon() objects, the overlap percentage and the overlap area.
:param poly1: first shapely.Polygon() object
:param poly2: second shapely.Polygon() object
:return: overlap polygon as shapely.Polygon() object
......@@ -3432,6 +3445,22 @@ def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.9
return degCel
def find_line_intersection_point(line1, line2):
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
def calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=True, algorithm='shapely'):
"""
Calculates the image coordinates of the true data corners for rotated datasets.
......@@ -3447,7 +3476,8 @@ def calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=True, algor
rows, cols = mask_1bit.shape[:2]
if sum([mask_1bit[0,0], mask_1bit[0,cols-1], mask_1bit[rows-1,0], mask_1bit[rows-1,cols-1]]) == 1:
if sum([mask_1bit[0,0], mask_1bit[0,cols-1], mask_1bit[rows-1,0], mask_1bit[rows-1,cols-1]]) == 4:
# if mask contains pixel value 1 in all of the four corners: return outer corner coords
out = [(0,0),(0,cols-1),(rows-1,0),(rows-1,cols-1)] # UL, UR, LL, LR
else:
assert algorithm in ['shapely','numpy'], "Algorithm '%' does not exist. Choose between 'shapely' and 'numpy'."
......@@ -3467,7 +3497,8 @@ def calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=True, algor
poly = Polygon(list(set(poly.exterior.coords))).convex_hull # eliminiate duplicates except of 1 (because poly is closed)
unique_corn_YX = sorted(set(poly.exterior.coords), key=lambda x: poly.exterior.coords[:].index(x)) # [(rows,cols),rows,cols]
if assert_four_corners:
if assert_four_corners or len(unique_corn_YX)==4:
# sort calculated corners like this: [UL, UR, LL, LR]
assert len(unique_corn_YX)==4, 'Found %s corner coordinates instead of 4. Exiting.' %len(unique_corn_YX)
get_dist = lambda P1yx ,P2yx : np.sqrt((P1yx[0]-P2yx[0])**2 + (P1yx[1]-P2yx[1])**2)
......@@ -3499,23 +3530,6 @@ def calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=True, algor
[StartStopRows_in_first_dataCol, StartStopRows_in_last_dataCol, StartStopCols_in_first_dataRow,
StartStopCols_in_last_dataRow]]:
''' In case of cut image corners (e.g. ALOS AVNIR-2 Level-1B2): Calculation of trueDataCornerPos outside of the image.'''
# print ('Image corners detected to be cut.')
def find_line_intersection_point(line1, line2):
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
line_N = ((StartStopCols_in_first_dataRow[1], first_dataRow),
(last_dataCol, StartStopRows_in_last_dataCol[0]))
line_S = ((first_dataCol, StartStopRows_in_first_dataCol[1]),
......@@ -3530,9 +3544,9 @@ def calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=True, algor
list(reversed(find_line_intersection_point(line_S, line_O)))]
else:
dataRow_in_first_dataCol = np.mean(StartStopRows_in_first_dataCol)
dataRow_in_last_dataCol = np.mean(StartStopRows_in_last_dataCol)
dataRow_in_last_dataCol = np.mean(StartStopRows_in_last_dataCol)
dataCol_in_first_dataRow = np.mean(StartStopCols_in_first_dataRow)
dataCol_in_last_dataRow = np.mean(StartStopCols_in_last_dataRow)
dataCol_in_last_dataRow = np.mean(StartStopCols_in_last_dataRow)
corners = [(first_dataRow, dataCol_in_first_dataRow), (dataRow_in_last_dataCol, last_dataCol),
(last_dataRow, dataCol_in_last_dataRow), (dataRow_in_first_dataCol, first_dataCol)]
......
This diff is collapsed.
This diff is collapsed.
......@@ -42,33 +42,30 @@ class L1C_object(L1B_object):
super().__init__(None,None)
if L1B_obj: [setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
self.proc_level = 'L1C'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) if L1B_obj else None
self.lonlat_arr = None # set by self.get_lonlat_coord_array()
self.VZA_arr = None # set by self.calc_VZA_array()
self.SZA_arr = None # set by self.calc_SZA_SAA_array()
self.SAA_arr = None # set by self.calc_SZA_SAA_array()
self.RAA_arr = None # set by self.calc_RAA_array()
def get_lonlat_coord_array(self, subset=None):
"""Calculates pixelwise 2D-array with longitude and latitude coordinates. Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile(temp_logger,'Calculating lonlat array.',subset)
self.log_for_fullArr_or_firstTile('Calculating lonlat array.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
lonlat_arr = GEOP.get_lonlat_coord_array(self.shape_fullArr, subset[1],
GEOP.mapinfo2geotransform(self.meta['map info']),
self.meta['coordinate system string'], mask_1bit_temp, fillVal)[0]
......@@ -87,17 +84,16 @@ class L1C_object(L1B_object):
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile(temp_logger,'Calculating acquisition and illumination geometry arrays.',subset)
self.log_for_fullArr_or_firstTile('Calculating acquisition and illumination geometry arrays.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
......@@ -108,11 +104,11 @@ class L1C_object(L1B_object):
else:
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
temp_logger, mask_1bit_temp, fillVal)
self.logger, mask_1bit_temp, fillVal)
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta['AcqDate'],
self.meta['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta['overpass duraction sec'],
temp_logger, mask_1bit_temp, fillVal,
self.logger, mask_1bit_temp, fillVal,
accurracy=job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if job.SZA_SAA_calculation_accurracy == 'fine' else None)
......@@ -130,20 +126,17 @@ class L1C_object(L1B_object):
def atm_corr(self, subset=None):
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
self.log_for_fullArr_or_firstTile(temp_logger,'Dummy atmospheric correction started.',subset) # FIXME
self.log_for_fullArr_or_firstTile('Dummy atmospheric correction started.',subset) # FIXME
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
self.arr = self.arr # FIXME implement atmospheric correction
def delete_tempFiles(self):
self.delete_acquisition_illumination_geometry()
def delete_acquisition_illumination_geometry(self):
self.VZA_arr = None # not needed anymore
self.SZA_arr = None # not needed anymore
self.SAA_arr = None # not needed anymore
self.RAA_arr = None # not needed anymore
# delete previous proc_level results
if getattr(job,'exec__%sP'%self.proc_level)[2]:
self.delete_previous_proc_level_results()
......@@ -50,6 +50,8 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
- cliptoextent (bool): True: clip the input image to its actual bounds while deleting possible no data
areas outside of the actual bounds, default = True
"""
# FIXME diese Methode muss target grid festlegen, auch wenn keine Referenz verfügbar ist!
illegal_kw = [i for i in kwargs if i not in ['align_grids','out_gsd','match_gsd','no_resamp','cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" %illegal_kw[0]
......@@ -68,6 +70,15 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
config_dicts = []
for obj in dicts_GMS_obj:
# FIXME workaround für fehlende refererence geotransform -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj,dict)
if not obj['coreg_info']['reference geotransform']:
obj['coreg_info']['reference geotransform'] = GEOP.mapinfo2geotransform(
obj['coreg_info']['original map info'])
obj['coreg_info']['reference geotransform'][1] = usecase.target_gsd[0]
obj['coreg_info']['reference geotransform'][5] = -abs(usecase.target_gsd[1])
item2add = [obj]
for attrname in attrnames2deshift:
attritem2add = item2add+[attrname]
......@@ -117,7 +128,7 @@ class DESHIFTER(object):
:Keyword Arguments:
- band2process (int): The index of the band to be processed within the given array (starts with 1),
default = None (all bands are processed)
- out_gsd (float): output pixel size in units of the reference coordinate system (default = pixel size
- out_gsd (list): output X/Y pixel size in units of the reference coordinate system (default = pixel size
of the input array), given values are overridden by match_gsd=True
- align_grids (bool): True: align the input coordinate grid to the reference (does not affect the
output pixel size as long as input and output pixel sizes are compatible
......@@ -131,7 +142,7 @@ class DESHIFTER(object):
"""
# unpack kwargs
self.band2process = kwargs.get('band2process',1) # starts with 1 # FIXME warum?
out_gsd = kwargs.get('out_gsd' ,None)
out_gsd = kwargs.get('out_gsd' ,[None,None])
self.align_grids = kwargs.get('align_grids' ,False)
self.match_gsd = kwargs.get('match_gsd' ,False)
tempAsENVI = kwargs.get('tempAsENVI' ,False)
......@@ -169,25 +180,23 @@ class DESHIFTER(object):
self.x_shift_px = dict_GMS_obj['coreg_info']['corrected_shifts_px']['x']
self.y_shift_px = dict_GMS_obj['coreg_info']['corrected_shifts_px']['y']
self.updated_projection = self.ref_prj
self.deshift_needed = self.x_shift_px or self.y_shift_px
# FIXME NICHT ref_gt, cols, rows, xgsd,ygsd festlegen, sondern ein referenzgrid!
self.deshift_needed = (self.x_shift_px or self.y_shift_px) and dict_GMS_obj['coreg_info']['success']
# set the rest
self.shift_xgsd = abs(self.shift_gt[1])
self.shift_ygsd = abs(self.shift_gt[5])
self.ref_xgsd = abs(self.ref_gt[1]) # falls ref_gt nach Coreg nicht verfügbar: get_deshift_configs überschreibt leere ref gt mit originaler shift-gt + gsd vom usecase target grid
self.ref_ygsd = abs(self.ref_gt[5])
if self.deshift_needed:
self.ref_xgsd = abs(self.ref_gt[1]) # FIXME refgrid muss schon durch config festgelegt werden, nicht durch referenz-Szene
self.ref_ygsd = abs(self.ref_gt[5])
if out_gsd and not isinstance(out_gsd,int) or not (isinstance(out_gsd,list) and len(out_gsd)==2):
raise ValueError
#self.out_gsd = [self.shift_xgsd, self.shift_ygsd] if out_gsd is None else \
# [self.ref_xgsd, self.ref_ygsd] if self.match_gsd else \
# [out_gsd, out_gsd] if isinstance(out_gsd,int) else out_gsd
else:
if not self.deshift_needed:
self.align_grids = False # ensures proper processing in self.correct_shifts()
self.out_gsd = usecase.target_gsd
if out_gsd and type(out_gsd) not in [list,tuple] or len(out_gsd) != 2:
raise ValueError("'out_gsd' must be a list with two elements. Got %s." %out_gsd)
self.out_gsd = [self.shift_xgsd, self.shift_ygsd] if out_gsd is None else \
[self.ref_xgsd, self.ref_ygsd] if self.match_gsd else \
[out_gsd, out_gsd] if isinstance(out_gsd,int) else out_gsd
tmpBaseN = '%s__shifted_to__%s' %(dict_GMS_obj['entity_ID'],self.ref_entity_ID)
tempdir = '/dev/shm/GeoMultiSens/' if os.path.exists('/dev/shm/GeoMultiSens/') else\
......@@ -252,8 +261,8 @@ class DESHIFTER(object):
if self.cliptoextent:
if self.shift_gt==self.updated_gt:
temp_logger.info("Attribute '%s', band %s has only been clipped to it's extent because no shifts "
"have been detected." %(self.attrname2deshift,self.band2process))
temp_logger.info("Attribute '%s', band %s has only been clipped to it's extent because no valid "
"shifts have been detected." %(self.attrname2deshift,self.band2process))
else:
temp_logger.info("Only the map info of %s, attribute '%s', band %s has been updated because "
"'align_grids' is turned off, the pixel sizes keep the same and source and target "
......@@ -388,6 +397,7 @@ class DESHIFTER(object):
# resample cloud mask with nearest neighbour alg. in order to prevent changes in pixel values that represent different object classes
rsp_alg = 2 if self.attrname2deshift not in ['masks','mask_clouds'] else 0
if self.cliptoextent:
out_arr, out_gt, out_prj = GEOP.warp_ndarray(in_arr,self.shift_gt,self.shift_prj,self.ref_prj,
rsp_alg=rsp_alg, in_nodata=in_nodata, out_nodata=in_nodata, out_res=(xgsd,xgsd),
......
......@@ -22,13 +22,12 @@ class L2B_object(L2A_object):
super().__init__(None)
if L2A_obj: [setattr(self, key, value) for key,value in L2A_obj.__dict__.items()]
self.proc_level = 'L2B'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) if L2A_obj else None
def spectral_homogenization(self, subset=None, kind='linear'):
src_cwls = self.meta['wavelength']
tgt_cwls = usecase.target_CWL # FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
assert kind in ['linear',], "%s is not a supported kind of homogenization." %kind
self.log_for_fullArr_or_firstTile(self.logger,'Performing spectral homogenization (%s) with target wavelength '
self.log_for_fullArr_or_firstTile('Performing spectral homogenization (%s) with target wavelength '
'positions at %s nm.' %(kind,', '.join(np.array(tgt_cwls[:-1]).astype(str))+' and %s' %tgt_cwls[-1]))
inArray = self.arr if isinstance(self.arr,np.ndarray) else \
......
......@@ -1656,15 +1656,22 @@ def get_special_values(GMS_identifier):
def metaDict_to_metaODict(metaDict,logger=None):
"""Converts a GMS metadata dictionary to an ordered dictionary according to the sorting given in
Output_writer.enviHdr_keyOrder.
:param metaDict: <dict> GMS metadata dictionary
:param logger: <logging.logger> if given, warnings will be logged. Otherwise they are raised.
"""
from gms_io.Output_writer import enviHdr_keyOrder
expected_keys = [k for k in enviHdr_keyOrder if k in metaDict]
only_gmsFile_keys = ['ViewingAngle_arrProv','IncidenceAngle_arrProv','projection']
unexpected_keys = [k for k in metaDict.keys() if k not in expected_keys and k not in only_gmsFile_keys]
if unexpected_keys:
if unexpected_keys:
msg = 'Got unexpected keys in metadata dictionary: %s. Adding them at the end of output header files.'\
%', '.join(unexpected_keys)
if logger: logger.warning(msg)
else: warnings.warn(msg)
meta_vals = [metaDict[k] for k in expected_keys] + [metaDict[k] for k in unexpected_keys]
return collections.OrderedDict(zip(expected_keys+unexpected_keys, meta_vals))
\ No newline at end of file
......@@ -114,7 +114,7 @@ class job:
exec__L1BP = [1, 1, 1]
exec__L1CP = [1, 1, 1]
exec__L2AP = [1, 1, 1]
exec__L2BP = [1, 1, 1]
exec__L2BP = [1, 1, 0]
exec__L2CP = [1, 1, 0]
if exec_mode=='Python':
for i in ['L1AP','L1BP','L1CP','L2AP','L2BP','L2CP']:
......@@ -165,9 +165,9 @@ class usecase:
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
virtual_sensor_name = query_vir(job.conn_db_meta, 'name',virtual_sensor_id)
datasetid_spectral_ref = query_vir(job.conn_db_meta, 'spectral_characteristics_datasetid', virtual_sensor_id)
target_CWL = query_vir(job.conn_db_meta, 'wavelengths_pos', virtual_sensor_id)
target_FWHM = query_vir(job.conn_db_meta, 'band_width', virtual_sensor_id)
target_gsd = query_vir(job.conn_db_meta, 'spatial_resolution', virtual_sensor_id)
target_CWL = query_vir(job.conn_db_meta, 'wavelengths_pos', virtual_sensor_id) # FIXME column is empty a known datasetid as spectral characteristics virtual sensor is chosen
target_FWHM = query_vir(job.conn_db_meta, 'band_width', virtual_sensor_id) # FIXME column is empty a known datasetid as spectral characteristics virtual sensor is chosen
target_gsd = query_vir(job.conn_db_meta, 'spatial_resolution', virtual_sensor_id) # table features only 1 value for X/Y-dims
target_gsd = [target_gsd,target_gsd] if type(target_gsd) in [int,float] else target_gsd
EPSG = query_vir(job.conn_db_meta, 'projection_epsg', virtual_sensor_id)
......
......@@ -40,6 +40,7 @@ def read_ENVIhdr_to_dict(hdr_path, logger=None):
if not os.path.isfile(hdr_path):
if logger: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
else: print ('read_ENVIfile: Input data not found at %s.'%hdr_path)
raise FileNotFoundError(hdr_path)
else:
SpyFileheader = envi.open(hdr_path)
return SpyFileheader.metadata
......
......@@ -27,6 +27,7 @@ import ogr
import osr
import builtins
import warnings
import logging
from itertools import chain
from misc import helper_functions as HLP_F
......@@ -109,20 +110,30 @@ def HDR_writer(meta_dic,outpath_hdr,logger=None):
def ASCII_writer(In,path_out_baseN):
assert isinstance(In,dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' %type(In)
for k,v in zip(list(In.keys()),list(In.values())):
for k,v in list(In.items()):
# if isinstance(v,np.ndarray) or isinstance(v,dict) or hasattr(v,'__dict__'):
## so, wenn meta-dicts nicht ins gms-file sollen. ist aber vllt ni schlecht -> erlaubt lesen der metadaten direkt aus gms
if isinstance(v,datetime.datetime):
In[k] = v.strftime('%Y-%m-%d')
elif isinstance(v,logging.Logger):
In[k] = 'not set'
elif isinstance(v,collections.OrderedDict) or isinstance(v,dict):
if 'logger' in v:
In[k]['logger'] = 'not set'
elif isinstance(v, np.ndarray):
# delete every 3D Array and every larger than 100x100
# delete every 3D Array larger than 100x100
if len(v.shape)==2 and sum(v.shape) <= 200:
In[k] = v.tolist() # numpy arrays are not jsonable
else:
del In[k]
elif hasattr(v,'__dict__') and not isinstance(v,collections.OrderedDict):
elif hasattr(v,'__dict__'):
# löscht Instanzen von Objekten und Arrays, aber keine OrderedDicts
if hasattr(v,'logger'):
In[k].logger = 'not set'
del In[k]
# class customJSONEncoder(json.JSONEncoder):
# def default(self, obj):
# if isinstance(obj, np.ndarray):
......@@ -317,8 +328,8 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
'Input for Output writer is expected to be class type. Got %s.' %(type(InObj))
assert InObj.arr_shape in ['cube','MGRS_tile','block'],\
"Output_writer.Obj2ENVI supports only array shapes 'cube', 'MGRS_tile' and 'block'. Got %s." %InObj.arr_shape
InObj.logger = HLP_F.setup_logger('log__'+InObj.baseN, InObj.path_logfile,append=1)
# set MGRS info in case of MGRS tile
MGRS_info = None
if InObj.arr_shape=='MGRS_tile':
assert hasattr(InObj,'MGRS_info'), \
......@@ -331,24 +342,28 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
InObj.acquisition_date,InObj.entity_ID,InObj.logger,
MGRS_info=MGRS_info).get_path_imagedata() # FIXME this could leed to check the wrong folder in Python exec_mode
# set dicts
image_type_dict = {'arr': InObj.image_type, 'masks': 'MAS', 'mask_clouds':'MAC'}
metaAttr_dict = {'arr': 'meta', 'masks': 'masks_meta', 'mask_clouds':'masks_meta'}
out_dtype_dict = {'arr': 'int16', 'masks': 'uint8' , 'mask_clouds':'uint8'}
print_dict = {'RSD_L1A': 'L1A satellite data', 'MAS_L1A': 'L1A masks', 'MAC_L1A': 'L1A cloud mask',
'RSD_L1B': 'L1B satellite data', 'MAS_L1B': 'L1B masks', 'MAC_L1B': 'L1B cloud mask',
print_dict = {'RSD_L1A': 'L1A satellite data', 'MAS_L1A': 'L1A masks', 'MAC_L1A': 'L1A cloud mask',
'RSD_L1B': 'L1B satellite data', 'MAS_L1B': 'L1B masks', 'MAC_L1B': 'L1B cloud mask',
'RSD_L1C': 'L1C atm. corrected reflectance data', 'MAS_L1C': 'L1C masks', 'MAC_L1C': 'L1C cloud mask',
'RSD_L2A': 'L2A geometrically homogenized data', 'MAS_L2A': 'L2A masks', 'MAC_L2A': 'L2A cloud mask',
'RSD_L2B': 'L2B spectrally homogenized data', 'MAS_L2B': 'L2B masks', 'MAC_L2B': 'L2B cloud mask',
'RSD_L2C': 'L2C MGRS tiled data', 'MAS_L2C': 'L2C masks', 'MAC_L2C': 'L2C cloud mask'}
'RSD_L2C': 'L2C MGRS tiled data', 'MAS_L2C': 'L2C masks', 'MAC_L2C': 'L2C cloud mask'}
# ensure InObj.masks exists
if not hasattr(InObj,'masks') or InObj.masks is None:
InObj.build_combined_masks_array() # creates InObj.masks and InObj.masks_meta
# loop through all attributes to write and execute writer
attributes2write = ['arr', 'masks'] + (['mask_clouds'] if write_masks_as_ENVI_classification else [])
for arrayname in attributes2write:
descriptor = '%s_%s' %(image_type_dict[arrayname],InObj.proc_level)
if hasattr(InObj,arrayname) and getattr(InObj,arrayname) is not None:
# initial assertions
assert arrayname in metaAttr_dict, "Obj2ENVI cannot yet write %s attribute." % arrayname
assert type(getattr(InObj,arrayname)) in [np.ndarray,str], \
"Expected a string or a numpy array for object attribute %s. Got %s." \
......@@ -360,7 +375,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
out_dtype = out_dtype_dict[arrayname]
meta_attr = metaAttr_dict[arrayname]
if not is_tempfile: InObj.log_for_fullArr_or_firstTile(InObj.logger,'Writing %s.' %print_dict[descriptor])
if not is_tempfile: InObj.log_for_fullArr_or_firstTile('Writing %s.' %print_dict[descriptor])
if isinstance(getattr(InObj,arrayname),str):
'''object attribute contains PATH to array. This is usually the case if the attribute has been read in
......@@ -380,7 +395,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
meta2write.update({'interleave': InObj.outInterleave, 'byte order': 0, 'header offset': 0,
'data type': HLP_F.dtype_lib_Python_IDL[out_dtype],
'lines': InObj.shape_fullArr[0], 'samples': InObj.shape_fullArr[1]})
meta2write = metaDict_to_metaODict(meta2write,InObj.logger)
meta2write = metaDict_to_metaODict(meta2write, InObj.logger)
if '__TEMPFILE' in path_to_array:
os.rename(path_to_array, outpath_arr)
......@@ -407,9 +422,11 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
raise NotImplementedError
if arrayname not in ['mask_clouds','mask_1bit']:
# read image data in subset
tempArr = gdalnumeric.LoadFile(path_to_array, cS, rS, cols, rows) # bands, rows, columns OR rows, columns
arr2write = tempArr if len(tempArr.shape)==2 else np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1) # rows, columns, (bands)
else:
# read mask data in subset
previous_procL = HLP_F.proc_chain[HLP_F.proc_chain.index(InObj.proc_level)-1]
PG_obj = PG.path_generator(InObj.__dict__,proc_level=previous_procL)
path_masks = PG_obj.get_path_maskdata()
......@@ -420,16 +437,20 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
if isinstance(getattr(InObj,arrayname),np.ndarray): # must be an if-condition because first if can change attribute type from string to np.ndarray
'''object attribute contains array'''