Commit a7920668 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

GEOP:

- warp_ndarray(): fixed a bug in the validation of the output dimensions calculated by rasterio that always expected a quadratic input array for performing a correction of out_cols and out_rows
- added reproject_shapelyPoly()
- added mapXY2imXY()
- added imXY2mapXY()
- added proj4_to_dict()
L1A_P:
- fixed a bug in calc_cloud_mask()
- fixed a bug in calc_corner_positions()
L1B_P:
- fixed a bug in get_reference_image_params()
- fixed a bug in apply_deshift_results() when applying results to a 2D array
L2A_P:
- changed titlebar
L2B_P:
- changed titlebar
L2C_P:
- changed titlebar
- added a dummy version of L2C_object
L2D_P:  Level 2D processor has been deleted due to changes in the L2 processing architecture
META:
- added META.CS_EPSG attribute
OUT_W:
- Obj2ENVI(): added functionality to write MGRS tiles as compressed ENVI files (compression mode not yet working)
- dirty hack in mask_to_ENVI_Classification() -> still not working
- added get_dtypeStr()
- added new version of write_shp()
DB_T:
- moved some code from get_scene_and_dataset_infos_from_postgreSQLdb() to new function execute_pgSQL_query()
- added get_pgSQL_geospatial_query_cond()
- added get_overlapping_MGRS_tiles()
- added get_overlapping_MGRS_tiles2()
- added pdDataFrame_to_sql_k()
- added import_shapefile_into_postgreSQL_database()
- added postgreSQL_table_to_csv() (not yet working)
HLP_F:
- added docstring to setup_logger()
- added gzipfile()
- added ENVIfile_to_ENVIcompressed()
- added get_subset_GMS_obj()
- added cut_GMS_obj_into_MGRS_tiles()
- added an extra assertion to postgreSQL_poly_to_cornerLonLat()
- added some type hints
- added postgreSQL_geometry_to_shapelyPolygon()
- added shapelyPolygon_to_postgreSQL_geometry()
- added get_imageCoords_from_shapelyPoly()
- added get_valid_arrSubsetBounds()
- added get_arrSubsetBounds_from_shapelyPolyLonLat() -> needed to get image coordinates of MGRS tiles
- added get_UL_LR_from_shapefile_features()
PG:
- added functionality to get pathes for MGRS-tiled products
CFG:
- added additional output folder for MGRS-tiled products
PC:
- removed L2D_P import
- added cloud mask and corner coordinates calculation to L2A_map_2
- added L2C_map_1()
- adjusted L2B calls
- added first version of L2C calls
parent 9142f468
......@@ -2755,8 +2755,9 @@ def get_point_bounds(points):
(min_lon, min_lat, max_lon, max_lat) = mp_points.bounds
return min_lon, min_lat, max_lon, max_lat
def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None,
out_res=None,out_extent=None,out_dtype=None,rsp_alg=0,in_nodata=None,out_nodata=None):
def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None,
out_res=None, out_extent=None, out_dtype=None, rsp_alg=0, in_nodata=None, out_nodata=None):
"""Reproject / warp a numpy array with given geo information to target coordinate system.
:param ndarray: numpy.ndarray [rows,cols,bands]
:param in_gt: input gdal GeoTransform
......@@ -2781,99 +2782,132 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
if not ndarray.flags['OWNDATA']:
temp = np.empty_like(ndarray)
temp[:] = ndarray
ndarray = temp # deep copy: converts view to its own array in oder to avoid wrong output
ndarray = temp # deep copy: converts view to its own array in oder to avoid wrong output
with rasterio.drivers():
if outUL is not None:
assert outRowsCols is not None, 'outRowsCols must be given if outUL is given.'
outUL = [in_gt[0],in_gt[3]] if outUL is None else outUL
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
inEPSG, outEPSG = [WKT2EPSG(prj) for prj in [in_prj, out_prj]]
assert inEPSG , 'Could not derive input EPSG code.'
assert inEPSG, 'Could not derive input EPSG code.'
assert outEPSG, 'Could not derive output EPSG code.'
assert in_nodata is None or type(in_nodata) in [int,float]
assert out_nodata is None or type(out_nodata) in [int,float]
assert in_nodata is None or type(in_nodata) in [int, float]
assert out_nodata is None or type(out_nodata) in [int, float]
src_crs = {'init': 'EPSG:%s' %inEPSG}
dst_crs = {'init': 'EPSG:%s' %outEPSG}
src_crs = {'init': 'EPSG:%s' % inEPSG}
dst_crs = {'init': 'EPSG:%s' % outEPSG}
if len(ndarray.shape)==3:
if len(ndarray.shape) == 3:
# convert input array axis order to rasterio axis order
ndarray = np.swapaxes(np.swapaxes(ndarray,0,2),1,2)
bands,rows,cols = ndarray.shape
rows,cols = outRowsCols if outRowsCols else rows,cols
ndarray = np.swapaxes(np.swapaxes(ndarray, 0, 2), 1, 2)
bands, rows, cols = ndarray.shape
rows, cols = outRowsCols if outRowsCols else rows, cols
else:
rows,cols = ndarray.shape if outRowsCols is None else outRowsCols
rows, cols = ndarray.shape if outRowsCols is None else outRowsCols
# set dtypes ensuring at least int16 (int8 is not supported by rasterio)
in_dtype = ndarray.dtype
out_dtype = ndarray.dtype if out_dtype is None else out_dtype
gt2bounds = lambda gt,r,c: [gt[0], gt[3] + r*gt[5], gt[0] + c*gt[1], gt[3]] # left, bottom, right, top
out_dtype = np.int16 if str(out_dtype) == 'int8' else out_dtype
ndarray = ndarray.astype(np.int16) if str(in_dtype) == 'int8' else ndarray
gt2bounds = lambda gt, r, c: [gt[0], gt[3] + r * gt[5], gt[0] + c * gt[1], gt[3]] # left, bottom, right, top
#get dst_transform
# get dst_transform
if out_gt is None and out_extent is None:
if outRowsCols:
outUL = [in_gt[0],in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul,r,c: [ul[0], ul[1] + r*in_gt[5], ul[0] + c*in_gt[1], ul[1]] # left, bottom, right, top
left, bottom, right, top = ulRC2bounds(outUL,rows,cols)
else: # outRowsCols is None and outUL is None: use in_gt
left, bottom, right, top = gt2bounds(in_gt,rows,cols)
#,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift))
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul, r, c: [ul[0], ul[1] + r * in_gt[5], ul[0] + c * in_gt[1],
ul[1]] # left, bottom, right, top
left, bottom, right, top = ulRC2bounds(outUL, rows, cols)
else: # outRowsCols is None and outUL is None: use in_gt
left, bottom, right, top = gt2bounds(in_gt, rows, cols)
# ,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift))
elif out_extent:
left, bottom, right, top = out_extent
#input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0]+(cols+1)*in_gt[1]) and\
bottom >= in_gt[3]+(rows+1)*in_gt[5] and top <= in_gt[3],\
"out_extent has to be completely within the input image bounds."
else: # out_gt is given
left, bottom, right, top = gt2bounds(in_gt,rows,cols)
# input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0] + (cols + 1) * in_gt[1]) and \
bottom >= in_gt[3] + (rows + 1) * in_gt[5] and top <= in_gt[3], \
"out_extent has to be completely within the input image bounds."
else: # out_gt is given
left, bottom, right, top = gt2bounds(in_gt, rows, cols)
if out_res is None:
# get pixel resolution in target coord system
prj_in_out = (isProjectedOrGeographic(in_prj),isProjectedOrGeographic(out_prj))
prj_in_out = (isProjectedOrGeographic(in_prj), isProjectedOrGeographic(out_prj))
assert None not in prj_in_out, 'prj_in_out contains None.'
if prj_in_out[0] == prj_in_out[1]:
out_res = (in_gt[1],abs(in_gt[5]))
elif prj_in_out == ('geographic','projected'):
out_res = (in_gt[1], abs(in_gt[5]))
elif prj_in_out == ('geographic', 'projected'):
raise NotImplementedError('Different projections are currently not supported.')
else: #('projected','geographic')
px_size_LatLon = np.array(pixelToLatLon([1,1],geotransform=in_gt,projection=in_prj)) - \
np.array(pixelToLatLon([0,0],geotransform=in_gt,projection=in_prj))
else: # ('projected','geographic')
px_size_LatLon = np.array(pixelToLatLon([1, 1], geotransform=in_gt, projection=in_prj)) - \
np.array(pixelToLatLon([0, 0], geotransform=in_gt, projection=in_prj))
out_res = tuple(reversed(abs(px_size_LatLon)))
print('OUT_RES NOCHMAL CHECKEN: ', out_res)
dst_transform,out_cols,out_rows = rio_calc_transform(
dst_transform, out_cols, out_rows = rio_calc_transform(
src_crs, dst_crs, cols, rows, left, bottom, right, top, resolution=out_res) # TODO keyword densify_pts=21 does not exist anymore (moved to transform_bounds()) -> could that be a problem? check warp outputs
rows_expected, cols_expected = abs(top-bottom) / out_res[1], abs(right-left) / out_res[0]
if rows==cols and rows_expected==cols_expected and inEPSG==outEPSG and out_rows!=out_cols:
out_rows,out_cols = rows_expected,cols_expected
rows_expected, cols_expected = abs(top - bottom) / out_res[1], abs(right - left) / out_res[0]
if rows == cols and rows_expected == cols_expected and inEPSG == outEPSG and out_rows != out_cols:
out_rows, out_cols = rows_expected, cols_expected
# fixes an issue where rio_calc_transform() does not return quadratic output image although input parameters
# correspond to a quadratic image and inEPSG equals outEPSG
aff = list(dst_transform)
out_gt = out_gt if out_gt else (aff[2],aff[0],aff[1],aff[5],aff[3],aff[4])
aff = list(dst_transform)
out_gt = out_gt if out_gt else (aff[2], aff[0], aff[1], aff[5], aff[3], aff[4])
src_transform = rasterio.transform.from_origin(in_gt[0], in_gt[3], in_gt[1], in_gt[5])
dict_rspInt_rspAlg = {0:RESAMPLING.nearest, 1:RESAMPLING.bilinear, 2:RESAMPLING.cubic,
3:RESAMPLING.cubic_spline, 4:RESAMPLING.lanczos, 5:RESAMPLING.average, 6:RESAMPLING.mode}
dict_rspInt_rspAlg = {0: RESAMPLING.nearest, 1: RESAMPLING.bilinear, 2: RESAMPLING.cubic,
3: RESAMPLING.cubic_spline, 4: RESAMPLING.lanczos, 5: RESAMPLING.average, 6: RESAMPLING.mode}
out_arr = np.zeros((bands,out_rows,out_cols), out_dtype)\
if len(ndarray.shape)==3 else np.zeros((out_rows,out_cols), out_dtype)
out_arr = np.zeros((bands, out_rows, out_cols), out_dtype) \
if len(ndarray.shape) == 3 else np.zeros((out_rows, out_cols), out_dtype)
# FIXME direct passing of src_transform and dst_transform results in a wrong output image. Maybe a rasterio-bug?
#rio_reproject(ndarray, out_arr, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform,
# rio_reproject(ndarray, out_arr, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform,
# dst_crs=dst_crs, resampling=dict_rspInt_rspAlg[rsp_alg])
# FIXME indirect passing causes Future warning
with warnings.catch_warnings():
warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0.
rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0.
try:
rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
except KeyError:
print(in_dtype, str(in_dtype))
print(ndarray.dtype)
# convert output array axis order to GMS axis order [rows,cols,bands]
out_arr = out_arr if len(ndarray.shape)==2 else np.swapaxes(np.swapaxes(out_arr,0,1),1,2)
out_arr = out_arr if len(ndarray.shape) == 2 else np.swapaxes(np.swapaxes(out_arr, 0, 1), 1, 2)
return out_arr, out_gt, out_prj
def warp_mp(args):
in_arr,gt,prj,out_prj,out_ext,rsp,in_nd = args
warped = warp_ndarray(in_arr,gt,prj,out_prj,out_extent=out_ext,rsp_alg=rsp,in_nodata=in_nd)[0]
return warped
def reproject_shapelyPoly(shapelyPoly, tgt_prj):
# type: (shapely.Polygon, str) -> shapely.Polygon
"""Repojects a shapely polygon that has LonLat coordinates into the given target projection.
:param shapelyPoly: <shapely.Polygon> the shapely polygon to be reprojected (must have lonlat coordinates)
:param tgt_prj: <str> WKT projection string (like GDAL projection)
"""
# TODO nicht sicher, ob wirklich nur lonlat funktioniert
get_coordsArr = lambda shpPoly: np.swapaxes(np.array(shpPoly.exterior.coords.xy), 0, 1)
coordsArr = get_coordsArr(shapelyPoly)
coordsArrTgtPrj = np.zeros_like(coordsArr)
proj4dict = proj4_to_dict(get_proj4info(proj=tgt_prj))
ReProj = pyproj.Proj(proj=proj4dict['proj'], zone=proj4dict['zone'], ellps=proj4dict['datum'])
x, y = ReProj(coordsArr[:, 0], coordsArr[:, 1], inverse=False)
coordsArrTgtPrj[:, 0] = x
coordsArrTgtPrj[:, 1] = y
return Polygon(coordsArrTgtPrj)
def zoom_2Darray_to_shapeFullArr(arr2D,shapeFullArr,subset=None,method='linear'):
assert method in ['linear','nearest']
......@@ -2900,11 +2934,6 @@ def adjust_acquisArrProv_to_shapeFullArr(arrProv,shapeFullArr,subset=None,bandwi
interpolated = zoom_2Darray_to_shapeFullArr(arr2interp,shapeFullArr,subset).astype(np.float32)
return interpolated
def warp_mp(args):
in_arr,gt,prj,out_prj,out_ext,rsp,in_nd = args
warped = warp_ndarray(in_arr,gt,prj,out_prj,out_extent=out_ext,rsp_alg=rsp,in_nodata=in_nd)[0]
return warped
def get_raster_size(minx, miny, maxx, maxy, cell_width, cell_height):
"""Determine the number of rows/columns given the bounds of the point data and the desired cell size"""
cols = int((maxx - minx) / cell_width)
......@@ -3030,6 +3059,22 @@ def pixelToMapYX(pixelCoords, path_im=None, geotransform=None, projection=None):
return mapYmapXPairs
def mapXY2imXY(mapXY, gt):
# type: (tuple,list) -> tuple
"""Translates given map coordinates into pixel locations according to the given image geotransform.
:param mapXY: <tuple> The map coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform
"""
return (mapXY[0]-gt[0])/gt[1],(mapXY[1]-gt[3])/gt[5]
def imXY2mapXY(imXY, gt):
# type: (tuple,list) -> tuple
"""Translates given pixel locations into map coordinates according to the given image geotransform.
:param imXY: <tuple> The image coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform
"""
return (gt[0] + imXY[0] * abs(gt[1])), (gt[3] - imXY[1] * abs(gt[5]))
def isProjectedOrGeographic(prj):
""":param prj: accepts EPSG, Proj4 and WKT projections"""
srs = osr.SpatialReference()
......@@ -3055,7 +3100,7 @@ def EPSG2WKT(EPSG_code):
def WKT2EPSG(wkt, epsg=os.environ['GDAL_DATA'].replace('/gdal','/proj/epsg')):
""" Transform a WKT string to an EPSG code
:param wkt: WKT definition
:param wkt: WKT definition, e.g. the result of gdal_ds.GetProjection()
:param epsg: the proj.4 epsg file (defaults to '/usr/local/share/proj/epsg')
:returns: EPSG code
http://gis.stackexchange.com/questions/20298/is-it-possible-to-get-the-epsg-value-from-an-osr-spatialreference-class-using-th
......@@ -3084,7 +3129,7 @@ def WKT2EPSG(wkt, epsg=os.environ['GDAL_DATA'].replace('/gdal','/proj/epsg')):
code = code if code else None # match or no match
else:
code = ac
return code
return int(code)
def get_UTMzone(ds=None,prj=None):
assert ds or prj, "Specify at least one of the arguments 'ds' or 'prj'"
......@@ -3117,9 +3162,10 @@ def geotransform2mapinfo(gt,prj):
return map_info
def mapinfo2geotransform(map_info):
# type: (list) -> list
"""Builds GDAL GeoTransform tuple from an ENVI map info.
:param map_info: ENVI map info (list), e.g. ['UTM', 1, 1, 192585.0, 5379315.0, 30.0, 30.0, 41, 'North', 'WGS-84']
:returns: GDAL GeoTransform, e.g. (249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0)
:returns: GDAL GeoTransform, e.g. [249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0]
:rtype: tuple
"""
assert isinstance(map_info,list) and len(map_info)==10,\
......@@ -3165,13 +3211,21 @@ def get_proj4info(ds=None,proj=None):
srs.ImportFromWkt(ds.GetProjection() if ds else proj)
return srs.ExportToProj4()
def proj4_to_dict(proj4):
#type: (str) -> dict
"""Converts a PROJ4-like string into a dictionary.
:param proj4: <str> the PROJ4-like string
"""
items = [item for item in proj4.replace('+','').split(' ') if '=' in item]
return {k:v for k,v in [kv.split('=') for kv in items]}
def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
:param corner_coords: list of coordinates like [[x1,y1],[x2,y2],[]...]
"""
x_vals = [int(i[0]) for i in corner_coords]
y_vals = [int(i[1]) for i in corner_coords]
x_vals = [i[0] for i in corner_coords]
y_vals = [i[1] for i in corner_coords]
xmin,xmax,ymin,ymax = min(x_vals),max(x_vals),min(y_vals),max(y_vals)
return xmin,xmax,ymin,ymax
......
......@@ -15,6 +15,7 @@
# Section 1.4 Remote Sensing, GFZ Potsdam
#
###############################################################################
__author__='Daniel Scheffler'
########################### Library import ####################################
import numpy as np
......@@ -564,7 +565,7 @@ class L1A_object(object):
def set_arr_desc_from_MetaObj(self):
"""Sets self.arr_desc according to MetaObj.PhysUnit. To be called directly after getting MetaObj."""
self.arr_desc = 'DN' if self.MetaObj.PhysUnit=='DN' else \
'Rad' if self.MetaObj.PhysUnit=="W * m-2 * sr-1 * micrometer-1" else \
'Rad' if self.MetaObj.PhysUnit=="W * m-2 * sr-1 * micrometer-1" else \
'Ref' if re.search('TOA_Reflectance',self.MetaObj.PhysUnit,re.I) else \
'Temp' if re.search('Degrees Celsius',self.MetaObj.PhysUnit,re.I) else None # FIXME TOA_Ref
assert self.arr_desc, 'L1A_obj.MetaObj contains an unexpected physical unit: %s' %self.MetaObj.PhysUnit
......@@ -804,8 +805,9 @@ class L1A_object(object):
else:
subset = subset if subset else ['block', self.arr_pos] if self.arr_pos else ['cube', None]
bands, rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
arr_isPath = isinstance(self.arr, str) and os.path.isfile(self.arr)
inPath = self.arr if arr_isPath else self.MetaObj.Dataname
arr_isPath = isinstance(self.arr, str) and os.path.isfile(self.arr) # FIXME
inPath = self.arr if arr_isPath else self.MetaObj.Dataname if \
(hasattr(self,'MetaObj') and self.MetaObj) else self.meta['Dataname'] # FIXME ersetzen durch path generator?
if not self.path_cloud_class_obj:
self.log_for_fullArr_or_firstTile(
......@@ -829,9 +831,10 @@ class L1A_object(object):
else:
sensorcode = ('%s %s %s' %(self.satellite,self.sensor, self.subsystem)) \
if self.subsystem else '%s %s' %(self.satellite,self.sensor)
nbands = self.shape_fullArr[2] if len(self.shape_fullArr)==3 else 1
self.log_for_fullArr_or_firstTile(temp_logger,"'The chosen cloud classifier for %s is not suitable "
" for the given %s bands. %s bands expected. Cloud masking failed."
%(sensorcode,self.MetaObj.bands,CLD_obj.classifier.n_channels), subset)
%(sensorcode,nbands,CLD_obj.classifier.n_channels), subset)
mask_clouds = None
else:
pathlist_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier, get_all=True)
......@@ -898,7 +901,9 @@ class L1A_object(object):
temp_logger.info('Calculating true data corner positions (image and world coordinates)...')
self.trueDataCornerPos = GEOP.calc_FullDataset_corner_positions(self.mask_1bit) #[(ULrow,ULcol),(URrow,URcol),...]
trueCorPosXY = [tuple(reversed(i)) for i in self.trueDataCornerPos]
gt, prj = GEOP.mapinfo2geotransform(self.MetaObj.map_info), self.MetaObj.projection
gt = GEOP.mapinfo2geotransform(self.MetaObj.map_info
if hasattr(self,'MetaObj') and self.MetaObj else self.meta['map info'])
prj = self.MetaObj.projection if hasattr(self,'MetaObj') and self.MetaObj else self.meta['projection']
trueCorLatLon = GEOP.pixelToLatLon(trueCorPosXY,geotransform=gt,projection=prj)
self.trueDataCornerLonLat = [tuple(reversed(i)) for i in trueCorLatLon]
rows,cols = self.shape_fullArr[:2]
......
......@@ -282,9 +282,9 @@ class COREG(object):
self.verbose_out = os.path.join(PG.path_generator(dict_L1A_Instance).get_path_procdata(),
'CoReg_verboseOut__%s__shifted_to__%s' %(get_baseN(self.path_im2shift), get_baseN(self.path_imref)))
if not os.path.isdir(self.verbose_out): os.makedirs(self.verbose_out)
OUT_W.write_shp(self.imref_footprint_poly, os.path.join(self.verbose_out,'poly_imref.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp(self.footprint_poly, os.path.join(self.verbose_out,'poly_im2shift.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp(self.overlap_poly, os.path.join(self.verbose_out,'overlap_poly.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp_OLD(self.imref_footprint_poly, os.path.join(self.verbose_out, 'poly_imref.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp_OLD(self.footprint_poly, os.path.join(self.verbose_out, 'poly_im2shift.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp_OLD(self.overlap_poly, os.path.join(self.verbose_out, 'overlap_poly.shp'), GEOP.get_prjLonLat())
"""get bands to use for matching"""
self.get_opt_bands4matching(target_cwlPos_nm=550)
......@@ -377,6 +377,7 @@ class COREG(object):
["value_download"].strip(), scene_ID))
path = PG.path_generator(scene_ID = sc['scene_ID']).get_path_imagedata()
if not os.path.exists(path):
# add scene 2 download to scenes_jobs.missing_scenes
......@@ -386,6 +387,7 @@ class COREG(object):
# check if scene is downloading
download_start_timeout = 5 # seconds
processing_timeout = 5 # seconds # FIXME increase timeout if processing is really started
proc_level = None
while True:
proc_level_chk = DB_T.get_info_from_postgreSQLdb(
......@@ -400,6 +402,13 @@ class COREG(object):
break
if proc_level_chk == 'L1A':
ref_available=True; break
elif proc_level_chk == 'DOWNLOADED' and time.time()-t_dl_start >= processing_timeout: # proc_level='DOWNLOADED' but scene has not been processed
warnings.warn('L1A processing of reference scene %s (entity ID %s) timed out. '
'Coregistration of this scene failed.' % (self.baseN, self.scene_ID))
break
#DB_T.set_info_in_postgreSQLdb(job.conn_database,'scenes',
# {'proc_level':'METADATA'},{'id':sc['scene_ID']})
time.sleep(5)
else:
ref_available = True
......@@ -496,7 +505,7 @@ class COREG(object):
ds = gdal.Open(self.imref)
assert ds, 'The image %s can not be read by GDAL.' %self.imref_entity_ID
elif isinstance(self.imref,np.ndarray):
"""L1A_obj refernce in GDAL-Array konvertieren"""
"""L1A_obj reference in GDAL-Array konvertieren"""
path_src = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
arr_ds = GEOP.ndarray2gdal(self.path_imref, path_src, geotransform=self.ref_gt,
projection=self.ref_prj, direction=3)
......@@ -505,7 +514,7 @@ class COREG(object):
arr_ds = None
[gdal.Unlink(p) for p in [path_src, os.path.splitext(path_src)[0]+'.hdr']]
else:
raise TypeError('COREG.imref must contain a path, a numpy array or None. Got %s' &type(self.imref))
raise TypeError('COREG.imref must contain a path, a numpy array or None. Got %s' %type(self.imref))
return ds
def get_ds_im2shift(self):
......@@ -522,7 +531,7 @@ class COREG(object):
arr_ds = None
gdal.Unlink(path_src)
else:
raise TypeError('COREG.im2shift must contain a path or a numpy array. Got %s' &type(self.im2shift))
raise TypeError('COREG.im2shift must contain a path or a numpy array. Got %s' %type(self.im2shift))
return ds
def get_cloudmask(self):
......@@ -585,7 +594,7 @@ class COREG(object):
%(self.imref_band4match, ref_cwl[self.imref_band4match-1]))
def get_image_windows_to_match(self, win_pos, win_sz, v=0):
# TODO scheinbar ist die neuere Version in CoReg-Sat implementiert -> genauso wie get_clip_window_properties()
# auflösungsfaktor
imref_gt = self.ref_gt
imref_proj = self.ref_prj
......@@ -641,7 +650,7 @@ class COREG(object):
map_yvals = [i[0] for i in imref_box_map] # UTM-HW
# gdalwarp für im2shift mit pixel aggregate
rsp_algor = 5 # 'average'
rsp_algor = 5 # 'average' # FIXME set to cubic
if gdal.VersionInfo().startswith('1'):
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "
......@@ -682,7 +691,7 @@ class COREG(object):
map_yvals = [i[0] for i in imref_box_map] # UTM-HW
# imref mit imref_box ausclippen und per pixel aggregate auf auflösung von im2shift downsamplen
rsp_algor = 5 #'average'
rsp_algor = 5 #'average' # FIXME set to cubic
if gdal.VersionInfo().startswith('1'):
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "
......@@ -917,6 +926,7 @@ class COREG(object):
self.updated_map_info[4] = str(float(self.map_info_to_update[4]) + self.y_shift_map)
print('Updated map info:',self.updated_map_info)
class L1B_object(L1A_object):
def __init__(self, L1A_obj, COREG_obj):
super().__init__(None)
......@@ -1006,7 +1016,7 @@ class L1B_object(L1A_object):
# overwrite array, only if it contains new data (if it has been clipped or resampled by DESHIFTER)
print("Applying deshift result to attribute '%s' of entity ID %s..." %(attrname,self.scene_ID))
compl_arr = np.dstack(tuple([OrdDicsAllBands[bandNr]['arr_shifted'] for bandNr in bandsNrs])) \
if bandsNrs[0] else OrdDic0['arr_shifted']
if len(bandsNrs)>1 else OrdDic0['arr_shifted']
setattr(self,attrname,compl_arr)
if attrname=='arr':
......
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 16 11:57:31 2015
###############################################################################
#
# Level 2A Processor:
# Spectral homogenization
#
###############################################################################
__author__='Daniel Scheffler'
@author: danschef
"""
import collections
import os
import time
......
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 16 11:57:31 2015
###############################################################################
#
# Level 2B Processor:
# Spectral homogenization
#
###############################################################################
__author__='Daniel Scheffler'
@author: danschef
"""
import builtins
import numpy as np
from scipy.interpolate import interp1d
......
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 16 11:57:31 2015
###############################################################################
#
# Level 2C Processor:
# Accurracy layers
#
###############################################################################
__author__='Daniel Scheffler'
@author: danschef
"""
shared = {}
res = {}
from algorithms.L2B_P import L2B_object
class L2C_object(L2B_object):
def __init__(self, L2B_obj):
super().__init__(None)
if L2B_obj: [setattr(self, key, value) for key,value in L2B_obj.__dict__.items()]
self.proc_level = 'L2C'
def calc_geometric_accurracy(self):
pass
def calc_spectral_accurracy(self):
pass
\ No newline at end of file
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 16 11:57:31 2015
@author: danschef
"""
......@@ -1311,6 +1311,8 @@ class METADATA(object):
self.map_info = GEOP.geotransform2mapinfo(rasObj.geotransform,rasObj.projection)
self.projection = rasObj.projection
self.gResolution = abs(rasObj.geotransform[1])
self.CS_EPSG = GEOP.WKT2EPSG(rasObj.projection)
dict_conv_physUnit = {'Rad':"W * m-2 * sr-1 * micrometer-1",
'Ref':'TOA_Reflectance in [0-10000]',
'Temp':'Degrees Celsius with scale factor = 100'}
......@@ -1343,12 +1345,13 @@ class METADATA(object):
# 'SPOT_Ref' :"(1) Ortho Level3A01 Product; '"+self.Dataname+"'\n (2) Int16 TOA_Reflectance in [0-10000] (fillPixels: -99, zeroPixels:0, saturatedPixels: 32767 (Max of Int16))'"}
# Meta['description'] = descr_dic[self.Satellite'_'+GEOP_o.conversion_type] ##
Meta['samples'] = self.cols
Meta['lines'] = self.rows
Meta['bands'] = self.bands
Meta['lines'] = self.rows
Meta['bands'] = self.bands
if self.map_info !=[] and self.projection != "":
Meta['map info'] = self.map_info
Meta['coordinate system string'] = self.projection
Meta['corner coordinates lonlat']= str(self.CornerTieP_LonLat).replace('(','[').replace(')',']')
Meta['CS_EPSG'] = self.CS_EPSG
Meta['scene length'] = self.scene_length
Meta['wavelength units'] = "Nanometers"
Meta['wavelength'] = self.CWL
......
......@@ -33,7 +33,7 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
connection = psycopg2.connect(conn_params)
if connection is None: return 'database connection fault'
cursor = connection.cursor()
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
cursor.execute("SELECT " +','.join(vals2return)+ " FROM " +tablename+ " " + condition)
records2return = cursor.fetchall() if records2fetch == 0 else [cursor.fetchone()] if records2fetch == 1 else \
cursor.fetchmany(size = records2fetch) # e.g. [('LE71950282003121EDC00',), ('LE71950282003105ASN00',)]
......@@ -79,8 +79,9 @@ class job:
path_fileserver = '/misc/gms2/scheffler/GeoMultiSens/' if hostname != 'geoms' else absP('./')
# path_procdata = absP('../database/processed_data/')
# path_procdata = '/srv/gms2/scheffler/GeoMultiSens/database/processed_data/'
path_procdata = joinP(path_fileserver, 'database/processed_data%s/' %('_bench' if benchmark_global else ''))
path_database = joinP(path_fileserver, 'database/processed_data%s/data_DB.db' %('_bench' if benchmark_global else ''))
path_procdata = joinP(path_fileserver, 'database/processed_scenes%s/' %('_bench' if benchmark_global else ''))
path_procdata_MGRS = joinP(path_fileserver, 'database/processed_mgrs_tiles%s/' %('_bench' if benchmark_global else ''))
path_database = joinP(path_fileserver, 'database/processed_scenes%s/data_DB.db' %('_bench' if benchmark_global else ''))
# path_database = absP('./database/processed_data/data_DB.db')
# path_db_meta = absP('./database/metadata/')
path_db_meta = absP('./database/metadata/metadata_DB.db') # ('geoms.gfz-potsdam.de:5432')
......@@ -91,7 +92,8 @@ class job:
conn_database = "dbname='usgscache' user='gmsdb' password='gmsdb' host='geoms' connect_timeout=3"
conn_db_meta = conn_database
path_fileserver = query_cfg(conn_db_meta, 'path_data_root')
path_procdata = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata'))
path_procdata_scenes = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata_scenes'))
path_procdata_MGRS = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata_MGRS'))
path_archive = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_download'))
path_earthSunDist = absP(query_cfg(conn_db_meta, 'path_earthSunDist'))
#path_earthSunDist = '/home/gfz-fe/GeoMultiSens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv' # FIXME!!
......@@ -112,14 +114,14 @@ class job:
exec__L1CP = [1, 1]
exec__L2AP = [1, 1]
exec__L2BP = [1, 1]
exec__L2CP = [0, 1]
exec__L2DP = [0, 1]
exec__L2CP = [1, 1]
exec__L2DP = [1, 1]
if exec_mode=='Python':
for i in ['L1AP','L1BP','L1CP','L2AP','L2BP','L2CP','L2DP']: