Commit 81b8ecd8 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

L1B_P: third running version:

- full multiprocessing compatibility
- added a DESHIFTER class, that can be called from each processing level >= L1B
- added a function to generate configurations for DESHIFTER in order to call DESHIFTER in multiprocessing from process controller
- much faster IO
- added a method to L1B object to perform deshifting without calling DESHIFTER from process controller
- added a method to L1B object to apply deshifting results to object attributes
- some bugfixes
- config module: added values for setting global configurations for DESHIFTER (align_coord_grids, match_gsd, target_gsd) -> allow global spatial homogenization
GEOP:
- added warp_ndarray(): a method for warping numpy array that are associated to geo information (coordinate transformation) WITHOUT writing temporary files to disk
- added isProjectedOrGeographic()
- added wkt2epsg() for conversion of WKT projections to EPSG
- added geotransform2mapinfo() and mapinfo2geotransform()
- moved get_corner_coordinates() from L1B_P to GEOP
- some bugfixes
L0P: fixed a bug that caused a complete reprocessing when only L1B output is was deleted
METADATA: fixed a bug when retrieving radiance and reflectance gains from Landsat MTL files
HLP_F: added find_nearest(): a function to get the nearest value of a 1D-array in relation to a specific value
PG: added get_tempfile(): a function to generate tempfiles that can be passed to functions that require a physical file on the disk
process controller: updated L1B_P calls
parent b2d71396
......@@ -27,6 +27,11 @@ import math
import subprocess
import builtins
import time
import rasterio
from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING
try:
from osgeo import ogr
......@@ -673,7 +678,7 @@ class GEOPROCESSING(object):
# Create new gdalfile: Create(<filename>, <xsize>, <ysize>, [<bands>], [<GDALDataType>]) bands is optional and defaults to 1 GDALDataType is optional and defaults to GDT_Byte
outDs = driver.Create(outPath_h, self.cols, self.rows, self.bands, GDT_Int16)
if outDs is None:
self.logger.critical('Could not create OutputData.');
self.logger.critical('Could not create OutputData.')
sys.exit(1)
# georeference the image and set the projection
......@@ -2402,7 +2407,7 @@ class GEOPROCESSING(object):
elif direction == 2: # rows, bands, cols
im = np.swapaxes(im,1,2)
elif direction == 3: # rows, cols, bands; so wie in PIL bzw. IDL benötigt
pass
pass # bands, rows,cols
return im
def layerstacking_Robert(self, layerlist, outPath, optshift=None):
......@@ -2774,7 +2779,7 @@ def zonalStatistic(inImage, typ, zoneFile, outStats=[1, 1, 1, 1, 1, 1], zonalID=
if len(outStats) != 6 or min(outStats) < 0 or max(outStats) > 1:
print(
" >>> ERROR: list for outStasts is wrong: list of 6 integers required 0 (no calculation) 1 (calculation) ['Mean', 'Std', 'Max', 'Min', 'Hist', 'Count']->GEOPROCESSING zonalStatistic")
os.exit(1)
sys.exit()
if typ == 'vector':
###########-Vector OGR-#######################
# Get Ids and corresponding Names of the features for zonal statistics
......@@ -2790,7 +2795,7 @@ def zonalStatistic(inImage, typ, zoneFile, outStats=[1, 1, 1, 1, 1, 1], zonalID=
# check if attribute zonalID is available in the shapefile
if zonalID not in field_names:
print(" >>> ERROR: Need a <zonalID> attribute whiche defines the feature zones")
os.exit(1)
sys.exit()
nF = 1
# Get feature Names of Ids from the attribute "Name"
......@@ -2975,7 +2980,7 @@ def zonalStatistic(inImage, typ, zoneFile, outStats=[1, 1, 1, 1, 1, 1], zonalID=
except ValueError:
print(" >>> ERROR: list or number for NoDataValues required; -> given type:" + type(
NoDataValues).__name__)
os.exit(1)
sys.exit()
feature_labels_masked = np.ma.masked_where(feature_labels == 0, feature_labels)
else:
feature_labels_masked = np.ma.masked_where(feature_labels == 0, feature_labels)
......@@ -3282,6 +3287,8 @@ def convertGdalNumpyDataType(dType):
# dictionary to translate GDAL data types (strings) in corresponding numpy data types
dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32,
"Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32}
outdType = None
if dType in dTypeDic:
outdType = dTypeDic[dType]
elif dType in dTypeDic.values():
......@@ -3300,7 +3307,8 @@ def convertGdalNumpyDataType(dType):
elif dType in [np.float16]:
outdType = "Float32"
print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType)
else:
raise Exception('GEOP.convertGdalNumpyDataType: Unexpected input data type %s.' %dType)
return outdType
def ApplyMask(dataFile, maskFile, maskValue, outPath=None, keepDataType=1, v=0):
......@@ -3598,6 +3606,116 @@ def get_point_bounds(points):
(min_lon, min_lat, max_lon, max_lat) = mp_points.bounds
return min_lon, min_lat, max_lon, max_lat
def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None,
out_res=None,out_extent=None,out_dtype=None,rsp_alg=0,in_nodata=None,out_nodata=None):
"""Reproject / warp a numpy array with given geo information to target coordinate system.
:param ndarray: numpy.ndarray [rows,cols,bands]
:param in_gt: input gdal GeoTransform
:param in_prj: input projection as WKT string
:param out_prj: output projection as WKT string
:param out_gt: output gdal GeoTransform as float tuple in the source coordinate system (optional)
:param outUL: [X,Y] output upper left coordinates as floats in the source coordinate system
(requires outRowsCols)
:param outRowsCols: [rows, cols] (optional)
:param out_res: output resolution as float in the TARGET coordinate system
:param out_extent: [left, bottom, right, top] as floats in the source coordinate system
:param out_dtype: output data type as numpy data type
:param rsp_alg: Resampling method to use. One of the following (int, default is 0):
0 = nearest neighbour, 1 = bilinear, 2 = cubic, 3 = cubic spline, 4 = lanczos,
5 = average, 6 = mode
:param in_nodata no data value of the input image
:param out_nodata no data value of the output image
:return out_arr: warped numpy array
:return out_gt: warped gdal GeoTransform
:return out_prj: warped projection as WKT string
"""
with rasterio.drivers():
if out_gt is None:
assert out_extent is not None, 'Provide eighter out_gt or out_extent!'
if outUL is not None:
assert outRowsCols is not None, 'outRowsCols must be given if outUL is given.'
outUL = [in_gt[0],in_gt[3]] if outUL is None else outUL
inEPSG, outEPSG = [wkt2epsg(prj) for prj in [in_prj, out_prj]]
assert inEPSG is not None, 'Could not derive input EPSG code.'
assert outEPSG is not None, 'Could not derive output EPSG code.'
src_crs = {'init': 'EPSG:%s' %inEPSG}
dst_crs = {'init': 'EPSG:%s' %outEPSG}
if len(ndarray.shape)==3:
# convert input array axis order to rasterio axis order
ndarray = np.swapaxes(np.swapaxes(ndarray,0,2),1,2)
bands,rows,cols = ndarray.shape
rows,cols = outRowsCols if outRowsCols else rows,cols
else:
rows,cols = ndarray.shape if outRowsCols is None else outRowsCols
out_dtype = ndarray.dtype if out_dtype is None else out_dtype
gt2bounds = lambda gt,r,c: [gt[0], gt[3] + r*gt[5], gt[0] + c*gt[1], gt[3]] # left, bottom, right, top
#get dst_transform
if out_gt is None and out_extent is None:
if outRowsCols:
outUL = [in_gt[0],in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul,r,c: [ul[0], ul[1] + r*in_gt[5], ul[0] + c*in_gt[1], ul[1]] # left, bottom, right, top
left, bottom, right, top = ulRC2bounds(outUL,rows,cols)
else: # outRowsCols is None and outUL is None: use in_gt
left, bottom, right, top = gt2bounds(in_gt,rows,cols)
#,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift))
elif out_extent:
left, bottom, right, top = out_extent
#input array is only a window of the actual input array
assert left > in_gt[0] and right < (in_gt[0]+cols*in_gt[1]) and bottom > in_gt[3]+rows*in_gt[5] and \
top < in_gt[3], "out_extent has to be completely within the input image bounds."
else: # out_gt is given
left, bottom, right, top = gt2bounds(in_gt,rows,cols)
if out_res is None:
# get pixel resolution in target coord system
prj_in_out = (isProjectedOrGeographic(in_prj),isProjectedOrGeographic(out_prj))
assert None not in prj_in_out, 'prj_in_out contains None.'
if prj_in_out[0] == prj_in_out[1]:
out_res = (in_gt[1],abs(in_gt[5]))
elif prj_in_out == ('geographic','projected'):
raise NotImplementedError
else: #('projected','geographic')
px_size_LatLon = np.array(pixelToLatLon([1,1],geotransform=in_gt,projection=in_prj)) - \
np.array(pixelToLatLon([0,0],geotransform=in_gt,projection=in_prj))
out_res = tuple(reversed(abs(px_size_LatLon)))
print('OUT_RES NOCHMAL CHECKEN: ', out_res)
dst_transform,out_cols,out_rows = rio_calc_transform(
src_crs, dst_crs, cols, rows, left, bottom, right, top, resolution=out_res, densify_pts=21)
aff = list(dst_transform)
if out_gt is None:
out_gt = (aff[2],aff[0],aff[1],aff[5],aff[3],aff[4])
src_transform = rasterio.transform.from_origin(in_gt[0], in_gt[3], in_gt[1], in_gt[5])
dict_rspInt_rspAlg = {0:RESAMPLING.nearest, 1:RESAMPLING.bilinear, 2:RESAMPLING.cubic,
3:RESAMPLING.cubic_spline, 4:RESAMPLING.lanczos, 5:RESAMPLING.average, 6:RESAMPLING.mode}
out_arr = np.zeros((bands,out_rows,out_cols), out_dtype)\
if len(ndarray.shape)==3 else np.zeros((out_rows,out_cols), out_dtype)
# FIXME direct passing of src_transform and dst_transform results in a wrong output image. Maybe a rasterio-bug?
# FIXME indirect passing causes Future warning
#rio_reproject(ndarray, out_arr, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform,
# dst_crs=dst_crs, resampling=dict_rspInt_rspAlg[rsp_alg])
rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
if len(ndarray.shape)==3:
# convert output array axis order to GMS axis order [rows,cols,bands]
out_arr = np.swapaxes(np.swapaxes(out_arr,0,1),1,2)
return out_arr, out_gt, out_prj
def get_raster_size(minx, miny, maxx, maxy, cell_width, cell_height):
"""
Determine the number of rows/columns given the bounds of the point data and the desired cell size
......@@ -3683,21 +3801,27 @@ def lonlat_to_pixel(lon, lat, inverse_geo_transform):
# # set dataset to None to "close" file
# dataset = None
def latLonToPixel(geotifAddr, latLonPairs):
def latLonToPixel(latLonPairs, path_im=None, geotransform=None, projection=None):
"""The following method translates given latitude/longitude pairs into pixel locations on a given GEOTIF
This method does not take into account pixel size and assumes a high enough
image resolution for pixel size to be insignificant
:param geotifAddr: The file location of the GEOTIF
:param latLonPairs: The decimal lat/lon pairings to be translated in the form [[lat1,lon1],[lat2,lon2]]
:return: The pixel translation of the lat/lon pairings in the form [[x1,y1],[x2,y2]]
:param latLonPairs: The decimal lat/lon pairings to be translated in the form [[lat1,lon1],[lat2,lon2]]
:param path_im: The file location of the input image
:param geotransform: GDAL GeoTransform
:param projection: GDAL Projection
:return: The pixel translation of the lat/lon pairings in the form [[x1,y1],[x2,y2]]
"""
assert path_im is not None or geotransform is not None and projection is not None, \
"GEOP.latLonToPixel: Missing argument! Please provide either 'path_im' or 'geotransform' AND 'projection'."
if geotransform is not None and projection is not None:
gt,proj = geotransform, projection
else:
ds = gdal.Open(path_im)
gt, proj = ds.GetGeoTransform(), ds.GetProjection()
# Load the image dataset
ds = gdal.Open(geotifAddr)
# Get a geo-transform of the dataset
gt = ds.GetGeoTransform()
# Create a spatial reference object for the dataset
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection())
srs.ImportFromWkt(proj)
# Set up the coordinate transformation object
srsLatLong = srs.CloneGeogCS()
ct = osr.CoordinateTransformation(srsLatLong, srs)
......@@ -3713,24 +3837,27 @@ def latLonToPixel(geotifAddr, latLonPairs):
pixelPairs.append([int(x), int(y)])
return pixelPairs
def pixelToLatLon(geotifAddr, pixelPairs):
def pixelToLatLon(pixelPairs, path_im=None, geotransform=None, projection=None):
"""The following method translates given pixel locations into latitude/longitude locations on a given GEOTIF
INPUTS:
geotifAddr - The file location of the GEOTIF
pixelPairs - The pixel pairings to be translated in the form [[x1,y1],[x2,y2]]
OUTPUT:
The lat/lon translation of the pixel pairings in the form [[lat1,lon1],[lat2,lon2]]
NOTE:
This method does not take into account pixel size and assumes a high enough
image resolution for pixel size to be insignificant
This method does not take into account pixel size and assumes a high enough
image resolution for pixel size to be insignificant
:param pixelPairs: The pixel pairings to be translated in the form [[x1,y1],[x2,y2]]
:param path_im: The file location of the input image
:param geotransform: GDAL GeoTransform
:param projection: GDAL Projection
:returns: The lat/lon translation of the pixel pairings in the form [[lat1,lon1],[lat2,lon2]]
"""
# Load the image dataset
ds = gdal.Open(geotifAddr)
# Get a geo-transform of the dataset
gt = ds.GetGeoTransform()
assert path_im is not None or geotransform is not None and projection is not None, \
"GEOP.pixelToLatLon: Missing argument! Please provide either 'path_im' or 'geotransform' AND 'projection'."
if geotransform is not None and projection is not None:
gt,proj = geotransform, projection
else:
ds = gdal.Open(path_im)
gt, proj = ds.GetGeoTransform(), ds.GetProjection()
# Create a spatial reference object for the dataset
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection())
srs.ImportFromWkt(proj)
# Set up the coordinate transformation object
srsLatLong = srs.CloneGeogCS()
ct = osr.CoordinateTransformation(srs, srsLatLong)
......@@ -3783,11 +3910,100 @@ def pixelToMapYX(pixelCoords, path_im=None, geotransform=None, projection=None):
return mapYmapXPairs
def isProjectedOrGeographic(prj):
srs = osr.SpatialReference()
if prj.startswith('EPSG:'):
srs.ImportFromEPSG(int(prj.split(':')[1]))
elif prj.startswith('+proj='):
srs.ImportFromProj4(prj)
elif prj.startswith('GEOGCS') or prj.startswith('PROJCS'):
srs.ImportFromWkt(prj)
else:
raise Exception('Unknown input projection.')
return 'projected' if srs.IsProjected() else 'geographic' if srs.IsGeographic() else None
def EPSG2Proj4(EPSG_code):
srs = osr.SpatialReference()
srs.ImportFromEPSG(EPSG_code)
return srs.ExportToProj4()
def wkt2epsg(wkt, epsg=os.environ['GDAL_DATA'].replace('/gdal','/proj/epsg')):
""" Transform a WKT string to an EPSG code
:param wkt: WKT definition
:param epsg: the proj.4 epsg file (defaults to '/usr/local/share/proj/epsg')
:returns: EPSG code
http://gis.stackexchange.com/questions/20298/is-it-possible-to-get-the-epsg-value-from-an-osr-spatialreference-class-using-th
"""
code = None #default
p_in = osr.SpatialReference()
s = p_in.ImportFromWkt(wkt)
if s == 5: # invalid WKT
raise Exception('Invalid WKT.')
if p_in.IsLocal():
raise Exception('The given WKT is a local coordinate system.')
cstype = 'GEOGCS' if p_in.IsGeographic() else 'PROJCS'
an = p_in.GetAuthorityName(cstype)
assert an in [None,'EPSG'], "No EPSG code found. Found %s instead." %an
ac = p_in.GetAuthorityCode(cstype)
if ac is None: # try brute force approach by grokking proj epsg definition file
p_out = p_in.ExportToProj4()
if p_out:
with open(epsg) as f:
for line in f:
if line.find(p_out) != -1:
m = re.search('<(\\d+)>', line)
if m:
code = m.group(1)
break
code = code if code else None # match or no match
else:
code = ac
return code
def geotransform2mapinfo(gt,prj):
"""Builds an ENVI map info from given GDAL GeoTransform and Projection (compatible with UTM and LonLat projections).
:param gt: GDAL GeoTransform
:param prj: GDAL Projection
:returns: ENVI map info, e.g. [ UTM , 1 , 1 , 256785.0 , 4572015.0 , 30.0 , 30.0 , 43 , North , WGS-84 ]
:rtype: list
"""
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
Proj4 = [i[1:] for i in srs.ExportToProj4().split()]
Proj4_proj = [v.split('=')[1] for i,v in enumerate(Proj4) if '=' in v and v.split('=')[0]=='proj'][0]
Proj4_ellps = [v.split('=')[1] for i,v in enumerate(Proj4) if '=' in v and v.split('=')[0] in ['ellps','datum']][0]
proj = 'Geographic Lat/Lon' if Proj4_proj=='longlat' else 'UTM' if Proj4_proj=='utm' else Proj4_proj
ellps = 'WGS-84' if Proj4_ellps=='WGS84' else Proj4_ellps
UL_X, UL_Y, GSD_X, GSD_Y = gt[0], gt[3], gt[1], gt[5]
is_UTM_North_South = (lambda LonLat: 'North' if LonLat[1] >= 0. else 'South')\
(transform_utm_to_wgs84(UL_X,UL_Y,srs.GetUTMZone()))
map_info = [proj,1,1,UL_X,UL_Y,GSD_X,abs(GSD_Y),ellps] \
if proj!='UTM' else ['UTM',1,1,UL_X,UL_Y,GSD_X,abs(GSD_Y),srs.GetUTMZone(),is_UTM_North_South,ellps]
return map_info
def mapinfo2geotransform(map_info):
"""Builds GDAL GeoTransform tuple from an ENVI map info.
:param map_info: ENVI map info (list)
:returns: GDAL GeoTransform, e.g. (249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0)
:rtype: tuple
"""
return [float(map_info[3]),float(map_info[5]),0.,float(map_info[4]),0.,-float(map_info[6])]
def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
assert gdal_ds is not None or (gt is not None and cols is not None and rows is not None), \
"GEOP.get_corner_coordinates: Missing argument! Please provide either 'gdal_ds' or 'gt', 'cols' AND 'rows'."
gdal_ds_GT = gdal_ds.GetGeoTransform() if gdal_ds is not None else gt
ext=[]
xarr=[0,gdal_ds.RasterXSize if gdal_ds is not None else cols]
yarr=[0,gdal_ds.RasterYSize if gdal_ds is not None else rows]
for px in xarr:
for py in yarr:
x=gdal_ds_GT[0]+(px*gdal_ds_GT[1])+(py*gdal_ds_GT[2])
y=gdal_ds_GT[3]+(px*gdal_ds_GT[4])+(py*gdal_ds_GT[5])
ext.append([x,y])
yarr.reverse()
return ext
def get_footprint_polygon(CornerLonLat):
""" Converts a list of coordinates into a shapely polygon object.
:param CornerLonLat: a list of coordinate tuples like [[lon,lat], [lon. lat], ..]
......
......@@ -86,11 +86,13 @@ def get_data_list_of_current_jobID(): # called in webapp mode
data_list =[]
for sceneid in sceneids:
# add from postgreSQL-DB: proc_level, scene_ID, datasetid, image_type, satellite, sensor, subsystem,
# acquisition_date, entity_ID, filename
ds = DB_T.get_scene_and_dataset_infos_from_postgreSQLdb(sceneid)
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if usecase.skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
......@@ -104,11 +106,15 @@ def get_data_list_of_current_jobID(): # called in webapp mode
def LandsatID2dataset(ID_list):
dataset_list =[]
for ID in ID_list:
dataset = {'image_type':'RSD','satellite':None,'sensor':None,'subsystem':None,'acquisition_date':None, 'entity_ID':ID}
dataset['satellite'] = 'Landsat-5' if ID[:3]=='LT5' else 'Landsat-7' if ID[:3]=='LE7' else 'Landsat-8' if ID[:3]=='LC8' else dataset['satellite']
dataset['sensor'] = 'TM' if ID[:3]=='LT5' else 'ETM+' if ID[:3]=='LE7' else 'OLI_TIRS' if ID[:3]=='LC8' else dataset['satellite']
dataset = {'image_type': 'RSD', 'satellite': None, 'sensor': None, 'subsystem': None, 'acquisition_date': None,
'entity_ID': ID}
dataset['satellite'] = 'Landsat-5' if ID[:3]=='LT5' else 'Landsat-7' if ID[:3]=='LE7' else 'Landsat-8' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['sensor'] = 'TM' if ID[:3]=='LT5' else 'ETM+' if ID[:3]=='LE7' else 'OLI_TIRS' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['subsystem'] = None
dataset['acquisition_date'] = (datetime.datetime(int(ID[9:13]), 1, 1) + datetime.timedelta(int(ID[13:16]) - 1)).strftime('%Y-%m-%d')
dataset['acquisition_date'] = (datetime.datetime(int(ID[9:13]), 1, 1) + datetime.timedelta(int(ID[13:16]) - 1))\
.strftime('%Y-%m-%d')
dataset_list.append(dataset)
return dataset_list
......@@ -132,26 +138,27 @@ def add_local_availability(dataset):
'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']})
path_logfile = PG.path_generator(dataset).get_path_logfile()
def get_HighestProcL_dueLog(path_log):
if os.path.exists(path_log):
def get_AllWrittenProcL_dueLog(path_log):
if not os.path.exists(path_log):
print ("No logfile named '%s' found for %s at %s. Dataset has to be reprocessed." \
% (os.path.basename(path_log), dataset['entity_ID'], os.path.dirname(path_log)))
AllWrittenProcL_dueLog = []
else:
logfile = open(path_log, 'r').read()
AllWrittenProcL_dueLog = re.findall(":*(\S*\s*) data successfully saved.",logfile,re.I)
if AllWrittenProcL_dueLog != []:
ProcL = HLP_F.sorted_nicely(AllWrittenProcL_dueLog)[-1]
else:
if not AllWrittenProcL_dueLog: # AllWrittenProcL_dueLog = []
print ('%s: According to logfile no completely processed data exist at any processing level. ' \
'Dataset has to be reprocessed.' %dataset['entity_ID'])
ProcL = None
else:
print ("No logfile named '%s' found for %s at %s. Dataset has to be reprocessed." \
% (os.path.basename(path_log), dataset['entity_ID'], os.path.dirname(path_log)))
ProcL = None
return ProcL
else:
AllWrittenProcL_dueLog = HLP_F.sorted_nicely(AllWrittenProcL_dueLog)
return AllWrittenProcL_dueLog
if len(DB_match) == 1 or DB_match == [] or DB_match == 'database connection fault':
HighestProcL_dueLog = get_HighestProcL_dueLog(path_logfile)
if HighestProcL_dueLog is not None:
assumed_path_GMS_file = '%s_%s.gms' %(os.path.splitext(path_logfile)[0],HighestProcL_dueLog)
AllWrittenProcL = get_AllWrittenProcL_dueLog(path_logfile)
dataset['proc_level'] = None # default (dataset has to be reprocessed)
for ProcL in reversed(AllWrittenProcL):
if dataset['proc_level'] is not None: break # proc_level found; no further searching for lower proc_levels
assumed_path_GMS_file = '%s_%s.gms' %(os.path.splitext(path_logfile)[0],ProcL)
if os.path.isfile(assumed_path_GMS_file):
GMS_file_dict = INP_R.GMSfile2dict(assumed_path_GMS_file)
target_LayerBandsAssignment = get_LayerBandsAssignment({'image_type': dataset['image_type'],
......@@ -161,26 +168,25 @@ def add_local_availability(dataset):
if DB_match == [] or DB_match == 'database connection fault':
print ('The dataset %s is not included in the database of processed data but according to ' \
'logfile %s has been written successfully. Recreating missing database entry.' \
%(dataset['entity_ID'],HighestProcL_dueLog))
%(dataset['entity_ID'],ProcL))
DB_T.data_DB_updater(GMS_file_dict)
if job.call_type == 'console':
DB_T.data_DB_to_csv()
dataset['proc_level'] = HighestProcL_dueLog
dataset['proc_level'] = ProcL
elif len(DB_match) == 1:
print('Found a matching %s dataset for %s. Processing skipped until %s.' \
%(HighestProcL_dueLog,dataset['entity_ID'],HighestProcL_dueLog))
if DB_match[0][0] == HighestProcL_dueLog:
%(ProcL,dataset['entity_ID'],ProcL))
if DB_match[0][0] == ProcL:
dataset['proc_level'] = DB_match[0][0]
else:
dataset['proc_level'] = HighestProcL_dueLog
dataset['proc_level'] = ProcL
else:
print('Found a matching dataset for %s but with a different LayerBandsAssignment. ' \
'Dataset has to be reprocessed.' %dataset['entity_ID'])
dataset['proc_level'] = None
else:
dataset['proc_level'] = None
else:
dataset['proc_level'] = None
print('%s for dataset %s has been written due to logfile but no corresponding dataset has been found.' \
%(ProcL,dataset['entity_ID'])+' Searching for lower processing level...'
if AllWrittenProcL.index(ProcL)!=0 else '')
elif len(DB_match) > 1:
print ('According to database there are multiple matches for the dataset %s. Dataset has to be reprocessed.' \
%dataset['entity_ID'])
......
......@@ -732,10 +732,10 @@ class L1A_object(object):
space = 0.3
n = len(classifiers)
width = (1 - space) / (len(classifiers))
for i,classif in enumerate(classifiers):
vals = dpoints[dpoints[:,0] == cond][:,2].astype(np.float)
pos = [j - (1 - space) / 2. + i * width for j in range(1,len(categories)+1)]
ax.bar(pos, vals, width=width)
#for i,classif in enumerate(classifiers): # FIXME
# vals = dpoints[dpoints[:,0] == cond][:,2].astype(np.float)
# pos = [j - (1 - space) / 2. + i * width for j in range(1,len(categories)+1)]
# ax.bar(pos, vals, width=width)
print(os.path.join(job.path_testing,'out/%s_benchmark_cloud_classifiers.png' %self.baseN))
fig.savefig(os.path.abspath('./testing/out/%s_benchmark_cloud_classifiers.png' %self.baseN),
format='png',dpi=300,bbox_inches='tight')
......
This diff is collapsed.
......@@ -23,10 +23,11 @@ import datetime
import math
import pyproj
import glob
import builtins
from matplotlib import dates as mdates
from pyorbital import astronomy
try: from osgeo import osr
except: import osr
except ImportError: import osr
from gms_io import envifilehandling_BD as ef
......@@ -34,7 +35,7 @@ from algorithms import GEOPROCESSING_BD as GEOP
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
job, usecase = GMS_config.job, GMS_config.usecase # read from builtins (set by process_controller)
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
# + Input Reader (has to be left out here in order to avoid circular dependencies)
class METADATA(object):
......@@ -359,13 +360,13 @@ class METADATA(object):
# Gains and Offsets
h4 = re.search("GROUP = MIN_MAX_RADIANCE[\s\S]*END_GROUP = MIN_MAX_PIXEL_VALUE",mtl_, re.I)
h4_ = h4.group(0)
h4max_rad = re.findall(" LMAX_BAND[0-9]+ = ([\S]*)", h4_, re.I)
h4min_rad = re.findall(" LMIN_BAND[0-9]+ = ([\S]*)", h4_, re.I)
h4max_rad = re.findall(" LMAX_BAND[0-9]+ = ([\S]*)", h4_, re.I) # space in front IS needed
h4min_rad = re.findall(" LMIN_BAND[0-9]+ = ([\S]*)", h4_, re.I) # space in front IS needed
h4max_pix = re.findall("QCALMAX_BAND[0-9]+ = ([\S]*)", h4_, re.I)
h4min_pix = re.findall("QCALMIN_BAND[0-9]+ = ([\S]*)", h4_, re.I)
if h4max_rad is []:
h4max_rad = re.findall(" RADIANCE_MAXIMUM_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I)
h4min_rad = re.findall(" RADIANCE_MINIMUM_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I)
if not h4max_rad:
h4max_rad = re.findall(" RADIANCE_MAXIMUM_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I) # space in front IS needed
h4min_rad = re.findall(" RADIANCE_MINIMUM_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I) # space in front IS needed
h4max_pix = re.findall("QUANTIZE_CAL_MAX_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I)
h4min_pix = re.findall("QUANTIZE_CAL_MIN_BAND_[0-9_VCID]+ = ([\S]*)", h4_, re.I)
# additional: LMAX, LMIN, QCALMAX, QCALMIN
......@@ -464,14 +465,15 @@ class METADATA(object):
h9 = re.search("GROUP = RADIOMETRIC_RESCALING[\s\S]*END_GROUP = RADIOMETRIC_RESCALING",mtl_, re.I)
if h9 is not None:
h9_ = h9.group(0)
h9gain_ref = re.findall(" REFLECTANCE_MULT_BAND_[0-9]+ = ([\S]*)", h9_, re.I)
h9gain_ref_bandNr = re.findall(" REFLECTANCE_MULT_BAND_([0-9]+) = [\S]*", h9_, re.I)
h9offs_ref = re.findall(" REFLECTANCE_ADD_BAND_[0-9]+ = ([\S]*)", h9_, re.I)
h9offs_ref_bandNr = re.findall(" REFLECTANCE_ADD_BAND_([0-9]+) = [\S]*", h9_, re.I)
h9gain_ref = re.findall(" REFLECTANCE_MULT_BAND_[0-9]+ = ([\S]*)", h9_, re.I)
h9gain_ref_bandNr = re.findall(" REFLECTANCE_MULT_BAND_([0-9]+) = [\S]*", h9_, re.I)
h9offs_ref = re.findall(" REFLECTANCE_ADD_BAND_[0-9]+ = ([\S]*)" , h9_, re.I)
h9offs_ref_bandNr = re.findall(" REFLECTANCE_ADD_BAND_([0-9]+) = [\S]*" , h9_, re.I)
if h9gain_ref:
self.GainsRef = [None]*len(self.Gains)
self.OffsetsRef = [None]*len(self.Offsets)
FullLBA = get_LayerBandsAssignment(self.get_GMS_identifier(),ignore_usecase=True)
for ii,ind in zip(h9gain_ref_bandNr, h9gain_ref):
self.GainsRef[FullLBA.index(ii)] = ind
for ii,ind in zip(h9offs_ref_bandNr, h9offs_ref):
......@@ -810,7 +812,7 @@ class METADATA(object):
re_res_GSD = re.findall('OBJECT[\s]*=[\s]*SPATIALRESOLUTION[\s\S]*VALUE[\s]*=[\s]*[\S]{1}([1-9][0-9]), ([1-9][0-9]), ([1-9][0-9])[\s\S]*END_OBJECT[\s]*=[\s]*SPATIALRESOLUTION',h_,re.I)
idx_subsystem = 0 if subsystem[:4]=='VNIR' else 1 if subsystem[:4]=='SWIR' else 2 if subsystem[:4]=='TIR' else None
self.gResolution = float(re_res_GSD[0][idx_subsystem]) \
if re_res_GSD!=[] and idx_subsystem is not None else self.gResolution
if re_res_GSD and idx_subsystem is not None else self.gResolution
# Solar irradiance, central wavelengths , full width half maximum per band
self.wvlUnit = 'Nanometers'
......@@ -1118,24 +1120,11 @@ class METADATA(object):
ef.WriteEnviHeader(hdr_o, hdrfile)
def add_rasObj_dims_projection_physUnit(self,rasObj,dict_LayerOptTherm,temp_logger=None):
self.rows = rasObj.rows
self.cols = rasObj.cols
self.rows = rasObj.rows
self.cols = rasObj.cols
self.bands = rasObj.bands
if rasObj.projection!='':
srs = osr.SpatialReference()
srs.ImportFromWkt(rasObj.projection)