Commit b6a2f1b6 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

PEP-8 editing. Added style checkers.

parent a5f41a39
Pipeline #1207 passed with stage
in 8 minutes and 1 second
before_script:
- git lfs pull
stages:
- test
- deploy
test_gms_preprocessing:
stage: test
script:
- source /root/anaconda3/bin/activate
- export GDAL_DATA=/root/anaconda3/share/gdal
......@@ -17,7 +24,24 @@ test_gms_preprocessing:
- nosetests.xml
when: always
pages:
test_styles:
stage: test
script:
- source /root/anaconda3/bin/activate
- export GDAL_DATA=/root/anaconda3/share/gdal
- export PYTHONPATH=$PYTHONPATH:/root # /root <- directory needed later
- pip install flake8 pycodestyle pylint pydocstyle # TODO remove as soon as docker container is rebuilt
- make lint
artifacts:
paths:
- tests/linting/flake8.log
- tests/linting/pycodestyle.log
- tests/linting/pydocstyle.log
when: always
deploy_pages:
stage: deploy
dependencies:
- test_gms_preprocessing
......@@ -28,7 +52,6 @@ pages:
- cp nosetests.* public/nosetests_reports/
- mkdir -p public/doc
- cp -r docs/_build/html/* public/doc/
artifacts:
paths:
- public
......
......@@ -50,7 +50,9 @@ clean-test: ## remove test and coverage artifacts
rm -fr nosetests.xml
lint: ## check style with flake8
flake8 gms_preprocessing tests
flake8 --max-line-length=120 gms_preprocessing tests > ./tests/linting/flake8.log
pycodestyle gms_preprocessing --exclude="*.ipynb,*.ipynb*,envifilehandling.py" --max-line-length=120 > ./tests/linting/pycodestyle.log
-pydocstyle gms_preprocessing > ./tests/linting/pydocstyle.log
test: ## run tests quickly with the default Python
python setup.py test
......
......@@ -4,12 +4,12 @@ import os
if 'MPLBACKEND' not in os.environ:
os.environ['MPLBACKEND'] = 'Agg'
from . import algorithms
from . import io
from . import misc
from . import processing
from . import config
from .processing.process_controller import process_controller
from . import algorithms # noqa: E402
from . import io # noqa: E402
from . import misc # noqa: E402
from . import processing # noqa: E402
from . import config # noqa: E402
from .processing.process_controller import process_controller # noqa: E402
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
......
......@@ -37,13 +37,12 @@ try:
from osgeo import osr
from osgeo import gdal
from osgeo import gdalnumeric
from osgeo.gdalconst import *
except ImportError:
import ogr
import osr
import gdal
import gdalnumeric
from gdalconst import *
from gdalconst import GA_ReadOnly, GDT_Byte, GDT_Int16, GDT_Float32
import pyproj
from pyorbital import astronomy
......@@ -54,11 +53,8 @@ from shapely.ops import cascaded_union
from geoarray import GeoArray
from py_tools_ds.geo.coord_grid import snap_bounds_to_pixGrid
from py_tools_ds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_to_utm, mapXY2imXY, imXY2mapXY
from py_tools_ds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
from py_tools_ds.geo.raster.reproject import warp_ndarray
from ..config import GMS_config as CFG
from ..misc import helper_functions as HLP_F
from ..io import envifilehandling as ef
from ..misc.definition_dicts import get_outFillZeroSaturated
......@@ -104,16 +100,16 @@ class GEOPROCESSING(object):
if not geodata.startswith('/vsi'):
path2check = geodata
else:
gdal_prefix_dict = {'/vsizip': '.zip', '/vsitar': '.tar', '/vsitar': '.tar.gz',
'/vsitar' '.gz': '/vsigzip'}
p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz'] \
# gdal_prefix_dict = {'/vsizip': '.zip', '/vsitar': '.tar', '/vsitar': '.tar.gz',
# '/vsitar' '.gz': '/vsigzip'}
p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz']
if len(geodata.split(i)) > 1 and geodata.split(i)[1].startswith('/')][0]
path2check = os.path.abspath('.' + re.search('/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1))
assert os.path.exists(path2check), "ERROR: data %s does not exist!" % path2check
assert self.inDs is not None, "ERROR: Could not open %s!" % self.filename
elif isinstance(geodata, gdal.Dataset):
self.inDs = geodata
geodata = None
del geodata
self.filepath, self.filename = None, self.inDs.GetDescription()
self.shortname, self.extension = os.path.splitext(self.filename)
......@@ -183,7 +179,7 @@ class GEOPROCESSING(object):
def subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart(self):
shape_fullArr = [self.inDs.RasterYSize, self.inDs.RasterXSize, self.inDs.RasterCount]
self.rows, self.cols, self.bands, self.rowStart, self.rowEnd, self.colStart, self.colEnd, self.bandStart, \
self.bandEnd, self.bandsList = get_subsetProps_from_subsetArg(shape_fullArr, self.subset).values()
self.bandEnd, self.bandsList = get_subsetProps_from_subsetArg(shape_fullArr, self.subset).values()
self.subset = self.subset if self.subset[0] != 'cube' else None
def update_dataset_related_attributes(self):
......@@ -277,13 +273,12 @@ class GEOPROCESSING(object):
offsets, gains = [list(np.array(par)[self.bandsList]) for par in [offsets, gains]]
inArray = np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1)
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[
0] == 'custom' else inArray # FIXME ineffektiv, da unnötige Bänder eingelesen werden
# FIXME ineffektiv, da unnötige Bänder eingelesen werden
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[0] == 'custom' else inArray
rad = DN2Rad(inArray, offsets, gains, inFill=inFill, inZero=inZero, inSaturated=inSaturated, cutNeg=cutNeg)
rad = np.swapaxes(np.swapaxes(rad, 0, 2), 1, 2) # [rows,cols,bands] => [bands,rows,cols]
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + (str(self.subset[1]) if self.subset
and self.subset[
0] != 'cube' else '') + 'rad.mem'
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + \
(str(self.subset[1]) if self.subset and self.subset[0] != 'cube' else '') + 'rad.mem'
self.inDs = gdalnumeric.SaveArray(rad, filename_in_MEM, format='MEM', prototype=self.desc)
self.update_dataset_related_attributes()
......@@ -317,14 +312,13 @@ class GEOPROCESSING(object):
offsets, gains, irradiance = [list(np.array(par)[self.bandsList]) for par in [offsets, gains, irradiance]]
inArray = np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1)
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[
0] == 'custom' else inArray # FIXME ineffektiv, da unnötige Bänder eingelesen werden
# FIXME ineffektiv, da unnötige Bänder eingelesen werden
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[0] == 'custom' else inArray
TOA_ref = DN2TOARef(inArray, offsets, gains, irradiance, zenith, earthSunDist,
inFill=inFill, inZero=inZero, inSaturated=inSaturated, cutNeg=cutNeg)
TOA_ref = np.swapaxes(np.swapaxes(TOA_ref, 0, 2), 1, 2) # [rows,cols,bands] => [bands,rows,cols]
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + (str(self.subset[1]) if self.subset
and self.subset[
0] != 'cube' else '') + '_TOA_ref.mem'
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + \
(str(self.subset[1]) if self.subset and self.subset[0] != 'cube' else '') + '_TOA_ref.mem'
self.inDs = gdalnumeric.SaveArray(TOA_ref, filename_in_MEM, format='MEM', prototype=self.desc)
self.update_dataset_related_attributes()
......@@ -409,11 +403,13 @@ class GEOPROCESSING(object):
outPath_h = os.path.join(outPath, filename_new)
else:
outPath_h = outPath
if not os.path.exists(outPath_h): os.makedirs(outPath_h)
if not os.path.exists(outPath_h):
os.makedirs(outPath_h)
# register outputdriver
driver = gdal.GetDriverByName('ENVI')
driver.Register()
# Create new gdalfile: Create(<filename>, <xsize>, <ysize>, [<bands>], [<GDALDataType>]) bands is optional and defaults to 1 GDALDataType is optional and defaults to GDT_Byte
# Create new gdalfile: Create(<filename>, <xsize>, <ysize>, [<bands>], [<GDALDataType>]) bands is optional
# and defaults to 1 GDALDataType is optional and defaults to GDT_Byte
outDs = driver.Create(outPath_h, self.cols, self.rows, self.bands, GDT_Int16)
assert outDs is not None, 'Could not create OutputData'
......@@ -437,8 +433,9 @@ class GEOPROCESSING(object):
data = band.ReadAsArray(0, 0, self.cols, self.rows).astype(np.float32)
else:
data = band.ReadAsArray(self.colStart, self.rowStart, self.cols, self.rows).astype(np.float32)
data_TOA_ref_ = 10000 * ((float(offsetsRef[x]) + data * float(gainsRef[x])) / math.cos(math.radians(
zenith))) # Rundung erfolgt bei Uebertragung in GDALBand automatisch. (fkt nicht wie int() sondern wie normales Runden)
# Rundung erfolgt bei Uebertragung in GDALBand automatisch (fkt nicht wie int() sondern wie normales Runden)
data_TOA_ref_ = \
10000 * ((float(offsetsRef[x]) + data * float(gainsRef[x])) / math.cos(math.radians(zenith)))
if cutNeg:
data_TOA_ref_ = np.where(data_TOA_ref_ < 0, 0, data_TOA_ref_)
# set fill, zero and saturated Pixels to default
......@@ -452,7 +449,9 @@ class GEOPROCESSING(object):
# flush data to disk, set the NoData value and calculate stats
outBand.FlushCache()
# outBand.SetUnitType("W * m-2 * sr-1 * micrometer-1") #Mal gucken ob in gdal.py irgendwo diese Funktionen vorhanden sind. Sylvia fragen wo python packages abgelegt sind und ob ich darauf zugreifen kann
# outBand.SetUnitType("W * m-2 * sr-1 * micrometer-1")
# Mal gucken ob in gdal.py irgendwo diese Funktionen vorhanden sind. Sylvia fragen wo python packages
# abgelegt sind und ob ich darauf zugreifen kann
# outBand.SetScale(100)
# outBand.SetOffset(0)
outBand.SetNoDataValue(outFill)
......@@ -467,7 +466,7 @@ class GEOPROCESSING(object):
self.logger.info("properties of radianceBand%d" % (x + 1))
GetBandProps(outBand)
GetHistnStdv(outBand)
except:
except Exception:
self.logger.error("an unexpected ERROR in GetBandProps()")
# Set temporary NoData to saturated Pixel to get max of real Data
......@@ -495,11 +494,11 @@ class GEOPROCESSING(object):
self.logger.info("\t##\n\tband" + str(x + 1) + " of " + str(self.bands) + " completed")
# --Close Gdalobjects python: GdalObject = None -> behaves like GDALClose() in C
band = None
outBand = None
del band
del outBand
# self.inDs = None
outDs = None
# del self.inDs
del outDs
def TOARad2Kelvin_fastforward(self, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None):
"""
......@@ -521,14 +520,13 @@ class GEOPROCESSING(object):
K1, K2 = [list(np.array(par)[self.bandsList]) for par in [K1, K2]]
inArray = np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1)
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[
0] == 'custom' else inArray # FIXME ineffektiv, da unnötige Bänder eingelesen werden
# FIXME ineffektiv, da unnötige Bänder eingelesen werden
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[0] == 'custom' else inArray
Kelvin = TOARad2Kelvin_fastforward(inArray, K1, K2, emissivity=emissivity,
inFill=inFill, inZero=inZero, inSaturated=inSaturated)
Kelvin = np.swapaxes(np.swapaxes(Kelvin, 0, 2), 1, 2) # [rows,cols,bands] => [bands,rows,cols]
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + (str(self.subset[1]) if self.subset
and self.subset[
0] != 'cube' else '') + '_Kelvin.mem'
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + \
(str(self.subset[1]) if self.subset and self.subset[0] != 'cube' else '') + '_Kelvin.mem'
self.inDs = gdalnumeric.SaveArray(Kelvin, filename_in_MEM, format='MEM', prototype=self.desc)
self.update_dataset_related_attributes()
......@@ -556,14 +554,13 @@ class GEOPROCESSING(object):
offsets, gains, K1, K2 = [list(np.array(par)[self.bandsList]) for par in [offsets, gains, K1, K2]]
inArray = np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1)
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[
0] == 'custom' else inArray # FIXME ineffektiv, da unnötige Bänder eingelesen werden
# FIXME ineffektiv, da unnötige Bänder eingelesen werden
inArray = np.take(inArray, self.bandsList, axis=2) if self.subset and self.subset[0] == 'custom' else inArray
degreesCelsius = DN2DegreesCelsius_fastforward(inArray, offsets, gains, K1, K2, emissivity=emissivity,
inFill=inFill, inZero=inZero, inSaturated=inSaturated)
degreesCelsius = np.swapaxes(np.swapaxes(degreesCelsius, 0, 2), 1, 2) # [rows,cols,bands] => [bands,rows,cols]
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + (str(self.subset[1]) if self.subset
and self.subset[
0] != 'cube' else '') + '_degreesCelsius.mem'
filename_in_MEM = os.path.splitext(self.filename)[0] + '__' + \
(str(self.subset[1]) if self.subset and self.subset[0] != 'cube' else '') + '_degreesCelsius.mem'
self.inDs = gdalnumeric.SaveArray(degreesCelsius, filename_in_MEM, format='MEM', prototype=self.desc)
self.update_dataset_related_attributes()
......@@ -583,7 +580,7 @@ class GEOPROCESSING(object):
if v:
print("\n--------GEOPROCESSING--------")
print("Calculate normalized Bandindex (NDVIlike) (band2-band1)/(band2+band1)")
print("**NDVI arg:**" \
print("**NDVI arg:**"
"\n\t*data:\n\t %s" % (os.path.join(self.filepath, self.filename)))
print("\t*band1 (Nir): %s" % band1)
print("\t*band2 (Red): %s" % band2)
......@@ -602,17 +599,18 @@ class GEOPROCESSING(object):
outPath = self.workspace
else:
print(outPath)
if not os.path.exists(outPath): os.makedirs(outPath)
if not os.path.exists(outPath):
os.makedirs(outPath)
if exclValues is None:
pass
else:
if type(exclValues) is list:
exclValuesL = exclValues
exclValuesL = exclValues # noqa F841 unused
else:
try:
float(exclValues) # checks if arg is number
exclValuesL = [exclValues]
exclValuesL = [exclValues] # noqa F841 unused
except ValueError:
print("XXXX\nValueERROR: exclValue = nan\nXXXX")
sys.exit(1)
......@@ -630,7 +628,8 @@ class GEOPROCESSING(object):
outPath_h = os.path.join(outPath, filename_new)
else:
outPath_h = outPath
if not os.path.exists(outPath_h): os.makedirs(outPath_h)
if not os.path.exists(outPath_h):
os.makedirs(outPath_h)
# register outputdriver
driver = gdal.GetDriverByName('ENVI')
driver.Register()
......@@ -693,16 +692,15 @@ class GEOPROCESSING(object):
print("properties of NDVI_band")
GetBandProps(outBand)
GetHistnStdv(outBand)
except:
except Exception:
print("an unexpected ERROR in GetBandProps()")
print("\t##\n\tNDVI calculation completed")
# --Close Gdalobjects python: GdalObject = None -> behaves like GDALClose() in C
band = None
outBand = None
# self.inDs = None
outDs = None
del outBand
# del self.inDs
del outDs
def ClipMaxExt_nb(self, outPath=None, bin=1, v=0):
"""----METHOD_5----------------------------------------------------------
......@@ -720,7 +718,7 @@ class GEOPROCESSING(object):
if v == 1:
print("\n--------GEOPROCESSING--------")
print("Clip file to maximum extent without background pixel (background = pixel<=0)")
print("**NDVI arg:**" \
print("**NDVI arg:**"
"\n\t*data:\n\t %s" % (os.path.join(self.filepath, self.filename)))
if outPath is None or outPath == "":
......@@ -728,7 +726,8 @@ class GEOPROCESSING(object):
if v == 1:
print("\t*outPath:")
print("\t %s" % outPath)
if not os.path.exists(outPath): os.makedirs(outPath)
if not os.path.exists(outPath):
os.makedirs(outPath)
# main
Ds_nd = self.tondarray()
bandSum = Ds_nd[0]
......@@ -819,7 +818,7 @@ class GEOPROCESSING(object):
rows = int(ext_max[4] - ext_max[2])
ulx = self.originX + ext_max[1] * self.pixelWidth
uly = self.originY + ext_max[2] * self.pixelHeight
ul_pix = [int(ext_max[1]), int(ext_max[2])]
# ul_pix = [int(ext_max[1]), int(ext_max[2])]
lrx = self.originX + ext_max[3] * self.pixelWidth
lry = self.originY + ext_max[4] * self.pixelHeight
if v == 1:
......@@ -833,7 +832,8 @@ class GEOPROCESSING(object):
outPath_h = os.path.join(outPath, filename_new)
else:
outPath_h = outPath
if not os.path.exists(outPath_h): os.makedirs(outPath_h)
if not os.path.exists(outPath_h):
os.makedirs(outPath_h)
# register outputdriver
driver = gdal.GetDriverByName('ENVI')
driver.Register()
......@@ -861,10 +861,10 @@ class GEOPROCESSING(object):
outBand.FlushCache()
# --Close Gdalobjects python: GdalObject = None -> behaves like GDALClose() in C
band = None
outBand = None
# self.inDs = None
outDs = None
del band
del outBand
# del self.inDs
del outDs
# update header if input == ENVI
if self.drname_s == "ENVI":
......@@ -951,7 +951,9 @@ class GEOPROCESSING(object):
:param dst_EPSG_code: EPSG-Code defines LonLat or UTM coordinates.
:param dst_CS:
:param dst_CS_datum:
:param mode:
:param use_workspace:
:param inFill:
"""
if use_inherent_GCPs and TieP is None:
self.logger.info('Georeferencing dataset using inherent GCP list...')
......@@ -987,23 +989,25 @@ class GEOPROCESSING(object):
if mode == 'rasterio':
"""needs no temporary files but does not support multiprocessing"""
out_prj = EPSG2WKT(dst_EPSG_code)
proj_geo = isProjectedOrGeographic(self.projection)
assert proj_geo in ['projected', 'geographic']
TieP = TieP if proj_geo == 'geographic' else \
[transform_wgs84_to_utm(LonLat[0], LonLat[1], get_UTMzone(prj=self.projection)) for LonLat in TieP]
xmin, xmax, ymin, ymax = HLP_F.corner_coord_to_minmax(TieP)
t0 = time.time()
in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1)
print('reading time', time.time() - t0)
if inFill is None: inFill = get_outFillZeroSaturated(in_arr.dtype)[0]
out_nodataVal = get_outFillZeroSaturated(in_arr.dtype)[0]
t0 = time.time()
warped, gt, prj = warp_ndarray(in_arr, self.geotransform, self.projection, out_prj,
out_bounds=([xmin, ymin, xmax, ymax]), rspAlg='cubic',
in_nodata=inFill, out_nodata=out_nodataVal)[0]
raise NotImplementedError()
# out_prj = EPSG2WKT(dst_EPSG_code)
# proj_geo = isProjectedOrGeographic(self.projection)
#
# assert proj_geo in ['projected', 'geographic']
# TieP = TieP if proj_geo == 'geographic' else \
# [transform_wgs84_to_utm(LonLat[0], LonLat[1], get_UTMzone(prj=self.projection)) for LonLat in TieP]
# xmin, xmax, ymin, ymax = HLP_F.corner_coord_to_minmax(TieP)
# t0 = time.time()
# in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1)
# print('reading time', time.time() - t0)
# if inFill is None:
# inFill = get_outFillZeroSaturated(in_arr.dtype)[0]
# out_nodataVal = get_outFillZeroSaturated(in_arr.dtype)[0]
# t0 = time.time()
# warped, gt, prj = warp_ndarray(in_arr, self.geotransform, self.projection, out_prj,
# out_bounds=([xmin, ymin, xmax, ymax]), rspAlg='cubic',
# in_nodata=inFill, out_nodata=out_nodataVal)[0]
# multiprocessing: not implemented further because multiproceesing within Python Mappers is not possible
# args = [(in_arr[:,:,i],self.geotransform,self.projection,out_prj,([xmin, ymin, xmax, ymax]),
......@@ -1012,7 +1016,7 @@ class GEOPROCESSING(object):
# with multiprocessing.Pool() as pool:
# results = pool.map(warp_mp,args)
print('warping time', time.time() - t0)
# print('warping time', time.time() - t0)
# from spectral.io import envi
# envi.save_image('/dev/shm/test_warped.hdr',warped,dtype=str(np.dtype(warped.dtype)),
# force=True,ext='.bsq',interleave='bsq')
......@@ -1021,7 +1025,8 @@ class GEOPROCESSING(object):
"""needs temporary files but does support multiprocessing and configuring cache size"""
t0 = time.time()
in_dtype = gdalnumeric.LoadFile(self.desc, 0, 0, 1, 1).dtype
if inFill is None: inFill = get_outFillZeroSaturated(in_dtype)[0]
if inFill is None:
inFill = get_outFillZeroSaturated(in_dtype)[0]
out_nodataVal = get_outFillZeroSaturated(in_dtype)[0]
gcps = ' '.join(['-gcp %s %s %s %s' % tuple(gcp) for gcp in [gcp_ul, gcp_ur, gcp_ll, gcp_lr]])
......@@ -1033,21 +1038,26 @@ class GEOPROCESSING(object):
os.system('gdal_translate -of ENVI %s \
-a_srs EPSG:%s -q %s %s' % (gcps, dst_EPSG_code, inFile, translatedFile))
# os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \
# cubic -srcnodata -%s -dstnodata %s -overwrite -q %s %s' \
# os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps
# -r cubic -srcnodata -%s -dstnodata %s -overwrite -q %s %s' \
# %(dst_EPSG_code, in_nodataVal,out_nodataVal, translatedFile, warpedFile))
os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s' \
cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s'
% (dst_EPSG_code, inFill, out_nodataVal, CFG.job.CPUs, translatedFile, warpedFile))
# import shutil
# shutil.copy(translatedFile, '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') ## only for bugfixing
# shutil.copy(translatedFile+'.hdr','//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') ## only for bugfixing
# shutil.copy(warpedFile, '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') ## only for bugfixing
# shutil.copy(warpedFile+'.hdr', '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') ## only for bugfixing
# only for bugfixing:
# shutil.copy(translatedFile, \
# '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/')
# shutil.copy(translatedFile+'.hdr',\
# '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/')
# shutil.copy(warpedFile, \
# '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/')
# shutil.copy(warpedFile+'.hdr', \
# '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/')
else:
VRT_name = os.path.splitext(os.path.basename(self.desc))[0] + '.vrt'
dst_ds = gdal.GetDriverByName('VRT').CreateCopy(VRT_name, self.inDs, 0)
dst_ds = None
# dst_ds = gdal.GetDriverByName('VRT').CreateCopy(VRT_name, self.inDs, 0)
# del dst_ds
inFile = VRT_name
translatedFile = os.path.splitext(inFile)[0] + '_translate.vrt' if not use_inherent_GCPs else self.desc
warpedFile = os.path.splitext(inFile)[0] + '_warp.vrt'
......@@ -1058,7 +1068,7 @@ class GEOPROCESSING(object):
# cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=10 -q %s %s' \
# %(dst_EPSG_code,in_nodataVal,out_nodataVal,translatedFile,warpedFile))
os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s' \
cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s'
% (dst_EPSG_code, inFill, out_nodataVal, CFG.job.CPUs, translatedFile, warpedFile))
# print('warped')
print('GDAL warping time', time.time() - t0)
......@@ -1078,13 +1088,17 @@ class GEOPROCESSING(object):
# row_start,row_end,col_start,col_end, rows,cols = 0,self.rows,arr_pos,arr_pos+1, self.rows,1
# elif arr_shape == 'band' or arr_shape == 'cube': # arr_pos = None
# row_start,row_end,col_start,col_end, rows,cols = 0,self.rows-1,0,self.cols-1, self.rows,self.cols
# elif arr_shape == 'block' or arr_shape == 'custom': # arr_pos = [ (row_start,row_end),(col_start,col_end),(band_start,band_end) ]
# row_start,row_end,col_start,col_end, rows,cols = arr_pos[0][0],arr_pos[0][1],arr_pos[1][0],arr_pos[1][1], arr_pos[0][1]+1-arr_pos[0][0],arr_pos[1][1]+1-arr_pos[1][0]
# elif arr_shape == 'block' or arr_shape == 'custom':
# # arr_pos = [ (row_start,row_end),(col_start,col_end),(band_start,band_end) ]
# row_start,row_end,col_start,col_end, rows,cols = \
# arr_pos[0][0],arr_pos[0][1],arr_pos[1][0],arr_pos[1][1], \
# arr_pos[0][1]+1-arr_pos[0][0],arr_pos[1][1]+1-arr_pos[1][0]
# elif arr_shape == 'pixel': # arr_pos = (x,y)
# row_start,row_end,col_start,col_end, rows,cols = arr_pos[0],arr_pos[0]+1,arr_pos[1],arr_pos[1]+1, 1,1
# return row_start,row_end,col_start,col_end,rows,cols
# except:
# self.logger.error('Error while setting row and column bounds. Got arr_shape = %s and arr_pos = %s' %(arr_shape,arr_pos))
# self.logger.error(
# 'Error while setting row and column bounds. Got arr_shape = %s and arr_pos = %s' %(arr_shape,arr_pos))
def get_projection_type(self):
return 'LonLat' if osr.SpatialReference(self.projection).IsGeographic() else 'UTM' if osr.SpatialReference(
......@@ -1098,8 +1112,8 @@ class GEOPROCESSING(object):
=> lonlat_arr always contains UL-coordinate of each pixel
:param targetProj: 'LonLat' or 'UTM' """
UL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((0, 0), targetProj)
UR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((0, self.cols),
targetProj) # self.cols statt self.cols-1, um Koordinate am rechten Pixelrand zu berechnen
# self.cols statt self.cols-1, um Koordinate am rechten Pixelrand zu berechnen:
UR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((0, self.cols), targetProj)
LL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((self.rows, 0), targetProj)
LR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((self.rows, self.cols), targetProj)
UL_LonLat = tuple([round(i, 10) for i in UL_LonLat])
......@@ -1137,7 +1151,7 @@ class GEOPROCESSING(object):
# print(data[40:60,730:760])
# print(mask_1bit[40:60,730:760,x])
band.FlushCache()
band = None
del band
mask_1bit = np.all(mask_1bit, axis=2)
# if self.bands > 1:
# mask_1bit[np.std(img,axis=2)==0] = 0 # sets those pixels to zero which have the same value in all bands
......@@ -1151,7 +1165,8 @@ class GEOPROCESSING(object):
:return: [UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]
"""
assert self.subset is None, 'Unable to calculate fullSceneCornerPos for a subset of an image. ' \
'calc_FullDataset_corner_positions can only be called if GEOP object represents a full image.'
'calc_FullDataset_corner_positions can only be called if GEOP object represents ' \
'a full image.'
mask_1bit = mask_data_nodata if mask_data_nodata is not None else self.calc_mask_data_nodata()
cols_containing_data = np.any(mask_1bit, axis=0)
......@@ -1162,9 +1177,10 @@ class GEOPROCESSING(object):
first_dataRow = list(rows_containing_data).index(True)
last_dataRow = self.rows - (list(reversed(rows_containing_data)).index(True) + 1)
if first_dataCol == 0 and first_dataRow == 0 and last_dataCol == self.cols - 1 and last_dataRow == self.rows - 1 \
and mask_1bit[0, 0] == True and mask_1bit[0, self.cols - 1] == True \
and mask_1bit[self.rows - 1, 0] == True and mask_1bit[self.rows - 1, self.cols - 1] == True:
if first_dataCol == 0 and first_dataRow == 0 and last_dataCol == self.cols - 1 \
and last_dataRow == self.rows - 1 \
and mask_1bit[0, 0] is True and mask_1bit[0, self.cols - 1] is True \
and mask_1bit[self.rows - 1, 0] is True and mask_1bit[self.rows - 1, self.cols - 1] is True:
UL, UR, LL, LR = (0, 0), (0, self.cols - 1), (self.rows - 1, 0), (self.rows - 1, self.cols - 1)
else:
StartStopRows_in_first_dataCol = [list(mask_1bit[:, first_dataCol]).index(True),
......@@ -1179,7 +1195,8 @@ class GEOPROCESSING(object):
if True in [abs(np.diff(i)[0]) > 10 for i in
[StartStopRows_in_first_dataCol, StartStopRows_in_last_dataCol, StartStopCols_in_first_dataRow,
StartStopCols_in_last_dataRow]]:
''' In case of cut image corners (e.g. ALOS AVNIR-2 Level-1B2): Calculation of fullSceneCornerPos outside of the image.'''
# In case of cut image corners (e.g. ALOS AVNIR-2 Level-1B2): Calculation of fullSceneCornerPos
# outside of the image.'''
# print ('Image corners detected to be cut.')
def find_line_intersection_point(line1, line2):
......@@ -1234,7 +1251,8 @@ class GEOPROCESSING(object):
LR = corners[distLR.index(min(distLR))]
# print([UL, UR, LL, LR])
# if UL[0] == 0 and UR[0] == 0:
# envi.save_image(os.path.abspath('./testing/out/__faulty_mask_1bit.hdr'), self.mask_nodata, dtype= 'uint8', interleave='bsq', ext='.bsq',force=True)