Commit d03c8511 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Moved several functions to py_tools_ds. Removed deprecated functions. Removed...

Moved several functions to py_tools_ds. Removed deprecated functions. Removed io and utilities modules.
parent 8c942e05
Pipeline #1531 failed with stages
in 54 seconds
......@@ -25,7 +25,6 @@ from skimage.exposure import rescale_intensity
# internal modules
from .DeShifter import DESHIFTER, _dict_rspAlg_rsp_Int
from . import geometry as GEO
from . import io as IO
from . import plotting as PLT
from geoarray import GeoArray
......@@ -40,6 +39,8 @@ from py_tools_ds.geo.raster.reproject import warp_ndarray
from py_tools_ds.geo.map_info import geotransform2mapinfo
from py_tools_ds.numeric.vector import find_nearest
from py_tools_ds.similarity.raster import calc_ssim
from py_tools_ds.io.raster.writer import convert_gdal_to_bsq__mp
from py_tools_ds.io.vector.writer import write_shp
__author__ = 'Daniel Scheffler'
......@@ -293,9 +294,9 @@ class COREG(object):
self._get_overlap_properties()
if self.v and self.path_verbose_out:
IO.write_shp(os.path.join(self.path_verbose_out, 'poly_imref.shp'), self.ref.poly, self.ref.prj)
IO.write_shp(os.path.join(self.path_verbose_out, 'poly_im2shift.shp'), self.shift.poly, self.shift.prj)
IO.write_shp(os.path.join(self.path_verbose_out, 'overlap_poly.shp'), self.overlap_poly, self.ref.prj)
write_shp(os.path.join(self.path_verbose_out, 'poly_imref.shp'), self.ref.poly, self.ref.prj)
write_shp(os.path.join(self.path_verbose_out, 'poly_im2shift.shp'), self.shift.poly, self.shift.prj)
write_shp(os.path.join(self.path_verbose_out, 'overlap_poly.shp'), self.overlap_poly, self.ref.prj)
# FIXME: transform_mapPt1_to_mapPt2(im2shift_center_map, ds_imref.GetProjection(), ds_im2shift.GetProjection())
# FIXME später basteln für den fall, dass projektionen nicht gleich sind
......@@ -307,8 +308,8 @@ class COREG(object):
self._get_clip_window_properties() # sets self.matchBox, self.otherBox and much more
if self.v and self.path_verbose_out and self.matchBox.mapPoly and self.success is not False:
IO.write_shp(os.path.join(self.path_verbose_out, 'poly_matchWin.shp'),
self.matchBox.mapPoly, self.matchBox.prj)
write_shp(os.path.join(self.path_verbose_out, 'poly_matchWin.shp'),
self.matchBox.mapPoly, self.matchBox.prj)
self.success = False if self.success is False or not self.matchBox.boxMapYX else None
self._coreg_info = None # private attribute to be filled by self.coreg_info property
......@@ -741,8 +742,8 @@ class COREG(object):
print('Target window size %s not possible due to too small overlap area or window position too close '
'to an image edge. New matching window size: %s.' % (self.win_size_XY, match_win_size_XY))
# IO.write_shp('/misc/hy5/scheffler/Temp/matchMapPoly.shp', matchBox.mapPoly,matchBox.prj)
# IO.write_shp('/misc/hy5/scheffler/Temp/otherMapPoly.shp', otherBox.mapPoly,otherBox.prj)
# write_shp('/misc/hy5/scheffler/Temp/matchMapPoly.shp', matchBox.mapPoly,matchBox.prj)
# write_shp('/misc/hy5/scheffler/Temp/otherMapPoly.shp', otherBox.mapPoly,otherBox.prj)
def _get_image_windows_to_match(self):
"""Reads the matching window and the other window using subset read, and resamples the other window to the
......@@ -1433,7 +1434,7 @@ class COREG(object):
ds_im2shift = gdal.Open(self.shift.path)
if not ds_im2shift.GetDriver().ShortName == 'ENVI': # FIXME laaangsam
if self.CPUs is not None and self.CPUs > 1:
IO.convert_gdal_to_bsq__mp(self.shift.path, self.path_out)
convert_gdal_to_bsq__mp(self.shift.path, self.path_out)
else:
os.system('gdal_translate -of ENVI %s %s' % (self.shift.path, self.path_out))
file2getHdr = self.path_out
......
......@@ -20,11 +20,11 @@ from skimage.transform import AffineTransform, PolynomialTransform
# internal modules
from .CoReg import COREG
from . import io as IO
from py_tools_ds.geo.projection import isProjectedOrGeographic, isLocal, get_UTMzone, dict_to_proj4, proj4_to_WKT
from py_tools_ds.io.pathgen import get_generic_outpath
from py_tools_ds.processing.progress_mon import ProgressBar
from py_tools_ds.geo.vector.conversion import points_to_raster
from py_tools_ds.io.vector.writer import write_shp
from geoarray import GeoArray
from .CoReg import GeoArray_CoReg # noqa F401 # flake8 issue
......@@ -637,7 +637,7 @@ class Tie_Point_Grid(object):
fName_out = "CoRegPoints_grid%s_ws%s.shp" % (self.grid_res, self.COREG_obj.win_size_XY)
path_out = os.path.join(self.dir_out, fName_out)
IO.write_shp(path_out, shapely_points, prj=self.COREG_obj.shift.prj, attrDict=attr_dicts)
write_shp(path_out, shapely_points, prj=self.COREG_obj.shift.prj, attrDict=attr_dicts)
def to_vectorfield(self, path_out=None, fmt=None, mode='md'):
# type: (str) -> GeoArray
......@@ -680,45 +680,6 @@ class Tie_Point_Grid(object):
return out_GA
def _to_Raster_using_KrigingOLD(self, attrName, skip_nodata=1, skip_nodata_col='ABS_SHIFT', outGridRes=None,
path_out=None, tilepos=None): # pragma: no cover
warnings.warn(DeprecationWarning("'to_Raster_using_KrigingOLD' is deprecated. Use to_Raster_using_Kriging "
"instead.")) # TODO delete
GDF = self.CoRegPoints_table
GDF2pass = GDF if not skip_nodata else GDF[GDF[skip_nodata_col] != self.outFillVal]
# subset if tilepos is given
rows, cols = tilepos if tilepos else self.shift.shape
GDF2pass = GDF2pass.loc[(GDF2pass['X_IM'] >= cols[0]) & (GDF2pass['X_IM'] <= cols[1]) &
(GDF2pass['Y_IM'] >= rows[0]) & (GDF2pass['Y_IM'] <= rows[1])]
X_coords, Y_coords, ABS_SHIFT = GDF2pass['X_UTM'], GDF2pass['Y_UTM'], GDF2pass[attrName]
xmin, ymin, xmax, ymax = GDF2pass.total_bounds
grid_res = outGridRes if outGridRes else int(min(xmax - xmin, ymax - ymin) / 250)
grid_x, grid_y = np.arange(xmin, xmax + grid_res, grid_res), np.arange(ymax, ymin - grid_res, -grid_res)
# Reference: P.K. Kitanidis, Introduction to Geostatistcs: Applications in Hydrogeology,
# (Cambridge University Press, 1997) 272 p.
from pykrige.ok import OrdinaryKriging
OK = OrdinaryKriging(X_coords, Y_coords, ABS_SHIFT, variogram_model='spherical', verbose=False)
zvalues, sigmasq = OK.execute('grid', grid_x, grid_y) # ,backend='C',)
path_out = path_out if path_out else \
get_generic_outpath(dir_out=os.path.join(self.dir_out, 'CoRegPoints'),
fName_out="Kriging__%s__grid%s_ws(%s_%s).tif"
% (attrName, self.grid_res, self.COREG_obj.win_size_XY[0],
self.COREG_obj.win_size_XY[1]))
print('Writing %s ...' % path_out)
# add a half pixel grid points are centered on the output pixels
xmin, ymin, xmax, ymax = xmin - grid_res / 2, ymin - grid_res / 2, xmax + grid_res / 2, ymax + grid_res / 2
IO.write_numpy_to_image(zvalues, path_out, gt=(xmin, grid_res, 0, ymax, 0, -grid_res),
prj=self.COREG_obj.shift.prj)
return zvalues
def to_Raster_using_Kriging(self, attrName, skip_nodata=1, skip_nodata_col='ABS_SHIFT', outGridRes=None,
fName_out=None, tilepos=None, tilesize=500, mp=None):
......@@ -752,17 +713,6 @@ class Tie_Point_Grid(object):
GDF = self.CoRegPoints_table
GDF2pass = GDF if not skip_nodata else GDF[GDF[skip_nodata_col] != self.outFillVal]
# # subset if tilepos is given
# # overlap_factor =
# rows,cols = tilepos if tilepos else self.tgt_shape
# xvals, yvals = np.sort(GDF2pass['X_IM'].values.flat),np.sort(GDF2pass['Y_IM'].values.flat)
# cS,cE = UTL.find_nearest(xvals,cols[0],'off',1), UTL.find_nearest(xvals,cols[1],'on',1)
# rS,rE = UTL.find_nearest(yvals,rows[0],'off',1), UTL.find_nearest(yvals,rows[1],'on',1)
# # GDF2pass = GDF2pass.loc[(GDF2pass['X_IM']>=cols[0])&(GDF2pass['X_IM']<=cols[1])&
# # (GDF2pass['Y_IM']>=rows[0])&(GDF2pass['Y_IM']<=rows[1])]
# GDF2pass = GDF2pass.loc[(GDF2pass['X_IM']>=cS)&(GDF2pass['X_IM']<=cE)&
# (GDF2pass['Y_IM']>=rS)&(GDF2pass['Y_IM']<=rE)]
X_coords, Y_coords, ABS_SHIFT = GDF2pass['X_UTM'], GDF2pass['Y_UTM'], GDF2pass[attrName]
xmin, ymin, xmax, ymax = GDF2pass.total_bounds
......@@ -783,11 +733,12 @@ class Tie_Point_Grid(object):
fName_out = fName_out if fName_out else \
"Kriging__%s__grid%s_ws%s.tif" % (attrName, self.grid_res, self.COREG_obj.win_size_XY)
path_out = get_generic_outpath(dir_out=self.dir_out, fName_out=fName_out)
print('Writing %s ...' % path_out)
# add a half pixel grid points are centered on the output pixels
xmin, ymin, xmax, ymax = xmin - grid_res / 2, ymin - grid_res / 2, xmax + grid_res / 2, ymax + grid_res / 2
IO.write_numpy_to_image(zvalues, path_out, gt=(xmin, grid_res, 0, ymax, 0, -grid_res),
prj=self.COREG_obj.shift.prj)
GeoArray(zvalues,
geotransform=(xmin, grid_res, 0, ymax, 0, -grid_res),
projection=self.COREG_obj.shift.prj).save(path_out)
return zvalues
......
# -*- coding: utf-8 -*-
import ctypes
import multiprocessing
import os
import time
import numpy as np
import ogr
import osr
try:
import gdal
except ImportError:
from osgeo import gdal
from spectral.io import envi
# internal modules
from .utilities import get_image_tileborders, convertGdalNumpyDataType
from py_tools_ds.geo.map_info import geotransform2mapinfo
from py_tools_ds.geo.projection import EPSG2WKT
from py_tools_ds.dtypes.conversion import get_dtypeStr
def wait_if_used(path_file, lockfile, timeout=100, try_kill=0):
globs = globals()
same_gdalRefs = [k for k, v in globs.items() if
isinstance(globs[k], gdal.Dataset) and globs[k].GetDescription() == path_file]
t0 = time.time()
def update_same_gdalRefs(sRs): return [sR for sR in sRs if sR in globals() and globals()[sR] is not None]
while same_gdalRefs != [] or os.path.exists(lockfile):
if os.path.exists(lockfile):
continue
if time.time() - t0 > timeout:
if try_kill:
for sR in same_gdalRefs:
globals()[sR] = None
print('had to kill %s' % sR)
else:
if os.path.exists(lockfile):
os.remove(lockfile)
raise TimeoutError('The file %s is permanently used by another variable.' % path_file)
same_gdalRefs = update_same_gdalRefs(same_gdalRefs)
def write_envi(arr, outpath, gt=None, prj=None):
if gt or prj:
assert gt and prj, 'gt and prj must be provided together or left out.'
meta = {'map info': geotransform2mapinfo(gt, prj), 'coordinate system string': prj} if gt else None
shape = (arr.shape[0], arr.shape[1], 1) if len(arr.shape) == 3 else arr.shape
out = envi.create_image(outpath, metadata=meta, shape=shape, dtype=arr.dtype, interleave='bsq', ext='.bsq',
force=True) # 8bit for multiple masks in one file
out_mm = out.open_memmap(writable=True)
out_mm[:, :, 0] = arr
def wfa(p, c): # pragma: no cover
try:
with open(p, 'a') as of:
of.write(c)
except Exception:
pass
shared_array = None
def init_SharedArray_in_globals(dims):
rows, cols = dims
global shared_array
shared_array_base = multiprocessing.Array(ctypes.c_double, rows * cols)
shared_array = np.ctypeslib.as_array(shared_array_base.get_obj())
shared_array = shared_array.reshape(rows, cols)
def gdal_read_subset(fPath, pos, bandNr):
(rS, rE), (cS, cE) = pos
ds = gdal.Open(fPath)
data = ds.GetRasterBand(bandNr).ReadAsArray(cS, rS, cE - cS + 1, rE - rS + 1)
del ds
return data
def fill_arr(argDict, def_param=shared_array):
pos = argDict.get('pos')
func = argDict.get('func2call')
args = argDict.get('func_args', [])
kwargs = argDict.get('func_kwargs', {})
(rS, rE), (cS, cE) = pos
shared_array[rS:rE + 1, cS:cE + 1] = func(*args, **kwargs)
def gdal_ReadAsArray_mp(fPath, bandNr, tilesize=1500):
ds = gdal.Open(fPath)
rows, cols = ds.RasterYSize, ds.RasterXSize
del ds
init_SharedArray_in_globals((rows, cols))
tilepos = get_image_tileborders([tilesize, tilesize], (rows, cols))
fill_arr_argDicts = [{'pos': pos, 'func2call': gdal_read_subset, 'func_args': (fPath, pos, bandNr)} for pos in
tilepos]
with multiprocessing.Pool() as pool:
pool.map(fill_arr, fill_arr_argDicts)
return shared_array
def write_shp(path_out, shapely_geom, prj=None, attrDict=None):
shapely_geom = [shapely_geom] if not isinstance(shapely_geom, list) else shapely_geom
attrDict = [attrDict] if not isinstance(attrDict, list) else attrDict
# print(len(shapely_geom))
# print(len(attrDict))
assert len(shapely_geom) == len(attrDict), "'shapely_geom' and 'attrDict' must have the same length."
assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out)
print('Writing %s ...' % path_out)
if os.path.exists(path_out):
os.remove(path_out)
ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)
if prj is not None:
prj = prj if not isinstance(prj, int) else EPSG2WKT(prj)
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
else:
srs = None
geom_type = list(set([gm.type for gm in shapely_geom]))
assert len(geom_type) == 1, 'All shapely geometries must belong to the same type. Got %s.' % geom_type
layer = \
ds.CreateLayer('', srs, ogr.wkbPoint) if geom_type[0] == 'Point' else\
ds.CreateLayer('', srs, ogr.wkbLineString) if geom_type[0] == 'LineString' else \
ds.CreateLayer('', srs, ogr.wkbPolygon) if geom_type[0] == 'Polygon' else None # FIXME
if isinstance(attrDict[0], dict):
for attr in attrDict[0].keys():
assert len(attr) <= 10, "ogr does not support fieldnames longer than 10 digits. '%s' is too long" % attr
DTypeStr = get_dtypeStr(attrDict[0][attr])
FieldType = \
ogr.OFTInteger if DTypeStr.startswith('int') else \
ogr.OFTReal if DTypeStr.startswith('float') else \
ogr.OFTString if DTypeStr.startswith('str') else \
ogr.OFTDateTime if DTypeStr.startswith('date') else None
FieldDefn = ogr.FieldDefn(attr, FieldType)
if DTypeStr.startswith('float'):
FieldDefn.SetPrecision(6)
layer.CreateField(FieldDefn) # Add one attribute
for i in range(len(shapely_geom)):
# Create a new feature (attribute and geometry)
feat = ogr.Feature(layer.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkb(shapely_geom[i].wkb)) # Make a geometry, from Shapely object
list_attr2set = attrDict[0].keys() if isinstance(attrDict[0], dict) else []
for attr in list_attr2set:
val = attrDict[i][attr]
DTypeStr = get_dtypeStr(val)
val = int(val) if DTypeStr.startswith('int') else float(val) if DTypeStr.startswith('float') else \
str(val) if DTypeStr.startswith('str') else val
feat.SetField(attr, val)
layer.CreateFeature(feat)
feat.Destroy()
# Save and close everything
del ds, layer
def write_numpy_to_image(array, path_out, outFmt='GTIFF', gt=None, prj=None):
rows, cols, bands = list(array.shape) + [1] if len(array.shape) == 2 else array.shape
gdal_dtype = gdal.GetDataTypeByName(convertGdalNumpyDataType(array.dtype))
outDs = gdal.GetDriverByName(outFmt).Create(path_out, cols, rows, bands, gdal_dtype)
for b in range(bands):
band = outDs.GetRasterBand(b + 1)
arr2write = array if len(array.shape) == 2 else array[:, :, b]
band.WriteArray(arr2write)
del band
if gt:
outDs.SetGeoTransform(gt)
if prj:
outDs.SetProjection(prj)
del outDs
# def get_tempfile(ext=None,prefix=None,tgt_dir=None):
# """Returns the path to a tempfile.mkstemp() file that can be passed to any function that expects a physical path.
# The tempfile has to be deleted manually.
# :param ext: file extension (None if None)
# :param prefix: optional file prefix
# :param tgt_dir: target directory (automatically set if None)
# """
# prefix = 'danschef__CoReg__' if prefix is None else prefix
# fd, path = tempfile.mkstemp(prefix=prefix,suffix=ext,dir=tgt_dir)
# os.close(fd)
# return path
shared_array_on_disk__memmap = None
def init_SharedArray_on_disk(out_path, dims, gt=None, prj=None):
global shared_array_on_disk__memmap
global shared_array_on_disk__path
path = out_path if not os.path.splitext(out_path)[1] == '.bsq' else \
os.path.splitext(out_path)[0] + '.hdr'
Meta = {}
if gt and prj:
Meta['map info'] = geotransform2mapinfo(gt, prj)
Meta['coordinate system string'] = prj
shared_array_on_disk__obj = envi.create_image(path, metadata=Meta, shape=dims, dtype='uint16',
interleave='bsq', ext='.bsq', force=True)
shared_array_on_disk__memmap = shared_array_on_disk__obj.open_memmap(writable=True)
def fill_arr_on_disk(argDict):
pos = argDict.get('pos')
in_path = argDict.get('in_path')
band = argDict.get('band')
(rS, rE), (cS, cE) = pos
ds = gdal.Open(in_path)
band = ds.GetRasterBand(band)
data = band.ReadAsArray(cS, rS, cE - cS + 1, rE - rS + 1)
shared_array_on_disk__memmap[rS:rE + 1, cS:cE + 1, 0] = data
del ds, band
def convert_gdal_to_bsq__mp(in_path, out_path, band=1):
"""
Usage:
ref_ds,tgt_ds = gdal.Open(self.path_imref),gdal.Open(self.path_im2shift)
ref_pathTmp, tgt_pathTmp = None,None
if ref_ds.GetDriver().ShortName!='ENVI':
ref_pathTmp = IO.get_tempfile(ext='.bsq')
IO.convert_gdal_to_bsq__mp(self.path_imref,ref_pathTmp)
self.path_imref = ref_pathTmp
if tgt_ds.GetDriver().ShortName!='ENVI':
tgt_pathTmp = IO.get_tempfile(ext='.bsq')
IO.convert_gdal_to_bsq__mp(self.path_im2shift,tgt_pathTmp)
self.path_im2shift = tgt_pathTmp
ref_ds=tgt_ds=None
:param in_path:
:param out_path:
:param band:
:return:
"""
ds = gdal.Open(in_path)
dims = (ds.RasterYSize, ds.RasterXSize)
gt, prj = ds.GetGeoTransform(), ds.GetProjection()
del ds
init_SharedArray_on_disk(out_path, dims, gt, prj)
positions = get_image_tileborders([512, 512], dims)
argDicts = [{'pos': pos, 'in_path': in_path, 'band': band} for pos in positions]
with multiprocessing.Pool() as pool:
pool.map(fill_arr_on_disk, argDicts)
# -*- coding: utf-8 -*-
import numpy as np
def get_image_tileborders(target_tileSize, shape_fullArr):
rows, cols = shape_fullArr[:2]
row_bounds = [0]
while row_bounds[-1] + target_tileSize[0] < rows:
row_bounds.append(row_bounds[-1] + target_tileSize[0] - 1)
row_bounds.append(row_bounds[-2] + target_tileSize[0])
else:
row_bounds.append(rows - 1)
col_bounds = [0]
while col_bounds[-1] + target_tileSize[1] < cols:
col_bounds.append(col_bounds[-1] + target_tileSize[1] - 1)
col_bounds.append(col_bounds[-2] + target_tileSize[1])
else:
col_bounds.append(cols - 1)
return [[tuple([row_bounds[r], row_bounds[r + 1]]), tuple([col_bounds[c], col_bounds[c + 1]])]
for r in range(0, len(row_bounds), 2) for c in range(0, len(col_bounds), 2)]
def cornerPoints_to_listOfXYPairs(corYX, out_shape, out_resXY, shrinkSize=None):
"""
:param corYX: list of XY pairs in the order UL,UR,LR,LL
"""
Xarr = np.zeros(out_shape, np.float64)
Xarr[None, :] = np.arange(corYX[0][1], corYX[1][1], out_resXY[0])
Xarr = Xarr
Yarr = np.zeros(list(reversed(out_shape)), np.float64)
out_resY = out_resXY[1] if corYX[0][0] < corYX[2][0] else -out_resXY[1]
Yarr[None, :] = np.arange(corYX[0][0], corYX[2][0], out_resY)
Yarr = Yarr.T
Xarr = Xarr[shrinkSize:-shrinkSize, shrinkSize:-shrinkSize] if shrinkSize else Xarr
Yarr = Yarr[shrinkSize:-shrinkSize, shrinkSize:-shrinkSize] if shrinkSize else Yarr
XYarr = np.empty((Xarr.size, 2), np.float64)
XYarr[:, 0] = Xarr.flat
XYarr[:, 1] = Yarr.flat
return XYarr
def get_coord_grid(ULxy, LRxy, out_resXY):
X_vec = np.arange(ULxy[0], LRxy[0], out_resXY[0])
Y_vec = np.arange(ULxy[1], LRxy[1], out_resXY[1])
return np.meshgrid(X_vec, Y_vec)
def convertGdalNumpyDataType(dType):
"""convertGdalNumpyDataType
:param dType: GDALdataType string or numpy dataType
:return: corresponding dataType
"""
# dictionary to translate GDAL data types (strings) in corresponding numpy data types
dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32,
"Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32}
outdType = None
if dType in dTypeDic:
outdType = dTypeDic[dType]
elif dType in dTypeDic.values():
for i in dTypeDic.items():
if dType == i[1]:
outdType = i[0]
elif dType in [np.int8, np.int64, np.int]:
outdType = "Int32"
print(">>> Warning: %s is converted to GDAL_Type 'Int_32'\n" % dType)
elif dType in [np.bool, np.bool_]:
outdType = "Byte"
print(">>> Warning: %s is converted to GDAL_Type 'Byte'\n" % dType)
elif dType in [np.float]:
outdType = "Float32"
print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType)
elif dType in [np.float16]:
outdType = "Float32"
print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType)
else:
raise Exception('GEOP.convertGdalNumpyDataType: Unexpected input data type %s.' % dType)
return outdType
......@@ -360,7 +360,13 @@ if __name__ == '__main__':
from socket import gethostname
from datetime import datetime as dt
from getpass import getuser
from arosics.io import wfa
def wfa(p, c):
try:
with open(p, 'a') as of:
of.write(c)
except Exception:
pass
wfa('/misc/hy5/scheffler/tmp/crlf', '%s\t%s\t%s\t%s\n' % (dt.now(), getuser(), gethostname(), ' '.join(sys.argv)))
......
......@@ -13,7 +13,7 @@ with open('README.rst') as readme_file:
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = ['numpy', 'gdal', 'shapely', 'scikit-image', 'matplotlib', 'geopandas', 'spectral', 'geoarray>=0.6.16',
requirements = ['numpy', 'gdal', 'shapely', 'scikit-image', 'matplotlib', 'geopandas', 'geoarray>=0.6.16',
'py_tools_ds>=0.9.3', 'plotly', 'cmocean', 'six',
# 'pykrige' # conda install --yes -c conda-forge pykrige
# 'pyfftw', # conda install --yes -c conda-forge pyfftw=0.10.4 ; \
......
......@@ -18,7 +18,7 @@ RUN /bin/bash -i -c "source /root/anaconda3/bin/activate ; \
conda install --yes -c ioam holoviews bokeh ; \
conda install --yes -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio shapely basemap pykrige geopandas; \
conda install --yes -c conda-forge 'icu=58.*' lxml ; \
pip install dicttoxml jsmin cerberus pyprind pint iso8601 tqdm mpld3 sphinx-argparse spectral \
pip install dicttoxml jsmin cerberus pyprind pint iso8601 tqdm mpld3 sphinx-argparse \
geoarray>=0.6.12 py_tools_ds>=0.9.1 plotly flake8 pycodestyle pylint pydocstyle coverage nose nose2 \
nose-htmloutput rednose" # must include all the requirements needed to build the docs!
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment