Commit ca0d8e78 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'dev' into include_sicor

Former-commit-id: ce5bc509
Former-commit-id: 6721ba70
parents a925ad93 d62647e3
......@@ -67,15 +67,17 @@ target/
BAK/
OLD/
/geomultisens/database/cloud_classifier/
/geomultisens/database/sampledata/
/tests/data/sampledata/
/geomultisens/database/metadata/
/geomultisens/database/tiling_system/
/geomultisens/database/processed_data/
/geomultisens/database/earth_sun_distance/
/geomultisens/database/solar_irradiance/
!/geomultisens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv
!/geomultisens/database/solar_irradiance/SUNp1fontenla__350-2500nm_converted.txt
!/geomultisens/database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt
/geomultisens/database/earth_sun_distance/horizon_web_interface.cgi_files/
/geomultisens/database/earth_sun_distance/horizon_web_interface.cgi.html
/geomultisens/database/earth_sun_distance/README
/geomultisens/database/solar_irradiance/Solar_irradiance_Thuillier_2002.xls
/geomultisens/database/solar_irradiance/Thuillier_2003_solar_irradiance_400_2400.pdf
/geomultisens/database/solar_irradiance/SUNp1fontenla.asc
/geomultisens/database/solar_irradiance/SOLAR.txt
/geomultisens/logs/
.ipynb_checkpoints/
......@@ -94,3 +96,4 @@ sandbox/Landsat-8__OLI_TIRS__LC81940242015091LGN00__32UNC_masks_L2C__compressed.
sandbox/job_logs/
sandbox/meta_validation/
before_script:
- git lfs pull
test_geomultisens:
script:
- source /root/anaconda3/bin/activate
- export GDAL_DATA=/root/anaconda3/share/gdal
- export PYTHONPATH=$PYTHONPATH:/root # /root <- directory needed later
- make coverage
- make docs
artifacts:
paths:
- htmlcov/
- docs/_build/html/
pages:
stage: deploy
dependencies:
- test_geomultisens
script:
- mkdir -p public/coverage
- cp -r htmlcov/* public/coverage/
- mkdir -p public/doc
- cp -r docs/_build/html/* public/doc/
artifacts:
paths:
- public
expire_in: 30 days
only:
- master
......@@ -51,16 +51,16 @@ lint: ## check style with flake8
flake8 geomultisens tests
test: ## run tests quickly with the default Python
python setup.py test
test-all: ## run tests on every Python version with tox
tox
coverage: ## check code coverage quickly with the default Python
coverage run --source geomultisens setup.py test
coverage report -m
coverage html
$(BROWSER) htmlcov/index.html
......@@ -86,4 +86,8 @@ dist: clean ## builds source and wheel package
ls -l dist
install: clean ## install the package to the active Python's site-packages
pip install -r requirements.txt
python setup.py install
gitlab_CI_docker: ## Build a docker image for CI use within gitlab
cd ./tests/CI_docker/; bash ./build_testsuite_image.sh
......@@ -3,27 +3,31 @@ GeoMultiSens
============
.. image:: https://img.shields.io/pypi/v/geomultisens.svg
GeoMultiSens - Scalable Multi-Sensor Analysis of Remote Sensing Data
* Free software: GNU General Public License v3
* Documentation: https://geomultisens.readthedocs.io.
Status
------
.. .. image:: https://img.shields.io/pypi/v/geomultisens.svg
:target: https://pypi.python.org/pypi/geomultisens
.. image:: https://img.shields.io/travis/danschef/geomultisens.svg
.. .. image:: https://img.shields.io/travis/danschef/geomultisens.svg
:target: https://travis-ci.org/danschef/geomultisens
.. image:: https://readthedocs.org/projects/geomultisens/badge/?version=latest
.. .. image:: https://readthedocs.org/projects/geomultisens/badge/?version=latest
:target: https://geomultisens.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://pyup.io/repos/github/danschef/geomultisens/shield.svg
.. .. image:: https://pyup.io/repos/github/danschef/geomultisens/shield.svg
:target: https://pyup.io/repos/github/danschef/geomultisens/
:alt: Updates
GeoMultiSens - Scalable Multi-Sensor Analysis of Remote Sensing Data
* Free software: GNU General Public License v3
* Documentation: https://geomultisens.readthedocs.io.
Features
--------
......
......@@ -17,12 +17,14 @@ export GDAL_DATA=${PFX}/opt/share/gdal
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python/GeoMultiSens # needed to find geomultisens
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python/geoarray # needed to find geoarray
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python # needed to find e.g. CoReg_Sat, py_tools_ds
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python/py_tools_ds # needed to find py_tools_ds
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python # needed to find e.g. CoReg_Sat
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/AT4P # needed to find AT4P
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/ECMWF # needed to find ECMWF
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/S2MSI # needed to find S2MSI
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/S2SCAPEM # needed to find S2SCAPEM
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/cBc # needed to find cBc
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/sicor # needed to find sicor
# execute python script
......
......@@ -15,14 +15,16 @@ export LD_LIBRARY_PATH=${LD_PATH_PYTHON_GFZ}:${LD_LIBRARY_PATH}
export PYTHONPATH=${PFX}/opt/lib/python3.6/site-packages:${PFX}/opt/lib/python2.7/site-packages # Python version must be updated here!
export GDAL_DATA=${PFX}/opt/share/gdal
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/GeoMultiSens_deployed/GeoMultiSens # needed to find geomultisens
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/GeoMultiSens_deployed/geoarray # needed to find geoarray
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/GeoMultiSens_deployed # needed to find e.g. CoReg_Sat, py_tools_ds
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/GeoMultiSens # needed to find geomultisens
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/geoarray # needed to find geoarray
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/py_tools_ds # needed to find py_tools_ds
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed # needed to find e.g. CoReg_Sat
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/AT4P # needed to find AT4P
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/ECMWF # needed to find ECMWF
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/S2MSI # needed to find S2MSI
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/S2SCAPEM # needed to find S2SCAPEM
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/cBc # needed to find cBc
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/sicor # needed to find sicor
# execute python script
......
......@@ -45,62 +45,75 @@ def run_from_sceneids(args):
virtual_sensor_id=virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref,
comment='')
dbJob.create()
jobid = dbJob.id
# set up process controller instance
parallelization_level = 'scenes'
warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO
PC = process_controller(jobid, parallelization_level=parallelization_level)
#PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
#PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
_run_job(dbJob)
def run_from_entityids(args):
# create a new job from entity IDs
# TODO
"""Create a new job from entity IDs.
:param args:
:return:
"""
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
# run the job
PC.run_all_processors()
dbJob = GMS_JOB(db_connection)
dbJob.from_entityIDlist(list_entityids=args.entityids,
virtual_sensor_id=virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref,
comment='')
_run_job(dbJob)
def run_from_filenames(args):
# find out sceneIDs belonging to filenames
# create a new job from entity IDs
# TODO
"""Create a new GMS job from filenames of downloaded archives and run it!
:param args:
:return:
"""
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
dbJob = GMS_JOB(db_connection)
dbJob.from_filenames(list_filenames=args.filenames,
virtual_sensor_id=virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref,
comment='')
_run_job(dbJob)
# run the job
PC.run_all_processors()
def run_from_constraints(args):
# create a new job from constraints
# TODO
raise NotImplementedError
def _run_job(dbJob, parallelization_level='scenes'):
# type: (GMS_JOB) -> None
"""
:param dbJob:
:return:
"""
dbJob.create()
jobid = dbJob.id
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO
PC = process_controller(jobid, parallelization_level=parallelization_level)
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
......@@ -189,6 +202,10 @@ if __name__=='__main__':
description='Run a GeoMultiSens job for a given list of entity IDs.',
help="Run a GeoMultiSens job for a given list of entity IDs (Sub-Parser).")
parser_filenames = subparsers.add_parser('filenames',
description='Run a GeoMultiSens job for a given list of filenames of downloaded satellite image archives!',
help="Run a GeoMultiSens job for a given list of filenames of downloaded satellite image archives! (Sub-Parser).")
parser_constraints = subparsers.add_parser('constraints',
description='Run a GeoMultiSens job matching the given constraints.',
help="Run a GeoMultiSens job matching the given constraints (Sub-Parser).")
......@@ -200,24 +217,30 @@ if __name__=='__main__':
## ADD ARGUMENTS
if sys.argv[1]=='jobid':
# add arguments to parse_jobid
# add arguments to parser_jobid
jid_p = parser_jobid.add_argument
jid_p('jobid', type=int,
help='job ID of an already created GeoMultiSens job (must be present in the jobs table of the database)')
elif sys.argv[1]=='sceneids':
# add arguments to parse_sceneids
# add arguments to parser_sceneids
sid_p = parser_sceneids.add_argument
sid_p('sceneids', nargs='+', type=int,
help="list of scene IDs corresponding to valid records within the 'scenes' table of the database")
elif sys.argv[1]=='entityids':
# add arguments to parse_entityids
# add arguments to parser_entityids
eid_p = parser_entityids.add_argument
eid_p('entityids', nargs='+', type=str,
help="list of entity IDs corresponding to valid records within the 'scenes' table of the database")
# FIXME satellite and sensor are required
elif sys.argv[1]=='filenames':
# add arguments to parser_filenames
eid_p = parser_filenames.add_argument
eid_p('filenames', nargs='+', type=str,
help="list of filenames of satellite image archives corresponding to valid records within the 'scenes' "
"table of the database")
elif sys.argv[1]=='constraints':
# add arguments to parse_constraints
......@@ -252,6 +275,7 @@ if __name__=='__main__':
parser_jobid.set_defaults(func=run_from_jobid)
parser_sceneids.set_defaults(func=run_from_sceneids)
parser_entityids.set_defaults(func=run_from_entityids)
parser_filenames.set_defaults(func=run_from_filenames)
parser_constraints.set_defaults(func=run_from_constraints)
......
......@@ -2,8 +2,8 @@
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.2.0'
__versionalias__ = '20170530.01'
__version__ = '0.3.4'
__versionalias__ = '20170612.01'
from . import algorithms
......
......@@ -52,10 +52,10 @@ from shapely.geometry import MultiPoint, Polygon
from shapely.ops import cascaded_union
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_grid import snap_bounds_to_pixGrid
from py_tools_ds.ptds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_to_utm, mapXY2imXY, imXY2mapXY
from py_tools_ds.ptds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
from py_tools_ds.ptds.geo.raster.reproject import warp_ndarray
from py_tools_ds.geo.coord_grid import snap_bounds_to_pixGrid
from py_tools_ds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_to_utm, mapXY2imXY, imXY2mapXY
from py_tools_ds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
from py_tools_ds.geo.raster.reproject import warp_ndarray
from ..config import GMS_config as CFG
from ..misc import helper_functions as HLP_F
......
......@@ -18,10 +18,10 @@ except ImportError:
SD = None
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import EPSG2WKT
from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.geo.coord_trafo import pixelToLatLon
from py_tools_ds.geo.map_info import mapinfo2geotransform
from py_tools_ds.geo.projection import EPSG2WKT
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
......
......@@ -2,7 +2,7 @@
###############################################################################
#
# Level 1B Processor:
#
#
# Performed operations:
# Generation of RPCs for later Orthorectification:
# - for satellite data
......@@ -27,13 +27,13 @@ import numpy as np
from geopandas import GeoDataFrame
from shapely.geometry import box
from CoReg_Sat import COREG, DESHIFTER
from CoReg_Sat import COREG, DESHIFTER # FIXME replace by arosics
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG
from py_tools_ds.ptds.geo.vector.topology import get_overlap_polygon
from py_tools_ds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
from py_tools_ds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG
from py_tools_ds.geo.vector.topology import get_overlap_polygon
from ..config import GMS_config as CFG
from .L1A_P import L1A_object
......
......@@ -24,7 +24,7 @@ except ImportError:
import osr
from geoarray import GeoArray
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
......
......@@ -14,7 +14,7 @@ import warnings
import numpy as np
from geoarray import GeoArray
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from .L1C_P import L1C_object
......
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
class FMask_Runner(object):
def __init__(self):
# TODO provide options of fmask_usgsLandsatStacked argparser
pass
def create_thermal_stack(self):
pass
def create_toaRef_stack(self):
pass
def get_path_metaFile(self):
pass
def create_anglesFile(self):
pass
def create_saturationmask(self):
pass
def calc_cloudMask(self):
# console call like
# fmask_usgsLandsatStacked.py -t thermal.img -a toa.img -m *_MTL.txt -z angles.img -s saturationmask.img -o cloud.img
# TODO use VRT for toa and thermal stacks?
pass
......@@ -2,13 +2,13 @@ from copy import copy
from random import sample
from operator import itemgetter
import random
from inspect import getargspec
from inspect import getargspec # FIXME
import numpy as np
from scipy.ndimage.filters import gaussian_filter
from sklearn.ensemble import AdaBoostClassifier
from sklearn.feature_selection import chi2
from sklearn.cross_validation import train_test_split
from sklearn.cross_validation import train_test_split # FIXME
__author__ = "Andre Hollstein"
......
......@@ -39,8 +39,8 @@ from ..misc.database_tools import get_overlapping_scenes_from_postgreSQLdb
from ..misc.path_generator import path_generator, get_tempfile
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import transform_any_prj
from py_tools_ds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.geo.coord_trafo import transform_any_prj
......@@ -375,7 +375,7 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
# handle coordinate infos
tgt_corner_coord_lonlat = [transform_any_prj(prj, 4326, X,Y) for X,Y in cornerCoords_tgt]
#tgt_corner_coord_lonlat = [(-114, 52), (-117, 52), (-117, 50), (-114, 50)] # this is a test
#from py_tools_ds.ptds.geo.projection import EPSG2WKT
#from py_tools_ds.geo.projection import EPSG2WKT
#prj = EPSG2WKT(32612)
# handle coordinates crossing the 180 degress meridian
......
......@@ -7,10 +7,10 @@ import re
import warnings
from datetime import datetime, timedelta
from shapely.geometry import Polygon
class SpatialIndexMediatorServer:
controller = 'index-mediator-server'
controller = 'index-mediator-server.sh'
def __init__(self, rootDir):
self.rootDir = rootDir
......@@ -79,7 +79,7 @@ class SpatialIndexMediatorServer:
os.chdir(curdir)
if exitcode:
raise err
raise Exception(err)
else:
if output:
return output.decode('UTF-8')
......@@ -89,7 +89,7 @@ class SpatialIndexMediatorServer:
class SpatialIndexMediator:
FULL_SCENE_QUERY_MSG = 3
""" message value for a full scene query message """
......@@ -105,29 +105,29 @@ class SpatialIndexMediator:
self.host = host
self.port = port
self.timeout = timeout
@staticmethod
def __deriveSeasonCode(refDate, maxDaysDelta):
if refDate is None or maxDaysDelta is None:
return 0
delta = timedelta(days=maxDaysDelta)
startMonth = (refDate - delta).month - 1
endMonth = (refDate + delta).month - 1
seasonCode = 0
for x in range(12):
month = (startMonth + x) % 12
seasonCode |= 1 << month
if month == endMonth: break
return seasonCode
def getFullSceneDataForDataset(self, envelope, timeStart, timeEnd, minCloudCover, maxCloudCover, datasetid, refDate=None, maxDaysDelta=None):
"""
Query the spatial index with the given parameters in order to get a list of matching scenes intersecting the
......@@ -145,65 +145,65 @@ class SpatialIndexMediator:
:param maxDaysDelta: maximum allowed number of days the target scenes might be apart from the given refDate
[optional]
"""
filterTimerange = not (refDate is None or maxDaysDelta is None)
# prepare buffer
# numbytes = 1 + 4*8 + 8 + 8 + 4 + 1 + 1 + 2 + 2
b = bytearray(59)
# pack msg header and envelope
offset = 0
struct.pack_into('> b 4d', b, offset, self.FULL_SCENE_QUERY_MSG, *envelope)
offset += 33
# pack the dates
struct.pack_into('> h 6b', b, offset, timeStart.year, timeStart.month, timeStart.day, timeStart.hour, timeStart.minute, timeStart.second, 0)
offset += 8
struct.pack_into('> h 6b', b, offset, timeEnd.year, timeEnd.month, timeEnd.day, timeEnd.hour, timeEnd.minute, timeEnd.second, 0)
offset += 8
# derive season code
seasonCode = self.__deriveSeasonCode(refDate, maxDaysDelta)
# pack the rest
# TODO: send unconstraint min/max proclevel values
struct.pack_into('> i 2b h 2b', b, offset, seasonCode, minCloudCover, maxCloudCover, datasetid, 0, 127)
# get connection and lock the channel
con = Connection(self.host, self.port, self.timeout)
# send the buffer
con.socket.sendall(b)
# receive the response
# read first byte, indicating the response type, must match full scene query msg
if con.recvByte() != self.FULL_SCENE_QUERY_MSG:
raise EnvironmentError('Bad Protocol')