gts2_client.py 48 KB
Newer Older
Hannes Diedrich's avatar
Hannes Diedrich committed
1
#!/usr/bin/env python
Hannes Diedrich's avatar
Hannes Diedrich committed
2
# -*- coding: utf-8 -*-
Hannes Diedrich's avatar
Hannes Diedrich committed
3
4
5

import requests
import warnings
6
import traceback
Hannes Diedrich's avatar
Hannes Diedrich committed
7
8
import argparse
import json
9
import time
Hannes Diedrich's avatar
Hannes Diedrich committed
10
import logging
11
12
import tempfile
import shutil
Hannes Diedrich's avatar
Hannes Diedrich committed
13
import os
Hannes Diedrich's avatar
Hannes Diedrich committed
14
15
16
import collections
import re

Hannes Diedrich's avatar
Hannes Diedrich committed
17
import gdal
Hannes Diedrich's avatar
Hannes Diedrich committed
18
19
20
import numpy as np
import netCDF4 as nc4
from glob import glob
21
22
from os.path import isfile
from os.path import join
Hannes Diedrich's avatar
Hannes Diedrich committed
23
24
25
from os.path import expanduser
from scipy.ndimage.interpolation import zoom
from skimage.exposure import rescale_intensity, adjust_gamma
26
from imageio import imwrite
27
from subprocess import Popen, PIPE
Hannes Diedrich's avatar
Hannes Diedrich committed
28
29
30
31
32
33
34

band_settings = {"realistic": ("B04", "B03", "B02"),
                 "nice_looking": ("B11", "B08", "B03"),
                 "vegetation": ("B8A", "B04", "B03"),
                 "healthy_vegetation_urban": ("B8A", "B11", "B02"),
                 "snow": ("B08", "B11", "B04"),
                 "agriculture": ("B11", "B08", "B02")}
Hannes Diedrich's avatar
Hannes Diedrich committed
35

36
37
warnings.filterwarnings("ignore")

38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def get_defaults():
    return ({
        "out_dir" : "",
        "out_prefix" : "",
        "out_mode" : "json",
        "lat_ll" : 0.0,
        "lon_ll" : 0.0,
        "lat_ur" : 0.0,
        "lon_ur" : 0.0,
        "sensor" : "S2A",
        "level" : "L2A",
        "version" : "0.13",
        "bands" : "",
        "start_date" : "",
        "end_date" : "",
        "coreg" : False,
        "max_cloudy" : "0.5",
        "minimum_fill" : "0.8",
        "utm_zone" : "",
        "stack_resolution" : "10",
        "rgb_extension" : "jpg",
        "rgb_bands_selection" : "realistic",
        "merge_tifs" : False,
        "merge_tile" : None,
        "onlytime" : False,
        "timeout" : None
    })
65

Hannes Diedrich's avatar
Hannes Diedrich committed
66
class Gts2Request(dict):
67

68
69
70
71
72
73
74
    def check_geo(self, ll=(), ur=()):
        """
        Check if area of request is not too large.
        :param ll: tuple
        :param ur: tuple
        :return:
        """
Hannes Diedrich's avatar
Hannes Diedrich committed
75
        thres = 0.4
76
77
78
        latrange = ur[1]-ll[1]
        lonrange = ur[0]-ll[0]
        if (lonrange >= thres) | (latrange >= thres):
Hannes Diedrich's avatar
Hannes Diedrich committed
79
80
            raise ValueError("Your requestst area too large: ({lon:7.5f}°x{lat:7.5f}° exceeds {thres}°x{thres}°)"
                             "".format(lat=latrange, lon=lonrange, thres=thres))
81

Hannes Diedrich's avatar
Hannes Diedrich committed
82
83
    def __init__(self, opts, logger=None):

Hannes Diedrich's avatar
Hannes Diedrich committed
84
        self.check_geo(ll=opts["ll"], ur=opts["ur"])
85

Hannes Diedrich's avatar
Hannes Diedrich committed
86
        address = "https://rz-vm175.gfz-potsdam.de:{port}/AC".format(port=opts["auth"]["port"])
87
88

        # test parameters such that api string formatting wont fail
Hannes Diedrich's avatar
Hannes Diedrich committed
89
90
91
92
93
        assert len(opts["ur"]) == 2
        assert len(opts["ll"]) == 2
        assert opts["sensor"] in ["S2A", "S2B", "S2all"]
        assert opts["level"] in ["L1C", "L2A"]

94
95
96
        # onlytime=False seems to act as onlytime=True, therefore it has to be omitted completelly if false
        onlytime = "&onlytime=True" if opts["onlytime"] else ""

Hannes Diedrich's avatar
Hannes Diedrich committed
97
98
        self.api_fmt = "{address}/{bands}/{date_from}_{date_to}/{ll_lon:.5f}_{ur_lon:.5f}_{ll_lat:.5f}_{ur_lat:.5f}" \
                       "?minimum_fill={minimum_fill}&sensor={sensor}&level={level}&version={version}&suffix={suffix}" \
99
                       "&max_cloudy={max_cloudy}&utm_zone={utm_zone}" + onlytime
Hannes Diedrich's avatar
Hannes Diedrich committed
100

101
        # construct api call
Hannes Diedrich's avatar
Hannes Diedrich committed
102
103
104
105
106
107
        self.api_call = self.api_fmt.format(address=address, bands=opts["bands"], date_from=opts["date_from"],
                                            date_to=opts["date_to"], suffix=opts["suffix"], level=opts["level"],
                                            minimum_fill=opts["minimum_fill"], sensor=opts["sensor"],
                                            version=opts["version"], max_cloudy=opts["max_cloudy"],
                                            ll_lon=opts["ll"][0], ll_lat=opts["ll"][1],
                                            ur_lon=opts["ur"][0], ur_lat=opts["ur"][1], utm_zone=opts["utm_zone"])
108
109
110
        logger.info(">>>>>> API-call: %s " % self.api_call)
        # get data, update dict from json

Niklas Keck's avatar
Niklas Keck committed
111
        result = requests.get(self.api_call, verify=False, auth=opts["auth"]["auth"], timeout=opts["timeout"])
112
113
        status_code = result.status_code
        status = {"http_code": status_code}
Hannes Diedrich's avatar
Hannes Diedrich committed
114

115
116
117
        # if api responds with http error it can't create json
        if status_code < 400:
            self.update(result.json())
Hannes Diedrich's avatar
Hannes Diedrich committed
118
        self.update(status)
119

Hannes Diedrich's avatar
Hannes Diedrich committed
120

121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
def __spawn(cmd, cwd):
    """
    Runs shell command (cmd) in the working directory (cwd).
    :param cmd: string
    :param cwd: string
    :return:
    """
    pp = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, cwd=cwd)
    stdout, stderr = pp.communicate()
    if pp.returncode != 0:
        raise ValueError(
            "cmd:%s,rc:%s,stderr:%s,stdout:%s" % (cmd, str(pp.returncode), str(stderr), str(stdout)))


def __merge_tifs_same_timestamp(tifs, outpath, target_tile=""):
    """
    Transforms all tifs from timestep to same utm zon (from target tile) and merges them together.
    :param outpath: string
    :param tifs: List of strings or None
    :param target_tile: 5-element string
    :return:
    """
    basepath = os.path.dirname(tifs[0])
    utm = target_tile[0:2]
    base_split = os.path.basename(tifs[0]).split("_")
    merge_file = "{path}/{pref}_{tile}_{suff}".format(path=outpath, pref="_".join(base_split[0:4]), tile=target_tile,
                                                      suff="_".join(base_split[5::]))

    with tempfile.TemporaryDirectory(dir=basepath) as tmp_dir:
        for fi in tifs:
            if target_tile not in fi:
                outfile = "{tmp_dir}/{orig_fn}_{tile}.tif".format(orig_fn=os.path.basename(fi.split(".")[0]),
                                                                  tile=target_tile, tmp_dir=tmp_dir)
                cmd = "gdalwarp -t_srs '+proj=utm +zone={utm} +datum=WGS84' -overwrite {infile} {outfile} ".format(
                    infile=fi, outfile=outfile, utm=utm)
                __spawn(cmd, tmp_dir)

            else:
                shutil.copy(fi, tmp_dir)

        merge_list = " ".join(glob(os.path.join(tmp_dir, "*.tif")))
        cmd = "gdal_merge.py -o {merge_file} {merge_list}".format(merge_file=merge_file, merge_list=merge_list)
        __spawn(cmd, tmp_dir)

    return merge_file


def __find_double_timestamp(files):
    """
    Finds filenames with doublicate time stamp (but different MGRS tile) in list of files.
    :param files: list of strings
    :return: dict
    """

    basepath = os.path.dirname(files[0])
    prefix = "_".join(os.path.basename(files[0]).split("_")[0:3])

    bands = list(set(["_".join(os.path.basename(fa).split("_")[5::]).split(".")[0] for fa in files]))

    double_dict = {}
    for bi in bands:
        band_files = [s for s in files if "_{band}.".format(band=bi) in s]
        dates = [os.path.basename(bf).split("_")[3] for bf in band_files]
        double_dates = ([item for item, count in collections.Counter(dates).items() if count > 1])

        double_files = {}
        for di in double_dates:
            double_files[di] = glob(os.path.join(basepath, "{pref}*_{date}*{band}.*".format(
                band=bi, date=di, pref=prefix)))

        double_dict[bi] = double_files

    return double_dict


def merge_tiles(tif_list, out_mode="single", target_tile=None):
    """
    Performs merge of tifs of the same timestep that are devided into two different MGRS tiles
    :param tif_list: list of strings
    :param out_mode: string
    :param target_tile: 5-element string or None
    :return:
    """

    stack = True if out_mode == "stack" else False

    double_dict = __find_double_timestamp(tif_list)
    for bi in double_dict.keys():
        for dates, tifs_one_date in double_dict[bi].items():
            basepath = os.path.dirname(tifs_one_date[0])
            if len(tifs_one_date) > 1:

                if target_tile is None:
                    tiles = [re.search('[0-9]{2,2}[A-Z]{3,3}', ti).group(0).replace("_", "") for ti in tifs_one_date]
                    target_tile = list(set(tiles))[0]

                if stack is False:
                    with tempfile.TemporaryDirectory(dir=basepath) as parent_dir:
                        _ = [shutil.move(fm, parent_dir) for fm in tifs_one_date]

                        tifs_all = glob(os.path.join(parent_dir, "*"))
                        if len(tifs_all) == 0:
                            raise FileNotFoundError("No files found in {dir}".format(dir=parent_dir))

                        bands = list(set(["_".join(os.path.basename(fa).split("_")[6::]).strip(".tif") for fa in tifs_all]))
                        for bi in bands:
                            tifs = glob(os.path.join(parent_dir, "S2*{band}*.tif".format(band=bi)))
                            if len(tifs) != 0:
                                __merge_tifs_same_timestamp(tifs, basepath, target_tile=target_tile)

                else:
                    with tempfile.TemporaryDirectory(dir=basepath) as parent_dir:
                        _ = [shutil.move(fm, parent_dir) for fm in tifs_one_date]
                        tifs_all = glob(os.path.join(parent_dir, "*"))

                        if len(tifs_all) == 0:
                            raise FileNotFoundError("No files found in {dir}".format(dir=parent_dir))

                        __merge_tifs_same_timestamp(tifs_all, basepath, target_tile=target_tile)

            else:
                raise ValueError("List of tifs to merge to short")


245
def mean_rgb_comb(infiles, rgb_comb=("B04", "B03", "B02"), bad_data_value=np.NaN):
Hannes Diedrich's avatar
Hannes Diedrich committed
246
247
248
249
    """
    Determines mean pixel value for the specified band combination for each input image.

    :param infiles: input fn
Hannes Diedrich's avatar
Hannes Diedrich committed
250
    :param rgb_comb: band combination(s) to use, use band_settings
Hannes Diedrich's avatar
Hannes Diedrich committed
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
    :param bad_data_value: bad data value in input images, default: NaN
    :return: list containing mean pixel value for the specified band combination for each input image
    """
    dicts = []
    mean_RGB = {}

    for fn in infiles:
        mean_val = []
        ds = gdal.Open(fn, gdal.GA_ReadOnly)
        for ii, band in enumerate(rgb_comb):
            data = ds.GetRasterBand(ii+1).ReadAsArray() / 10000.0
            if bad_data_value is np.NAN:
                bf = data[:, :][np.isfinite(data[:, :])]
            else:
                bf = data[:, :][data[:, :] == bad_data_value]

            # Only consider images with less than 20% pixels with value 0
            zero_per = (np.count_nonzero(bf == 0) / (data.shape[0] * data.shape[1])) * 100
            if zero_per < 20:
                band_mean = np.mean(bf)
                mean_val.append(band_mean)
            else:
                band_mean = 1
                mean_val.append(band_mean)

        # Append pathname and mean pixel value to dictionary
        mean_RGB[fn] = np.mean(mean_val)
        dicts.append(mean_RGB)
    return dicts


282
def get_lim(limfn, rgb_comb=("B04", "B03", "B02"), bad_data_value=np.NaN):
Hannes Diedrich's avatar
Hannes Diedrich committed
283
284
285
286
287
    """
    Determines standard stretching limits based on an optimized linear histogram stretching applied to
    the most cloud-free image of an image sequence.

    :param limfn: input directory, most cloud free image of a sequence
Hannes Diedrich's avatar
Hannes Diedrich committed
288
    :param rgb_comb: band combination(s) to determine stretching limits for, use band_settings
Hannes Diedrich's avatar
Hannes Diedrich committed
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
    :param bad_data_value: bad data value in input images, default: NaN
    :return: list containing stretching limits for rgb_comb
    """
    bins = np.linspace(0.0, 1.0, 255)
    band_lim = []

    ds = gdal.Open(limfn, gdal.GA_ReadOnly)
    for ii, band in enumerate(rgb_comb):

        data = ds.GetRasterBand(ii + 1).ReadAsArray() / 10000.0

        if bad_data_value is np.NAN:
            bf = data[:, :][np.isfinite(data[:, :])]
        else:
            bf = data[:, :][data[:, :] == bad_data_value]

        # Calculate stretching limits (optimized linear stretch)
        hh, xx = np.histogram(bf, bins=bins, normed=False)
        rel_cum_hist = np.cumsum(hh) / np.max(np.cumsum(hh))
        bp = (np.where(rel_cum_hist > 0.025)[0][0]) / 255
        wp = (np.where(rel_cum_hist > 0.99)[0][0]) / 255
        b = bp - 0.1 * (wp - bp)
        if wp > 0.65:
            w = wp
        elif wp < 0.15:
            w = wp + 2 * (wp - bp)
        else:
            w = wp + 0.5 * (wp - bp)
        lim = (b, w)
        band_lim.append(lim)
    return band_lim


322
def mk_rgb(basedir, outdir, rgb_comb=("B04", "B03", "B02"), rgb_gamma=(1.0, 1.0, 1.0), extension="jpg",
Hannes Diedrich's avatar
Hannes Diedrich committed
323
           resample_order=1, rgb_type=np.uint8, bad_data_value=np.NaN, logger=None):
Hannes Diedrich's avatar
Hannes Diedrich committed
324
325
326
327
328
329
330
    """
    Generation of RGB Images from Sentinel-2 data. Use hist_stretch = 'single', if to process a single image.
    Use hist_stretch = 'series', if to process a sequence of images of the same tile but different dates.

    :param extension:
    :param basedir: directory of a single image or a series of images
    :param outdir: output directory
Hannes Diedrich's avatar
Hannes Diedrich committed
331
    :param rgb_comb: band combination(s) for rgb image generation, use band_settings
Hannes Diedrich's avatar
Hannes Diedrich committed
332
333
334
335
336
337
338
339
    :param rgb_gamma: gamma values for each band, default: (1.0, 1.0, 1.0)
    :param resample_order: order of the spline interpolation, has to be in the range 0-5, default: 1
    :param rgb_type: data type of RGB image(s), default: uint8
    :param bad_data_value: bad data value in input images, default: NaN
    :return: output directory and filename
    """

    fnout_list = []
340
    infiles = glob(os.path.join(basedir, "*.tif"))
Hannes Diedrich's avatar
Hannes Diedrich committed
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
    if len(infiles) == 0:
        raise FileExistsError("Could not find any stacked tif files in {dir}".format(dir=basedir))

    # Get stretching limits
    dicts = mean_rgb_comb(infiles, rgb_comb, bad_data_value)
    super_dict = {}
    for dd in dicts:
        for k, v in dd.items():
            super_dict.setdefault(k, []).append(v)
    limdir = min(super_dict, key=super_dict.get)
    limits = get_lim(limdir, rgb_comb, bad_data_value)

    # Processing images
    for fin in infiles:

        # Output directory and filename
        split = tuple(os.path.basename(fin).split("_")[0:5])
Hannes Diedrich's avatar
Hannes Diedrich committed
358
359
        fn_out = join(outdir, "{pref}_RGB_{bands}.{ext}".format(pref="_".join(split), bands="_".join(rgb_comb),
                                                                ext=extension))
Hannes Diedrich's avatar
Hannes Diedrich committed
360
361
362
363
364
365
366
367
368
        if isfile(fn_out) is False:
            # Create output array
            ds = gdal.Open(fin, gdal.GA_ReadOnly)
            sdata = ds.GetRasterBand(1)
            output_shape = [sdata.YSize, sdata.XSize]

            S2_rgb = np.zeros(output_shape + [len(rgb_comb), ], dtype=rgb_type)
            if ds is None:
                raise ValueError("Can not load {fn}".format(fn=fin))
Hannes Diedrich's avatar
Hannes Diedrich committed
369

Hannes Diedrich's avatar
Hannes Diedrich committed
370
371
372
373
374
            for i_rgb, (band, gamma) in enumerate(zip(rgb_comb, rgb_gamma)):

                data = ds.GetRasterBand(i_rgb + 1).ReadAsArray() / 10000.0

                # Determine stretching limits
Hannes Diedrich's avatar
Hannes Diedrich committed
375
                lim = limits[rgb_comb.index(band)]
Hannes Diedrich's avatar
Hannes Diedrich committed
376
377
378
379
380
381
382
383
384
385
386

                # Calculate and apply zoom factor to input image
                zoom_factor = np.array(output_shape) / np.array(data[:, :].shape)
                zm = np.nan_to_num(np.array(data[:, :], dtype=np.float32))
                if (zoom_factor != [1.0, 1.0]).all():
                    zm = zoom(input=zm, zoom=zoom_factor, order=resample_order)

                # Rescale intensity to range (0.0, 255.0)
                bf = rescale_intensity(image=zm, in_range=lim, out_range=(0.0, 255.0))

                if gamma != 0.0:
Hannes Diedrich's avatar
Hannes Diedrich committed
387
388
389
                    S2_rgb[:, :, i_rgb] = np.array(adjust_gamma(bf, gamma), dtype=rgb_type)
                else:
                    S2_rgb[:, :, i_rgb] = np.array(bf, dtype=rgb_type)
Hannes Diedrich's avatar
Hannes Diedrich committed
390

391
                del data
Hannes Diedrich's avatar
Hannes Diedrich committed
392
393
            del ds

394
            imwrite(fn_out, S2_rgb)
Hannes Diedrich's avatar
Hannes Diedrich committed
395
396
397
398
399
            fnout_list.append(fn_out)

    return fnout_list


Hannes Diedrich's avatar
Hannes Diedrich committed
400
401
402
403
def json_to_tiff(out_mode, api_result, only_tile, outpath, out_prefix, wl, level, stack_resolution, bands, logger=None):
    """
    Get data from json dict and save if as singletif files OR:
    save all requested bands plus cloudmask as one tiff file per date and tile.
Hannes Diedrich's avatar
Hannes Diedrich committed
404
405
406
407
408
409
410
411
412
413
    :type out_mode: string
    :type api_result: json sring
    :type only_tile: string
    :type outpath: string
    :type out_prefix: string list of 
    :type wl: dict
    :type level: string
    :type stack_resolution: string
    :type bands: string
    :type logger: logger
414
    :return: List of written tifs (list of strings)
Hannes Diedrich's avatar
Hannes Diedrich committed
415
416
    """

417
418
    tif_list = []

419
    if out_mode == "single":
Hannes Diedrich's avatar
Hannes Diedrich committed
420
421
422
423
424
425
426
427
428
429
430
        for tile_key in api_result['Results'].keys():
            if not ((only_tile != "") & (tile_key != only_tile)):
                for bi in range(len(api_result['Request']['bands'])):
                    band = api_result['Request']['bands'][bi]
                    res = api_result['Request']['resolution'][bi]
                    band_key = "%s_%sm" % (band, res)
                    if band_key not in api_result['Results'][tile_key].keys():
                        raise Exception("ERROR: No data was returned from API.")
                    count_dates = len(api_result['Results'][tile_key][band_key]['data'])
                    for ti in range(count_dates):
                        data = api_result['Results'][tile_key][band_key]['data'][ti]
431
432
                        ac_date = api_result['Results'][tile_key][band_key]['time'][ti][:10]  # only date without time
                        sensor = api_result['Results'][tile_key][band_key]['sensor'][ti]
Hannes Diedrich's avatar
Hannes Diedrich committed
433
434
435
436
437
438
439
440
441
442
443
444
                        x_min = api_result['Results'][tile_key][band_key]['mapinfo']['utm_coord_ul_x']
                        y_max = api_result['Results'][tile_key][band_key]['mapinfo']['utm_coord_ul_y']
                        geo_proj = api_result['Results'][tile_key][band_key]['mapinfo']['geo_projection']
                        arr = np.asarray(data)

                        # save each requested band plus cloudmask as single tiff file per date and tile
                        if out_mode == "single":
                            rows, cols = arr.shape
                            geotrans = (x_min, res, 0, y_max, 0, -res)
                            driver = gdal.GetDriverByName("GTiff")

                            # create tif file for band
445
                            outfile = "{path}/{sensor}_{level}_{pref}_{date}_{tile}_{band}_{res}m.tif".format(
446
                                path=outpath, pref=out_prefix, date=ac_date, tile=tile_key, band=band, level=level,
447
                                res=res, sensor=sensor)
Hannes Diedrich's avatar
Hannes Diedrich committed
448
449
450
451
                            img = driver.Create(outfile, cols, rows, 1, gdal.GDT_Int32)
                            img.SetGeoTransform(geotrans)
                            img.SetProjection(geo_proj)
                            img.GetRasterBand(1).WriteArray(arr)
452
                            img.GetRasterBand(1).SetNoDataValue(api_result['Results'][tile_key][band_key]["fill_value"])
453
454
455
                            img.GetRasterBand(1).SetMetadataItem("Name", "Band" + str(band)[1:3])
                            img.GetRasterBand(1).SetMetadataItem("Central Wavelength", wl[str(band)])
                            img.GetRasterBand(1).SetMetadataItem("Acknowledgement",
Hannes Diedrich's avatar
Hannes Diedrich committed
456
457
458
459
460
461
                                                                 "The 'GFZ Time Series System for Sentinel-2' (GTS2) "
                                                                 "was financed by AgriCircle/ADAMA between 2016/06 and "
                                                                 "2017/12.")

                            img.FlushCache()
                            del img
462
                            tif_list.append(outfile)
Hannes Diedrich's avatar
Hannes Diedrich committed
463
464
465
466
467

                            # create tif file for clm
                            if level != "L1C":
                                clm = api_result['Metadata'][tile_key]['MSK_' + str(res) + 'm']['data'][ti]
                                clm_arr = np.asarray(clm)
468
                                clm_outfile = "{path}/{sensor}_{level}_{pref}_{date}_{tile}_MSK_{res}m.tif".format(
469
                                    path=outpath, pref=out_prefix, date=ac_date, tile=tile_key, band=band, level=level,
470
                                    res=res, sensor=sensor)
Hannes Diedrich's avatar
Hannes Diedrich committed
471
                                img = driver.Create(clm_outfile, cols, rows, 1, gdal.GDT_Int32)
472
473
                                img.SetGeoTransform(geotrans)
                                img.SetProjection(geo_proj)
Hannes Diedrich's avatar
Hannes Diedrich committed
474
                                img.GetRasterBand(1).WriteArray(clm_arr)
475
476
                                img.GetRasterBand(1).SetNoDataValue(api_result['Results'][tile_key][band_key]
                                                                    ["fill_value"])
477
                                img.GetRasterBand(1).SetMetadataItem("Name", "MSK")
478
479
                                img.GetRasterBand(1).SetMetadataItem("MSK_legend", str(api_result['Metadata']
                                                                                       ['MSK_legend']))
480
                                img.GetRasterBand(1).SetMetadataItem("Acknowledgement",
Hannes Diedrich's avatar
Hannes Diedrich committed
481
482
483
                                                                     "The 'GFZ Time Series System for Sentinel-2' "
                                                                     "(GTS2) was financed by AgriCircle/ADAMA between "
                                                                     "2016/06 and 2017/12.")
Hannes Diedrich's avatar
Hannes Diedrich committed
484
485
                                img.FlushCache()
                                del img
486
                                tif_list.append(clm_outfile)
Hannes Diedrich's avatar
Hannes Diedrich committed
487

Hannes Diedrich's avatar
Hannes Diedrich committed
488
    if out_mode == "stack":
Hannes Diedrich's avatar
Hannes Diedrich committed
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
        for tile_key in api_result['Results'].keys():
            # get the number of timesteps:
            band = api_result['Request']['bands'][0]
            res = api_result['Request']['resolution'][0]
            band_key = "%s_%sm" % (band, res)
            if band_key not in api_result['Results'][tile_key].keys():
                raise Exception("ERROR: No data was returned from API.")
            count_dates = len(api_result['Results'][tile_key][band_key]['data'])
            for ti in range(0, count_dates):
                zm_dict = {}

                # number of requested bands per date and tile
                count_bands = len(api_result['Request']['bands'])

                # define reference spatial resolution for bandstack
                find_ref_band_key = [key for key in api_result['Results'][tile_key].keys() if
                                     stack_resolution in key]

                if len(find_ref_band_key) == 0:
                    raise ValueError(
                        "You have chosen {res}m for stack resolution, but did not request at least one "
                        "band with spatial sampling of {res}m".format(res=stack_resolution))

                ref_band_key = find_ref_band_key[0]
                ref_data = api_result['Results'][tile_key][ref_band_key]['data'][ti]
                ref_data = np.asarray(ref_data)

                # get data from json dict, zoom and put in dict
                for bi in range(0, count_bands):
                    band = api_result['Request']['bands'][bi]
                    res = api_result['Request']['resolution'][bi]
                    band_key = "%s_%sm" % (band, res)
                    if band_key in api_result['Results'][tile_key].keys():
                        data = api_result['Results'][tile_key][band_key]['data'][ti]
                        data = np.asarray(data)

                        # resample array to reference spatial resolution
                        zoom_factor = np.array(ref_data.shape) / np.array(data.shape)
                        zm = np.nan_to_num(np.array(data, dtype=np.float32))
                        if (zoom_factor != [1.0, 1.0]).all():
                            zm = zoom(input=zm, zoom=zoom_factor, order=0)
                        zm_dict[band] = zm

                # preparation of output file:
                rows, cols = ref_data.shape
                ac_date = api_result['Results'][tile_key][ref_band_key]['time'][ti][:10]
535
                sensor = api_result['Results'][tile_key][band_key]['sensor'][ti]
Hannes Diedrich's avatar
Hannes Diedrich committed
536
537
538
539
540
541
                x_min = api_result['Results'][tile_key][ref_band_key]['mapinfo']['utm_coord_ul_x']
                y_max = api_result['Results'][tile_key][ref_band_key]['mapinfo']['utm_coord_ul_y']
                geo_proj = api_result['Results'][tile_key][band_key]['mapinfo']['geo_projection']
                geotrans = (x_min, int(stack_resolution), 0, y_max, 0, -int(stack_resolution))
                driver = gdal.GetDriverByName("GTiff")
                if level == "L1C":
542
                    outfile = "{path}/{sensor}_{level}_{pref}_{date}_{tile}_{band}_{res}m.tif".format(
543
                        path=outpath, pref=out_prefix, date=ac_date, tile=tile_key, band=bands, level=level,
544
                        res=stack_resolution, sensor=sensor)
Hannes Diedrich's avatar
Hannes Diedrich committed
545
                else:
546
                    outfile = "{path}/{sensor}_{level}_{pref}_{date}_{tile}_{band}_MSK_{res}m.tif".format(
547
                        path=outpath, pref=out_prefix, date=ac_date, tile=tile_key, band=bands, level=level,
548
                        res=stack_resolution, sensor=sensor)
Hannes Diedrich's avatar
Hannes Diedrich committed
549
550
551
552
553
554
555
                img = driver.Create(outfile, cols, rows, count_bands + 1, gdal.GDT_Int32)
                img.SetGeoTransform(geotrans)
                img.SetProjection(geo_proj)

                slice = 1
                for zi in bands.split("_"):
                    img.GetRasterBand(slice).WriteArray(zm_dict[zi])
556
557
558
                    img.GetRasterBand(slice).SetMetadataItem("Name", "Band" + zi[1:3])
                    img.GetRasterBand(slice).SetMetadataItem("Central Wavelength", wl[str(zi)])
                    img.GetRasterBand(slice).SetMetadataItem("Acknowledgement",
Hannes Diedrich's avatar
Hannes Diedrich committed
559
560
561
562
563
                                                             "The 'GFZ Time Series System for Sentinel-2' (GTS2) "
                                                             "was financed by AgriCircle/ADAMA between 2016/06 and "
                                                             "2017/12.")
                    slice += 1

564
565
                # This is done outside of the loop because one can not set a nodata_value for each band
                img.GetRasterBand(slice).SetNoDataValue(api_result['Results'][tile_key][band_key]["fill_value"])
Hannes Diedrich's avatar
Hannes Diedrich committed
566
567
568
569
570
571
572
                img.SetMetadata({'TIFFTAG_YRESOLUTION': '%s' % stack_resolution,
                                 'TIFFTAG_XRESOLUTION': '%s' % stack_resolution})

                if level != "L1C":
                    clm = api_result['Metadata'][tile_key]['MSK_%sm' % stack_resolution]['data'][ti]
                    clm_arr = np.asarray(clm)
                    img.GetRasterBand(slice).WriteArray(clm_arr)
573
                    img.GetRasterBand(slice).SetMetadataItem("Name", "MSK")
Hannes Diedrich's avatar
Hannes Diedrich committed
574
                    img.GetRasterBand(slice).SetMetadataItem("MSK_legend", str(api_result['Metadata']['MSK_legend']))
Hannes Diedrich's avatar
Hannes Diedrich committed
575
576
577

                img.FlushCache()
                del img
Hannes Diedrich's avatar
Hannes Diedrich committed
578

579
580
581
582
                tif_list.append(outfile)

    return tif_list

583
584

def json_to_netcdf(out_mode, api_result, outpath, out_prefix, geo_ll, geo_ur, start_date, end_date, bands, level, wl):
Hannes Diedrich's avatar
Hannes Diedrich committed
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
    """
    Get data from json-dict and save as netcdf file
    :param out_mode:
    :param api_result:
    :param outpath:
    :param out_prefix:
    :param geo_ll:
    :param geo_ur:
    :param start_date:
    :param end_date:
    :param bands:
    :param level:
    :param wl:
    :return:
    """

601
602
    if out_mode == "nc":
        try:
603
604
605
606
            outfile = "{path}/S2_{level}_{pref}_{lla}_{llo}_{ura}_{uro}_{sdate}_{edate}_{band}.nc".format(
                path=outpath, pref=out_prefix, sdate=start_date, edate=end_date, band=bands, level=level,
                lla=geo_ll[0], llo=geo_ll[1], ura=geo_ur[0], uro=geo_ur[1])

Niklas Bohn's avatar
Niklas Bohn committed
607
            f = nc4.Dataset(outfile, 'w', format='NETCDF4')
608
609
610
611
612
613
614

            f.Conventions = "CF-1.7"
            f.title = "Sentinel-2 data from the GTS2 cloud"
            f.institution = "Helmholtz Centre Potsdam, GFZ German Research Centre For Geosciences"
            f.source = "https://gitext.gfz-potsdam.de/gts2/gts2_client/blob/master/gts2_client/gts2_client.py"
            f.references = "GTS2 (GFZ Time Series System for Sentinel-2)"
            f.history = "[]"
Niklas Bohn's avatar
Niklas Bohn committed
615
616
            f.comment = "Sentinel-2 data from the GTS2 (GFZ Time Series System for Sentinel-2) cloud for an area of " \
                        "interest, time of interest and wanted band combination"
617
618
            f.acknowledgement = api_result['Acknowledgement']
            for key in api_result.keys():
619
                if key == 'ControlValues':
620
                    key_group = f.createGroup(key)
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
                    [setattr(key_group, attr, api_result[key][attr]) for attr in api_result[key].keys()]
                if key == 'Request':
                    key_group = f.createGroup(key)
                    [setattr(key_group, attr.replace("-", "_"), str(api_result[key][attr]))
                     for attr in api_result[key].keys() if attr != 'bands' and attr != 'resolution']
                    key_group.setncattr_string('bands', api_result[key]['bands'])
                    key_group.setncattr_string('resolution', api_result[key]['resolution'])
                if key == 'RequestGeoInfo':
                    key_group = f.createGroup(key)
                    tile_group = key_group.createGroup('Tiles')
                    [setattr(tile_group, attr, api_result[key]['Tiles'][attr])
                     for attr in api_result[key]['Tiles'].keys()]
                if key == 'Results':
                    key_group = f.createGroup(key)
                    for tile in api_result[key].keys():
                        tile_group = key_group.createGroup(tile)
                        for band in api_result[key][tile].keys():
                            band_group = tile_group.createGroup(band)
                            band_group.setncattr_string('sensor', api_result[key][tile][band]['sensor'])
                            band_group.setncattr_string('time', api_result[key][tile][band]['time'])
                            for data_info in api_result[key][tile][band].keys():
                                if data_info == 'mapinfo':
                                    data_info_group = band_group.createGroup(data_info)
                                    [setattr(data_info_group, attr, api_result[key][tile][band][data_info][attr])
                                     for attr in api_result[key][tile][band][data_info].keys()]
                                if data_info == 'data':
                                    band_arr = np.asarray(api_result[key][tile][band][data_info])
648
                                    band_group.createDimension('x', band_arr.shape[0])
649
                                    band_group.createDimension('y', band_arr.shape[1])
650
                                    band_group.createDimension('t', band_arr.shape[2])
651
652
                                    fill = api_result[key][tile][band]['fill_value']
                                    data = band_group.createVariable('Data', 'i4', ('x', 'y', 't'), fill_value=fill)
653
654
655
656
657
658
659
                                    data.units = "None"
                                    if level == "L1C":
                                        data.long_name = "Top of Atmosphere Reflectance"
                                        data.standard_name = "toa_reflectance"
                                    else:
                                        data.long_name = "Surface Reflectance"
                                        data.standard_name = "boa_reflectance"
660
661
                                    masked_fill = np.ma.masked_equal(band_arr, fill)
                                    data.data_range = np.array((np.min(masked_fill), np.max(masked_fill)))
662
                                    data[:, :, :] = band_arr
663
664
665
                                    band_group.Band = band.split("_")[0].split("B")[-1]
                                    band_group.Resolution = band.split("_")[-1]
                                    band_group.Central_Wavelength = wl[str(band.split("_")[0])]
666
667
668
669
670
671
672
673
                if key == 'Metadata':
                    key_group = f.createGroup(key)
                    for tile in api_result[key].keys():
                        tile_group = key_group.createGroup(tile)
                        if tile == 'MSK_legend':
                            [setattr(tile_group, "Class_" + str(attr), api_result[key][tile][attr])
                             for attr in api_result[key][tile].keys()]
                        else:
674
675
                            for msk in api_result[key][tile].keys():
                                mask_group = tile_group.createGroup(msk)
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
                                mask_group.setncattr_string('time', api_result[key][tile][msk]['time'])
                                for data_info in api_result[key][tile][msk].keys():
                                    if data_info == 'cloud_stats':
                                        data_info_group = mask_group.createGroup(data_info)
                                        data_info_group.setncattr_string('cloud_frac',
                                                                         api_result[key][tile][msk][data_info]
                                                                         ['cloud_frac'])
                                    if data_info == 'mapinfo':
                                        data_info_group = mask_group.createGroup(data_info)
                                        [setattr(data_info_group, attr, api_result[key][tile][msk][data_info][attr])
                                         for attr in api_result[key][tile][msk][data_info].keys()]
                                    if data_info == 'stats':
                                        data_info_group = mask_group.createGroup(data_info)
                                        data_info_group.setncattr_string('cloud_frac',
                                                                         api_result[key][tile][msk][data_info]
                                                                         ['cloud_frac'])
                                        data_info_group.setncattr_string('data_frac',
                                                                         api_result[key][tile][msk][data_info]
                                                                         ['data_frac'])
                                        data_info_group.setncattr_string('time',
                                                                         api_result[key][tile][msk][data_info]['time'])
                                    if data_info == 'data':
                                        mask_arr = np.asarray(api_result[key][tile][msk][data_info])
699
                                        mask_group.createDimension('x', mask_arr.shape[0])
700
                                        mask_group.createDimension('y', mask_arr.shape[1])
701
                                        mask_group.createDimension('t', mask_arr.shape[2])
702
703
                                        fill = api_result[key][tile][msk]['fill_value']
                                        data = mask_group.createVariable('Data', 'i4', ('x', 'y', 't'), fill_value=fill)
704
705
706
                                        data.units = "None"
                                        data.long_name = "Mask classes"
                                        data.standard_name = "classes"
707
708
                                        masked_fill = np.ma.masked_equal(mask_arr, fill)
                                        data.data_range = np.array((np.min(masked_fill), np.max(masked_fill)))
709
                                        data[:, :, :] = mask_arr
710
            f.close()
711
        except Exception:
712
713
714
            raise Exception("Something went wrong while saving as netcdf. " + traceback.format_exc())


Hannes Diedrich's avatar
Hannes Diedrich committed
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
def __get_auth(logger=None):
    """
    Gets auth and port
    :param logger: logger
    :return: dict
    """

    home_dir = expanduser("~")
    cred_file = "%s/credentials_gts2_client" % home_dir
    if len(glob(cred_file)) == 1:
        with open(cred_file, "r") as cf:
            cred_dict = json.load(cf)
        if "port" in cred_dict.keys():
            port = cred_dict["port"]
        else:
730
            port = 443
Hannes Diedrich's avatar
Hannes Diedrich committed
731
732
733
734
735
        auth = (cred_dict["user"], cred_dict["password"])
    else:
        logger.error("You did not save your credentials in %s." % cred_file)
        user = input("gts2_client - Please insert username: ")
        password = input("gts2_client - Please insert password: ")
736
737
        port = input("gts2_client - Please insert port for API (for default, type 'yes'): ")
        if port == "yes":
738
            port = "443"
Hannes Diedrich's avatar
Hannes Diedrich committed
739
740
741
742
743
        auth = (user, password)

    return {"auth": auth, "port": port}


744
def client(outpath="", out_prefix="", out_mode="json", geo_ll=(), geo_ur=(), sensor="S2A", bands="", max_cloudy="0.5",
745
           level="L2A", start_date="", end_date="", version="0.15", suffix="", minimum_fill="0.8",
746
           only_tile="", stack_resolution="10", quiet=False, rgb_extension="jpg", rgb_bands_selection="realistic",
Niklas Keck's avatar
Niklas Keck committed
747
           merge_tifs=False, merge_tile=None, onlytime=False, timeout=None):
748
    """
Niklas Bohn's avatar
Niklas Bohn committed
749
750
    Downloads data via API and saves it in a wanted file format (.json, .tiff or .nc) or alternatively returns a python 
    dictionary.
Hannes Diedrich's avatar
Hannes Diedrich committed
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
    :type merge_tile: string
    :type merge_tifs: boolean
    :type rgb_bands_selection: string
    :type rgb_extension: string
    :type quiet: boolean
    :type only_tile: string
    :type minimum_fill: string
    :type outpath: string
    :type out_prefix: string
    :type out_mode: string
    :type geo_ll: tuple of floats
    :type geo_ur: tuple of floats
    :type sensor: string
    :type bands: string
    :type max_cloudy: string
    :type level: string
    :type start_date: string
    :type end_date: string
    :type version: string
    :type suffix: string
    :type stack_resolution: string
772
773
774
    :return:
    """

775
776
    stime = time.time()

Hannes Diedrich's avatar
Hannes Diedrich committed
777
778
779
780
    wl = {'B01': '443nm', 'B02': '490nm', 'B03': '560nm', 'B04': '665nm', 'B05': '705nm', 'B06': '740nm',
          'B07': '783nm', 'B08': '842nm', 'B8A': '865nm', 'B09': '940nm', 'B10': '1375nm', 'B11': '1610nm',
          'B12': '2190nm'}

Hannes Diedrich's avatar
Hannes Diedrich committed
781
782
783
784
    if quiet is True:
        loggerlevel = logging.ERROR
    else:
        loggerlevel = logging.INFO
Hannes Diedrich's avatar
Hannes Diedrich committed
785
786
    logging.basicConfig(level=loggerlevel, format='# %(name)-15s %(message)s')
    logger = logging.getLogger(name="gts2_client")
Hannes Diedrich's avatar
Hannes Diedrich committed
787

Hannes Diedrich's avatar
Hannes Diedrich committed
788
789
790
    if "_" in out_prefix:
        raise ValueError("out_prefix contains '_'. Please use different character, e.g. '-'.")

Hannes Diedrich's avatar
Hannes Diedrich committed
791
792
793
    valid_stack_resolution = ["10", "20", ""]
    if stack_resolution not in valid_stack_resolution:
        raise AssertionError("Stack resolution is not in ", valid_stack_resolution)
794
795

    if out_mode == "stack" and "B01" in bands:
796
        raise AssertionError("Band 1 (B01) can not be stacked. Please request separately.")
797

798
799
800
    if out_mode != "rgb" and bands == "":
        raise AssertionError("Please provide at least one band.")

801
802
803
    if onlytime and out_mode not in ["json", "python"]:
        raise AssertionError("Invalid output mode. If onlytime==True, choose either 'python' or 'json'")

Hannes Diedrich's avatar
Hannes Diedrich committed
804
    valid_out_modes = ["json", "nc", "single", "stack", "python", "rgb"]
Hannes Diedrich's avatar
Hannes Diedrich committed
805
    try:
Hannes Diedrich's avatar
Hannes Diedrich committed
806
        assert out_mode in valid_out_modes
Hannes Diedrich's avatar
Hannes Diedrich committed
807
    except:
Hannes Diedrich's avatar
Hannes Diedrich committed
808
        raise AssertionError("Invalid output mode. Choose from {list}".format(list=", ".join(valid_out_modes)))
Hannes Diedrich's avatar
Hannes Diedrich committed
809

Hannes Diedrich's avatar
Hannes Diedrich committed
810
    auth = __get_auth(logger=logger)
811

Hannes Diedrich's avatar
Hannes Diedrich committed
812
813
814
    if out_mode == "rgb":
        band_setting = band_settings[rgb_bands_selection]
        bands = "_".join(band_setting)
815
816
    else:
        band_setting = []
Hannes Diedrich's avatar
Hannes Diedrich committed
817

818
819
820
    # options
    opts = {"ll": geo_ll, "ur": geo_ur, "sensor": sensor, "bands": bands,
            "max_cloudy": max_cloudy, "auth": auth, "level": level, "date_from": start_date, "date_to": end_date,
821
            "version": version, "suffix": suffix, "minimum_fill": minimum_fill, "utm_zone": only_tile, "logger": logger,
Niklas Keck's avatar
Niklas Keck committed
822
            "onlytime": onlytime, "timeout": timeout}
823

Hannes Diedrich's avatar
Hannes Diedrich committed
824
    # actual API request
825
    logger.info("Requesting data from the GTS2 server ...")
826
    a_stime = time.time()
Hannes Diedrich's avatar
Hannes Diedrich committed
827
    api_result = Gts2Request(opts, logger=logger)
828
829
    a_runtime = time.time() - a_stime
    logger.info(">>>>>> Runtime of API: %7.2f minutes" % (a_runtime / 60.))
830

831
    if "ControlValues" in api_result and api_result["ControlValues"]["API_status"] == 1:
832
        logger.error("##################")
Hannes Diedrich's avatar
Hannes Diedrich committed
833
        logger.error(str(api_result["ControlValues"]["API_message"]))
Hannes Diedrich's avatar
Hannes Diedrich committed
834
835
        raise ChildProcessError("Something went wrong on GTS2 Server (http_code={code}).".format(
            code=api_result["http_code"]))
836
837
838
839
    if api_result["http_code"] >= 400:
        logger.error("##################")
        raise ChildProcessError("API call not right or server Problem (http_code={code}).".format(
            code=api_result["http_code"]))
840

Hannes Diedrich's avatar
Hannes Diedrich committed
841
842
843
    if only_tile != "":
        requ_tiles = api_result['Results'].keys()
        if only_tile not in requ_tiles:
Hannes Diedrich's avatar
Hannes Diedrich committed
844
            raise ValueError("Tile %s not available in results." % only_tile)
Hannes Diedrich's avatar
Hannes Diedrich committed
845

846
    if out_mode == "json":
Hannes Diedrich's avatar
Hannes Diedrich committed
847
        logger.info("Saving data to json file ...", )
848
        json_outfile = "{path}/S2_{level}_{pref}_{lla}_{llo}_{ura}_{uro}_{sdate}_{edate}_{band}{onlytime}.json".format(
849
            path=outpath, pref=out_prefix, sdate=start_date, edate=end_date, band=bands, level=level,
850
            lla=geo_ll[0], llo=geo_ll[1], ura=geo_ur[0], uro=geo_ur[1], onlytime="_onlytime" if onlytime else "")
851

852
853
854
855
        with open(json_outfile, 'w') as f:
            json.dump(api_result, f, indent=4)

    elif out_mode == "nc":
Hannes Diedrich's avatar
Hannes Diedrich committed
856
        logger.info("Converting data to netcdf-file ...")
Niklas Bohn's avatar
Niklas Bohn committed
857
858
        json_to_netcdf(out_mode, api_result, outpath, out_prefix, geo_ll, geo_ur, start_date, end_date, bands, level,
                       wl)
Hannes Diedrich's avatar
Hannes Diedrich committed
859

Hannes Diedrich's avatar
Hannes Diedrich committed
860
861
    elif out_mode == "single" or out_mode == "stack":
        logger.info("Converting data to %s tif-files ..." % out_mode, )
Hannes Diedrich's avatar
Hannes Diedrich committed
862
863
        tif_list = json_to_tiff(out_mode, api_result, only_tile, outpath, out_prefix, wl, level, stack_resolution,
                                bands, logger=logger)
864
865
        if merge_tifs is True:
            merge_tiles(tif_list, out_mode=out_mode, target_tile=merge_tile)
866
867
868
869
870

    elif out_mode == "rgb":
        logger.info("Converting data to %s files ..." % out_mode, )
        with tempfile.TemporaryDirectory(dir=outpath) as tmp_dir:

871
            tif_list = json_to_tiff("stack", api_result, only_tile, tmp_dir, out_prefix, wl, level, stack_resolution,
Hannes Diedrich's avatar
Hannes Diedrich committed
872
                                    bands, logger=logger)
873
874
875
876

            if merge_tifs is True:
                merge_tiles(tif_list, out_mode="stack", target_tile=merge_tile)

Hannes Diedrich's avatar
Hannes Diedrich committed
877
            mk_rgb(tmp_dir, tmp_dir, rgb_comb=band_setting, extension=rgb_extension, logger=logger)
Hannes Diedrich's avatar
Hannes Diedrich committed
878
            rgb_files = glob(os.path.join(tmp_dir, "*RGB*.{ext}".format(ext=rgb_extension)))
879
880
            _ = [shutil.move(fi, os.path.join(outpath, os.path.basename(fi))) for fi in rgb_files]

Hannes Diedrich's avatar
Hannes Diedrich committed
881
882
    elif out_mode == "python":
        logger.info("Returning python dictionary ...")
883
        return api_result
Niklas Bohn's avatar
Niklas Bohn committed
884

885
886
887
    runtime = time.time() - stime
    logger.info("Total runtime: %7.2f minutes" % (runtime / 60.))
    logger.info("....DONE")
Hannes Diedrich's avatar
Hannes Diedrich committed
888
    return
889

Hannes Diedrich's avatar
Hannes Diedrich committed
890

891
892
893
894
895
896
897
898
def str2bool(v):
    if v.lower() in ('yes', 'true', 't', 'y', '1'):
        return True
    elif v.lower() in ('no', 'false', 'f', 'n', '0'):
        return False
    else:
        raise argparse.ArgumentTypeError('Boolean value expected.')

Hannes Diedrich's avatar
Hannes Diedrich committed
899

Hannes Diedrich's avatar
Hannes Diedrich committed
900
if __name__ == "__main__":
Hannes Diedrich's avatar
Hannes Diedrich committed
901

Hannes Diedrich's avatar
Hannes Diedrich committed
902
    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
Hannes Diedrich's avatar
Hannes Diedrich committed
903
904
    parser.add_argument("-o", "--out_dir", action="store", required=True, type=str,
                        help="output_directory")
905
    parser.add_argument("-r", "--out_prefix", action="store", required=False, type=str,
906
                        help="output_prefix for naming the output files, it should not contain '_'.")
907
    parser.add_argument("-m", "--out_mode", action="store", required=False, type=str, default=get_defaults()["out_mode"],
Hannes Diedrich's avatar
Hannes Diedrich committed
908
                        help="output_mode, use 'json' for json-file,"
909
                             "'single' for single tiffs or 'stack' for band stack "
Niklas Bohn's avatar
Niklas Bohn committed
910
                             "'nc' for netcdf-file")
Hannes Diedrich's avatar
Hannes Diedrich committed
911
    parser.add_argument("-l", "--lat_ll", action="store", required=True, type=float,
Hannes Diedrich's avatar
Hannes Diedrich committed
912
                        help="latitude lower left corner")
Hannes Diedrich's avatar
Hannes Diedrich committed
913
    parser.add_argument("-k", "--lon_ll", action="store", required=True, type=float,
Hannes Diedrich's avatar
Hannes Diedrich committed
914
                        help="longitude lower left corner")
Hannes Diedrich's avatar
Hannes Diedrich committed
915
    parser.add_argument("-i", "--lat_ur", action="store", required=True, type=float,
Hannes Diedrich's avatar
Hannes Diedrich committed
916
                        help="latitude upper right corner")
Hannes Diedrich's avatar
Hannes Diedrich committed
917
    parser.add_argument("-j", "--lon_ur", action="store", required=True, type=float,
Hannes Diedrich's avatar
Hannes Diedrich committed
918
                        help="longitude upper right corner")
919
    parser.add_argument("-a", "--sensor", action="store", required=False, type=str, default=get_defaults()["sensor"],
Hannes Diedrich's avatar
Hannes Diedrich committed
920
                        help="sensor name (e.g. S2A) for all: S2all")
921
    parser.add_argument("-t", "--level", action="store", required=False, type=str, default=get_defaults()["level"],
922
                        help="processing level (e.g. L2A)")
923
    parser.add_argument("-v", "--version", action="store", required=False, type=str, default=get_defaults()["version"],
924
                        help="version of atmospheric correction (e.g. 0.10)")
925
    parser.add_argument("-b", "--bands", action="store", required=False, type=str, default=get_defaults()["bands"],
Hannes Diedrich's avatar
Hannes Diedrich committed
926
                        help="list of Bands (e.g. -b B02_B03")
Hannes Diedrich's avatar
Hannes Diedrich committed
927
    parser.add_argument("-s", "--start_date", action="store", required=True, type=str,
Hannes Diedrich's avatar
Hannes Diedrich committed
928
                        help="Startdate e.g. 20160701")
Hannes Diedrich's avatar
Hannes Diedrich committed
929
    parser.add_argument("-e", "--end_date", action="store", required=True, type=str,
Hannes Diedrich's avatar
Hannes Diedrich committed
930
                        help="Enddate e.g. 20160701")
931
    parser.add_argument("-c", "--coreg", action="store", required=False, type=str2bool, default=get_defaults()["coreg"],
932
                        help="get data with corrected pixel shifts (True or False)")
933
    parser.add_argument("-z", "--max_cloudy", action="store", required=False, type=str, default=get_defaults()["max_cloudy"],
934
                        help="maximal percentage of cloudyness of requested scene (e.g. 0.5)")
935
    parser.add_argument("-f", "--minimum_fill", action="store", required=False, type=str, default=get_defaults()["minimum_fill"],
936
                        help="minimal percentage of data in scene (e.g. 1.0)")
937
    parser.add_argument("-d", "--utm_zone", action="store", required=False, type=str, default=get_defaults()["utm_zone"],
938
                        help="only return data for specific utm-zone (MGRS-tile, e.g. 33UUV)")
939
    parser.add_argument("-g", "--stack_resolution", action="store", required=False, type=str, default=get_defaults()["stack_resolution"],
940
                        help="spatial sampling [in meters] of the output stack file, choose from [10,20,60])")
941
    parser.add_argument("-n", "--rgb_extension", action="store", required=False, type=str, default=get_defaults()["rgb_extension"],
942
                        help="file extension of rgb files e.g.[jpg, png], default: jpg")
943
    parser.add_argument("-q", "--rgb_bands_selection", action="store", required=False, type=str, default=get_defaults()["rgb_bands_selection"],
944
                        help="band selection for rgb production, choose from: [realistic, nice_looking, vegetation, "
945
                             "healthy_vegetation_urban, snow, agriculture]")
946
    parser.add_argument("-w", "--merge_tifs", action="store", required=False, type=str2bool, default=get_defaults()["merge_tifs"],
947
                        help="Merge tifs and RGBs if area in two or more MGRS tiles per time step (True or False).")
948
    parser.add_argument("-x", "--merge_tile", action="store", required=False, type=str, default=get_defaults()["merge_tile"],
949
                        help="Choose MGRS tile into which the merge of files is performed (e.g. 33UUV).")
950
    parser.add_argument("-p", "--onlytime", action="store", required=False, type=str2bool, default=get_defaults()["onlytime"],
951
                        help="get the available timestamps for a specific request without downloading any rasterdata")
952
    parser.add_argument("-u", "--timeout", action="store", required=False, type=int, default=get_defaults()["timeout"],
Niklas Keck's avatar
Niklas Keck committed
953
                        help="time to wait for a response from gts2 before raising a Timeout exception")
954

Hannes Diedrich's avatar
Hannes Diedrich committed
955
956
957
958
    args = parser.parse_args()

    suffix = "coreg" if (args.coreg is True) else ""

959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
    client(outpath=args.out_dir,
           out_prefix=args.out_prefix,
           out_mode=args.out_mode,
           geo_ll=(args.lon_ll, args.lat_ll),
           geo_ur=(args.lon_ur, args.lat_ur),
           sensor=args.sensor,
           level=args.level,
           version=args.version,
           bands=args.bands,
           max_cloudy=args.max_cloudy,
           suffix=suffix,
           start_date=args.start_date,
           end_date=args.end_date,
           minimum_fill=args.minimum_fill,
           only_tile=args.utm_zone,
974
           stack_resolution=args.stack_resolution,
Hannes Diedrich's avatar
Hannes Diedrich committed
975
           rgb_extension=args.rgb_extension,
976
977
           rgb_bands_selection=args.rgb_bands_selection,
           merge_tifs=args.merge_tifs,
978
           merge_tile=args.merge_tile,
Niklas Keck's avatar
Niklas Keck committed
979
980
           onlytime=args.onlytime,
           timeout=args.timeout)