Commit c8b5a5dd authored by Karsten Prehn's avatar Karsten Prehn
Browse files

1st

parent d630fdc1
*.csv
*.log
*.sql
TAGS
*.out
README.txt
*.cpg
*.dbf
*.prj
*.shp
*.shx
.directory
[DEFAULT]
# True if using coastline buffer to calculate the coastline, False if using the recursive method
calc_coastline = False
# number of threads to spawn
num_threads = 1
# the stripe num to calculate
stripe_num = 0
# whether to skip DB import alltogether
dryrun = False
# whether to skip tile import but update remaining water tiles outside the buffer
tileimport = True
# whether to calculate intermediate coastal water tiles again
recalc_intermediate = False
# whether to calculate final coastal water tiles again
recalc_final = False
# whether to double check with db state on calculating quad grid structure
checkwith_db = True
# whether to calc the same dataset over and over again
test_perf = False
# how many iterations on the same dataset
num_iter = 10
psql_addr = localhost
db_name = obm_cstl
table_name = cmpl_grid
psql_user = prehn
psql_pw = !rs!
file_land_polys = /home/prehn/opt/coastline/data/land-polygons-complete-4326/land_polygons.shp
file_water_polys = /home/prehn/opt/coastline/data/water-polygons-split-4326/water_polygons.shp
file_coastlines = /home/prehn/opt/coastline/data/coastlines-split-4326/lines.shp
# where the db situation per L9 tile is stored
dbextract_path = /home/prehn/opt/coastline/data/import/csv/dbextract
# where to store L9 ids per stripe
ids_path = /home/prehn/opt/coastline/data/import/ids
# where to dump intermediate L9 tiles (incl. coastline data)
intermediate_path = /home/prehn/opt/coastline/data/import/csv/intermediate
# where to dump the final coastline grid for this L9 tile
final_path = /home/prehn/opt/coastline/data/import/csv/final
# where to dump/restore calculation stripes
stripes_path = /home/prehn/opt/coastline/data/grid/L{}
# where to store log files
log_path = /home/prehn/opt/coastline/log
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (setq python-shell-interpreter "ipython" python-shell-interpreter-args "-i --simple-prompt")
# (setq elpy-project-root nil)
"""
TODOs
mkdir /home/prehn/opt/coastline/data/grid/L9
mkdir /home/prehn/opt/coastline/data/img
mkdir /home/prehn/opt/coastline/data/land-polygons-complete-4326
mkdir /home/prehn/opt/coastline/data/water-polygons-split-4326
mkdir /home/prehn/opt/coastline/data/coastlines-split-4326
mkdir /home/prehn/opt/coastline/data/import/csv/intermediate
mkdir /home/prehn/opt/coastline/data/import/csv/final
mkdir /home/prehn/opt/coastline/log
"""
""" Imports
"""
# pygeotile 1.0.6
from pygeotile.tile import Tile
from pygeotile.point import Point
# psycopg2 2.8.5, re 2.2.1, logging 0.5.1.2
import psycopg2, math, re, logging # HINT install libpq-dev python-dev (apt)
import psycopg2.extras as extras
from datetime import datetime
# fiona 1.8.13.post1
import fiona
# shapely 1.7.0
from shapely import wkb, wkt
from shapely.geometry import box, shape, mapping, Polygon, MultiPolygon, LineString
# cartopy 0.18.0
import cartopy.crs as ccrs # HINT req proj (apt), geos
# pyproj 2.2.2
# print pyproj.__version__
# import cartopy.mpl as cmpl
import cartopy.feature as cftr
from cartopy import geodesic
"""
# matplotlib 2.2.4
#import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from mpl_toolkits.axes_grid1.inset_locator import InsetPosition
"""
import time
import timeit
# pandas 0.24.2
import pandas as pd
# numpy 1.16.4
import numpy as np
# geopandas 0.5.1
import geopandas as gpd
# json 2.0.9
import json
import os, sys
# multiprocessing 0.70a1
from multiprocessing import Process
# import imp
# imp.reload(gpd)
from operator import itemgetter
# configparser 3.8.1
import configparser, getopt
import glob
_ids_test = ['122220223', '122220230', '122220231', '122220232']
# styles
world_coastline_bbox_props = {'facecolor':'none', 'edgecolor':'#0CA4FF'}
coastline_bounds_properties = {'facecolor':'none', 'edgecolor':'#0CA4FF'}
figtitle_properties = {'fontsize':22, 'y':.69}
world_water_bbox_props = {'facecolor':'#0077BE', 'edgecolor':'#0CA4FF'}
intersected_world_bbox_props = {'facecolor':'#FFF9D9', 'edgecolor':'black'}
coastline_props = {'facecolor':'none', 'edgecolor':'k', 'alpha':0.5}
""" Globals """
zoom = 17
srid = 4326
lose_no_information_cmpl = [0, 4, 5, 6]
other_quadrants = {'0':['1','2','3'], '1':['0','2','3'], '2':['0','1','3'], '3':['0','1','2']}
""" Functions
"""
""" @PARAM name __name__ """
def _init_logging(path, name):
log = logging.getLogger(name)
logging.basicConfig(
level=logging.DEBUG,
format=
'%(asctime)s: %(levelname)-8s <[%(filename)s]> ...: %(message)s',
datefmt='%Y.%m.%d %H:%M:%S')
if not log.handlers:
if not os.path.exists(path):
os.makedirs(path)
today = datetime.now().strftime('%Y-%m-%d')
fileHandler = logging.FileHandler("{0}/{1}_{2}.log".format(
"%s" % (path), "db_import", today),
mode='a')
# handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s: %(levelname)-8s <[%(filename)s]> ...: %(message)s',
datefmt='%Y.%m.%d %H:%M:%S')
fileHandler.setFormatter(formatter)
log.setLevel(logging.DEBUG)
log.addHandler(fileHandler)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(formatter)
# logger.addHandler(consoleHandler)
log.debug('logging handler created')
return log
""" Map function """
def polys_gtr(polys):
for feature in polys:
f = {}
f['geometry'] = feature['geometry']
f['properties'] = {}
yield f
""" DB fns """
def connect_obm_cmpl(sql = "SELECT version();", data = None, **kwargs):
template = kwargs.get('template', None)
thrd_num = kwargs.get('thrd_num', None)
# dsn = """user='{}' password='{}' host='{}' port='{}' databse='{}'""".format('prehn','HansaRostock','127.0.0.1','5432','obm_cmpl')
try:
connection = psycopg2.connect(user = PSQL_USER,
password = PSQL_PW,
host = PSQL_ADDR,
port = "5432",
database = DB_NAME)
cursor = connection.cursor()
if data is None:
# fetch from DB
cursor.execute(sql)
record = cursor.fetchall()
else:
if len(data) == 1:
if template:
sql = sql % template
cursor.execute(sql, data[0])
else:
if template is not None:
extras.execute_values(cursor, sql, data, page_size=len(data), template=template)
else:
if template:
sql = sql % template
cursor.executemany(sql, data)
cur_msg = cursor.statusmessage
cur_cnt = cursor.rowcount
log.debug('<[{}]> [CONN] statusmessage {}, rowcount {}'.format(thrd_num, cur_msg, cur_cnt))
if cur_cnt == -1 or re.search(r"^\s*?(?:UPDATE|INSERT|DELETE).+$", sql, re.DOTALL) is not None or len(data)>1:
record = [1]
else:
record = cursor.fetchall()
connection.commit()
return record
except (Exception, psycopg2.Error) as error :
log.debug("<[{}]> Error while connecting to PostgreSQL {}. Sql {}, Template {}. Data entries {}.".format(thrd_num, error, sql, template, len(data)))
finally:
#closing database connection.
if(connection):
cursor.close()
connection.close()
def _load_intermediate_data_csv_GDF(curr_id, csvdir_intermediate, csvlist_intermediate, **kwargs):
""" import L9 tile """
thrd_num = kwargs.get('thrd_num', '-1') # thread / process identifier
log.debug('<[{}]> [CALC][{}] Importing calculation {} from {}.'.format(thrd_num,curr_id, csvlist_intermediate[curr_id], csvdir_intermediate))
_from_csv_df = pd.read_csv(
'{}/{}'.format(csvdir_intermediate, csvlist_intermediate[curr_id]),
delimiter=",")
""" handle str dumped polygon objects (load from str) """
curr_section_coastline_bbox_gdf = _from_csv_df[
['quadkey']]
curr_section_coastline_bbox_gdf['water'] = _from_csv_df.apply(
lambda x: wkt.loads(x['water']), axis=1)
#curr_section_coastline_bbox_gdf['land'] = _from_csv_df.apply(
# lambda x: wkt.loads(x['land']), axis=1)
#_landpoly = gpd.GeoDataFrame({'geometry':[_from_csv_df.iloc[0]['land']]})
_landpoly = wkt.loads(_from_csv_df.iloc[0]['land'])
_waterpoly = wkt.loads(_from_csv_df.iloc[0]['waterpoly'])
""" do the same procedure as in @fn L18_coastline_water_ID_in_LX_GDF (LAND) """
if type(_landpoly) == MultiPolygon:
log.debug('<[{}]> [CALC][{}] Found {} land poly geometries of type {}.'.format(thrd_num, curr_id, len(_landpoly), type(_landpoly)))
""" put the land poly in the first slot. beware: other row's 'land' slot will be NaN """
curr_section_coastline_bbox_gdf.loc[
[curr_section_coastline_bbox_gdf.index[0]],
'land'] = gpd.GeoDataFrame({'geometry':[_landpoly]}).geometry.values
else:
curr_section_coastline_bbox_gdf['land'] = _landpoly
""" do the same procedure as in @fn L18_coastline_water_ID_in_LX_GDF (WATER) """
if type(_waterpoly) == MultiPolygon:
log.debug('<[{}]> [CALC][{}] Found {} water poly geometries of type {}.'.format(thrd_num, curr_id, len(_waterpoly), type(_waterpoly)))
""" put the water poly in the first slot. beware: other row's 'water' slot will be NaN """
curr_section_coastline_bbox_gdf.loc[
[curr_section_coastline_bbox_gdf.index[0]],
'waterpoly'] = gpd.GeoDataFrame({'geometry':[_waterpoly]}).geometry.values
else:
curr_section_coastline_bbox_gdf['waterpoly'] = _waterpoly
#print type(wkt.loads(_from_csv_df.iloc[0]['land']))
#curr_section_coastline_bbox_gdf.loc[
#[curr_section_coastline_bbox_gdf.index[0]],
#'land'] = _landpoly #wkt.loads(_from_csv_df.iloc[0]['land'])
curr_section_coastline_bbox_gdf['quadgrid'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['quadgrid']) else wkt.loads(x['quadgrid']), axis=1)
###
curr_section_coastline_bbox_gdf['bbox'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['bbox']) else wkt.loads(x['bbox']), axis=1)
curr_section_coastline_bbox_gdf['quadgeom'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['quadgeom']) else wkt.loads(x['quadgeom']), axis=1)
curr_section_coastline_bbox_gdf['buff'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['buff']) else wkt.loads(x['buff']), axis=1)
curr_section_coastline_bbox_gdf['bbox_land'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['bbox_land']) else wkt.loads(x['bbox_land']), axis=1)
curr_section_coastline_bbox_gdf['geometry'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['geometry']) else wkt.loads(x['geometry']), axis=1)
return gpd.GeoDataFrame(curr_section_coastline_bbox_gdf)
def _load_final_data_csv_GDF(curr_id, csvdir_final, csvlist_final, **kwargs):
""" import L9 tile """
thrd_num = kwargs.get('thrd_num', '-1') # thread / process identifier
log.debug('<[{}]> [CALC][{}] Importing calculation {} from {}.'.format(thrd_num,curr_id, csvlist_final[curr_id], csvdir_final))
_from_csv_df = pd.read_csv(
'{}/{}'.format(csvdir_final, csvlist_final[curr_id]),
delimiter=",")
""" handle str dumped polygon objects (load from str) """
upsert_gdf = _from_csv_df[['completeness']]
upsert_gdf['quadkey'] = _from_csv_df.apply(
lambda x: '{}'.format(x['quadkey']), axis=1)
upsert_gdf['geometry'] = _from_csv_df.apply(
lambda x: None if pd.isna(x['geometry']) else wkt.loads(x['geometry']), axis=1)
return upsert_gdf
def _remove_overlap_larger_tiles(upsert, complement):
""" remove overlapping larger tiles in the complement tile data set
sort complement tile quadkeys by length descending
to rule out overlapping tiles we sort descending from smallest tile/longest qk to largest/shortest
we then travers the list backwards and remove any smaller quadkey that is inherited by a longer one
"""
_nonoverlap_complement = sorted(complement, key=len, reverse=True)
_len_cks = len(_nonoverlap_complement)
# _nonoverlap_complement[0]
regex_str_cmplmnt = '^{cell_id}[0-3]+$'
regex_str_ups = '^{cell_id}[0-3]*$'
_ck_idx = _len_cks
_upsert_qk = upsert #[u[0] for u in upsert]
for qk in _nonoverlap_complement[::-1]:
_ck_idx -= 1
# _pattern = re.compile(regex_str_complement.format(cell_id=qk))
if any(re.compile(regex_str_cmplmnt.format(cell_id=qk)).search(_qk) for _qk in _nonoverlap_complement) or \
any(re.compile(regex_str_ups.format(cell_id=qk)).search(_qk) for _qk in _upsert_qk):
_nonoverlap_complement.pop(_ck_idx)
return _nonoverlap_complement
def _upsert_remove_data_to_GDF(upsert_and_remove_data, **kwargs):
thrd_num = kwargs.get('thrd_num', '-1') # thread / process identifier
curr_section_coastline_bbox_gdf = kwargs.get('curr_section_dbextract_gdf', gpd.GeoDataFrame({}))
remove = []
upsert = []
complement = {}
for tile_qk, entry in upsert_and_remove_data.iteritems():
#print qk, entry
""" TODO """
remove += [r[0] for r in entry['remove']] if len(entry['remove']) > 0 else []
for entry_qk in entry['upsert']:
if str(entry_qk) == str(tile_qk):
upsert += [fiddle_db_entry_tuplet(entry_qk, srid, entry['completeness'])]
else:
complement[entry_qk] = fiddle_db_entry_tuplet(entry_qk, srid, 0)
remove = list(set(remove))
_nonoverlap_complement = _remove_overlap_larger_tiles([u[0] for u in upsert], complement.keys())
_len_upsert = len(upsert)
_len_complement = len(complement)
log.debug('<[{}]> [UPSERT][TOGDF] Removing redundant complement tiles yielded {} (of {} total ones).'.format(thrd_num, len(_nonoverlap_complement), _len_complement))
upsert += [complement[c] for c in complement if c in _nonoverlap_complement] # _nonoverlap_complement # good_complement
log.debug('<[{}]> [UPSERT][TOGDF] Preparing Insert of {} tiles ({} coastal in-buffer & {} complements).'.format(thrd_num, len(upsert), _len_upsert, len(_nonoverlap_complement)))
upsert_df = pd.DataFrame({
'quadkey':[g[0] for g in upsert]+[r for r in remove],
'geometry':[shape(box(g[1],g[2],g[3],g[4])) for g in upsert]+[None]*len(remove),
'completeness':[g[6] for g in upsert]+[-1]*len(remove),
})
upsert_gdf = gpd.GeoDataFrame(upsert_df)
upsert_gdf.crs = {'init':'epsg:4326'}
# upsert_gdf.plot(**coastline_bounds_properties)
# plt.show()
if False:
plot_grid_gdf(upsert_gdf.total_bounds, [upsert_gdf],
gdf0_properties=coastline_bounds_properties,
show_minimap=True,
minimap_title_properties={'fontsize':8},
minimap_position=[.01,.62,.10,.25],
minimap_quadlevel=7,
)
return upsert_gdf
def _upsert_remove_from_GDF(upsert_gdf):
upsert = upsert_gdf[upsert_gdf['completeness']>-1].apply(lambda x:fiddle_db_entry_tuplet(x['quadkey'], srid, x['completeness']), axis=1).values.tolist()
""" retrieve remove data """
remove = upsert_gdf.loc[upsert_gdf['completeness']==-1, 'quadkey'].values.tolist()
return upsert, remove
def _curr_sect_cstl_to_upsert_remove_data(curr_section_coastline_bbox_gdf, curr_section_dbextract_gdf, **kwargs):
""" takes L9 coastline data and calculates L18 (&less) coastline quad tiles """
if False:
kwargs = {}
dryrun = kwargs.get('dryrun', False) # whether we want do any DB operation at all
curr_id = kwargs.get('curr_id', '-1')
thrd_num = kwargs.get('thrd_num', '-1') # thread / process identifier
if not dryrun:
quad_cmpl = 5
qks = []
for idx, row in curr_section_coastline_bbox_gdf.iterrows():
row_qks = [latlon2quadkey(cell.centroid.y, cell.centroid.x, 18) for cell in row['water']]
qks += row_qks
qks = list(set(qks))
log.debug('<[{}]> [CALC][{}] Working with {} L18 water quadkeys in QK area {}.'.format(thrd_num, curr_id, len(qks), curr_id))
upsert_and_remove_data = {}
if len(qks) == 0:
log.debug('<[{}]> [CALC][{}] List is empty. No water cells to import. Skipping {}.'.format(thrd_num, curr_id, curr_id))
else:
log.debug('<[{}]> [CALC][DB][{}] Starting upsert data calculation loop.'.format(thrd_num, curr_id))
""" TODO use this gdf instead of database calls """
# curr_section_dbextract_gdf
for qk in qks:
#upsert_and_remove_data = {}
quad_start_leafs = same_quadrant_leafs(qk)
upsert_and_remove_data[qk] = upsert_quad_leafs(
quad_start_leafs, quad_start=qk, completeness=quad_cmpl, curr_section_dbextract_gdf=curr_section_dbextract_gdf, **kwargs)
#log.debug('<[{}]> [CALC][DB][{}] Calling @fn _update_database ...'.format(thrd_num, curr_id))
""" single update """
"""
_time_updatedb_tic = timeit.default_timer() # timing
_update_database(upsert_and_remove_data[qk], qk, quad_cmpl, **kwargs)
_time_updatedb_toc = timeit.default_timer() # timing
tictoc[curr_id]['update_database'] += _time_updatedb_toc - _time_updatedb_tic
tictoc[curr_id]['num_update_ticks'] += 1
log.debug('<[{}]> [CALC][DB][{}] ... Update done in {} seconds.'.format(thrd_num, curr_id, _time_updatedb_toc - _time_updatedb_tic))
"""
return upsert_and_remove_data
def _update_database(upsert_and_remove_data, quad_start=None, _new_cmpl=0, **kwargs):
thrd_num = kwargs.get('thrd_num', '-1') # thread / process identifier
if False:
kwargs = {}
if type(upsert_and_remove_data) == dict:
if 'upsert' in upsert_and_remove_data:
log.debug('<[{}]> [UPSERT][DB] Single entry DB update.'.format(thrd_num))
new_cmpl = upsert_and_remove_data['completeness'] if 'completeness' in upsert_and_remove_data else _new_cmpl
remove = [r[0] for r in upsert_and_remove_data['remove']] if len(upsert_and_remove_data['remove']) > 0 else []
if len(upsert_and_remove_data['remove']) > 0:
remove = [r[0] for r in upsert_and_remove_data['remove']]
complement_cells_cmpl = upsert_and_remove_data['remove'][0][1] if len(remove) == 1 else 0
# only auto fill complement cells if they are of unknown, water or empty completeness
complement_cells_cmpl = complement_cells_cmpl if complement_cells_cmpl in lose_no_information_cmpl else 0
else:
remove = []
complement_cells_cmpl = 0
# print 'remove', remove, 'complement cells cmpl', complement_cells_cmpl
upsert = [fiddle_db_entry_tuplet(u, srid, new_cmpl) if str(u) == str(quad_start) else
fiddle_db_entry_tuplet(u, srid, complement_cells_cmpl) for u in upsert_and_remove_data['upsert']]
#print ('[DB][{}] Setting new completeness {}'.format(quad_start, new_cmpl))
else:
log.debug('<[{}]> [UPSERT][DB] Batch DB update.'.format(thrd_num))
upsert_gdf = _upsert_remove_data_to_GDF(upsert_and_remove_data, **kwargs)
""" retrieve upsert data """
upsert, remove = _upsert_remove_from_GDF(upsert_gdf)
elif type(upsert_and_remove_data) == pd.DataFrame:
""" retrieve upsert data """
upsert, remove = _upsert_remove_from_GDF(upsert_and_remove_data)
elif type(upsert_and_remove_data) == gpd.GeoDataFrame:
""" retrieve upsert data """
upsert, remove = _upsert_remove_from_GDF(upsert_and_remove_data)
else:
log.warning('<[{}]> [UPSERT][DB] @param upsert_and_remove_data of wrong type {}.'.format(thrd_num, type(upsert_and_remove_data)))
rem_sql = ','.join('\'{}\''.format(d) for d in remove)
#print('rem {}, ups {}'.format(rem_sql, upsert))
upsert_template = '(%s, ST_MakeEnvelope(%s,%s,%s,%s, %s), %s)'
# sql_beg = """--BEGIN;"""
sql_del = """
DELETE FROM {} WHERE cell_id IN ({});
""".format(TABLE_NAME, rem_sql)
if len(remove) == 0:
sql_del = """"""
sql_ups = """
INSERT INTO {} (cell_id, geom, completeness)
VALUES {}
ON CONFLICT (cell_id) DO UPDATE
SET cell_id = excluded.cell_id,
geom = excluded.geom,
completeness = excluded.completeness;
""".format(TABLE_NAME, '%s') #format(table_name, upsert_template)
sql = sql_del + sql_ups
log.debug('<[{}]> [UPSERT][DB] Removing {} and upserting {} rows.'.format(thrd_num, len(remove), len(upsert)))
log.debug('<[{}]> [UPSERT][DB][SQL] {} expressions. Template {}'.format(thrd_num, len(upsert[0]), upsert_template))
db_result = -1
db_result = connect_obm_cmpl(sql, upsert, template=upsert_template, thrd_num=thrd_num)
return db_result
""" quadkey fns """
def latlon2quadkey(lat, lon, zoom):
if False:
lat, lon = bbox[1], bbox[0] # SW
lat, lon = bbox[3], bbox[2] # NE
zoom = target_quadlevel
if lon < -180.0:
log.warning('[LATLON2QK] Setting longitude of {} to -180.0.'.format(lon))
lon = -180.0
if lon > 180.0:
log.warning('[LATLON2QK] Setting longitude of {} to 180.0.'.format(lon))
lon = 180.0
#log.debug('[LATLON2QK] lat {}, lon {}'.format(lat, lon))
point = Point.from_latitude_longitude(latitude=lat, longitude=lon) # point from lat lon in WGS84
tile = Tile.for_latitude_longitude(point.latitude_longitude[0], point.latitude_longitude[1], zoom)
return tile.quad_tree
def quadkey2bbox(qk):
bounds = tile_bounds(qk)
bb = shape(box(bounds[0].longitude, bounds[0].latitude, bounds[1].longitude, bounds[1].latitude))
return bb
def quadkey2bbox_geom(qk):
bb = quadkey2bbox(qk)
return mapping(bb)
def tile_bounds(quad_cell_id):
t = Tile.from_quad_tree('{}'.format(quad_cell_id)) # tile from a quad tree repr string
bounds = t.bounds
return bounds
def postgres_envelope_expr(bounds, srid):
return """ST_MakeEnvelope({},{},{},{}, {})""".format(bounds[0].longitude, \
bounds[0].latitude, \
bounds[1].longitude, \
bounds[1].latitude, srid)
def relevantpolys4quadkey(the_polygons, quadkey):
parent_bb = quadkey2bbox(quadkey)
relevant_polys = [p for p in the_polygons if parent_bb.intersects(p)]
return relevant_polys
def common_parent_quadkey(q1, q2):
while (q1 != q2):
q1 /= 10
q2 /= 10
return q1
def quadkey2GoogleTMS(quadkey):
qk = '{}'.format(quadkey)
tile = Tile.from_quad_tree(qk)
return '{}/{}/{}'.format(len(qk), *tile.google)
def quadkey_tilelength(quadkey):
return quadkey2bbox(latlon2quadkey(0.0,0.0,quadkey)).length
""" quadgrid fns (qk+db) """
""" go down the quad tree by number of levels """
def quad_level_down( qk, levels, grid = None, intersect_geoms = None):
if grid == None:
grid = []
if levels == 0: # return statement
bb = quadkey2bbox(qk)
grid += [(qk, bb)]
else:
for q in range(0,4,1):
next_qk = '{}{}'.format(qk,q)
valid_path = False
if intersect_geoms is not None:
_next_qk_shp = quadkey2bbox(next_qk)
if type(intersect_geoms) == gpd.GeoSeries:
log.warning('[WARNING] type: {}, data: {}'.format(type(intersect_geoms), intersect_geoms))
valid_path = _next_qk_shp.intersects(intersect_geoms.iloc[0])
else:
valid_path = _next_qk_shp.intersects(intersect_geoms)
#_next_qk_gdf = gpd.GeoDataFrame({'geometry':[_next_qk_shp]})
#_intersects = gpd.sjoin(intersect_geoms, _next_qk_gdf,
# op='intersects')
#_intersects = _next_qk_gdf['geometry'].apply(lambda g: _next_qk_shp.intersects(g))