Commit da8b2c49 authored by roelo008's avatar roelo008
Browse files

advanced draagkracht model

parent 1701f53d
......@@ -11,32 +11,33 @@ nbt_src: path to neergeschaalde beheertypenkaart (bt) (nominal raster)
1. total pixels per bt per area
2. reduce NDFF observations to 1 observation per nbt pixel using forward affine transform
3. couple each NDFF observation to 1 bt
4. couple each NDFF observation to 1 area
3. couple each NDFF observation to 1 area
4. couple each NDFF observation to 1 bt
5. rework tables to output
By: Hans Roelofsen, WEnR, 29 June 2020
import argparse
import os
import pandas as pd
import geopandas as gp
import rasterio as rio
import rasterstats as rast
import numpy as np
from mnp import helpers as hlp
# from sample.mnp import helpers as hlp
parser = argparse.ArgumentParser()
parser.add_argument('species', type=str, help='species name')
parser.add_argument('bt', type=str, help='beheertype code Nxx.yy')
# Parse arguments
args = parser.parse_args()
sp = hlp.Species(args.species)
#args = parser.parse_args()
#sp = hlp.Species(args.species)
sp = hlp.Species('Tapuit')
#bt =
# Path to beheertypenkaart
bt_tiff = r'c:\apps\temp_geodata\nbt\nbt_20190730_111603.tif'
......@@ -47,23 +48,46 @@ nbts = hlp.nbts()
pxl_2_btB = dict(zip(nbts.pxl, nbts.bt_B))
btA_2_btB = dict(zip(nbts.bt_A, nbts.bt_B))
# Read NDFF shapefile
ndff = hlp.read_ndff_shp(sp.ndff_src)
'''1. total BT pixels per area'''
areas = hlp.gen_squares(origin_x=0, origin_y=625000, ncol=28, nrow=33, size=10000)
bt_stats_prelim = rast.zonal_stats(vectors=areas,, categorical=True, category_map=pxl_2_btB,
bt_stats = []
for area_stats in bt_stats_prelim:
empty_stats = dict(zip(set(nbts.bt_B), [0] * len(set(nbts.bt_B))))
# See:
bt_stats.append({**empty_stats, **area_stats})
areas = areas.join(pd.DataFrame(data=bt_stats))
# Read areas shapefile
areas = gp.read_file()
# define queries that isolate NDFF data of interest
queries = {'q1': ndff.loc[(ndff.year >= 2000) & (ndff.year.notna())].index,
'q2': ndff.loc[ndff.nl_name.isin([])].index,
'q3': ndff.loc[ndff.area < 10000].index}
'''2. Construct GDF with NDFF waarnemingen reduced to 1 waarneming per BTKaart pixel. Use forward Affine to tie NDFF
waarneming centroids to a NDFF pixel'''
ndff_all = hlp.read_ndff_shp(sp.ndff_src)
queries = {'q1': ndff_all.loc[(ndff_all.year >= 2000) & (ndff_all.year.notna())].index,
'q2': ndff_all.loc[ndff_all.nl_name.isin([])].index,
'q3': ndff_all.loc[ndff_all.area < 10000].index}
sel = set.intersection(*[set(queries[x]) for x in queries.keys()]) # for query in selected_queries
assert len(sel) > 0, 'no observations remaining'
print('Found {0} NDFF observations for {1}'.format(len(sel),
assert len(sel) > 0, ' no observations remaining'
print(' found {0} NDFF observations for {1}'.format(len(sel),
ndff = gp.GeoDataFrame({'ndff_id': [1] * len(sel), 'soort': [] * len(sel), 'geometry': ndff_all.loc[sel].centroid})
ndff.loc[:, 'nbt_row_col'] = ndff.geometry.apply(hlp.xy_2_colrow, rio_object=bt_kaart)
ndff.drop_duplicates(subset='nbt_row_col', inplace=True)
ndff_gdf = gp.GeoDataFrame({'ID': [1] * len(sel), 'soort': [] * len(sel), 'geometry': ndff.loc[sel].centroid})
ndff_gdf.loc[:, 'nbt_row_col'] = ndff_gdf.geometry.apply(hlp.xy_2_colrow, rio_object=bt_kaart)
ndff_gdf.drop_duplicates(subset='nbt_row_col', inplace=True)
sp_stats = rast.zonal_stats(vectors=ndff_gdf,, categorical=True, all_touched=False,
'''3. Join each NDFF waarneming to an area'''
ndff = gp.sjoin(left_df=ndff, right_df=areas.loc[:, ['ID', 'geometry']], how='left', op='within')
'''4. Join each NDFF waarneming to a BT'''
sp_stats = rast.zonal_stats(vectors=ndff,, categorical=True, all_touched=False,
category_map=pxl_2_btB, affine=bt_kaart.affine)
# not more than 1 BT per NDFF observation and not more than 1 pixel per NDFF observation
assert all([len(x) == 1 for x in sp_stats])
assert all([item for sublist in [[v == 1 for v in x.values()] for x in sp_stats] for item in sublist]) # sorry..
ndff.loc[:, 'BT'] = [item for sublist in [[k for k in x.keys()] for x in sp_stats] for item in sublist]
'''5. Pivot NDFF waarnemingen to Area ids and sum aantal waarnemingen. Then merge with areas df.'''
assert set(ndff.BT).issubset(areas.columns)
ndff_per_area = pd.pivot_table(data=ndff, index='ID', columns='BT', values='ndff_id', aggfunc='sum')
ndff_per_area.columns = ['{0}_ndffpxls'.format(x) for x in ndff_per_area.columns.tolist()]
out = pd.merge(left=areas, right=ndff_per_area.fillna(0), left_on='ID', right_index=True, how='left')
out = out.reindex(sorted(out.columns), axis=1)
\ No newline at end of file
......@@ -6,6 +6,8 @@ import os
import numpy as np
import geopandas as gp
import pandas as pd
import shapely
import affine
from datetime import datetime as dt
from matplotlib import pyplot as plt
......@@ -98,22 +100,24 @@ class Species:
def colrow_2_xy(colrow, rio_object):
def colrow_2_xy(colrow, rio_object=None, affine_in=None):
:param rio_object:
:param col: column index
:param row: row index
:param colrow: column-index_row-index tuple
:param rio_object: rio object
:param affine_in: affine transformation
:return: easting norhting in CRS
a = rio_object.affine
col, row = colrow
x, y = a * (col, row)
return x, y
except AttributeError:
print("{0} is not a valid rasterio object".format(rio_object))
if isinstance(affine_in, affine.Affine):
a = affine_in
raise Exception("{0} is not a valid rasterio object and no Affine alternative supplied".format(rio_object))
col, row = colrow
x, y = a * (col, row)
return x, y
def xy_2_colrow(point, rio_object):
......@@ -142,3 +146,41 @@ def get_mnp_dk(species):
except AttributeError:
print('Make sure to provide a Species object, w@nker')
def gen_squares(x_ul, y_ul, nrow, ncol, size):
Generate geodataframe with square polygons
:param x_ul: easting top-left corner
:param y_ul: northing top-left corner
:param nrow: number of rows
:param ncol: number of cols
:param size: square size in m
:return: gdf with ncol*nrow items
Definition affine.Affine(a, b, c, d, e, f)
a: width of a pixel
b: row rotation (typically zero)
c: x-coordinate of the upper-left of the upper-left pixel
d: column rotation (typically zero)
e: height of a pixel (negative)
f: y-coordinate of the upper-left of the upper-left pixel
aff = affine.Affine(size, 0, x_ul, 0, -size, y_ul)
ul_corners_indx = [indx for indx in np.ndindex(nrow, ncol)]
ul_corners_coords = [colrow_2_xy(colrow=corner, affine_in=aff) for corner in ul_corners_indx]
# [(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)] == [ll, lr, ur, ul]
all_corner_coords = [[(xmin, ymax-size), (xmin+size, ymax-size), (xmin+size, ymax), (xmin, ymax)] for (xmin, ymax)
in ul_corners_coords]
shapes = [shapely.geometry.Polygon(corner) for corner in all_corner_coords]
ids = ['{0}_{1}'.format(int(x), int(y)) for x,y in ul_corners_coords]
return gp.GeoDataFrame(crs={"init": 'epsg:28992'}, geometry=shapes, data={'ID': ids, 'size': str(size)})
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment