VHR Factory

This commit is contained in:
Florent Guiotte 2018-07-03 18:54:12 +02:00
parent 16ca49cdb5
commit 6b713f6323
5 changed files with 587 additions and 28 deletions

View File

@ -233,6 +233,15 @@
"## Interpolate NoData"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"griddata"
]
},
{
"cell_type": "code",
"execution_count": null,
@ -378,31 +387,10 @@
"metadata": {},
"outputs": [],
"source": [
"def rasterize(spatial, values, resolution=1., method='nearest'):\n",
" '''\n",
" Rasterize spatialised data.\n",
"\n",
" :param spatial: Coordinates of the points (x, y, z)\n",
" :type spatial: ndarray\n",
" :param values: Values of the points\n",
" :type values: ndarray\n",
" :param resolution: Resolution of the raster in meters\n",
" :type resolution: float\n",
" :param method: Method of interpolation for NoData values {'linear', 'nearest', 'cubic'}\n",
" :type method: string\n",
" :return: 2D raster of the values along z axis\n",
" :rtype: ndarray\n",
"\n",
" .. warning:: WIP\n",
" '''\n",
" \n",
" resolution = 0.5\n",
"\n",
"spatial_data = np.array((C1['x'], C1['y'], C1['z'])).T\n",
"\n",
"bins = np.rint((spatial_data.max(axis=0) - spatial_data.min(axis=0)) / resolution).astype(int)\n",
"display(bins)\n",
" "
"import importlib\n",
"sys.path.append(\"..\")\n",
"import rasterizer\n",
"importlib.reload(rasterizer)"
]
},
{
@ -411,7 +399,7 @@
"metadata": {},
"outputs": [],
"source": [
"rasterize griddata"
"myraster = rasterizer.rasterize(spatial_data, C1['intensity'], 1.)"
]
},
{
@ -420,7 +408,7 @@
"metadata": {},
"outputs": [],
"source": [
"infile = laspy.file.File('../Data/lidar/C1/272056_3289689.las')"
"myraster_r = rasterizer.rasterize(spatial_data, C1['intensity'], 1., reverse_alt=True)"
]
},
{
@ -429,7 +417,34 @@
"metadata": {},
"outputs": [],
"source": [
"infile.reader.get_dimension('X')"
"plt.imsave('../Res/myrstr.png', myraster, origin='upper')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imsave('../Res/myrstr_r.png', myraster_r, origin='upper')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mid_layer = myraster - myraster_r"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imsave('../Res/myrstr_layer_1.png', mid_layer, origin='upper')"
]
},
{
@ -459,6 +474,52 @@
"plt.show()\n",
"plt.imsave('../Res/dfc_c1.png', dfc_c1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def rasterize_cache(spatial, values, resolution=1., method='linear', reverse_alt=False, cache_dir='/tmp'):\n",
" \"\"\"Cache layer for rasterize\"\"\"\n",
" \n",
" cache_dir = Path(cache_dir)\n",
" name = '{}_{}_{}_{}{}'.format(data['name'], field, str(resolution).replace('.', '_'), method,\n",
" '_reversed' if reverse_alt else '')\n",
" png_file = cache_dir.joinpath(Path(name + '.png'))\n",
" tif_file = cache_dir.joinpath(Path(name + '.tif'))\n",
" \n",
" if tif_file.exists() :\n",
" print ('WARNING: Loading cached result {}'.format(tif_file))\n",
" raster = triskele.read(tif_file)\n",
" else: \n",
" raster = rasterizer.rasterize(spatial, values, resolution, method, reverse_alt)\n",
" triskele.write(tif_file, raster)\n",
" plt.imsave(png_file, raster)\n",
" \n",
" return raster\n",
"\n",
"rasterize_cache(spatial_data, C1['z'], 10, cache_dir='../Res/TMP')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"td = Path(Path('/tmp'))\n",
"tf = Path('test.png')\n",
"td.joinpath(tf).j"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {

View File

@ -0,0 +1,271 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Custom Raster Gen for LD2DAPs"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"sys.path.append(\"..\")\n",
"import rasterizer\n",
"import raster_assistant as ra\n",
"\n",
"sys.path.append('../triskele/python/')\n",
"import triskele\n",
"\n",
"figsize = np.array((16, 3)) * 1.5"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Load LAS data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"C1 = ra.bulk_load('../Data/lidar/C1', 'C1', filter_treshold=.5)\n",
"C2 = ra.bulk_load('../Data/lidar/C2', 'C1', filter_treshold=.5)\n",
"C3 = ra.bulk_load('../Data/lidar/C3', 'C1', filter_treshold=.5)\n",
"C123 = ra.bulk_load('../Data/lidar', 'C123', filter_treshold=.5)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Raster Validation\n",
"\n",
"### Rasterize some data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster = rasterizer.rasterize(C1.spatial, C1.intensity, 0.5)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=figsize)\n",
"plt.imshow(raster, origin='upper')\n",
"plt.show()\n",
"plt.imsave('../Res/raster_validation.png', raster)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.hist(raster.flatten(), bins=1000)\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Write TIFF file"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"triskele.write('../Res/validation.tiff', raster)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Compare with DFC dataset"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dfc = triskele.read('../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif')\n",
"our = triskele.read('../Res/validation.tiff')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Filter DFC with same parameters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ra.auto_filter(dfc, treshold=0.5)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Display Stats"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dfc.shape, our.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dfc.dtype, our.dtype"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.hist(dfc.flatten(), bins=1000, label='DFC')\n",
"plt.hist(our.flatten(), bins=1000, label='Our', alpha=.8)\n",
"plt.legend()\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Display Rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, axs = plt.subplots(2, figsize=figsize * 2)\n",
"\n",
"axs[0].imshow(dfc)\n",
"axs[0].set_title('DFC')\n",
"axs[1].imshow(our)\n",
"axs[1].set_title('Our')\n",
"\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Raster Pack"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from multiprocessing import Pool, Process, Queue\n",
"import multiprocessing as mp\n",
"#mp.set_start_method('spawn')\n",
"\n",
"def rasterize_cache_mp(data_var, field, res, method, reverse, cache):\n",
" if data_var == 'C1':\n",
" ra.rasterize_cache(C1, field, res, method, cache)\n",
" if data_var == 'C2':\n",
" ra.rasterize_cache(C2, field, res, method, cache)\n",
" if data_var == 'C3':\n",
" ra.rasterize_cache(C3, field, res, method, cache)\n",
" if data_var == 'C123':\n",
" ra.rasterize_cache(C123, field, res, method, cache)\n",
" \n",
"pool = Pool(processes=20)\n",
"\n",
"job_args = list()\n",
"for field in ('z', 'intensity', 'num_returns', 'gps_time'):\n",
" for data in ('C1', 'C2', 'C3', 'C123'):\n",
" job_args.append([data, field, .5, 'linear', False, '../Res/HVR/'])\n",
" \n",
"for i in pool.starmap(rasterize_cache_mp, job_args):\n",
" pass"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -0,0 +1,60 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"sys.path.append(\"..\")\n",
"import raster_assistant as ra"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"C1 = ra.bulk_load('../Data/lidar/C1/272056_3289689.las', 'C1mini')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ra.rasterize_cache(C1, 'z', 10, cache_dir='../Res/TMP')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

96
raster_assistant.py Normal file
View File

@ -0,0 +1,96 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file raster_assistant.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 03 juil. 2018
#
# TODO details
import sys
from pathlib import Path
import numpy as np
import matplotlib.pyplot as plt
import laspy
sys.path.append('../triskele/python/')
import triskele
import rasterizer
def rasterize_cache(data, field, resolution=1., method='linear', reverse_alt=False, cache_dir='/tmp'):
"""Cache layer for rasterize"""
cache_dir = Path(cache_dir)
name = '{}_{}_{}_{}{}'.format(data.name, field, str(resolution).replace('.', '_'), method,
'_reversed' if reverse_alt else '')
png_file = cache_dir.joinpath(Path(name + '.png'))
tif_file = cache_dir.joinpath(Path(name + '.tif'))
if tif_file.exists() :
print ('WARNING: Loading cached result {}'.format(tif_file))
raster = triskele.read(tif_file)
else:
raster = rasterizer.rasterize(data.spatial, getattr(data, field), resolution, method, reverse_alt)
triskele.write(tif_file, raster)
plt.imsave(png_file, raster)
return raster
def find_las(path):
path = Path(path)
las = list()
if path.is_dir():
for child in path.iterdir():
las.extend(find_las(child))
if path.is_file() and path.suffix == '.las':
las.append(path)
return las
def auto_filter(data, treshold=.5):
tresholds = np.percentile(data, [treshold, 100 - treshold])
data[data < tresholds[0]] = tresholds[0]
data[data > tresholds[1]] = tresholds[1]
def bulk_load(path, name=None, filter_treshold=.1):
data = {'file': path, 'name': name}
attributes = ['x', 'y', 'z', 'intensity', 'num_returns']#, 'scan_angle_rank', 'pt_src_id', 'gps_time']
for a in attributes:
data[a] = list()
print('Load data...')
for f in find_las(path):
print('{}: '.format(f), end='')
infile = laspy.file.File(f)
for i, a in enumerate(attributes):
print('\r {}: [{:3d}%]'.format(f, int(i/len(attributes) * 100)), end='')
data[a].extend(getattr(infile, a))
infile.close()
print('\r {}: [Done]'.format(f))
print('Create matrices...', end='')
for i, a in enumerate(attributes):
print('\rCreate matrices: [{:3d}%]'.format(int(i/len(attributes) * 100)), end='')
data[a] = np.array(data[a])
print('\rCreate matrices: [Done]')
print('Filter data...', end='')
for i, a in enumerate(attributes):
print('\rFilter data: [{:3d}%]'.format(int(i/len(attributes) * 100)), end='')
if a == 'x' or a == 'y':
continue
auto_filter(data[a], filter_treshold)
print('\rFilter data: [Done]')
data['spatial'] = np.array((data['x'], data['y'], data['z'])).T
class TMPLAS(object):
def __init__(self, d):
self.__dict__.update(d)
return TMPLAS(data)

71
rasterizer.py Normal file
View File

@ -0,0 +1,71 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file rasterizer.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 03 juil. 2018
#
# TODO details
import numpy as np
from scipy.interpolate import griddata
def rasterize(spatial, values, resolution=1., method='linear', reverse_alt=False):
'''
Rasterize spatialised data.
:param spatial: Coordinates of the points [x, y, z] shape (n, 3)
:type spatial: ndarray
:param values: Values of the points
:type values: ndarray
:param resolution: Resolution of the raster in meters
:type resolution: float
:param method: Method of interpolation for NoData values {'linear', 'nearest', 'cubic'}
:type method: string
:return: 2D raster of the values along z axis
:rtype: ndarray
.. warning:: WIP
'''
bins = np.rint((spatial.max(axis=0) - spatial.min(axis=0)) / resolution).astype(int)
print('Compute histogram...')
voxels_histo, edges = np.histogramdd(spatial, bins=bins)
print('Compute weighted histogram...')
voxels_histo_v, edges = np.histogramdd(spatial, bins=bins, weights=values)
layers = voxels_histo.shape[2]
raster = np.zeros_like(voxels_histo.T[0])
raster[:] = np.nan
print('Compute voxels value...')
voxels = (voxels_histo_v / voxels_histo).T
print('Stack voxels...')
for i in reversed(range(layers)) if not reverse_alt else range(layers):
nan_filter = np.isnan(raster)
raster[nan_filter] = voxels[i][nan_filter]
print('Interpolate NoData...')
if method != 'nearest':
raster = interpolate(raster, method)
print('Final touches...')
raster = interpolate(raster, 'nearest')
return np.flip(raster, 0)
def interpolate(raster, method='linear'):
nan_filter = np.isnan(raster)
val_filter = np.logical_not(nan_filter)
coords = np.argwhere(val_filter)
values = raster[val_filter]
grid = np.argwhere(nan_filter)
raster[nan_filter] = griddata(coords, values, grid, method=method)
return raster