HVR Denoising

This commit is contained in:
Florent Guiotte 2018-07-05 12:20:27 +02:00
parent 6b713f6323
commit 10aed66682
5 changed files with 485 additions and 46 deletions

400
Notebooks/HVR Noise.ipynb Normal file
View File

@ -0,0 +1,400 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Setup\n",
"\n",
"## Create legacy HVR"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"sys.path.append(\"..\")\n",
"import rasterizer\n",
"import raster_assistant as ra\n",
"\n",
"sys.path.append('../triskele/python/')\n",
"import triskele\n",
"\n",
"figsize = np.array((16, 3)) * 1.5"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"data = ra.bulk_load('../Data/lidar/C3/', filter_treshold=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster = rasterizer.vhr(data.spatial, data.z, dtype=np.float32)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=figsize)\n",
"plt.imshow(raster)\n",
"plt.show()\n",
"plt.imsave('../Res/tmp.png', raster)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Noise Profile"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.hist(data.z, bins=1000)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.hist(data.intensity, bins=1000)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=(10,10))\n",
"plt.hist2d(data.intensity, data.z, bins=1000)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"treshold = .5\n",
"tresholds_i = np.percentile(data.intensity, [treshold, 100 - treshold])\n",
"tresholds_z = np.percentile(data.z, [treshold, 100 - treshold])\n",
"\n",
"intensity_filter = np.logical_or(data.intensity < tresholds_i[0], data.intensity > tresholds_i[1])\n",
"z_filter = np.logical_or(data.z < tresholds_z[0], data.z > tresholds_z[1])\n",
"all_filter = np.logical_or(intensity_filter, z_filter)\n",
"inter_filter = np.logical_and(z_filter, intensity_filter)\n",
"\n",
"data.x.size, intensity_filter.sum(), z_filter.sum(), all_filter.sum(), inter_filter.sum()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The z and intensity noise seems uncorrelated."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Noise rasters\n",
"\n",
"### Denoise altitude for voxels computation..."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"Z = data.spatial[:,2]\n",
"ra.auto_filter(Z)\n",
"data.spatial = np.hstack((data.spatial[:,:2], Z.reshape(-1, 1)))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Compute noise rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_noise = rasterizer.rasterize(data.spatial, all_filter.astype(np.float16), .5, 'nearest', dtype=np.float16)\n",
"z_noise = rasterizer.rasterize(data.spatial, z_filter.astype(np.float16), .5, 'nearest', dtype=np.float16)\n",
"i_noise = rasterizer.rasterize(data.spatial, intensity_filter.astype(np.float16), .5, 'nearest', dtype=np.float16)\n",
"inter_noise = rasterizer.rasterize(data.spatial, inter_filter.astype(np.float16), .5, 'nearest', dtype=np.float16)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Display"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, axs = plt.subplots(4, figsize=figsize * 4)\n",
"for i, fig_title in enumerate(zip((all_noise, z_noise, i_noise, inter_noise), ('All', 'Altitude', 'Intensity', 'Intersection'))):\n",
" axs[i].imshow(fig_title[0].astype(np.float32))\n",
" axs[i].set_title(fig_title[1])\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Z and Int cutoff visualisation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, axs = plt.subplots(2, 2, figsize=(200,200))\n",
"for ax, filter_title in zip(axs.flatten(), zip((all_filter, z_filter, intensity_filter, inter_filter), ('All', 'Altitude', 'Intensity', 'Intersection'))):\n",
" ax.set_title(filter_title[1])\n",
" ax.hist2d(data.intensity[np.logical_not(filter_title[0])], data.z[np.logical_not(filter_title[0])], bins=1000)\n",
"fig.tight_layout()\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reconstruct denoised rasters\n",
"\n",
"#### All filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_all = rasterizer.vhr(data.spatial[np.logical_not(all_filter)], data.z[np.logical_not(all_filter)], dtype=np.float32)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=figsize)\n",
"plt.imshow(raster_all)\n",
"plt.show()\n",
"plt.imsave('../Res/tmp_all.png', raster_all)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Intensity filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_i = rasterizer.vhr(data.spatial[np.logical_not(intensity_filter)], data.z[np.logical_not(intensity_filter)], dtype=np.float32)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=figsize)\n",
"plt.imshow(raster_i)\n",
"plt.show()\n",
"plt.imsave('../Res/tmp_i.png', raster_i)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Z filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_z = rasterizer.vhr(data.spatial[np.logical_not(z_filter)], data.z[np.logical_not(z_filter)], dtype=np.float32)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=figsize)\n",
"plt.imshow(raster_z)\n",
"plt.show()\n",
"plt.imsave('../Res/tmp_z.png', raster_z)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Extremum clipping"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def extremum_filter(data, treshold=.5):\n",
" tresholds_i = np.percentile(data.intensity, [treshold, 100 - treshold])\n",
" tresholds_z = np.percentile(data.z, [treshold, 100 - treshold])\n",
"\n",
" intensity_filter = np.logical_or(data.intensity < tresholds_i[0], data.intensity > tresholds_i[1])\n",
" z_filter = np.logical_or(data.z < tresholds_z[0], data.z > tresholds_z[1])\n",
" all_filter = np.logical_or(intensity_filter, z_filter)\n",
" #inter_filter = np.logical_and(z_filter, intensity_filter)\n",
"\n",
" return all_filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"tresholds = (0., .1, .2, .5, 1.)\n",
"\n",
"fig, axs = plt.subplots(len(tresholds), figsize=figsize * len(tresholds))\n",
"\n",
"for ax, t in zip(axs, tresholds):\n",
" f = extremum_filter(data, t)\n",
" raster = rasterizer.vhr(data.spatial[np.logical_not(f)], data.z[np.logical_not(f)], dtype=np.float32)\n",
" ax.imshow(raster)\n",
" ax.set_title(str(t))\n",
" plt.imsave('../Res/extremum_clip_z_{}.png'.format(t), raster)\n",
"plt.show()\n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"0.1 % seems optimal."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reproductivity"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"data2 = ra.bulk_load('../Data/lidar/C1/', filter_treshold=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"tresholds = (0., .1, .2, .5, 1.)\n",
"\n",
"fig, axs = plt.subplots(len(tresholds), figsize=figsize * len(tresholds))\n",
"\n",
"for ax, t in zip(axs, tresholds):\n",
" f = extremum_filter(data2, t)\n",
" raster = rasterizer.vhr(data2.spatial[np.logical_not(f)], data2.z[np.logical_not(f)], dtype=np.float32)\n",
" ax.imshow(raster)\n",
" ax.set_title(str(t))\n",
" plt.imsave('../Res/extremum_clip_z_{}.png'.format(t), raster)\n",
"plt.show()\n",
" "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -114,7 +114,7 @@
"metadata": {},
"outputs": [],
"source": [
"C1 = bulk_load('../Data/lidar/C1', 'C1')\n",
"C1 = bulk_load('../Data/lidar/C1/272056_3289689.las', 'C1')\n",
"#C2 = bulk_load('../Data/lidar/C2/', 'C2')\n",
"#C3 = bulk_load('../Data/lidar/C3/', 'C3')\n",
"#C123 = bulk_load('../Data/lidar/', 'C123') # Todo: Copy C1 C2 C3"
@ -148,7 +148,8 @@
"outputs": [],
"source": [
"voxels_histo, edges = np.histogramdd(spatial_data, bins=bins)\n",
"voxels_histo.shape"
"voxels_histo = voxels_histo.astype(np.float32)\n",
"voxels_histo.shape, voxels_histo.dtype"
]
},
{
@ -158,6 +159,7 @@
"outputs": [],
"source": [
"voxels_histo_i, edges = np.histogramdd(spatial_data, bins=bins, weights=C1['intensity'])\n",
"voxels_histo_i = voxels_histo_i.astype(np.float32)\n",
"voxels_histo_i.shape"
]
},
@ -214,6 +216,15 @@
"raster"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"del voxels_histo, voxels_histo_i"
]
},
{
"cell_type": "code",
"execution_count": null,

View File

@ -40,10 +40,10 @@
"metadata": {},
"outputs": [],
"source": [
"C1 = ra.bulk_load('../Data/lidar/C1', 'C1', filter_treshold=.5)\n",
"C2 = ra.bulk_load('../Data/lidar/C2', 'C1', filter_treshold=.5)\n",
"C3 = ra.bulk_load('../Data/lidar/C3', 'C1', filter_treshold=.5)\n",
"C123 = ra.bulk_load('../Data/lidar', 'C123', filter_treshold=.5)"
"C1 = ra.bulk_load('../Data/lidar/C1', 'C1', filter_treshold=.5, dtype=np.float32)\n",
"C2 = ra.bulk_load('../Data/lidar/C2', 'C2', filter_treshold=.5, dtype=np.float32)\n",
"C3 = ra.bulk_load('../Data/lidar/C3', 'C3', filter_treshold=.5, dtype=np.float32)\n",
"C123 = ra.bulk_load('../Data/lidar', 'C123', filter_treshold=.5, dtype=np.float32)"
]
},
{
@ -61,7 +61,7 @@
"metadata": {},
"outputs": [],
"source": [
"raster = rasterizer.rasterize(C1.spatial, C1.intensity, 0.5)"
"raster = rasterizer.rasterize(C1.spatial, C1.intensity, 0.5, dtype=np.float32)"
]
},
{
@ -207,7 +207,9 @@
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
"source": [
"C123.name"
]
},
{
"cell_type": "code",
@ -221,21 +223,25 @@
"\n",
"def rasterize_cache_mp(data_var, field, res, method, reverse, cache):\n",
" if data_var == 'C1':\n",
" ra.rasterize_cache(C1, field, res, method, cache)\n",
" ra.rasterize_cache(C1, field, res, method, reverse, cache)\n",
" if data_var == 'C2':\n",
" ra.rasterize_cache(C2, field, res, method, cache)\n",
" ra.rasterize_cache(C2, field, res, method, reverse, cache)\n",
" if data_var == 'C3':\n",
" ra.rasterize_cache(C3, field, res, method, cache)\n",
" ra.rasterize_cache(C3, field, res, method, reverse, cache)\n",
" if data_var == 'C123':\n",
" ra.rasterize_cache(C123, field, res, method, cache)\n",
" ra.rasterize_cache(C123, field, res, method, reverse, cache)\n",
" \n",
"pool = Pool(processes=20)\n",
"pool = Pool(processes=5)\n",
"\n",
"job_args = list()\n",
"for field in ('z', 'intensity', 'num_returns', 'gps_time'):\n",
" for data in ('C1', 'C2', 'C3', 'C123'):\n",
" job_args.append([data, field, .5, 'linear', False, '../Res/HVR/'])\n",
" \n",
"\n",
"for res in (0.5, 1., 2., 3., 5., 10., .1):\n",
" for reverse in (False, True):\n",
" for inter in ('linear', 'nearest'):\n",
" for field in ('z', 'intensity', 'num_returns'):\n",
" for data in ('C1', 'C2', 'C3', 'C123'):\n",
" job_args.append([data, field, res, inter, reverse, '../Res/HVR/'])\n",
" \n",
"for i in pool.starmap(rasterize_cache_mp, job_args):\n",
" pass"
]

View File

@ -30,7 +30,7 @@ def rasterize_cache(data, field, resolution=1., method='linear', reverse_alt=Fal
print ('WARNING: Loading cached result {}'.format(tif_file))
raster = triskele.read(tif_file)
else:
raster = rasterizer.rasterize(data.spatial, getattr(data, field), resolution, method, reverse_alt)
raster = rasterizer.rasterize(data.spatial, getattr(data, field), resolution, method, reverse_alt, np.float32)
triskele.write(tif_file, raster)
plt.imsave(png_file, raster)
@ -49,13 +49,17 @@ def find_las(path):
return las
def extremum_filter(data, treshold=.5):
tresholds = np.percentile(data, [treshold, 100 - treshold])
return np.logical_or(data < tresholds[0], data > tresholds[1])
def auto_filter(data, treshold=.5):
tresholds = np.percentile(data, [treshold, 100 - treshold])
data[data < tresholds[0]] = tresholds[0]
data[data > tresholds[1]] = tresholds[1]
def bulk_load(path, name=None, filter_treshold=.1):
def bulk_load(path, name=None, filter_treshold=.1, dtype=None):
data = {'file': path, 'name': name}
attributes = ['x', 'y', 'z', 'intensity', 'num_returns']#, 'scan_angle_rank', 'pt_src_id', 'gps_time']
@ -76,19 +80,24 @@ def bulk_load(path, name=None, filter_treshold=.1):
print('Create matrices...', end='')
for i, a in enumerate(attributes):
print('\rCreate matrices: [{:3d}%]'.format(int(i/len(attributes) * 100)), end='')
data[a] = np.array(data[a])
data[a] = np.array(data[a], dtype=dtype)
print('\rCreate matrices: [Done]')
print('Filter data...', end='')
Z = data['z'].copy()
auto_filter(Z)
data['spatial'] = np.array((data['x'], data['y'], Z)).T
del Z
t = .1
efilter = np.logical_or(extremum_filter(data['intensity'], t), extremum_filter(data['z']))
attributes.append('spatial')
for i, a in enumerate(attributes):
print('\rFilter data: [{:3d}%]'.format(int(i/len(attributes) * 100)), end='')
if a == 'x' or a == 'y':
continue
auto_filter(data[a], filter_treshold)
data[a] = data[a][np.logical_not(efilter)]
print('\rFilter data: [Done]')
data['spatial'] = np.array((data['x'], data['y'], data['z'])).T
class TMPLAS(object):
def __init__(self, d):
self.__dict__.update(d)

View File

@ -11,7 +11,7 @@
import numpy as np
from scipy.interpolate import griddata
def rasterize(spatial, values, resolution=1., method='linear', reverse_alt=False):
def rasterize(spatial, values, resolution=1., method='linear', reverse_alt=False, dtype=None):
'''
Rasterize spatialised data.
@ -29,27 +29,8 @@ def rasterize(spatial, values, resolution=1., method='linear', reverse_alt=False
.. warning:: WIP
'''
bins = np.rint((spatial.max(axis=0) - spatial.min(axis=0)) / resolution).astype(int)
print('Compute histogram...')
voxels_histo, edges = np.histogramdd(spatial, bins=bins)
raster = vhr(spatial, values, resolution, reverse_alt, dtype)
print('Compute weighted histogram...')
voxels_histo_v, edges = np.histogramdd(spatial, bins=bins, weights=values)
layers = voxels_histo.shape[2]
raster = np.zeros_like(voxels_histo.T[0])
raster[:] = np.nan
print('Compute voxels value...')
voxels = (voxels_histo_v / voxels_histo).T
print('Stack voxels...')
for i in reversed(range(layers)) if not reverse_alt else range(layers):
nan_filter = np.isnan(raster)
raster[nan_filter] = voxels[i][nan_filter]
print('Interpolate NoData...')
if method != 'nearest':
raster = interpolate(raster, method)
@ -57,6 +38,38 @@ def rasterize(spatial, values, resolution=1., method='linear', reverse_alt=False
print('Final touches...')
raster = interpolate(raster, 'nearest')
return raster
def vhr(spatial, values, resolution=1., reverse_alt=False, dtype=None):
dtype = values.dtype if dtype is None else dtype
bins = np.rint((spatial.max(axis=0) - spatial.min(axis=0)) / resolution).astype(int)
print('Compute histogram...')
voxels_histo, edges = np.histogramdd(spatial, bins=bins)
voxels_histo = voxels_histo.astype(dtype)
print('Compute weighted histogram...')
voxels_histo_v, edges = np.histogramdd(spatial, bins=bins, weights=values)
voxels_histo_v = voxels_histo_v.astype(dtype)
layers = voxels_histo.shape[2]
print('Compute voxels value...')
voxels = (voxels_histo_v / voxels_histo).T
raster = np.zeros_like(voxels_histo.T[0], dtype=dtype)
raster[:] = np.nan
del voxels_histo_v, voxels_histo
print('Stack voxels...')
for i in reversed(range(layers)) if not reverse_alt else range(layers):
nan_filter = np.isnan(raster)
raster[nan_filter] = voxels[i][nan_filter]
del voxels
return np.flip(raster, 0)
def interpolate(raster, method='linear'):