Merge branch 'develop' into 'master'

Merge v1.5

See merge request Florent/idefix!2
This commit is contained in:
Florent Guiotte 2021-02-09 18:11:58 +00:00
commit 0906183e49
9 changed files with 246 additions and 22 deletions

View File

@ -9,6 +9,6 @@ Utils and production pipelines for processing LiDAR point clouds.
"""
__all__ = ['utils', 'io', 'vxl']
__all__ = ['utils', 'io', 'vxl', 'helpers']
from . import utils, io, vxl
from . import utils, io, vxl, helpers

137
idefix/helpers.py Normal file
View File

@ -0,0 +1,137 @@
#!/usr/bin/env python
# file helpers.py
# author Florent Guiotte <florent.guiotte@irisa.fr>
# version 0.0
# date 24 août 2020
"""High-level helper functions.
This module contains high-level helper functions. This module shows many
exemple on the use of idefix package and other packages (sap, rasterio,
...) to process point clouds.
"""
import numpy as np
from scipy.interpolate import griddata
from rasterio import fill
import sap
import higra as hg
from .vxl import get_grid, bin, squash
def interpolate(raster, method='linear'):
"""Interpolate masked raster.
Wrapper function to interpolate missing values in masked raster.
The 'linear', 'nearest' and 'cubic' implementation are from `Scipy`_
while the 'idw' (inverse distance weighting) is provided by
`rasterio`_.
.. _Scipy: https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html
.. _rasterio: https://rasterio.readthedocs.io/en/latest/api/rasterio.fill.html
Parameters
----------
raster : masked ndarray
The raster with missing values masked.
method : str
Can be 'linear', 'nearest', 'cubic' or 'idw'.
Returns
-------
out : ndarray
The raster with filled missing values.
"""
if method == 'idw':
raster = fill.fillnodata(raster)
else:
coords = np.argwhere(~raster.mask)
values = raster.compressed()
grid = np.argwhere(raster.mask)
raster[raster.mask] = griddata(coords, values, grid, method=method)
if method != 'nearest':
raster.mask = np.isnan(raster)
raster = interpolate(raster, 'nearest')
raster = np.array(raster)
assert not np.isnan(raster).any()
return raster
def dsm(pcloud, cell_size=1., last=False):
"""Create the digital surface model (DSM) of the point cloud.
Parameters
----------
pcloud : recarray
A point cloud loaded with :mod:`idefix.io`.
cell_size : scalar
The size of the cells in meter. Cells are square. Default is 1
meter.
last : bool
Specifies whether the first echo (`False`) or the last echo
(`True`) should be taken into account. Default is `False`.
Returns
-------
dsm : ndarray
The DSM of the point cloud.
"""
grid = get_grid(pcloud.spatial, cell_size)
vxlg = bin(grid, pcloud.spatial, pcloud.spatial[:,2], 'mean')
rstr = squash(vxlg, 'bottom' if last else 'top')
rstr = interpolate(rstr, 'idw')
return rstr
def dtm_dh_filter(dsm, sigma=.5, epsilon=20000, alpha=2):
"""Compute a digital terrain model (DTM) from a DSM.
Work best with DSM of last echo.
Parameters
----------
dsm : ndarray
The DSM.
sigma : scalar
The height theshold to trigger object detection. Default is
0.5 m.
epsilon : scalar
The area theshold for ground objects. All objects with surface
greater than epsilon are forcedto be ground. Default is 20 km².
alpha : scalar
The area threshold for horizontal noise filter. Area variations
smaller than alpha are removed for the detection of height
threshold sigma. Default is 2 .
Returns
-------
dtm : ndarray
The DTM computed from the DSM.
"""
mt = sap.MaxTree(dsm)
area = mt.get_attribute('area')
area_child = hg.accumulate_parallel(mt._tree, area, hg.Accumulators.max)
pruned = (area - area_child) <= alpha
pruned_tree, pruned_map = hg.simplify_tree(mt._tree, pruned)
dh = mt._alt[pruned_map] - mt._alt[pruned_map][pruned_tree.parents()]
remove = dh > sigma
original_map = np.zeros(mt.num_nodes(), dtype=np.int)
original_map[pruned_map] = np.arange(pruned_map.size)
original_map = hg.accumulate_and_max_sequential(mt._tree, original_map, np.arange(mt._tree.num_leaves()), hg.Accumulators.max)
original_remove = remove[original_map] & (area < epsilon)
dtm = mt.reconstruct(original_remove, filtering='min')
return dtm

View File

@ -10,14 +10,14 @@ This module contains common utils for basic point cloud management and dataviz.
Notes
-----
Everything should be highly tested there.
Everything is well tested there.
"""
import numpy as np
import logging
log = logging.getLogger(__name__)
log = logging.getLogger(__name__)
def first(a):
"""Returns the inverse of the parameter.
@ -39,7 +39,7 @@ def first(a):
def bbox(data):
"""Returns bounding box of data.
This function returns the lower and the upper points describing the
bounding box of the points contained in data.

View File

@ -12,7 +12,6 @@ import logging
import numpy as np
import humanize
from .utils import bbox
import mayavi.mlab as mlab
log = logging.getLogger(__name__)
@ -30,7 +29,7 @@ def _ui_step(step, spatial):
out_step = [step] * spatial.shape[-1]
for s in out_step:
if s <= 0:
if s and s <= 0:
msg = 'Step should be greater than 0, steps = \'{}\'.'.format(step)
log.error(msg)
raise ValueError(msg)
@ -45,10 +44,12 @@ def get_grid(spatial, step):
Parameters
----------
spatial : array (m, n)
The spatial point cloud or the corresponding bounding box to grid.
The spatial point cloud or the corresponding bounding box to
grid.
step : number or array or tuple
The step of the grid, can be a number to get an isotropic grid, or an
iterable of size 3 (required) to get an anisotropic grid.
The step of the grid, can be a number to get an isotropic grid,
or an iterable of size 3 (required) to get an anisotropic grid.
Value can be `None` to define an undivided axis.
Returns
-------
@ -63,8 +64,11 @@ def get_grid(spatial, step):
grid = []
for a_min, a_max, a_s in zip(bb[0], bb[1], step):
# Beware of float underflow
bins = np.trunc((a_max - a_min) / a_s).astype(int) + 1
grid += [np.linspace(a_min, a_min + bins * a_s, bins + 1)]
if a_s:
bins = np.trunc((a_max - a_min) / a_s).astype(int) + 1
grid += [np.linspace(a_min, a_min + bins * a_s, bins + 1)]
else:
grid += [np.array((a_min, a_max + 1))]
return grid
@ -91,7 +95,7 @@ def bin(grid, spatial, feature=None, method='density'):
- 'mean': The mean of feature value in each cell.
- 'mode': The modal (most common) in each cell. Designed for labels on
point cloud, can be long with rich spectral data. If there is an
equal number of elements, then the smallest is returned.
equal number of elements, the smallest is returned.
The default is 'density'.
Returns
@ -371,6 +375,8 @@ def plot(voxel_grid, vmin=None, vmax=None):
>>> mlab.savefig(fname, magnification=4)
>>> mlab.show()
"""
import mayavi.mlab as mlab
points = np.where(~voxel_grid.mask)
if vmin or vmax:

View File

@ -8,14 +8,37 @@
#
# TODO details
from distutils.core import setup
import setuptools
setup(name='idefix',
version='1.4',
description='Utils and processing pipelines for LiDAR point clouds',
author='Florent Guiotte',
author_email='florent.guiotte@uhb.fr',
url='https://git.guiotte.fr/Florent/Idefix',
packages=['idefix', 'idefix.tools'],
entry_points = {'console_scripts':['txt2npz = idefix.tools.txt_to_npz:main',]},
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='idefix',
version='0.1.5',
description='Utils and processing pipelines for LiDAR point clouds',
author='Florent Guiotte',
author_email='florent.guiotte@uhb.fr',
url='https://git.guiotte.fr/Florent/Idefix',
long_description=long_description,
long_description_content_type='text/markdown',
packages=['idefix', 'idefix.tools'],
entry_points = {'console_scripts':['txt2npz = idefix.tools.txt_to_npz:main',]},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved',
'Operating System :: OS Independent',
],
python_requires='>=3.6',
install_requires=[
'numpy',
'sap',
'tqdm',
'matplotlib',
'pathlib',
'rasterio',
'laspy',
'humanize',
#'mayavi', Optional, for vxl.plot()
],
)

46
test/test_helpers.py Normal file
View File

@ -0,0 +1,46 @@
#!/usr/bin/env python
# file test_helpers.py
# author Florent Guiotte <florent.guiotte@irisa.fr>
# version 0.0
# date 24 août 2020
"""Abstract
doc.
"""
import numpy as np
import pytest
from idefix import helpers, io
@pytest.fixture
def ma_raster():
rs = np.random.RandomState(42)
raster = rs.random((10,10))
raster = np.ma.array(raster, mask=raster<.1)
return raster
@pytest.mark.parametrize('method',
['nearest', 'linear', 'cubic', 'idw'])
def test_interpolate(ma_raster, method):
helpers.interpolate(ma_raster, method)
def _data_pc(datadir, set_id):
path = datadir.join('pc{}.txt'.format(set_id))
data = io.load_txt(path, 'x y z i'.split())
return data
@pytest.mark.parametrize('params', [
{},
{'cell_size': 2.},
{'last': True}])
def test_dsm(datadir, params):
pc = _data_pc(datadir, 0)
dsm = helpers.dsm(pc, **params)
assert dsm is not None, 'Did not return anything...'
assert not np.isnan(dsm).any(), 'Some missing values in DSM'
def test_dtm(ma_raster):
dtm = helpers.dtm_dh_filter(ma_raster)
assert dtm is not None, 'Did not return anything...'

View File

@ -0,0 +1,8 @@
# x y z feature
1 1 1 2
1 3 2 5
1 3 2 5
1 3 2 10
1 3 2 20
10 10 10 1
5 5 5 0

View File

@ -75,6 +75,7 @@ def data_grid(datadir, set_id, step_id):
('0', .7, '0_7'),
('0', .15, '0_15'),
('0', [1.,1.,2.] , '1-1-2'),
('0', [1.,1.,None] , '1-1-n'),
])
def test_get_grid(datadir, set_id, step, grid_id):
spatial = data_pc(datadir, set_id).spatial

View File

@ -0,0 +1,3 @@
1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0
1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0
1.0 11.0