Compare commits

...

17 Commits
v1.2 ... master

Author SHA1 Message Date
0906183e49 Merge branch 'develop' into 'master'
Merge v1.5

See merge request Florent/idefix!2
2021-02-09 18:11:58 +00:00
ae0308374c bump version 2021-02-09 19:09:39 +01:00
e0bcb484c5 Update get_grid doc 2020-08-30 15:36:52 +02:00
7996cd6fdb Add DTM filter and 2D grids 2020-08-30 15:33:04 +02:00
cd462fc5a4 Add helpers module with dsm() and interpolate() 2020-08-27 14:00:34 +02:00
01cf68ea8b New setup.py and add interpolate utils 2020-08-24 17:05:06 +02:00
8273de1c17 Merge branch 'develop' into 'master'
Add tool scripts

See merge request Florent/idefix!1
2020-08-19 13:32:52 +00:00
653fb1afa3 Bump version 2020-08-19 15:29:11 +02:00
2b912b7347 Update ignore, re-enable laz test 2020-08-19 15:27:55 +02:00
b00ebafd1b Workaround missing num_returns in LAS headerj 2020-05-28 18:16:26 +02:00
f2c0c6e8d6 Bin mean return feature dtype 2019-05-28 16:58:36 +02:00
b1a2d50a14 Refactor misc to tool, scripts to setup 2019-05-24 15:18:23 +02:00
a21662bf68 Add txt to npz script 2019-05-24 14:32:55 +02:00
95bdf3b724 Squash dtype 2019-05-24 14:31:09 +02:00
362276efd6 Dump and load point clouds passing tests 2019-04-16 18:53:00 +02:00
7bdad094b2 Add load point cloud in IO 2019-04-15 17:55:49 +02:00
3dcf9bec74 Fix count, allow point-cloud without feature 2019-04-11 15:47:46 +02:00
20 changed files with 524 additions and 38 deletions

1
.gitignore vendored
View File

@ -3,3 +3,4 @@ doc/_build
doc/source
__pycache__
log/
.coverage

View File

@ -9,6 +9,6 @@ Utils and production pipelines for processing LiDAR point clouds.
"""
__all__ = ['utils', 'io', 'vxl']
__all__ = ['utils', 'io', 'vxl', 'helpers']
from . import utils, io, vxl
from . import utils, io, vxl, helpers

137
idefix/helpers.py Normal file
View File

@ -0,0 +1,137 @@
#!/usr/bin/env python
# file helpers.py
# author Florent Guiotte <florent.guiotte@irisa.fr>
# version 0.0
# date 24 août 2020
"""High-level helper functions.
This module contains high-level helper functions. This module shows many
exemple on the use of idefix package and other packages (sap, rasterio,
...) to process point clouds.
"""
import numpy as np
from scipy.interpolate import griddata
from rasterio import fill
import sap
import higra as hg
from .vxl import get_grid, bin, squash
def interpolate(raster, method='linear'):
"""Interpolate masked raster.
Wrapper function to interpolate missing values in masked raster.
The 'linear', 'nearest' and 'cubic' implementation are from `Scipy`_
while the 'idw' (inverse distance weighting) is provided by
`rasterio`_.
.. _Scipy: https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html
.. _rasterio: https://rasterio.readthedocs.io/en/latest/api/rasterio.fill.html
Parameters
----------
raster : masked ndarray
The raster with missing values masked.
method : str
Can be 'linear', 'nearest', 'cubic' or 'idw'.
Returns
-------
out : ndarray
The raster with filled missing values.
"""
if method == 'idw':
raster = fill.fillnodata(raster)
else:
coords = np.argwhere(~raster.mask)
values = raster.compressed()
grid = np.argwhere(raster.mask)
raster[raster.mask] = griddata(coords, values, grid, method=method)
if method != 'nearest':
raster.mask = np.isnan(raster)
raster = interpolate(raster, 'nearest')
raster = np.array(raster)
assert not np.isnan(raster).any()
return raster
def dsm(pcloud, cell_size=1., last=False):
"""Create the digital surface model (DSM) of the point cloud.
Parameters
----------
pcloud : recarray
A point cloud loaded with :mod:`idefix.io`.
cell_size : scalar
The size of the cells in meter. Cells are square. Default is 1
meter.
last : bool
Specifies whether the first echo (`False`) or the last echo
(`True`) should be taken into account. Default is `False`.
Returns
-------
dsm : ndarray
The DSM of the point cloud.
"""
grid = get_grid(pcloud.spatial, cell_size)
vxlg = bin(grid, pcloud.spatial, pcloud.spatial[:,2], 'mean')
rstr = squash(vxlg, 'bottom' if last else 'top')
rstr = interpolate(rstr, 'idw')
return rstr
def dtm_dh_filter(dsm, sigma=.5, epsilon=20000, alpha=2):
"""Compute a digital terrain model (DTM) from a DSM.
Work best with DSM of last echo.
Parameters
----------
dsm : ndarray
The DSM.
sigma : scalar
The height theshold to trigger object detection. Default is
0.5 m.
epsilon : scalar
The area theshold for ground objects. All objects with surface
greater than epsilon are forcedto be ground. Default is 20 km².
alpha : scalar
The area threshold for horizontal noise filter. Area variations
smaller than alpha are removed for the detection of height
threshold sigma. Default is 2 .
Returns
-------
dtm : ndarray
The DTM computed from the DSM.
"""
mt = sap.MaxTree(dsm)
area = mt.get_attribute('area')
area_child = hg.accumulate_parallel(mt._tree, area, hg.Accumulators.max)
pruned = (area - area_child) <= alpha
pruned_tree, pruned_map = hg.simplify_tree(mt._tree, pruned)
dh = mt._alt[pruned_map] - mt._alt[pruned_map][pruned_tree.parents()]
remove = dh > sigma
original_map = np.zeros(mt.num_nodes(), dtype=np.int)
original_map[pruned_map] = np.arange(pruned_map.size)
original_map = hg.accumulate_and_max_sequential(mt._tree, original_map, np.arange(mt._tree.num_leaves()), hg.Accumulators.max)
original_remove = remove[original_map] & (area < epsilon)
dtm = mt.reconstruct(original_remove, filtering='min')
return dtm

View File

@ -11,6 +11,7 @@ General functions to load and dump data in various format.
import logging
from pathlib import Path
import numpy as np
import numpy.core.records as rcd
from numpy.lib import recfunctions as rfn
import laspy
@ -36,11 +37,7 @@ def load_las(fname):
[(spatial), (feature, [f1, f2, ..., fn])]
'''
fname = Path(fname)
if not fname.is_file():
msg = 'No such file: \'{}\''.format(fname)
log.error(msg)
raise IOError(msg)
fname = _get_verify_path(fname)
log.info('Loading LAS file \'{}\'...'.format(fname))
try:
@ -67,6 +64,14 @@ def load_las(fname):
feature_data.append(att)
feature_dtype.append((spec.name, att.dtype))
# XXX: Workaround missing num_returns in header
try:
att = infile.num_returns
feature_data.append(att)
feature_dtype.append(('num_returns', att.dtype))
except Exception:
pass
log.debug('Create feature recarray')
feature = np.core.records.fromarrays(feature_data, dtype=feature_dtype)
del feature_data, feature_dtype
@ -106,11 +111,7 @@ def load_txt(fname, header, delimiter=' ', dtype=None):
[(spatial), (feature, [f1, f2, ..., fn])]
'''
fname = Path(fname)
if not fname.is_file():
msg = 'No such file: \'{}\''.format(fname)
log.error(msg)
raise IOError(msg)
fname = _get_verify_path(fname)
if dtype is not None:
assert len(dtype) == len(header), 'dtype and header must be the same size'
@ -143,6 +144,9 @@ def load_txt(fname, header, delimiter=' ', dtype=None):
for i in ('x', 'y', 'z'):
header_c.remove(i)
if not header_c:
return spatial
log.debug('Create feature recarray')
feature = raw_txt[header_c]
@ -150,3 +154,72 @@ def load_txt(fname, header, delimiter=' ', dtype=None):
pcloud = rfn.append_fields(spatial, 'feature', feature, usemask=False, asrecarray=True)
return pcloud
def _get_verify_path(fname):
fname = Path(fname)
if not fname.is_file():
msg = 'No such file: \'{}\''.format(fname)
log.error(msg)
raise IOError(msg)
return fname
def _arr_to_rec(arr):
"""Array to record array.
Used for point clouds, should work for everything else tho...
"""
arrays = []; dtypes = []
for k in arr.dtype.fields.keys():
arrays += [arr[k]]
dtypes += [(k, arr.dtype[k])]
return np.core.records.fromarrays(arrays, dtypes)
def load_pc(fname):
"""Load point cloud from file.
Loader for point clouds containted in compatible '.npz' files. This "point
cloud" format is based on NumPy files, with small overhead to manage record
array and multispectral point clouds.
Parameters
----------
fname : str, Path
Path to the point cloud file to load.
Returns
-------
point_cloud : recarray or tuple of recarray
The point cloud or tuple of point clouds (for multispectral point cloud
files).
"""
log.info('Loading point cloud file \'{}\')'.format(fname))
fname = _get_verify_path(fname)
archive = np.load(fname)
if len(archive.files) == 1:
return _arr_to_rec(archive[archive.files[0]])
else:
return tuple(_arr_to_rec(archive[arr]) for arr in archive.files)
def dump_pc(fname, point_cloud, compress=False):
"""Dump point cloud to file.
Write a point cloud (or several point clouds) in a '.npz' files.
Parameters
----------
fname : str, Path
Path to the point cloud file to create.
point_cloud : recarray or tuple of recarray
The point cloud (or the tuple of point clouds) to dump.
compress : bool
Enable compression of the dumped file. Default is False.
"""
if hasattr(point_cloud, 'spatial'):
point_cloud = (point_cloud, )
if compress:
np.savez_compressed(fname, *point_cloud)
else:
np.savez(fname, *point_cloud)

48
idefix/tools/txt_to_npz.py Executable file
View File

@ -0,0 +1,48 @@
#!/usr/bin/env python
# file txt_to_npz.py
# author Florent Guiotte <florent.guiotte@uhb.fr>
# version 0.0
# date 24 mai 2019
"""Convert point clouds from text files to Idefix file format.
doc.
"""
import numpy as np
import idefix.io as io
from pathlib import Path
import argparse
from tqdm import tqdm
def txt_to_npy(fname, header, delimiter=None, dtype=None, compression=False):
oname = fname.stem + '.npz'
pc = io.load_txt(fname, header, delimiter, dtype)
io.dump_pc(oname, pc, compression)
def main():
parser = argparse.ArgumentParser(description='Convert point clouds from text files to Idefix file format.')
parser.add_argument('file', type=str, help='file or dir to convert')
parser.add_argument('header', type=str, help='field names of the data')
parser.add_argument('--dtype', '-t', type=str, help='field data types')
parser.add_argument('--delimiter', '-d', type=str, default=',', help='field data delimiter')
parser.add_argument('--compress', '-c', action='store_true', default=False, help='enable data compression')
args = parser.parse_args()
header = args.header.split()
dtype = [np.dtype(x) for x in args.dtype.split()] if args.dtype else None
delimiter = args.delimiter
compress = args.compress
wd = Path(args.file)
if wd.is_dir():
files = wd.glob('*.txt')
else:
files = (wd,)
pbar = tqdm(list(files))
for f in pbar:
pbar.write('Processing {}...'.format(f))
txt_to_npy(f, header, delimiter, dtype, compress)
if __name__ == '__main__':
main()

View File

@ -10,14 +10,14 @@ This module contains common utils for basic point cloud management and dataviz.
Notes
-----
Everything should be highly tested there.
Everything is well tested there.
"""
import numpy as np
import logging
log = logging.getLogger(__name__)
log = logging.getLogger(__name__)
def first(a):
"""Returns the inverse of the parameter.
@ -39,7 +39,7 @@ def first(a):
def bbox(data):
"""Returns bounding box of data.
This function returns the lower and the upper points describing the
bounding box of the points contained in data.

View File

@ -12,7 +12,6 @@ import logging
import numpy as np
import humanize
from .utils import bbox
import ipdb
log = logging.getLogger(__name__)
@ -30,7 +29,7 @@ def _ui_step(step, spatial):
out_step = [step] * spatial.shape[-1]
for s in out_step:
if s <= 0:
if s and s <= 0:
msg = 'Step should be greater than 0, steps = \'{}\'.'.format(step)
log.error(msg)
raise ValueError(msg)
@ -45,10 +44,12 @@ def get_grid(spatial, step):
Parameters
----------
spatial : array (m, n)
The spatial point cloud or the corresponding bounding box to grid.
The spatial point cloud or the corresponding bounding box to
grid.
step : number or array or tuple
The step of the grid, can be a number to get an isotropic grid, or an
iterable of size 3 (required) to get an anisotropic grid.
The step of the grid, can be a number to get an isotropic grid,
or an iterable of size 3 (required) to get an anisotropic grid.
Value can be `None` to define an undivided axis.
Returns
-------
@ -63,8 +64,11 @@ def get_grid(spatial, step):
grid = []
for a_min, a_max, a_s in zip(bb[0], bb[1], step):
# Beware of float underflow
bins = np.trunc((a_max - a_min) / a_s).astype(int) + 1
grid += [np.linspace(a_min, a_min + bins * a_s, bins + 1)]
if a_s:
bins = np.trunc((a_max - a_min) / a_s).astype(int) + 1
grid += [np.linspace(a_min, a_min + bins * a_s, bins + 1)]
else:
grid += [np.array((a_min, a_max + 1))]
return grid
@ -91,7 +95,7 @@ def bin(grid, spatial, feature=None, method='density'):
- 'mean': The mean of feature value in each cell.
- 'mode': The modal (most common) in each cell. Designed for labels on
point cloud, can be long with rich spectral data. If there is an
equal number of elements, then the smallest is returned.
equal number of elements, the smallest is returned.
The default is 'density'.
Returns
@ -131,7 +135,8 @@ def _bin_mean(grid, spatial, feature):
density, edge = np.histogramdd(spatial, grid)
weightd, edge = np.histogramdd(spatial, grid, weights=feature)
mask = density == 0
return np.ma.masked_array(np.divide(weightd, density, where=~mask), mask)
return np.ma.masked_array(np.divide(weightd, density, where=~mask),
mask, dtype=feature.dtype)
def _bin_mode(grid, spatial, feature):
'''Bin spatial in a grid, mode method.
@ -237,7 +242,9 @@ def insight(grid, feature=None, method='density', mem_limit=None, verbose=False)
print('\n'.join(lines))
if mem_limit and mem_usage > mem_limit:
msg = 'The memory requirement is higher than allowed memory usage.'
msg = 'The memory requirement is higher than\
maximum authorized memory usage ({} GB needed).'.format(
humanize.naturalsize(mem_usage, binary=True))
log.error(msg)
raise MemoryError(msg)
@ -249,7 +256,7 @@ def _print_insight(grid, mem_usage, mem_limit):
'Grid shape: \t{}'.format([x.size - 1 for x in grid]),
'Number of cells:\t{}'.format(humanize.intword(_bin_insight(grid))),
'Predicted RAM usage:\t{}'.format(humanize.naturalsize(mem_usage, binary=True)),
'Allowed max RAM usage:\t{}'.format(humanize.naturalsize(mem_limit, binary=True) if mem_limit else 'Not set'),
'Max allowed RAM usage:\t{}'.format(humanize.naturalsize(mem_limit, binary=True) if mem_limit else 'Not set'),
'--------------------',]
return print_lines
@ -268,6 +275,9 @@ def _geo_to_np_coordinate(raster):
'''
return np.flip(np.swapaxes(raster, 0, 1), 0)
def _np_to_geo_coordinate(raster):
return np.swapaxes(np.flip(raster, 0), 1, 0)
def _squash_position(voxel_grid, method, axis):
squash_mask = np.zeros_like(voxel_grid, dtype=np.int)
mask_idx = (~voxel_grid.mask).nonzero()
@ -284,7 +294,7 @@ def _squash_position(voxel_grid, method, axis):
voxel_grid_where = list(xy_where)
voxel_grid_where.insert(axis%(len(voxel_grid_where)+1), squash_id.compressed())
raster = np.zeros_like(squash_id)
raster = np.zeros_like(squash_id, dtype=voxel_grid.dtype)
raster[xy_where] = voxel_grid[tuple(voxel_grid_where)]
return raster
@ -326,7 +336,7 @@ def squash(voxel_grid, method='top', axis=-1):
if method in ('top', 'center', 'bottom'):
return _squash_position(voxel_grid, method, axis)
elif method == 'count':
return ~voxel_grid.mask.sum(axis=axis)
return np.sum(~voxel_grid.mask, axis=axis)
elif method == 'mean':
return voxel_grid.mean(axis=axis)
elif method == 'median':
@ -340,3 +350,39 @@ def squash(voxel_grid, method='top', axis=-1):
raise NotImplementedError('Method \'{}\' does not exist.'.format(method))
def plot(voxel_grid, vmin=None, vmax=None):
"""Plot voxel grid with Mayavi.
Parameters
----------
voxel_grid : masked array (3D)
The voxel grid to plot.
vmin, vmax : scalar, optional
Define the data range that the colormap cover.
Returns
-------
figure : mlab figure
The figure instance.
Examples
--------
>>> a = np.random.random((10,10,10))
>>> view = {}
>>> mlab.clf()
>>> vxl.plot(a)
>>> mlab.view(**view)
>>> mlab.savefig(fname, magnification=4)
>>> mlab.show()
"""
import mayavi.mlab as mlab
points = np.where(~voxel_grid.mask)
if vmin or vmax:
disp_value = np.clip(voxel_grid[points], vmin, vmax)
else:
disp_value = voxel_grid[points]
voxels_display = mlab.points3d(*points, disp_value, mode='cube', scale_factor=1, scale_mode='none', opacity=1., colormap='viridis')
return voxels_display

22
idefix_plot.py Normal file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python
# file mlab_test.py
# author Florent Guiotte <florent.guiotte@uhb.fr>
# version 0.0
# date 11 avril 2019
"""Abstract
doc.
"""
from idefix import vxl
import mayavi.mlab as mlab
import numpy as np
spatial = np.random.random((10000, 3))
feature = np.random.random(10000)
grid = vxl.get_grid(spatial, .1)
vg = vxl.bin(grid, spatial, feature, 'mean')
vxl.plot(vg)
mlab.show()

1
pytest.ini Symbolic link
View File

@ -0,0 +1 @@
test/pytest.ini

View File

@ -8,13 +8,37 @@
#
# TODO details
from distutils.core import setup
import setuptools
setup(name='idefix',
version='0.0',
description='Utils and processing pipelines for LiDAR point clouds',
author='Florent Guiotte',
author_email='florent.guiotte@uhb.fr',
url='https://git.guiotte.fr/Florent/Idefix',
packages=['idefix'],
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='idefix',
version='0.1.5',
description='Utils and processing pipelines for LiDAR point clouds',
author='Florent Guiotte',
author_email='florent.guiotte@uhb.fr',
url='https://git.guiotte.fr/Florent/Idefix',
long_description=long_description,
long_description_content_type='text/markdown',
packages=['idefix', 'idefix.tools'],
entry_points = {'console_scripts':['txt2npz = idefix.tools.txt_to_npz:main',]},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved',
'Operating System :: OS Independent',
],
python_requires='>=3.6',
install_requires=[
'numpy',
'sap',
'tqdm',
'matplotlib',
'pathlib',
'rasterio',
'laspy',
'humanize',
#'mayavi', Optional, for vxl.plot()
],
)

8
test/pytest.ini Normal file
View File

@ -0,0 +1,8 @@
[pytest]
filterwarnings =
ignore::DeprecationWarning:sqlalchemy.*:
ignore::DeprecationWarning:apptools.*:
ignore::DeprecationWarning:pyface.*:
ignore::DeprecationWarning:traits.*:
ignore::DeprecationWarning:traitsui.*:
ignore:.*escape sequence.*:DeprecationWarning

46
test/test_helpers.py Normal file
View File

@ -0,0 +1,46 @@
#!/usr/bin/env python
# file test_helpers.py
# author Florent Guiotte <florent.guiotte@irisa.fr>
# version 0.0
# date 24 août 2020
"""Abstract
doc.
"""
import numpy as np
import pytest
from idefix import helpers, io
@pytest.fixture
def ma_raster():
rs = np.random.RandomState(42)
raster = rs.random((10,10))
raster = np.ma.array(raster, mask=raster<.1)
return raster
@pytest.mark.parametrize('method',
['nearest', 'linear', 'cubic', 'idw'])
def test_interpolate(ma_raster, method):
helpers.interpolate(ma_raster, method)
def _data_pc(datadir, set_id):
path = datadir.join('pc{}.txt'.format(set_id))
data = io.load_txt(path, 'x y z i'.split())
return data
@pytest.mark.parametrize('params', [
{},
{'cell_size': 2.},
{'last': True}])
def test_dsm(datadir, params):
pc = _data_pc(datadir, 0)
dsm = helpers.dsm(pc, **params)
assert dsm is not None, 'Did not return anything...'
assert not np.isnan(dsm).any(), 'Some missing values in DSM'
def test_dtm(ma_raster):
dtm = helpers.dtm_dh_filter(ma_raster)
assert dtm is not None, 'Did not return anything...'

View File

@ -0,0 +1,8 @@
# x y z feature
1 1 1 2
1 3 2 5
1 3 2 5
1 3 2 10
1 3 2 20
10 10 10 1
5 5 5 0

View File

@ -15,8 +15,8 @@ from idefix import io
@pytest.mark.parametrize('fname, exp_point_count, exp_field_count', [
# TODO: test different LAS version
# TODO: test LAS without field
('test.las', 58629, 3, ),
#('test.laz', 58629, 3, ),
('test.las', 58629, 4, ),
('test.laz', 58629, 4, ),
])
def test_load_las(datadir, fname, exp_point_count, exp_field_count):
fname = datadir.join(fname)
@ -85,3 +85,71 @@ def test_load_txt(datadir, fname, head, separator, exp_point_count, exp_field_co
if dtype is not None:
for feature_name, feature_dtype in zip(head[3:], dtype[3:]):
assert result.feature[feature_name].dtype == feature_dtype, "Missmatch between specified dtype and returned feature dtype"
@pytest.mark.parametrize('fname, exp_point_count, exp_field_count', [
('test.npz', 58629, 2, ),
('test_compressed.npz', 58629, 2,),
('test_multi.npz', (100, 200), 2,),
('test_multi_compressed.npz', (100, 200), 2,),
])
def test_load_pc(datadir, fname, exp_point_count, exp_field_count):
fname = datadir.join(fname)
# Raise "No such file"
with pytest.raises(IOError) as e_info:
io.load_pc('not_as_file.npz')
# Open file without exception
try:
result = io.load_pc(fname)
except IOError:
pytest.fail('Opening legit file without exception')
if isinstance(exp_point_count, tuple):
assert isinstance(result, tuple), "Multi point cloud file should return tuple of point cloud"
result = result[0]
exp_point_count = exp_point_count[0]
assert result.size == exp_point_count, "Return correct point count"
assert result['spatial'].shape[-1] == 3, "Return ndarray with spatial field"
assert result.spatial.shape[-1] == 3, "Returned array is not a recarray"
assert (result['spatial'] == result.spatial).all(), "Quick access with records array"
assert len(result['feature'].dtype) == exp_field_count, "Return ndarray with attribute fields"
assert result.spatial.dtype == np.float, "Dtype of spatial is np.float"
@pytest.mark.parametrize('fname, compress', [
('test.npz', False,),
('test.npz', True,),
('test_multi.npz', False,),
('test_multi.npz', True,),
])
def test_dump_pc(datadir, fname, compress):
in_fname = datadir.join(fname)
pc = io.load_pc(in_fname)
out_fname = datadir / 'PYTEST_test.npz'
try:
io.dump_pc(out_fname, pc, compress)
except IOError:
pytest.fail('Dump file without exception')
assert out_fname.exists(), 'The dump file was not created'
in_out_pc = io.load_pc(out_fname)
assert len(in_out_pc) == len(pc), 'Missmatch of dumped point cloud'
if isinstance(pc, tuple):
assert in_out_pc[0].spatial.shape == pc[0].spatial.shape, 'Missmatch of dumped point cloud'
assert in_out_pc[0].spatial.dtype == pc[0].spatial.dtype, 'Missmatch of dumped point cloud'
assert in_out_pc[0].feature.dtype == pc[0].feature.dtype, 'Missmatch of dumped point cloud'
else:
assert in_out_pc.spatial.shape == pc.spatial.shape, 'Missmatch of dumped point cloud'
assert in_out_pc.spatial.dtype == pc.spatial.dtype, 'Missmatch of dumped point cloud'
assert in_out_pc.feature.dtype == pc.feature.dtype, 'Missmatch of dumped point cloud'

BIN
test/test_io/test.npz Normal file

Binary file not shown.

Binary file not shown.

BIN
test/test_io/test_multi.npz Normal file

Binary file not shown.

Binary file not shown.

View File

@ -75,6 +75,7 @@ def data_grid(datadir, set_id, step_id):
('0', .7, '0_7'),
('0', .15, '0_15'),
('0', [1.,1.,2.] , '1-1-2'),
('0', [1.,1.,None] , '1-1-n'),
])
def test_get_grid(datadir, set_id, step, grid_id):
spatial = data_pc(datadir, set_id).spatial

View File

@ -0,0 +1,3 @@
1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0
1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0
1.0 11.0