Add tool scripts #1
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,3 +3,4 @@ doc/_build
|
|||||||
doc/source
|
doc/source
|
||||||
__pycache__
|
__pycache__
|
||||||
log/
|
log/
|
||||||
|
.coverage
|
||||||
|
|||||||
@ -64,6 +64,14 @@ def load_las(fname):
|
|||||||
feature_data.append(att)
|
feature_data.append(att)
|
||||||
feature_dtype.append((spec.name, att.dtype))
|
feature_dtype.append((spec.name, att.dtype))
|
||||||
|
|
||||||
|
# XXX: Workaround missing num_returns in header
|
||||||
|
try:
|
||||||
|
att = infile.num_returns
|
||||||
|
feature_data.append(att)
|
||||||
|
feature_dtype.append(('num_returns', att.dtype))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
log.debug('Create feature recarray')
|
log.debug('Create feature recarray')
|
||||||
feature = np.core.records.fromarrays(feature_data, dtype=feature_dtype)
|
feature = np.core.records.fromarrays(feature_data, dtype=feature_dtype)
|
||||||
del feature_data, feature_dtype
|
del feature_data, feature_dtype
|
||||||
|
|||||||
48
idefix/tools/txt_to_npz.py
Executable file
48
idefix/tools/txt_to_npz.py
Executable file
@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# file txt_to_npz.py
|
||||||
|
# author Florent Guiotte <florent.guiotte@uhb.fr>
|
||||||
|
# version 0.0
|
||||||
|
# date 24 mai 2019
|
||||||
|
"""Convert point clouds from text files to Idefix file format.
|
||||||
|
|
||||||
|
doc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import idefix.io as io
|
||||||
|
from pathlib import Path
|
||||||
|
import argparse
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
def txt_to_npy(fname, header, delimiter=None, dtype=None, compression=False):
|
||||||
|
oname = fname.stem + '.npz'
|
||||||
|
pc = io.load_txt(fname, header, delimiter, dtype)
|
||||||
|
io.dump_pc(oname, pc, compression)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description='Convert point clouds from text files to Idefix file format.')
|
||||||
|
parser.add_argument('file', type=str, help='file or dir to convert')
|
||||||
|
parser.add_argument('header', type=str, help='field names of the data')
|
||||||
|
parser.add_argument('--dtype', '-t', type=str, help='field data types')
|
||||||
|
parser.add_argument('--delimiter', '-d', type=str, default=',', help='field data delimiter')
|
||||||
|
parser.add_argument('--compress', '-c', action='store_true', default=False, help='enable data compression')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
header = args.header.split()
|
||||||
|
dtype = [np.dtype(x) for x in args.dtype.split()] if args.dtype else None
|
||||||
|
delimiter = args.delimiter
|
||||||
|
compress = args.compress
|
||||||
|
wd = Path(args.file)
|
||||||
|
|
||||||
|
if wd.is_dir():
|
||||||
|
files = wd.glob('*.txt')
|
||||||
|
else:
|
||||||
|
files = (wd,)
|
||||||
|
|
||||||
|
pbar = tqdm(list(files))
|
||||||
|
for f in pbar:
|
||||||
|
pbar.write('Processing {}...'.format(f))
|
||||||
|
txt_to_npy(f, header, delimiter, dtype, compress)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@ -131,7 +131,8 @@ def _bin_mean(grid, spatial, feature):
|
|||||||
density, edge = np.histogramdd(spatial, grid)
|
density, edge = np.histogramdd(spatial, grid)
|
||||||
weightd, edge = np.histogramdd(spatial, grid, weights=feature)
|
weightd, edge = np.histogramdd(spatial, grid, weights=feature)
|
||||||
mask = density == 0
|
mask = density == 0
|
||||||
return np.ma.masked_array(np.divide(weightd, density, where=~mask), mask)
|
return np.ma.masked_array(np.divide(weightd, density, where=~mask),
|
||||||
|
mask, dtype=feature.dtype)
|
||||||
|
|
||||||
def _bin_mode(grid, spatial, feature):
|
def _bin_mode(grid, spatial, feature):
|
||||||
'''Bin spatial in a grid, mode method.
|
'''Bin spatial in a grid, mode method.
|
||||||
@ -289,7 +290,7 @@ def _squash_position(voxel_grid, method, axis):
|
|||||||
voxel_grid_where = list(xy_where)
|
voxel_grid_where = list(xy_where)
|
||||||
voxel_grid_where.insert(axis%(len(voxel_grid_where)+1), squash_id.compressed())
|
voxel_grid_where.insert(axis%(len(voxel_grid_where)+1), squash_id.compressed())
|
||||||
|
|
||||||
raster = np.zeros_like(squash_id)
|
raster = np.zeros_like(squash_id, dtype=voxel_grid.dtype)
|
||||||
raster[xy_where] = voxel_grid[tuple(voxel_grid_where)]
|
raster[xy_where] = voxel_grid[tuple(voxel_grid_where)]
|
||||||
|
|
||||||
return raster
|
return raster
|
||||||
|
|||||||
5
setup.py
5
setup.py
@ -11,10 +11,11 @@
|
|||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
|
|
||||||
setup(name='idefix',
|
setup(name='idefix',
|
||||||
version='0.0',
|
version='1.4',
|
||||||
description='Utils and processing pipelines for LiDAR point clouds',
|
description='Utils and processing pipelines for LiDAR point clouds',
|
||||||
author='Florent Guiotte',
|
author='Florent Guiotte',
|
||||||
author_email='florent.guiotte@uhb.fr',
|
author_email='florent.guiotte@uhb.fr',
|
||||||
url='https://git.guiotte.fr/Florent/Idefix',
|
url='https://git.guiotte.fr/Florent/Idefix',
|
||||||
packages=['idefix'],
|
packages=['idefix', 'idefix.tools'],
|
||||||
|
entry_points = {'console_scripts':['txt2npz = idefix.tools.txt_to_npz:main',]},
|
||||||
)
|
)
|
||||||
|
|||||||
@ -15,8 +15,8 @@ from idefix import io
|
|||||||
@pytest.mark.parametrize('fname, exp_point_count, exp_field_count', [
|
@pytest.mark.parametrize('fname, exp_point_count, exp_field_count', [
|
||||||
# TODO: test different LAS version
|
# TODO: test different LAS version
|
||||||
# TODO: test LAS without field
|
# TODO: test LAS without field
|
||||||
('test.las', 58629, 3, ),
|
('test.las', 58629, 4, ),
|
||||||
#('test.laz', 58629, 3, ),
|
('test.laz', 58629, 4, ),
|
||||||
])
|
])
|
||||||
def test_load_las(datadir, fname, exp_point_count, exp_field_count):
|
def test_load_las(datadir, fname, exp_point_count, exp_field_count):
|
||||||
fname = datadir.join(fname)
|
fname = datadir.join(fname)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user