Allow manual dtype, add Normalize node

This commit is contained in:
Florent Guiotte 2018-09-12 21:56:15 +02:00
parent 9d1360fbe6
commit fb0a423015
7 changed files with 112 additions and 112 deletions

View File

@ -29,7 +29,7 @@
"metadata": {},
"outputs": [],
"source": [
"n = 3; d = 0"
"n = 5; d = 0"
]
},
{
@ -63,10 +63,12 @@
"outputs": [],
"source": [
"load = ld2dap.LoadTIFF(layers_files)\n",
"#trsh = ld2dap.Treshold(1e4)\n",
"trsh = ld2dap.Treshold(1e4)\n",
"norm = ld2dap.Normalize(dtype=np.uint8)\n",
"rout = ld2dap.RawOutput()\n",
"\n",
"rout.input = load\n",
"rout.input = norm\n",
"norm.input = trsh\n",
"trsh.input = load\n",
"\n",
"rout.run()"
@ -120,7 +122,7 @@
"\n",
"for cut in cuts:\n",
" rinp = ld2dap.RawInput(cut, rout.metadata)\n",
" aps = ld2dap.AttributeProfiles(area=[100,1e3,1e4])\n",
" aps = ld2dap.SelfDualAttributeProfiles(area=[100,1e3,1e4,1e5], normalize_to_dtype=False)\n",
" vout = ld2dap.RawOutput()\n",
" \n",
" vout.input = aps\n",
@ -145,9 +147,10 @@
"outputs": [],
"source": [
"descriptors = np.zeros(rout.data.shape[:2] + (dcuts[0].shape[-1],))\n",
"view = np.moveaxis(descriptors, d, 0)\n",
"\n",
"for i, cut in enumerate(dcuts):\n",
" descriptors[i*step:(i+1)*step+1] = cut"
" view[i*step:(i+1)*step+1] = np.moveaxis(cut, 0, d)"
]
},
{
@ -164,110 +167,12 @@
"outputs": [],
"source": [
"t_inp = ld2dap.RawInput(descriptors, vout.metadata)\n",
"t_dsp = ld2dap.ShowFig(stack_id=3, symb=True, fname='../Res/bands.png')\n",
"t_dsp = ld2dap.ShowFig(stack_id=3, symb=False)\n",
"\n",
"t_dsp.input = t_inp\n",
"\n",
"t_dsp.run()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"descriptors.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dim = 5\n",
"A = np.arange(64).reshape((4, 8, 2))\n",
"A = A[:,:,np.newaxis] if A.ndim == 2 else A\n",
"B = np.zeros(A.shape[:2] + (A.shape[2] * dim,))\n",
"A.shape, B.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step = int(A.shape[0] / n)\n",
"cuts = list()\n",
"\n",
"for i in range(n):\n",
" cut = A[i*step:(i+1)*step+1]\n",
" cut = np.repeat(cut, dim, axis=2)\n",
" cuts.append(cut)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for i, cut in enumerate(cuts):\n",
" B[i*step:(i+1)*step+1] = cut\n",
"B"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cuts[0].shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"np.tile(A[:,:,np.newaxis], (1,1,10))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"np.repeat(A.reshape(, 2, axis=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {

View File

@ -42,6 +42,47 @@
"disp.run()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"aps = ld2dap.SelfDualAttributeProfiles(area=[100, 1e3], normalize_to_dtype=False)\n",
"\n",
"aps.input = rinp\n",
"disp.input = aps\n",
"\n",
"disp.run()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Try this new Normalize node !"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"load = ld2dap.LoadTIFF('../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif')\n",
"trsh = ld2dap.Treshold(1e4)\n",
"norm = ld2dap.Normalize(dtype=np.uint8)\n",
"aps = ld2dap.SelfDualAttributeProfiles(area=[100, 1e3], normalize_to_dtype=False)\n",
"disp = ld2dap.ShowFig(stack_id='all')\n",
"\n",
"disp.input = aps\n",
"aps.input = norm\n",
"norm.input = trsh\n",
"trsh.input = load\n",
"\n",
"disp.run()"
]
},
{
"cell_type": "code",
"execution_count": null,

View File

@ -14,12 +14,13 @@ import numpy as np
import triskele
class AttributeProfiles(Filter):
def __init__(self, area=None, sd=None, moi=None):
def __init__(self, area=None, sd=None, moi=None, normalize_to_dtype=True):
super().__init__(self.__class__.__name__)
self.logger.debug('Oh hi Mark!')
self.area = np.sort(area) if area is not None else None
self.sd = np.sort(sd) if sd is not None else None
self.moi = np.sort(moi) if moi is not None else None
self.normalize_to_dtype = normalize_to_dtype
def _process_desc(self):
att_desc = dict()
@ -64,13 +65,13 @@ class AttributeProfiles(Filter):
def _process(self, data, metadata):
self.logger.info('Compute Attribute Profiles on stack of size {}'.format(data.shape))
t = triskele.Triskele(data, verbose=False)
t = triskele.Triskele(data, verbose=False, normalize_to_dtype=self.normalize_to_dtype)
att_min = t.filter(tree='min-tree', area=self.area,
standard_deviation=self.sd,
moment_of_inertia=self.moi)
standard_deviation=self.sd,
moment_of_inertia=self.moi)
att_max = t.filter(tree='max-tree', area=self.area,
standard_deviation=self.sd,
moment_of_inertia=self.moi)
standard_deviation=self.sd,
moment_of_inertia=self.moi)
## Merge filtering as APs

49
ld2dap/Normalize.py Normal file
View File

@ -0,0 +1,49 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file Normalize.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 12 sept. 2018
#
# TODO details
from ld2dap.core import Filter
import numpy as np
class Normalize(Filter):
"""Normalize stream values
This node will normalize values between min and max, unless dtype is
provided.
If dtype is not None, min and max will be set to dtype extremum.
"""
def __init__(self, min=0., max=1., dtype=None):
super().__init__(self.__class__.__name__)
if dtype is not None:
self.min = np.iinfo(dtype).min
self.max = np.iinfo(dtype).max
else:
self.min = min
self.max = max
def _process(self, data, metadata):
self.logger.info('Filtering')
# TODO: see TODO from Treshold _process
## Channel independant scale
for i in range(data.shape[2]):
data[:,:,i] -= data[:,:,i].min() - self.min
data[:,:,i] *= (self.max - self.min) / (data[:,:,i].max() - data[:,:,i].min())
for stack in metadata:
for d, s in zip(stack.desc, stack.symb):
d.append('normalize [{}, {}]'.format(self.min, self.max))
# s.append('T_{{{}}}'.format(self.treshold))
return data, metadata

View File

@ -15,11 +15,12 @@ import triskele
class SelfDualAttributeProfiles(Filter):
def __init__(self, area=None, sd=None, moi=None):
def __init__(self, area=None, sd=None, moi=None, normalize_to_dtype=True):
super().__init__(self.__class__.__name__)
self.area = np.sort(area) if area is not None else None
self.sd = np.sort(sd) if sd is not None else None
self.moi = np.sort(moi) if moi is not None else None
self.normalize_to_dtype = normalize_to_dtype
def _process_desc(self):
att_desc = dict()
@ -58,7 +59,7 @@ class SelfDualAttributeProfiles(Filter):
return att_len, att_len_cs
def _process(self, data, metadata):
t = triskele.Triskele(data, verbose=False)
t = triskele.Triskele(data, verbose=False, normalize_to_dtype=self.normalize_to_dtype)
attributes = t.filter(tree='tos-tree', area=self.area,
standard_deviation=self.sd,
moment_of_inertia=self.moi)

View File

@ -21,6 +21,8 @@ class Treshold(Filter):
def _process(self, data, metadata):
# TODO: UPGRADE STACK DEPENDANCE
# TODO: Verify if the previous TODO is up to date
# TODO: It comes to mind that the hesitations expressed in the previous TODO are probably unfounded, as the previous previous TODO clearly states stacks, and no independants rasters
# This filter each raster independently
self.logger.info('Filtering')

View File

@ -19,3 +19,4 @@ from .ShowFig import ShowFig
from .RawOutput import RawOutput
from .RawInput import RawInput
from .Differential import Differential
from .Normalize import Normalize