WIP on SDAPs

This commit is contained in:
Florent Guiotte 2018-05-10 17:49:16 +02:00
parent 44e83910a6
commit db34096353
6 changed files with 343 additions and 35 deletions

285
Notebooks/Node SDAPs.ipynb Normal file
View File

@ -0,0 +1,285 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"from pathlib import Path\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import collections\n",
"\n",
"ld2dap_path = Path('../')\n",
"sys.path.append(str(ld2dap_path.resolve()))\n",
"import ld2dap\n",
"from ld2dap.core import Filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"layers_files = [\n",
" '../Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_TLI/UH17_GEG05_TR.tif',\n",
" '../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif'\n",
"]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Node Streaming Self-Dual Attribute Profiles"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"l = ld2dap.LoadTIFF(layers_files[5:7])\n",
"t = ld2dap.Treshold(1e4)\n",
"a = ld2dap.SelfDualAttributeProfiles(area = [1e3, 1e6], sd = [.5])\n",
"d = ld2dap.ShowFig(stack_id='all', symb=True)\n",
"o = ld2dap.RawOutput()\n",
"\n",
"o.input = a\n",
"d.input = a\n",
"a.input = t\n",
"t.input = l\n",
"\n",
"d.run()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(o.metadata[1]), o.data.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Algorithm"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Metadata"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A = {'area': 2, 'sd': 3, 'moi': 2}\n",
"raster_count = 2\n",
"\n",
"att_len = list(A.values())\n",
"att_len = np.tile(att_len, raster_count)\n",
"display(att_len)\n",
"\n",
"start = np.cumsum(att_len)[:-1]\n",
"start = np.hstack(([0], start))\n",
"start\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data\n",
"\n",
"Duplicate origin in attributes to respect Stack construction\n",
"\n",
"#### Insert a raster in a stack"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"stack = o.data.copy()\n",
"raster = stack[:,:,0]\n",
"\n",
"stack.shape, raster.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"where = 3\n",
"nstack = np.insert(stack, where, raster, axis=2)\n",
"nstack.shape, (nstack[:,:,0] == nstack[:,:,where]).all()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Insert same raster in multiple places"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_broadcast = np.tile(raster, (2, 1, 1))\n",
"raster_broadcast = np.rollaxis(raster_broadcast, 0, 3)\n",
"raster_broadcast.shape, (raster_broadcast[:,:,1] == raster).all()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"nstack = np.insert(stack, (3), raster_broadcast, axis=2)\n",
"nstack.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Check if offset is ok:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"(stack[:,:,3] == nstack[:,:,4]).all()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Repeat multiple origins"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_count = 2\n",
"att_len = {'area': 3, 'sd': 0, 'moi': 0}\n",
"# Ignore\n",
"where = np.array(list(att_len.values()))\n",
"where = where[where != 0] - 1\n",
"where[0] += 1\n",
"count = sum(where)\n",
"# /Ignore\n",
"origins_index = np.arange(raster_count) * count\n",
"display(origins_index)\n",
"origins = stack[:,:,origins_index]\n",
"origins.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A = np.arange(16*9*4).reshape((16,9,4))\n",
"A[:,:,[0,1]].shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Index where origin should be placed"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_count = 4\n",
"\n",
"att_len = {'area': 3, 'sd': 2, 'moi': 0}\n",
"where = np.array(list(att_len.values()))\n",
"where = where[where != 0] - 1\n",
"\n",
"where[0] += 1\n",
"display(where)\n",
"count = sum(where)\n",
"display(count)\n",
"\n",
"where = np.cumsum(where[:-1])\n",
"display(where)\n",
"\n",
"offset = np.repeat(np.arange(raster_count) * count, where.size)\n",
"display(offset)\n",
"where = np.tile(where, (raster_count)) + offset\n",
"where"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A = list()\n",
"A.size"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -10,13 +10,14 @@
from .core import Filter, Stack from .core import Filter, Stack
## TODO: dep # TODO: dep
import sys import sys
import numpy as np import numpy as np
sys.path.append('../triskele/python') sys.path.append('../triskele/python')
import triskele import triskele
class AttributeProfiles(Filter):
class SelfDualAttributeProfiles(Filter):
def __init__(self, area=None, sd=None, moi=None): def __init__(self, area=None, sd=None, moi=None):
super().__init__(self.__class__.__name__) super().__init__(self.__class__.__name__)
self.area = np.sort(area) if area is not None else None self.area = np.sort(area) if area is not None else None
@ -47,70 +48,83 @@ class AttributeProfiles(Filter):
def _process_len(self): def _process_len(self):
att_len = dict() att_len = dict()
att_len_cs = dict()
cs = 0
for att in ['area', 'sd', 'moi']: for att in ['area', 'sd', 'moi']:
values = self.__getattribute__(att) values = self.__getattribute__(att)
att_len[att] = len(values) if values is not None else 0 al = len(values) + 1 if values is not None else 0
return att_len att_len[att] = al
att_len_cs[att] = cs
cs += al
self.logger.debug('Attribute length: {}'.format(att_len))
self.logger.debug('Attribute length CS: {}'.format(att_len_cs))
return att_len, att_len_cs
def _process(self, data, metadata): def _process(self, data, metadata):
t = triskele.Triskele(data, verbose=False) t = triskele.Triskele(data, verbose=False)
att_min = t.filter(tree='tos-tree', area=self.area, attributes = t.filter(tree='tos-tree', area=self.area,
standard_deviation=self.sd, standard_deviation=self.sd,
moment_of_inertia=self.moi) moment_of_inertia=self.moi)
## Create new metadata # Create new data and metadata
### Pre-process descriptions metadata_new = list()
# Pre-process descriptions
att_desc = self._process_desc() att_desc = self._process_desc()
att_symb = self._process_symb() att_symb = self._process_symb()
### Compute stack offsets and att length # Compute stack offsets and att length
att_len = self._process_len() att_len, att_len_cs = self._process_len()
raster_offset = sum(att_len.values()) raster_offset = sum(att_len.values())
### Merge old and new descriptions # Duplicate origin in data to respect Stack structure
metadata_new = list() # Compute insert index in where
### Re-order to create original APs where = np.array(list(att_len.values()))
raster_list = list() where = where[where != 0] - 1
where[0] += 1
count = sum(where)
where = np.cumsum(where[:-1])
offset = np.repeat(np.arange(len(metadata)) * count,
where.size) # Can't nest this
where = np.tile(where, (len(metadata)) + offset
# TODO:
# - Repeat multiple origins
# - Duplicate origins to match where
# - Insert origins where
data_new = None
for stack in metadata: for stack in metadata:
if stack.end - stack.begin > 1: if stack.end - stack.begin > 1:
self.logger.err('Nested filtering, raising error')
raise NotImplementedError('Nested filtering not implemented yet') raise NotImplementedError('Nested filtering not implemented yet')
do = 0
sb = stack.begin * (raster_offset + 1)
for att in ['area', 'sd', 'moi']: for att in ['area', 'sd', 'moi']:
if att_offset[att] == 0: if att_len[att] == 0:
continue continue
al = att_len[att] stack_new = Stack(raster_offset * stack.begin + att_len_cs[att],
raster_list.append(att_min[:,:,sb+do+al:sb+do:-1]) att_len[att],
raster_list.append(att_min[:,:,sb])
print('DEBUG: copying layer {}'.format(sb))
raster_list.append(att_max[:,:,sb+do+1:sb+do+al+1])
do += al
stack_new = Stack(dso + stack_offset * stack.begin, att_offset[att],
stack.desc[0], stack.symb[0]) stack.desc[0], stack.symb[0])
for old_desc, new_desc in zip(stack_new.desc, att_desc[att]): for old_desc, new_desc in zip(stack_new.desc, att_desc[att]):
print('DESCRIPTION: {} > {}'.format(old_desc, new_desc)) self.logger.debug('Desc: {} + {}'.format(old_desc, new_desc))
old_desc.append(new_desc) old_desc.append(new_desc)
for old_symb, new_symb in zip(stack_new.symb, att_symb[att]): for old_symb, new_symb in zip(stack_new.symb, att_symb[att]):
print('symbRIPTION: {} > {}'.format(old_symb, new_symb)) self.logger.debug('Symb: {} + {}'.format(old_symb, new_symb))
old_symb.append(new_symb) old_symb.append(new_symb)
metadata_new.append(stack_new) metadata_new.append(stack_new)
data_new = np.dstack(raster_list)
return data_new, metadata_new return data_new, metadata_new
if __name__ == '__main__': if __name__ == '__main__':
area = [10, 100, 1000] area = [10, 100, 1000]
sd = [.1, .9] sd = [.1, .9]
ap = AttributeProfiles(area, sd) ap = SelfDualAttributeProfiles(area, sd)
print(ap._process_desc()) print(ap._process_desc())
print(ap._process_offset())

View File

@ -9,6 +9,7 @@
# TODO details # TODO details
from .AttributeProfiles import AttributeProfiles from .AttributeProfiles import AttributeProfiles
from .SelfDualAttributeProfiles import SelfDualAttributeProfiles
from .Treshold import Treshold from .Treshold import Treshold
from .LoadTIFF import LoadTIFF from .LoadTIFF import LoadTIFF
from .SaveFig import SaveFig from .SaveFig import SaveFig

View File

@ -10,6 +10,7 @@
from .Node import Node from .Node import Node
class Input(Node): class Input(Node):
def __init__(self, name='__CHILD__'): def __init__(self, name='__CHILD__'):
super().__init__('I:{}'.format(name)) super().__init__('I:{}'.format(name))

View File

@ -11,6 +11,7 @@
#from core import Input, Output, Filter #from core import Input, Output, Filter
from ld2dap import LoadTIFF, SaveFig, Treshold, ShowFig, Differential from ld2dap import LoadTIFF, SaveFig, Treshold, ShowFig, Differential
from ld2dap import AttributeProfiles as APs from ld2dap import AttributeProfiles as APs
from ld2dap import SelfDualAttributeProfiles as SDAPs
from ld2dap.core import logger from ld2dap.core import logger
import numpy as np import numpy as np
@ -19,7 +20,7 @@ def main():
logger.setup_logging() logger.setup_logging()
i = LoadTIFF(['Data/test.tiff', 'Data/test2.tiff']) i = LoadTIFF(['Data/test.tiff', 'Data/test2.tiff'])
t = Treshold(1e4) t = Treshold(1e4)
ap = APs([100,1e3,1e4]) ap = SDAPs([100,1e3], [.5,.7,.9], [.1,.3])
o = SaveFig('Res/test.png') o = SaveFig('Res/test.png')
s = ShowFig(0, True) s = ShowFig(0, True)
d = Differential() d = Differential()