ld2daps/Notebooks/Node SDAPs.ipynb

277 lines
6.0 KiB
Plaintext

{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import collections\n",
"\n",
"import ld2dap"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"layers_files = [\n",
" '../Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_TLI/UH17_GEG05_TR.tif',\n",
" '../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif'\n",
"]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Node Streaming Self-Dual Attribute Profiles"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"l = ld2dap.LoadTIFF(layers_files[0])\n",
"t = ld2dap.Treshold(1e4)\n",
"a = ld2dap.SelfDualAttributeProfiles(area = [1e3, 1e6])#, sd=[.4,.6,.8], moi=[.5,.9])\n",
"f = ld2dap.Differential()\n",
"d = ld2dap.ShowFig(stack_id='all', symb=False)\n",
"o = ld2dap.RawOutput()\n",
"\n",
"o.input = a\n",
"d.input = f\n",
"f.input = a\n",
"a.input = t\n",
"t.input = l\n",
"\n",
"d.run()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"(o.data[:,:,0] == o.data[:,:,13]).all()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(o.metadata[1]), o.data.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Algorithm"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Metadata"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A = {'area': 2, 'sd': 3, 'moi': 2}\n",
"raster_count = 2\n",
"\n",
"att_len = list(A.values())\n",
"att_len = np.tile(att_len, raster_count)\n",
"display(att_len)\n",
"\n",
"start = np.cumsum(att_len)[:-1]\n",
"start = np.hstack(([0], start))\n",
"start\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data\n",
"\n",
"Duplicate origin in attributes to respect Stack construction\n",
"\n",
"#### Insert a raster in a stack"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"stack = o.data.copy()\n",
"raster = stack[:,:,0]\n",
"\n",
"stack.shape, raster.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"where = 3\n",
"nstack = np.insert(stack, where, raster, axis=2)\n",
"nstack.shape, (nstack[:,:,0] == nstack[:,:,where]).all()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Insert same raster in multiple places"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_broadcast = np.tile(raster, (2, 1, 1))\n",
"raster_broadcast = np.rollaxis(raster_broadcast, 0, 3)\n",
"raster_broadcast.shape, (raster_broadcast[:,:,1] == raster).all()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"nstack = np.insert(stack, (3,5), raster_broadcast, axis=2)\n",
"nstack.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Check if offset is ok:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"(stack[:,:,3] == nstack[:,:,4]).all()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Index where origin should be placed"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_count = 2\n",
"\n",
"att_len = {'area': 3, 'sd': 4, 'moi': 3}\n",
"where = np.array(list(att_len.values()))\n",
"where = where[where != 0] - 1\n",
"\n",
"where[0] += 1\n",
"display(where)\n",
"count = sum(where)\n",
"display(count)\n",
"\n",
"where = np.cumsum(where[:-1])\n",
"origins_dcount = where.size\n",
"display(where)\n",
"\n",
"offset = np.repeat(np.arange(raster_count) * count, where.size)\n",
"display(offset)\n",
"where = np.tile(where, (raster_count)) + offset\n",
"where"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Repeat multiple origins"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"origins_index = np.arange(raster_count) * count\n",
"display(origins_index)\n",
"origins = stack[:,:,origins_index]\n",
"origins.shape, (origins[:,:,0] == stack[:,:,0]).all(), (origins[:,:,1] == stack[:,:,8]).all()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"rorigins = np.repeat(origins, 2, axis=2)\n",
"rorigins.shape, \\\n",
"(rorigins[:,:,0] == rorigins[:,:,1]).all() and (origins[:,:,0] == rorigins[:,:,1]).all(), \\\n",
"(rorigins[:,:,2] == rorigins[:,:,3]).all() and (origins[:,:,1] == rorigins[:,:,2]).all()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}