WORK IN PROGRESS: CVG

This commit is contained in:
Florent Guiotte 2018-07-06 11:55:11 +02:00
parent 3e4f5000f3
commit 27184025c9
9 changed files with 646 additions and 149 deletions

View File

@ -10,7 +10,7 @@
import numpy as np import numpy as np
class CVG: class CVG_legacy:
def __init__(self, attributes, ground_truth, n_test=2, order_dim=0): def __init__(self, attributes, ground_truth, n_test=2, order_dim=0):
self._order = order_dim self._order = order_dim
self._ntests = n_test self._ntests = n_test
@ -48,3 +48,11 @@ class CVG:
self._actual_ntest += 1 self._actual_ntest += 1
return (Xtrain, Xtest, Ytrain, Ytest, train_filter) return (Xtrain, Xtest, Ytrain, Ytest, train_filter)
class APsCVG:
"""Cross Validation Generator for Attribute Profiles Descriptors"""
def __init__(self, ground_truth, attributes, cv_count=5, label_ignore=None):
d
return xtrain, xtest, ytrain, ytest, test_index

View File

@ -1,5 +1,25 @@
{ {
"cells": [ "cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Attribute Profiles Classification Prototype\n",
"\n",
"Ground classification (2D) of LiDAR data with Attribute Profiles (APs) on pre-calculated rasters from LiDAR point cloud processing (DEMs, intensity maps...).\n",
"\n",
"We will use the LD2DAPs package to compute profiles and try to generalize the process to automatise the classification."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Setup\n",
"\n",
"### Packages"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@ -11,29 +31,34 @@
"import numpy as np\n", "import numpy as np\n",
"import matplotlib.pyplot as plt\n", "import matplotlib.pyplot as plt\n",
"\n", "\n",
"triskele_path = Path('../triskele/python/')\n", "sys.path.append(str(Path('..').resolve()))\n",
"sys.path.append(str(triskele_path.resolve()))\n", "import ld2dap\n",
"\n",
"sys.path.append(str(Path('../triskele/').resolve()))\n",
"import triskele" "import triskele"
] ]
}, },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Fuctions and constants"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"def show(im):\n", "figsize=np.array([16,9]) * 1."
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(im)\n",
" plt.colorbar()\n",
" plt.show()"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"## List raster files" "## List of raster files"
] ]
}, },
{ {
@ -50,7 +75,10 @@
" '../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',\n", " '../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',\n", " '../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif',\n", " '../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif',\n",
" #'../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif'\n", " #'../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif',\n",
" #'../Res/HVR/C123_num_returns_0_5_nearest.tif',\n",
" '../Res/HVR noisy/C123_num_returns_0_5_nearest.tif'\n",
"\n",
"]" "]"
] ]
}, },
@ -58,111 +86,59 @@
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"## Define dataset dependent raster filtering" "**IDEA:** We could try to combinate rasters into new ones (e.g. $R_{DSM} - R_{DTM}$ to obtain trees and building height map)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def DFC_filter(raster):\n",
" ## Remove extrem values\n",
" #raster[raster == raster.max()] = raster[raster != raster.max()].max()\n",
" raster[raster > 1e4] = raster[raster < 1e4].max()\n",
" #raster[raster == np.finfo(raster.dtype).max] = raster[raster != raster.max()].max()"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"## Load rasters data" "## Create the Profiles Pattern"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "markdown",
"execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [],
"source": [ "source": [
"layers = list()\n", "Basic APs classification flow:\n",
"\n", "\n",
"for file in layers_files:\n", "- Load rasters\n",
" print('Loading {}'.format(file))\n", "- Filter input rasters with a treshold value: for reasons DFC rasters are noisy with very high values\n",
" layer = triskele.read(file)\n", "- Construct filtered rasters with basic attributes profiles\n",
" DFC_filter(layer)\n", " + Area: [10, 100, 1e3, ..., 1e4]\n",
" layers.append(layer)\n", " + ...\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Load and filter rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"loader = ld2dap.LoadTIFF(layers_files)\n",
"dfc_filter = ld2dap.Treshold(1e4)\n",
"rasters_disp = ld2dap.ShowFig('all')\n",
"\n", "\n",
"layers_stack = np.stack(layers, axis=2)" "dfc_filter.input = loader\n",
] "rasters_disp.input = dfc_filter\n",
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Display rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for i in range(layers_stack.shape[2]):\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(layers_stack[:,:,i])\n",
" plt.colorbar()\n",
" plt.title(layers_files[i])\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Attributes filter with TRISKELE !"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"area = np.array([10, 100, 1e3, 1e4, 1e5])\n",
"sd = np.array([0.5,0.9,0.99,0.999,0.9999])#,1e4,1e5,5e5])\n",
"moi = np.array([0.01,0.02,0.03,0.04,0.05,0.06,0.07,0.08,0.09,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.99])\n",
"\n", "\n",
"t = triskele.Triskele(layers_stack[:,:,:], verbose=False)\n", "loader.run()"
"attributes = t.filter(tree='tos-tree',\n",
" area=area,\n",
" #standard_deviation=sd,\n",
" #moment_of_inertia=moi\n",
" )\n",
"attributes.shape"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"for i in range(attributes.shape[2]-1):\n", "### Compute APs\n",
" plt.figure(figsize=(16*2,3*2))\n", "\n",
" plt.imshow(attributes[:,:,i])\n", "Choose area filter tresholds."
" plt.colorbar()\n",
" plt.show()\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(attributes[:,:,i+1].astype(np.float) - attributes[:,:,i])\n",
" plt.colorbar()\n",
" #plt.title(layers_files[i])\n",
"plt.show()\n",
"plt.figure(figsize=(16*2,3*2))\n",
"plt.imshow(attributes[:,:,-1])\n",
"plt.colorbar()\n",
"plt.show()\n"
] ]
}, },
{ {
@ -171,18 +147,17 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"plt.imshow((attributes[:,:,4].astype(np.float) - attributes[:,:,3])>0)\n", "areas = [10., 100.]\n",
"areas.extend([x * 1e3 for x in range(1,100,2)])\n",
"plt.plot(areas, '.')\n",
"plt.show()" "plt.show()"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "markdown",
"execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [],
"source": [ "source": [
"plt.imshow((attributes[:,:,4].astype(np.float) - attributes[:,:,3])<0)\n", "Disable previous display then add the APs node and the vectors output to the flow ."
"plt.show()"
] ]
}, },
{ {
@ -191,7 +166,15 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"show((attributes[:,:,6].astype(np.float) - attributes[:,:,5]))" "rasters_disp.input = None\n",
"\n",
"aps = ld2dap.AttributeProfiles(area=areas)\n",
"aps.input = dfc_filter\n",
"\n",
"out_vectors = ld2dap.RawOutput()\n",
"out_vectors.input = aps\n",
"\n",
"out_vectors.run()"
] ]
}, },
{ {
@ -200,14 +183,30 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"attributes[0,0,:] = 0" "out_vectors.data.shape"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"## Classification vectors" "## Classification"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"- Concatenate filtered rasters into pixel description vector\n",
"- Split the vectors in train and test sets for cross validation with a spatial approach: random sampling is not good for spatial descriptors!\n",
"- Random forests"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Vectors"
] ]
}, },
{ {

View File

@ -50,7 +50,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"plt.figure(figsize=fs)\n", "plt.figure(figsize=fs)\n",
"plt.imshow(gt)\n", "plt.imshow(gt, cmap=plt.get_cmap('GnBu'))\n",
"plt.colorbar()\n", "plt.colorbar()\n",
"plt.show()" "plt.show()"
] ]
@ -118,30 +118,49 @@
"plt.show()" "plt.show()"
] ]
}, },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## First Semantic Cross Validation Generator Prototype\n",
"\n",
"**WORK IN PROGRESS** you 'll have fun"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import matplotlib.pyplot as plt\n", "nb_split = 10\n",
"import matplotlib.animation\n", "step = 0\n",
"import numpy as np\n",
"\n", "\n",
"t = np.linspace(0,2*np.pi)\n", "test_part = 1 / nb_split\n",
"x = np.sin(t)\n",
"\n", "\n",
"fig, ax = plt.subplots()\n", "split = np.zeros_like(gt)\n",
"ax.axis([0,2*np.pi,-1,1])\n",
"l, = ax.plot([],[])\n",
"\n", "\n",
"def animate(i):\n", "for lbli, lblc in zip(count[0][1:], count[1][1:]):\n",
" l.set_data(t[:i], x[:i])\n", " treshold = int(lblc * test_part)\n",
"\n", " #print('lbli:{}, count:{}, train:{}'.format(lbli, lblc, treshold))\n",
"ani = matplotlib.animation.FuncAnimation(fig, animate, frames=len(t))\n", " f = np.nonzero(gt == lbli)\n",
"\n", " train, test\n",
"from IPython.display import HTML\n", " split[f[0][treshold * step:treshold * (step + 1)], f[1][:treshold]] = 2\n",
"HTML(ani.to_jshtml())" " split[f[0][treshold:], f[1][treshold:]] = 1\n",
" \n",
"plt.figure(figsize=fs)\n",
"plt.imshow(split)\n",
"#plt.imshow(split * (gt==1))\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"gt == lbli"
] ]
}, },
{ {
@ -151,15 +170,6 @@
"# Metaclasses " "# Metaclasses "
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"gt"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@ -200,15 +210,6 @@
"plt.show()" "plt.show()"
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"lbl"
]
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
@ -244,7 +245,81 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"all_meta_view[all_meta_view['metaclass_index'] == 0]['index']" "np.array(all_meta_view[all_meta_view['metaclass_label'] == 'Roads']['index'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=fs)\n",
"plt.imshow(gt * np.isin(gt, np.array(all_meta_view[all_meta_view['metaclass_label'] == 'Roads']['index'])))\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"count = np.unique(gt, return_counts=True)\n",
"df_count = pd.DataFrame(np.array(count).T, columns=['index', 'count'])\n",
"df_count.head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import seaborn as sns\n",
"\n",
"df_tmp = meta_idx.merge(df_count).merge(meta_lbl).merge(dfc_lbl).drop(index=0)\n",
"display(df_tmp.head())\n",
"\n",
"g = sns.barplot(data=df_tmp, x='label', y='count')\n",
"plt.xticks(rotation=80)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"meta_count_view = pd.merge(meta_idx, df_count)\n",
"display(meta_count_view.head())\n",
"meta_count_view.drop('index', 1, inplace=True)\n",
"meta_count_view = meta_count_view.groupby('metaclass_index', as_index=False).sum()\n",
"display(meta_count_view.head())\n",
"\n",
"pd.merge(meta_lbl, meta_count_view)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"meta_index = np.array(meta_idx['metaclass_index'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=fs)\n",
"plt.imshow(meta_index[gt])\n",
"plt.colorbar()\n",
"plt.show()"
] ]
} }
], ],

View File

@ -0,0 +1,412 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"from pathlib import Path\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"triskele_path = Path('../triskele/python/')\n",
"sys.path.append(str(triskele_path.resolve()))\n",
"import triskele"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def show(im):\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(im)\n",
" plt.colorbar()\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## List raster files"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"layers_files = [\n",
" '../Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif',\n",
" '../Data/phase1_rasters/DEM_C123_TLI/UH17_GEG05_TR.tif',\n",
" '../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',\n",
" '../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif',\n",
" #'../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif'\n",
"]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Define dataset dependent raster filtering"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def DFC_filter(raster):\n",
" ## Remove extrem values\n",
" #raster[raster == raster.max()] = raster[raster != raster.max()].max()\n",
" raster[raster > 1e4] = raster[raster < 1e4].max()\n",
" #raster[raster == np.finfo(raster.dtype).max] = raster[raster != raster.max()].max()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Load rasters data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"layers = list()\n",
"\n",
"for file in layers_files:\n",
" print('Loading {}'.format(file))\n",
" layer = triskele.read(file)\n",
" DFC_filter(layer)\n",
" layers.append(layer)\n",
"\n",
"layers_stack = np.stack(layers, axis=2)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Display rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for i in range(layers_stack.shape[2]):\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(layers_stack[:,:,i])\n",
" plt.colorbar()\n",
" plt.title(layers_files[i])\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Attributes filter with TRISKELE !"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"area = np.array([10, 100, 1e3, 1e4, 1e5])\n",
"sd = np.array([0.5,0.9,0.99,0.999,0.9999])#,1e4,1e5,5e5])\n",
"moi = np.array([0.01,0.02,0.03,0.04,0.05,0.06,0.07,0.08,0.09,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.99])\n",
"\n",
"t = triskele.Triskele(layers_stack[:,:,:], verbose=False)\n",
"attributes = t.filter(tree='tos-tree',\n",
" area=area,\n",
" #standard_deviation=sd,\n",
" #moment_of_inertia=moi\n",
" )\n",
"attributes.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"for i in range(attributes.shape[2]-1):\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(attributes[:,:,i])\n",
" plt.colorbar()\n",
" plt.show()\n",
" plt.figure(figsize=(16*2,3*2))\n",
" plt.imshow(attributes[:,:,i+1].astype(np.float) - attributes[:,:,i])\n",
" plt.colorbar()\n",
" #plt.title(layers_files[i])\n",
"plt.show()\n",
"plt.figure(figsize=(16*2,3*2))\n",
"plt.imshow(attributes[:,:,-1])\n",
"plt.colorbar()\n",
"plt.show()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imshow((attributes[:,:,4].astype(np.float) - attributes[:,:,3])>0)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imshow((attributes[:,:,4].astype(np.float) - attributes[:,:,3])<0)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"show((attributes[:,:,6].astype(np.float) - attributes[:,:,5]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"attributes[0,0,:] = 0"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Classification vectors"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"X = attributes.reshape(-1, attributes.shape[2])\n",
"\n",
"(attributes[0,0] == X[0]).all()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"labels_file = Path('../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif')\n",
"labels = triskele.read(labels_file)\n",
"display(labels.shape)\n",
"\n",
"plt.figure(figsize=(16*2,3*2))\n",
"plt.imshow(labels)\n",
"plt.colorbar()\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"Y = labels.reshape(-1)\n",
"\n",
"X.shape, Y.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Random Forest Classifier"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import importlib\n",
"from sklearn import metrics\n",
"from sklearn.ensemble import RandomForestClassifier\n",
"import pickle\n",
"sys.path.insert(0, '..')\n",
"import CrossValidationGenerator as cvg"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"importlib.reload(cvg)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from sklearn import metrics\n",
"import pandas as pd\n",
"\n",
"\n",
"def scores(actual, prediction):\n",
" ct = pd.crosstab(prediction, actual,\n",
" rownames=['Prediction'], colnames=['Reference'],\n",
" margins=True, margins_name='Total',\n",
" normalize=False # all, index, columns\n",
" )\n",
" display(ct)\n",
" \n",
" scores = metrics.precision_recall_fscore_support(actual, prediction)\n",
" print(metrics.classification_report(actual, prediction)) "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cv_labels = np.zeros(labels[:].shape)\n",
"\n",
"for xtrain, xtest, ytrain, ytest, train_index in cvg.CVG_legacy(attributes[:], labels[:], 10, 1): \n",
" rfc = RandomForestClassifier(n_jobs=-1, random_state=0, n_estimators=100, verbose=True)\n",
" rfc.fit(xtrain, ytrain)\n",
" \n",
" ypred = rfc.predict(xtest)\n",
" \n",
" display(ytest.shape, ypred.shape)\n",
" \n",
" scores(ytest, ypred)\n",
" \n",
" cv_labels[:,train_index == False] = ypred.reshape(cv_labels.shape[0], -1)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"show(labels)\n",
"show(cv_labels)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imsave('../Res/labels.png', labels)\n",
"plt.imsave('../Res/prediction.png', cv_labels)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Scores"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"scores(actual=labels.reshape(-1), prediction=cv_labels.reshape(-1))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Labels\n",
"\n",
"\n",
" 0 Unclassified\n",
" 1 Healthy grass\n",
" 2 Stressed grass\n",
" 3 Artificial turf\n",
" 4 Evergreen trees\n",
" 5 Deciduous trees\n",
" 6 Bare earth\n",
" 7 Water\n",
" 8 Residential buildings\n",
" 9 Non-residential buildings\n",
" 10 Roads\n",
" 11 Sidewalks\n",
" 12 Crosswalks\n",
" 13 Major thoroughfares\n",
" 14 Highways\n",
" 15 Railways\n",
" 16 Paved parking lots\n",
" 17 Unpaved parking lots\n",
" 18 Cars\n",
" 19 Trains\n",
" 20 Stadium seats\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -48,9 +48,9 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"l = ld2dap.LoadTIFF(layers_files[5:7])\n", "l = ld2dap.LoadTIFF(layers_files[0])\n",
"t = ld2dap.Treshold(1e4)\n", "t = ld2dap.Treshold(1e4)\n",
"a = ld2dap.SelfDualAttributeProfiles(area = [1e3, 1e6], sd=[.4,.6,.8], moi=[.5,.9])\n", "a = ld2dap.SelfDualAttributeProfiles(area = [1e3, 1e6])#, sd=[.4,.6,.8], moi=[.5,.9])\n",
"f = ld2dap.Differential()\n", "f = ld2dap.Differential()\n",
"d = ld2dap.ShowFig(stack_id='all', symb=False)\n", "d = ld2dap.ShowFig(stack_id='all', symb=False)\n",
"o = ld2dap.RawOutput()\n", "o = ld2dap.RawOutput()\n",

View File

View File

@ -1,6 +0,0 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -18,6 +18,9 @@ class Input(Node):
def register(self, output): def register(self, output):
self.outputs.append(output) self.outputs.append(output)
def unregister(self, output):
self.outputs.remove(output)
def process(self, data, metadata=None): def process(self, data, metadata=None):
"""Override abstract method""" """Override abstract method"""

View File

@ -24,10 +24,16 @@ class Output(Node):
self.__dict__[name] = value self.__dict__[name] = value
def _input(self, inode): def _input(self, inode):
if not isinstance(inode, (Input)): if not isinstance(inode, (Input)) and inode is not None:
raise NotImplementedError('{} is not an Input'.format(inode)) raise NotImplementedError('{} is not an Input'.format(inode))
if self.__dict__['input'] is not None:
self.__dict__['input'].unregister(self)
self.__dict__['input'] = inode self.__dict__['input'] = inode
inode.register(self)
if inode is not None:
inode.register(self)
def _run(self): def _run(self):
if self.input is None: if self.input is None: