Legacy Supervisor
This commit is contained in:
parent
238f29c4bc
commit
fc891cdd4f
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,7 +1,9 @@
|
|||||||
[Rr]es/
|
[Rr]es/
|
||||||
[Ll]ogs/
|
[Ll]ogs/
|
||||||
[Dd]ata/
|
[Dd]ata/
|
||||||
|
Enrichment/
|
||||||
*.pyc
|
*.pyc
|
||||||
*.log
|
*.log
|
||||||
.ipynb_checkpoints/
|
.ipynb_checkpoints/
|
||||||
triskele
|
triskele
|
||||||
|
Untitled.ipynb
|
||||||
|
|||||||
22
GroundTruth/jurse_meta_idx.csv
Normal file
22
GroundTruth/jurse_meta_idx.csv
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
index,metaclass_index
|
||||||
|
0,0
|
||||||
|
1,1
|
||||||
|
2,1
|
||||||
|
3,0
|
||||||
|
4,2
|
||||||
|
5,2
|
||||||
|
6,0
|
||||||
|
7,0
|
||||||
|
8,3
|
||||||
|
9,4
|
||||||
|
10,5
|
||||||
|
11,0
|
||||||
|
12,0
|
||||||
|
13,5
|
||||||
|
14,5
|
||||||
|
15,0
|
||||||
|
16,0
|
||||||
|
17,0
|
||||||
|
18,6
|
||||||
|
19,7
|
||||||
|
20,0
|
||||||
|
9
GroundTruth/jurse_meta_lbl.csv
Normal file
9
GroundTruth/jurse_meta_lbl.csv
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
metaclass_index,metaclass_label
|
||||||
|
0,Unclassified
|
||||||
|
1,Grass
|
||||||
|
2,Trees
|
||||||
|
3,Residential buildings
|
||||||
|
4,Non-residential buildings
|
||||||
|
5,Roads
|
||||||
|
6,Cars
|
||||||
|
7,Trains
|
||||||
|
22
GroundTruth/labels.csv
Normal file
22
GroundTruth/labels.csv
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
index,label
|
||||||
|
0,Unclassified
|
||||||
|
1,Healthy grass
|
||||||
|
2,Stressed grass
|
||||||
|
3,Artificial turf
|
||||||
|
4,Evergreen trees
|
||||||
|
5,Deciduous trees
|
||||||
|
6,Bare earth
|
||||||
|
7,Water
|
||||||
|
8,Residential buildings
|
||||||
|
9,Non-residential buildings
|
||||||
|
10,Roads
|
||||||
|
11,Sidewalks
|
||||||
|
12,Crosswalks
|
||||||
|
13,Major thoroughfares
|
||||||
|
14,Highways
|
||||||
|
15,Railways
|
||||||
|
16,Paved parking lots
|
||||||
|
17,Unpaved parking lots
|
||||||
|
18,Cars
|
||||||
|
19,Trains
|
||||||
|
20,Stadium seats
|
||||||
|
22
GroundTruth/metaclass_indexes-Copy1.csv
Normal file
22
GroundTruth/metaclass_indexes-Copy1.csv
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
index,metaclass_index
|
||||||
|
0,0
|
||||||
|
1,1
|
||||||
|
2,1
|
||||||
|
3,1
|
||||||
|
4,2
|
||||||
|
5,2
|
||||||
|
6,3
|
||||||
|
7,0
|
||||||
|
8,4
|
||||||
|
9,4
|
||||||
|
10,5
|
||||||
|
11,5
|
||||||
|
12,5
|
||||||
|
13,5
|
||||||
|
14,5
|
||||||
|
15,6
|
||||||
|
16,5
|
||||||
|
17,0
|
||||||
|
18,7
|
||||||
|
19,8
|
||||||
|
20,9
|
||||||
|
11
GroundTruth/metaclass_labels.csv
Normal file
11
GroundTruth/metaclass_labels.csv
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
metaclass_index,metaclass_label
|
||||||
|
0,Unclassified
|
||||||
|
1,Grass
|
||||||
|
2,Trees
|
||||||
|
3,Bare earth
|
||||||
|
4,Buildings
|
||||||
|
5,Roads
|
||||||
|
6,Railways
|
||||||
|
7,Cars
|
||||||
|
8,Trains
|
||||||
|
9,Stadium
|
||||||
|
277
Notebooks/CrossVal spatial.ipynb
Normal file
277
Notebooks/CrossVal spatial.ipynb
Normal file
@ -0,0 +1,277 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import sys\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd\n",
|
||||||
|
"import matplotlib.pyplot as plt\n",
|
||||||
|
"\n",
|
||||||
|
"sys.path.append('../triskele/python/')\n",
|
||||||
|
"import triskele\n",
|
||||||
|
"\n",
|
||||||
|
"figsize = np.array((16, 3)) * 2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Spatial Cross Validation Generator\n",
|
||||||
|
"\n",
|
||||||
|
"Due to the operating of attribute profiles, we need a spatial cross validation generator to ensure we do not train and test samples with the same root in the hierarchical representation used during the description process."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Load ground truth & metadata"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"gt = triskele.read('../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"plt.figure(figsize=figsize)\n",
|
||||||
|
"plt.imshow(gt, cmap=plt.get_cmap('GnBu'))\n",
|
||||||
|
"plt.colorbar()\n",
|
||||||
|
"plt.show()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"dfc_lbl = pd.read_csv('../GroundTruth/labels.csv')\n",
|
||||||
|
"meta_idx = pd.read_csv('../GroundTruth/jurse_meta_idx.csv')\n",
|
||||||
|
"meta_lbl = pd.read_csv('../GroundTruth/jurse_meta_lbl.csv')\n",
|
||||||
|
"\n",
|
||||||
|
"imap = np.array(meta_idx['metaclass_index'])\n",
|
||||||
|
"\n",
|
||||||
|
"all_meta_view = dfc_lbl.merge(meta_idx).merge(meta_lbl)\n",
|
||||||
|
"all_meta_view"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"plt.figure(figsize=figsize)\n",
|
||||||
|
"plt.imshow(imap[gt], cmap=plt.get_cmap('GnBu'))\n",
|
||||||
|
"plt.colorbar()\n",
|
||||||
|
"plt.show()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Labels map"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"fig, axs = plt.subplots(dfc_lbl['label'].size, figsize=figsize * dfc_lbl['label'].size)\n",
|
||||||
|
"\n",
|
||||||
|
"for i, lbl in enumerate(dfc_lbl['label']):\n",
|
||||||
|
" omap = gt == np.array(dfc_lbl[dfc_lbl['label'] == lbl ]['index'])\n",
|
||||||
|
" axs[i].set_title(lbl)\n",
|
||||||
|
" axs[i].imshow(1. * omap, cmap=plt.get_cmap('GnBu'))\n",
|
||||||
|
"\n",
|
||||||
|
"plt.show()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Meta labels map"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"fig, axs = plt.subplots(meta_lbl['metaclass_label'].size, figsize=figsize * meta_lbl['metaclass_label'].size)\n",
|
||||||
|
"\n",
|
||||||
|
"for i, lbl in enumerate(meta_lbl['metaclass_label']):\n",
|
||||||
|
" omap = np.isin(gt, np.array(all_meta_view[all_meta_view['metaclass_label'] == lbl]['index']))\n",
|
||||||
|
" axs[i].set_title(lbl)\n",
|
||||||
|
" axs[i].imshow(1. * omap, cmap=plt.get_cmap('GnBu'))\n",
|
||||||
|
"\n",
|
||||||
|
"plt.show()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Split data "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"count"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"col, count = np.unique(cut, return_counts=True)\n",
|
||||||
|
"df = pd.DataFrame(count[np.newaxis,:], columns=col)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"count = 5\n",
|
||||||
|
"\n",
|
||||||
|
"step = int(gt.shape[0] / count)\n",
|
||||||
|
"\n",
|
||||||
|
"fig, axs = plt.subplots(count, figsize=figsize * count)\n",
|
||||||
|
"\n",
|
||||||
|
"col, count_all = np.unique(imap[gt], return_counts=True)\n",
|
||||||
|
"\n",
|
||||||
|
"metrics = pd.DataFrame(columns=col)\n",
|
||||||
|
"counts = list()\n",
|
||||||
|
"for i in range(count):\n",
|
||||||
|
" cut = imap[gt][i*step:(i+1)*step+1]\n",
|
||||||
|
" axs[i].imshow(cut, cmap=plt.get_cmap('GnBu'))\n",
|
||||||
|
" axs[i].set_title('Cut {}'.format(i))\n",
|
||||||
|
" \n",
|
||||||
|
" col, count = np.unique(cut, return_counts=True)\n",
|
||||||
|
" df = pd.DataFrame(count[np.newaxis,:], columns=col, index=[i])\n",
|
||||||
|
" counts.append(df)\n",
|
||||||
|
"\n",
|
||||||
|
"plt.savefig('../Res/meta.png',bbox_inches='tight', transparent=\"True\", pad_inches=0)\n",
|
||||||
|
"plt.show()\n",
|
||||||
|
"\n",
|
||||||
|
"metrics = pd.concat(counts)\n",
|
||||||
|
"metrics / count_all * 100"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Try to remove filtered object"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"sys.path.append('..')\n",
|
||||||
|
"import ld2dap"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"tif = ld2dap.LoadTIFF('../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif')\n",
|
||||||
|
"trh = ld2dap.Treshold(70)\n",
|
||||||
|
"dsp = ld2dap.ShowFig(symb=True)\n",
|
||||||
|
"\n",
|
||||||
|
"aps = ld2dap.SelfDualAttributeProfiles(area=[100, 1000, 1e4, 1e5, 1e6, 1e7])\n",
|
||||||
|
"dif = ld2dap.Differential()\n",
|
||||||
|
"\n",
|
||||||
|
"dif.input = aps\n",
|
||||||
|
"aps.input = trh\n",
|
||||||
|
"dsp.input = dif\n",
|
||||||
|
"trh.input = tif\n",
|
||||||
|
"\n",
|
||||||
|
"dsp.run()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"tif = ld2dap.LoadTIFF('../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif')\n",
|
||||||
|
"trh = ld2dap.Treshold(1e4)\n",
|
||||||
|
"aps = ld2dap.SelfDualAttributeProfiles(area=[100, 1e3, 1e4])\n",
|
||||||
|
"dsp = ld2dap.ShowFig(symb=True)\n",
|
||||||
|
"dif = ld2dap.Differential()\n",
|
||||||
|
"ddsp = ld2dap.ShowFig(symb=True)\n",
|
||||||
|
"\n",
|
||||||
|
"ddsp.input = dif\n",
|
||||||
|
"dif.input = aps\n",
|
||||||
|
"dsp.input = aps\n",
|
||||||
|
"aps.input = trh\n",
|
||||||
|
"trh.input = tif\n",
|
||||||
|
"\n",
|
||||||
|
"tif.run()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
@ -56,6 +56,22 @@
|
|||||||
"dtm = ra.rasterize_cache('z', C12, .5, 'nearest', True, '../Res/enrichment_rasters/')"
|
"dtm = ra.rasterize_cache('z', C12, .5, 'nearest', True, '../Res/enrichment_rasters/')"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"print('Hello world')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"Ici nous avons lancé un print en Python."
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": null,
|
"execution_count": null,
|
||||||
@ -122,8 +138,8 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"C1_raster = ra.rasterize_cache('intensity', C1, .5, 'linear', False, '../Res/enrichment_rasters/')\n",
|
"C1_raster = ra.rasterize_cache('intensity', C1, 1., 'linear', False, '../Res/enrichment_rasters/')\n",
|
||||||
"C2_raster = ra.rasterize_cache('intensity', C2, .5, 'linear', False, '../Res/enrichment_rasters/')"
|
"C2_raster = ra.rasterize_cache('intensity', C2, 1., 'linear', False, '../Res/enrichment_rasters/')"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -16,9 +16,12 @@
|
|||||||
"- [X] Write/move results\n",
|
"- [X] Write/move results\n",
|
||||||
"- [X] Watch folder\n",
|
"- [X] Watch folder\n",
|
||||||
"- [X] Main loop\n",
|
"- [X] Main loop\n",
|
||||||
"- [ ] Logs\n",
|
"- [X] Logs\n",
|
||||||
"- [ ] Catch errors\n",
|
"- [X] Catch errors\n",
|
||||||
"- [ ] Custom CVG\n",
|
"- [X] Custom CVG\n",
|
||||||
|
"- [ ] Use meta labelling\n",
|
||||||
|
"- [ ] Create new workflow\n",
|
||||||
|
"- [ ] Workflow selection in recipe\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"## Init"
|
"## Init"
|
||||||
@ -41,12 +44,7 @@
|
|||||||
"import datetime\n",
|
"import datetime\n",
|
||||||
"from sklearn import metrics\n",
|
"from sklearn import metrics\n",
|
||||||
"from pathlib import Path\n",
|
"from pathlib import Path\n",
|
||||||
"\n",
|
"import pandas as pd\n",
|
||||||
"from sklearn.ensemble import RandomForestClassifier\n",
|
|
||||||
"\n",
|
|
||||||
"sys.path.append('..')\n",
|
|
||||||
"import Descriptors\n",
|
|
||||||
"from CrossValidationGenerator import APsCVG\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"sys.path.append('../triskele/python')\n",
|
"sys.path.append('../triskele/python')\n",
|
||||||
"import triskele"
|
"import triskele"
|
||||||
@ -205,6 +203,36 @@
|
|||||||
"att.shape"
|
"att.shape"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Meta labeling"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def get_ground_truth(expe):\n",
|
||||||
|
" gt = triskele.read(expe['ground_truth'])\n",
|
||||||
|
" \n",
|
||||||
|
" # Meta labeling\n",
|
||||||
|
" idx_map = np.arange(gt.max() + 1)\n",
|
||||||
|
"\n",
|
||||||
|
" if 'meta_labels' in expe:\n",
|
||||||
|
" meta_idx = pd.read_csv(expe['meta_labels'])\n",
|
||||||
|
" idx = np.array(meta_idx['index'])\n",
|
||||||
|
" midx = np.array(meta_idx['metaclass_index'])\n",
|
||||||
|
" idx_map[idx] = midx\n",
|
||||||
|
" \n",
|
||||||
|
" return idx_map[gt]\n",
|
||||||
|
"\n",
|
||||||
|
"expe_ground_truth(expe)"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -221,8 +249,7 @@
|
|||||||
"def compute_classification(expe, att):\n",
|
"def compute_classification(expe, att):\n",
|
||||||
" \"\"\"Read a standard expe recipe and attributes, return the result classification\"\"\"\n",
|
" \"\"\"Read a standard expe recipe and attributes, return the result classification\"\"\"\n",
|
||||||
" # Ground truth\n",
|
" # Ground truth\n",
|
||||||
" gt = triskele.read(expe['ground_truth'])\n",
|
" gt = get_ground_truth(expe)\n",
|
||||||
"\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" # CrossVal and ML\n",
|
" # CrossVal and ML\n",
|
||||||
" cv = expe['cross_validation']\n",
|
" cv = expe['cross_validation']\n",
|
||||||
@ -304,6 +331,14 @@
|
|||||||
" \n",
|
" \n",
|
||||||
" ### Extensible: meta-classes\n",
|
" ### Extensible: meta-classes\n",
|
||||||
" gt = triskele.read(expe['ground_truth'])\n",
|
" gt = triskele.read(expe['ground_truth'])\n",
|
||||||
|
" idx_map = np.arange(gt.max() + 1)\n",
|
||||||
|
"\n",
|
||||||
|
" if 'meta_labels' in expe:\n",
|
||||||
|
" meta_idx = pd.read_csv(expe['meta_labels'])\n",
|
||||||
|
" idx = np.array(meta_idx['index'])\n",
|
||||||
|
" midx = np.array(meta_idx['metaclass_index'])\n",
|
||||||
|
" idx_map[idx] = midx\n",
|
||||||
|
"\n",
|
||||||
" return compute_metrics(gt, classification)\n",
|
" return compute_metrics(gt, classification)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"expe_results = run_metrics(expe, classification)\n",
|
"expe_results = run_metrics(expe, classification)\n",
|
||||||
|
|||||||
@ -190,11 +190,25 @@ def compute_descriptors(expe):
|
|||||||
return att
|
return att
|
||||||
|
|
||||||
|
|
||||||
|
def get_ground_truth(expe):
|
||||||
|
gt = triskele.read(expe['ground_truth'])
|
||||||
|
|
||||||
|
# Meta labeling
|
||||||
|
idx_map = np.arange(gt.max() + 1)
|
||||||
|
|
||||||
|
if 'meta_labels' in expe:
|
||||||
|
meta_idx = pd.read_csv(expe['meta_labels'])
|
||||||
|
idx = np.array(meta_idx['index'])
|
||||||
|
midx = np.array(meta_idx['metaclass_index'])
|
||||||
|
idx_map[idx] = midx
|
||||||
|
|
||||||
|
return idx_map[gt]
|
||||||
|
|
||||||
|
|
||||||
def compute_classification(expe, descriptors):
|
def compute_classification(expe, descriptors):
|
||||||
"""Read a standard expe recipe and descriptors, return the result classification"""
|
"""Read a standard expe recipe and descriptors, return the result classification"""
|
||||||
# Ground truth
|
# Ground truth
|
||||||
gt = triskele.read(expe['ground_truth'])
|
gt = get_ground_truth(expe)
|
||||||
|
|
||||||
|
|
||||||
# CrossVal and ML
|
# CrossVal and ML
|
||||||
cv = expe['cross_validation']
|
cv = expe['cross_validation']
|
||||||
@ -234,7 +248,7 @@ def run_metrics(expe, classification, descriptors):
|
|||||||
"""Compute the metrics from a standard expe recipe and an given classification"""
|
"""Compute the metrics from a standard expe recipe and an given classification"""
|
||||||
|
|
||||||
### Extensible: meta-classes
|
### Extensible: meta-classes
|
||||||
gt = triskele.read(expe['ground_truth'])
|
gt = get_ground_truth(expe)
|
||||||
return compute_metrics(gt, classification, descriptors)
|
return compute_metrics(gt, classification, descriptors)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
57
test.py
57
test.py
@ -8,30 +8,47 @@
|
|||||||
#
|
#
|
||||||
# TODO details
|
# TODO details
|
||||||
|
|
||||||
#from core import Input, Output, Filter
|
|
||||||
from ld2dap import LoadTIFF, SaveFig, Treshold, ShowFig, Differential
|
|
||||||
from ld2dap import AttributeProfiles as APs
|
|
||||||
from ld2dap import SelfDualAttributeProfiles as SDAPs
|
|
||||||
from ld2dap.core import logger
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import ld2dap
|
||||||
|
import logger
|
||||||
|
|
||||||
|
def diff_test():
|
||||||
|
layers_files = [
|
||||||
|
'../Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif',
|
||||||
|
'../Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif',
|
||||||
|
'../Data/phase1_rasters/DEM_C123_TLI/UH17_GEG05_TR.tif',
|
||||||
|
'../Data/phase1_rasters/DSM_C12/UH17c_GEF051_TR.tif',
|
||||||
|
'../Data/phase1_rasters/Intensity_C1/UH17_GI1F051_TR.tif',
|
||||||
|
'../Data/phase1_rasters/Intensity_C2/UH17_GI2F051_TR.tif',
|
||||||
|
'../Data/phase1_rasters/Intensity_C3/UH17_GI3F051_TR.tif'
|
||||||
|
]
|
||||||
|
|
||||||
|
l = ld2dap.LoadTIFF(layers_files[0:3])
|
||||||
|
t = ld2dap.Treshold(1e4)
|
||||||
|
p = ld2dap.SelfDualAttributeProfiles(area=[100,1e4])
|
||||||
|
f = ld2dap.Differential()
|
||||||
|
#p = ld2dap.AttributeProfiles(area=[100,1e4,1e6])
|
||||||
|
m = ld2dap.Merger()
|
||||||
|
a = ld2dap.LocalFeatures([np.std], 7)#, sd=[.4,.6,.8], moi=[.5,.9])
|
||||||
|
d = ld2dap.ShowFig(stack_id='all', symb=True)
|
||||||
|
o = ld2dap.RawOutput()
|
||||||
|
|
||||||
|
d.input = m
|
||||||
|
m.input = f
|
||||||
|
m.second.input = a
|
||||||
|
a.input = f
|
||||||
|
f.input = p
|
||||||
|
p.input = t
|
||||||
|
t.input = l
|
||||||
|
|
||||||
|
d.run()
|
||||||
|
|
||||||
|
|
||||||
|
#from core import Input, Output, Filter
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
logger.setup_logging()
|
logger.setup_logging()
|
||||||
i = LoadTIFF(['Data/test.tiff', 'Data/test2.tiff'])
|
diff_test()
|
||||||
t = Treshold(1e4)
|
|
||||||
ap = SDAPs([100,1e3], [.5,.7,.9], [.1,.3])
|
|
||||||
o = SaveFig('Res/test.png')
|
|
||||||
s = ShowFig(0, False)
|
|
||||||
d = Differential()
|
|
||||||
|
|
||||||
t.input = i
|
|
||||||
ap.input = t
|
|
||||||
o.input = ap
|
|
||||||
d.input = ap
|
|
||||||
s.input = d
|
|
||||||
|
|
||||||
i.run()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
38
test.yml
Normal file
38
test.yml
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
expe:
|
||||||
|
name: Première expérience
|
||||||
|
date: 9 juillet 2018
|
||||||
|
priority: 1
|
||||||
|
workflow: split-first # desc-first (default), split-first
|
||||||
|
ground_truth: ../Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif
|
||||||
|
meta_labels: ../GroundTruth/jurse_meta_idx.csv
|
||||||
|
descriptors_script:
|
||||||
|
name: Descriptors.dfc_sdaps
|
||||||
|
parameters:
|
||||||
|
areas:
|
||||||
|
- 100
|
||||||
|
- 1000
|
||||||
|
- 1e5
|
||||||
|
moi:
|
||||||
|
- 0.5
|
||||||
|
- 0.9
|
||||||
|
rasters:
|
||||||
|
- ./Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif
|
||||||
|
- ./Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif
|
||||||
|
treshold: 1e4
|
||||||
|
# features:
|
||||||
|
# - std
|
||||||
|
# - mean
|
||||||
|
# patch_size: 3
|
||||||
|
cross_validation:
|
||||||
|
package: CVGenerators
|
||||||
|
name: APsCVG
|
||||||
|
parameters:
|
||||||
|
n_test: 2
|
||||||
|
classifier:
|
||||||
|
package: sklearn.ensemble
|
||||||
|
name: RandomForestClassifier
|
||||||
|
parameters:
|
||||||
|
min_samples_leaf: 10
|
||||||
|
n_estimators: 50
|
||||||
|
n_jobs: -1
|
||||||
|
random_state: 0
|
||||||
Loading…
Reference in New Issue
Block a user