Supervisor and Jurse passing tests

This commit is contained in:
Florent Guiotte 2018-09-11 17:23:00 +02:00
parent 2943cc4a22
commit 314afce292
14 changed files with 346 additions and 29 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
Enrichment/
__pycache__/
Logs/
[Dd]ata/

0
descriptors/__init__.py Normal file
View File

30
descriptors/dfc_aps.py Normal file
View File

@ -0,0 +1,30 @@
import numpy as np
import yaml
import sys
sys.path.append('..')
import ld2dap
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None):
# Parse attribute type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
# APs Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
aps = ld2dap.AttributeProfiles(area=areas, sd=sd, moi=moi)
aps.input = dfc_filter
out_vectors = ld2dap.RawOutput()
out_vectors.input = aps
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

34
descriptors/dfc_base.py Normal file
View File

@ -0,0 +1,34 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_base.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 27 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
def run(rasters, treshold=1e4):
# Parse parameters type
treshold = float(treshold)
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
out_vectors = ld2dap.RawOutput()
out_vectors.input = dfc_filter
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

41
descriptors/dfc_daps.py Normal file
View File

@ -0,0 +1,41 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_daps.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 27 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None):
# Parse parameters type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
aps = ld2dap.AttributeProfiles(area=areas, sd=sd, moi=moi)
aps.input = dfc_filter
differential = ld2dap.Differential()
differential.input = aps
out_vectors = ld2dap.RawOutput()
out_vectors.input = differential
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

41
descriptors/dfc_dsdaps.py Normal file
View File

@ -0,0 +1,41 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_dsdaps.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 28 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None):
# Parse parameters type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
sdaps = ld2dap.SelfDualAttributeProfiles(area=areas, sd=sd, moi=moi)
sdaps.input = dfc_filter
differential = ld2dap.Differential()
differential.input = sdaps
out_vectors = ld2dap.RawOutput()
out_vectors.input = differential
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

57
descriptors/dfc_lfaps.py Normal file
View File

@ -0,0 +1,57 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_lfaps.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 27 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
# TODO: Add param percentile?
dispatcher = {
'mean': np.mean, # Arithmetic mean
'median': np.median, # Median
'average': np.average, # Weighted average (=mean ?)
'std': np.std, # Standard deviation
'var': np.var, # Variance
'amax': np.amax, # Maximum
'amin': np.amin, # Minimum
'ptp': np.ptp, # Range of values (max - min)
}
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None, features=['mean'], patch_size=3):
# Parse parameters type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
patch_size = int(patch_size)
features = [dispatcher[x] for x in features]
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
aps = ld2dap.AttributeProfiles(area=areas, sd=sd, moi=moi)
aps.input = dfc_filter
local_features = ld2dap.LocalFeatures(features, patch_size)
local_features.input = aps
out_vectors = ld2dap.RawOutput()
out_vectors.input = local_features
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

View File

@ -0,0 +1,57 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_lfsdaps.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 28 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
# TODO: Add param percentile?
dispatcher = {
'mean': np.mean, # Arithmetic mean
'median': np.median, # Median
'average': np.average, # Weighted average (=mean ?)
'std': np.std, # Standard deviation
'var': np.var, # Variance
'amax': np.amax, # Maximum
'amin': np.amin, # Minimum
'ptp': np.ptp, # Range of values (max - min)
}
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None, features=['mean'], patch_size=3):
# Parse parameters type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
patch_size = int(patch_size)
features = [dispatcher[x] for x in features]
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
sdaps = ld2dap.SelfDualAttributeProfiles(area=areas, sd=sd, moi=moi)
sdaps.input = dfc_filter
local_features = ld2dap.LocalFeatures(features, patch_size)
local_features.input = sdaps
out_vectors = ld2dap.RawOutput()
out_vectors.input = local_features
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

40
descriptors/dfc_sdaps.py Normal file
View File

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# \file dfc_sdaps.py
# \brief TODO
# \author Florent Guiotte <florent.guiotte@gmail.com>
# \version 0.1
# \date 27 août 2018
#
# TODO details
import numpy as np
import sys
sys.path.append('..')
import ld2dap
def run(rasters, treshold=1e4, areas=None, sd=None, moi=None):
# Parse parameters type
treshold = float(treshold)
areas = None if areas is None else np.array(areas).astype(np.float).astype(np.int)
sd = None if sd is None else np.array(sd).astype(np.float)
moi = None if moi is None else np.array(moi).astype(np.float)
# Pipelines
loader = ld2dap.LoadTIFF(rasters)
dfc_filter = ld2dap.Treshold(treshold)
dfc_filter.input = loader
sdaps = ld2dap.SelfDualAttributeProfiles(area=areas, sd=sd, moi=moi)
sdaps.input = dfc_filter
out_vectors = ld2dap.RawOutput()
out_vectors.input = sdaps
# Compute vectors
out_vectors.run()
return out_vectors.data
def version():
return 'v0.0'

View File

@ -14,9 +14,6 @@ from collections import OrderedDict
import numpy as np
import pandas as pd
from sklearn import metrics
# TODO: create package, use dev
import sys
sys.path.append('../triskele/python')
import triskele
from .protocol import Protocol, TestError
@ -51,15 +48,27 @@ class Jurse(Protocol):
descriptors = self._compute_descriptors()
except Exception:
raise TestError('Error occured during description')
self._time('description')
self._log.info('Classify data')
try:
classification = self._compute_classificatin(descriptors)
classification = self._compute_classification(descriptors)
except Exception:
raise TestError('Error occured during classification')
self._time('classification')
self._log.info('Run metrics')
self._metrics = self._run_metrics(classification, descriptors)
metrics = self._run_metrics(classification, descriptors)
self._time('metrics')
cmap = str(self._results_base_name) + '.tif'
self._log.info('Saving classification map {}'.format(cmap))
triskele.write(cmap, classification)
results = OrderedDict()
results['classification'] = cmap
results['metrics'] = metrics
self._results = results
def _compute_descriptors(self):
script = self._expe['descriptors_script']
@ -80,7 +89,7 @@ class Jurse(Protocol):
cross_val = getattr(importlib.import_module(cv['package']), cv['name'])
classifier = getattr(importlib.import_module(cl['package']), cl['name'])
prediction = np.zeros_like(gt)
prediction = np.zeros_like(gt, dtype=np.uint8)
for xt, xv, yt, yv, ti in cross_val(gt, descriptors, **cv['parameters']):
rfc = classifier(**cl['parameters'])
@ -99,7 +108,7 @@ class Jurse(Protocol):
# Meta labeling
idx_map = np.arange(gt.max() + 1)
if 'meta_labels' in self._expe:
if 'meta_labels' in gt_expe:
meta_idx = pd.read_csv(gt_expe['meta_labels'])
idx = np.array(meta_idx['index'])
midx = np.array(meta_idx['metaclass_index'])
@ -107,6 +116,9 @@ class Jurse(Protocol):
return idx_map[gt]
def _get_results(self):
return self._results
def _run_metrics(self, classification, descriptors):
gt = self._get_ground_truth()
@ -115,7 +127,7 @@ class Jurse(Protocol):
gt = gt[f].ravel()
results = OrderedDict()
results['dimension'] = descriptors.shape[-1]
results['dimensions'] = descriptors.shape[-1]
results['overall_accuracy'] = float(metrics.accuracy_score(gt, pred))
results['cohen_kappa'] = float(metrics.cohen_kappa_score(gt, pred))

View File

@ -19,22 +19,25 @@ class Protocol:
self._expe = expe
self._name = name
self._times = OrderedDict()
self._results_base_name = None
self._log.debug('expe loaded: {}'.format(self._expe))
def get_hashes(self):
self._log.info('Computing hashes')
return(self._get_hashes())
def set_results_base_name(self, base_name):
self._results_base_name = base_name
def run(self):
self._pt = time.process_time()
self._run()
# TODO: Strop process timer
def get_results(self):
self._get_results()
return self._get_results()
def get_process_time(self):
return self._times()
return self._times
def _time(self, process):
self._times[process] = time.process_time() - self._pt

View File

@ -13,10 +13,9 @@ protocol:
expe:
ground_truth:
raster: ./Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif
meta_labels: ./Data/ground_truth/jurse_idx.csv
meta_labels: ./Data/ground_truth/jurse_meta_idx.csv
descriptors_script:
name: dfc_aps
package: descriptors
name: descriptors.dfc_aps
parameters:
areas:
- 100
@ -25,8 +24,8 @@ expe:
- 0.5
- 0.9
rasters:
- ./Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif
- ./Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif
- ./Data/dfc_rasters/DEM+B_C123/UH17_GEM051_TR.tif
- ./Data/dfc_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif
treshold: 1e4
cross_validation:
name: APsCVG

View File

@ -76,8 +76,10 @@ def run(expe_file):
### Load protocol
try:
protocol = getattr(importlib.import_module(test['protocol']['package']),
test['protocol']['name'])
#protocol = getattr(importlib.import_module(test['protocol']['package']), test['protocol']['name'])
protocol_module = importlib.import_module(test['protocol']['package'])
importlib.reload(protocol_module)
protocol = getattr(protocol_module, test['protocol']['name'])
experience = protocol(test['expe'])
except Exception as e:
err = 'Could not load protocol from test {}'.format(expe_file)
@ -91,6 +93,8 @@ def run(expe_file):
test['report'] = create_report(start_time)
expe_file.stage(test)
experience.set_results_base_name(expe_file.get_result_path())
### Run test
try:
experience.run()
@ -105,15 +109,14 @@ def run(expe_file):
### Write complete report
report = create_report(start_time, end_time)
ressources = OrderedDict()
ressouces['ram'] = None
ressouces['proccess_time'] = experience.get_process_time()
report['ressources'] = ressouces
ressources['ram'] = None
ressources['proccess_time'] = experience.get_process_time()
report['ressources'] = ressources
test['report'] = report
### Write results
test['results'] = experience.get_results()
expe_file.result(test)
log.info('Additional results in {}'.format(expe_file.get_result_path()))
### End of test
log.info('Test complete')
@ -274,7 +277,7 @@ def create_report(stime=None, etime=None):
def watch_folder():
log.info('Waiting for test')
while not list(TEST_DIR.glob('*.yml')):
time.sleep(10)
time.sleep(3)
class Kronos(object):
def __init__(self):

View File

@ -13,10 +13,9 @@ protocol:
expe:
ground_truth:
raster: ./Data/ground_truth/2018_IEEE_GRSS_DFC_GT_TR.tif
meta_labels: ./Data/ground_truth/jurse_idx.csv
meta_labels: ./Data/ground_truth/jurse_meta_idx.csv
descriptors_script:
name: dfc_aps
package: descriptors
name: descriptors.dfc_aps
parameters:
areas:
- 100
@ -25,8 +24,8 @@ expe:
- 0.5
- 0.9
rasters:
- ./Data/phase1_rasters/DEM+B_C123/UH17_GEM051_TR.tif
- ./Data/phase1_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif
- ./Data/dfc_rasters/DEM+B_C123/UH17_GEM051_TR.tif
- ./Data/dfc_rasters/DEM_C123_3msr/UH17_GEG051_TR.tif
treshold: 1e4
cross_validation:
name: APsCVG
@ -38,6 +37,6 @@ expe:
package: sklearn.ensemble
parameters:
min_samples_leaf: 10
n_estimators: 50
n_estimators: 10
n_jobs: -1
random_state: 0