#!/usr/bin/python # -*- coding: utf-8 -*- # \file SelfDualAttributeProfiles.py # \brief TODO # \author Florent Guiotte # \version 0.1 # \date 17 avril 2018 # # TODO details from .core import Filter, Stack import numpy as np import triskele class SelfDualAttributeProfiles(Filter): def __init__(self, area=None, sd=None, moi=None, normalize_to_dtype=True): super().__init__(self.__class__.__name__) self.area = np.sort(area) if area is not None else None self.sd = np.sort(sd) if sd is not None else None self.moi = np.sort(moi) if moi is not None else None self.normalize_to_dtype = normalize_to_dtype def _process_desc(self): att_desc = dict() for att in ['area', 'sd', 'moi']: att_desc[att] = list() if self.__getattribute__(att) is not None: att_desc[att].append(None) att_desc[att].extend( ['Self-dual {} {}'.format(att, x) for x in self.__getattribute__(att)]) return att_desc def _process_symb(self): att_symb = dict() for att in ['area', 'sd', 'moi']: att_symb[att] = list() if self.__getattribute__(att) is not None: att_symb[att].append(None) att_symb[att].extend( ['\\rho^{{{}}}_{{{}}}'.format(att, x) for x in self.__getattribute__(att)]) return att_symb def _process_len(self): att_len = dict() att_len_cs = dict() cs = 0 for att in ['area', 'sd', 'moi']: values = self.__getattribute__(att) al = len(values) + 1 if values is not None else 0 att_len[att] = al att_len_cs[att] = cs cs += al self.logger.debug('Attribute length: {}'.format(att_len)) self.logger.debug('Attribute length CS: {}'.format(att_len_cs)) return att_len, att_len_cs def _process(self, data, metadata): t = triskele.Triskele(data, verbose=False, normalize_to_dtype=self.normalize_to_dtype) attributes = t.filter(tree='tos-tree', area=self.area, standard_deviation=self.sd, moment_of_inertia=self.moi) # Create new data and metadata metadata_new = list() # Pre-process descriptions att_desc = self._process_desc() att_symb = self._process_symb() # Compute stack offsets and att length att_len, att_len_cs = self._process_len() raster_offset = sum(att_len.values()) # Data # Duplicate origin in data to respect Stack structure # Compute insert index in where where = np.array(list(att_len.values())) where = where[where != 0] - 1 where[0] += 1 count = sum(where) where = np.cumsum(where[:-1]) origins_dcount = where.size offset = np.repeat(np.arange(len(metadata)) * count, where.size) # Can't nest this where = np.tile(where, len(metadata)) + offset # Find origins origins_index = np.arange(len(metadata)) * count self.logger.debug('origins_index: {}'.format(origins_index)) origins = attributes[:, :, origins_index] # Unfold origins to match insert pattern origins = np.repeat(origins, origins_dcount, axis=2) # Insert origins in data data_new = np.insert(attributes, where, origins, axis=2) # Metadata for stack in metadata: if stack.end - stack.begin > 1: self.logger.err('Nested filtering, raising error') raise NotImplementedError('Nested filtering not implemented yet') for att in ['area', 'sd', 'moi']: if att_len[att] == 0: continue stack_new = Stack(raster_offset * stack.begin + att_len_cs[att], att_len[att], stack.desc[0], stack.symb[0]) for old_desc, new_desc in zip(stack_new.desc, att_desc[att]): self.logger.debug('Desc: {} + {}'.format(old_desc, new_desc)) old_desc.append(new_desc) for old_symb, new_symb in zip(stack_new.symb, att_symb[att]): self.logger.debug('Symb: {} + {}'.format(old_symb, new_symb)) old_symb.append(new_symb) metadata_new.append(stack_new) return data_new, metadata_new if __name__ == '__main__': area = [10, 100, 1000] sd = [.1, .9] ap = SelfDualAttributeProfiles(area, sd) print(ap._process_desc())