/usr/share/pyshared/mvpa/measures/splitmeasure.py is in python-mvpa 0.4.8-3.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""This is a `FeaturewiseDatasetMeasure` that uses another
`FeaturewiseDatasetMeasure` and runs it multiple times on differents splits of
a `Dataset`.
"""
__docformat__ = 'restructuredtext'
import numpy as N
from mvpa.measures.base import FeaturewiseDatasetMeasure
from mvpa.datasets.splitters import NoneSplitter
from mvpa.misc.state import StateVariable
from mvpa.misc.transformers import FirstAxisMean
if __debug__:
from mvpa.base import debug
class SplitFeaturewiseMeasure(FeaturewiseDatasetMeasure):
"""This is a `FeaturewiseDatasetMeasure` that uses another
`FeaturewiseDatasetMeasure` and runs it multiple times on differents
splits of a `Dataset`.
When called with a `Dataset` it returns the mean sensitivity maps of all
data splits.
Additonally this class supports the `State` interface. Several
postprocessing functions can be specififed to the constructor. The results
of the functions specified in the `postproc` dictionary will be available
via their respective keywords.
"""
maps = StateVariable(enabled=False,
doc="To store maps per each split")
def __init__(self, sensana,
splitter=NoneSplitter,
combiner=FirstAxisMean,
**kwargs):
"""Cheap initialization.
:Parameters:
sensana : FeaturewiseDatasetMeasure
that shall be run on the `Dataset` splits.
splitter : Splitter
used to split the `Dataset`. By convention the first dataset
in the tuple returned by the splitter on each iteration is used
to compute the sensitivity map.
combiner
This functor will be called on an array of sensitivity maps
and the result will be returned by __call__(). The result of
a combiner must be an 1d ndarray.
"""
# init base classes first
FeaturewiseDatasetMeasure.__init__(self, **kwargs)
self.__sensana = sensana
"""Sensitivity analyzer used to compute the sensitivity maps.
"""
self.__splitter = splitter
"""Splitter instance used to split the datasets."""
self.__combiner = combiner
"""Function to combine sensitivities to serve a result of
__call__()"""
def _call(self, dataset):
"""Compute sensitivity maps for all dataset splits and run the
postprocessing functions afterward (if any).
Returns a list of all computed sensitivity maps. Postprocessing results
are available via the objects `State` interface.
"""
maps = []
# splitter
for split in self.__splitter(dataset):
# compute sensitivity using first dataset in split
sensitivity = self.__sensana(split[0])
maps.append(sensitivity)
self.maps = maps
"""Store the maps across splits"""
# return all maps
return self.__combiner(maps)
class TScoredFeaturewiseMeasure(SplitFeaturewiseMeasure):
"""`SplitFeaturewiseMeasure` computing featurewise t-score of
sensitivities across splits.
"""
def __init__(self, sensana, splitter, noise_level=0.0, **kwargs):
"""Cheap initialization.
:Parameters:
sensana : SensitivityAnalyzer
that shall be run on the `Dataset` splits.
splitter : Splitter
used to split the `Dataset`. By convention the first dataset
in the tuple returned by the splitter on each iteration is used
to compute the sensitivity map.
noise_level: float
Theoretical output of the respective `SensitivityAnalyzer`
for a pure noise pattern. For most algorithms this is probably
zero, hence the default.
"""
# init base classes first
# - get full sensitivity maps from SplittingSensitivityAnalyzer
# - no postprocessing
# - leave States handling to base class
SplitFeaturewiseMeasure.__init__(self,
sensana,
splitter,
combiner=N.array,
**kwargs)
self.__noise_level = noise_level
"""Output of the sensitivity analyzer when there is no signal."""
def _call(self, dataset, callables=[]):
"""Compute sensitivity maps for all dataset splits and return the
featurewise t-score of them.
"""
# let base class compute the sensitivity maps
maps = SplitFeaturewiseMeasure._call(self, dataset)
# feature wise mean
m = N.mean(maps, axis=0)
#m = N.min(maps, axis=0)
# featurewise variance
v = N.var(maps, axis=0)
# degrees of freedom (n-1 for one-sample t-test)
df = maps.shape[0] - 1
# compute t-score
t = (m - self.__noise_level) / N.sqrt(v * (1.0 / maps.shape[0]))
if __debug__:
debug('SA', 'T-score sensitivities computed for %d maps ' %
maps.shape[0] +
'min=%f max=%f. mean(m)=%f mean(v)=%f Result min=%f max=%f mean(abs)=%f' %
(N.min(maps), N.max(maps), N.mean(m), N.mean(v), N.min(t),
N.max(t), N.mean(N.abs(t))))
return t
|