This file is indexed.

/usr/share/pyshared/mlpy/_spectralreg.py is in python-mlpy 2.2.0~dfsg1-2.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
## This code is written by Davide Albanese, <albanese@fbk.eu>
## (C) 2010 Fondazione Bruno Kessler - Via Santa Croce 77, 38100 Trento, ITALY.

## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.

## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
## GNU General Public License for more details.

## You should have received a copy of the GNU General Public License
## along with this program.  If not, see <http://www.gnu.org/licenses/>.

__all__ = ["GradientDescent"]


import spectralreg as sr
import numpy as np

class GradientDescent(object):
    """Gradient Descent Method
    """

    def __init__(self, kernel, t, stepsize):
        """Initialization.

        :Parameters:
          kernel: kernel object
            kernel
          t : int (> 0)
            number of iterations
          stepsize: float
            step size
        """

        self.t = t
        self.stepsize = stepsize
        self.kernel = kernel
        self.__x = None
        self.__c = None
        
        
    def learn(self, x, y):
        """Compute the regression coefficients.

        :Parameters:
          x : numpy 2d array (n x p)
            matrix of regressors
          y : numpy 1d array (n)
            response
        """
        

        if not isinstance(x, np.ndarray):
            raise ValueError("x must be an numpy 2d array")

        if not isinstance(y, np.ndarray):
            raise ValueError("y must be an numpy 1d array")

        if x.ndim > 2:
            raise ValueError("x must be an 2d array")
        
        if x.shape[0] != y.shape[0]:
            raise ValueError("x and y are not aligned")

        c = np.zeros(x.shape[0])
        k = self.kernel.matrix(x)
        self.__c = sr.gradient_descent_steps(c, k, y, self.stepsize, self.t)
        self.__x = x.copy()

        print self.__c

    def pred(self, x):
        """Compute the predicted response.
        
        :Parameters:
          x : numpy 2d array (n x p)
            matrix of regressors
        
        :Returns:
          yp : 1d ndarray
             predicted response
        """


        if not isinstance(x, np.ndarray):
            raise ValueError("x must be an numpy 2d array")

        if x.ndim > 2:
            raise ValueError("x must be an 2d array")
       
        if x.shape[1] != self.__x.shape[1]:
            raise ValueError("x is not aligned")

        y = np.empty(x.shape[0])
        
        for i in range(x.shape[0]):
            k =  self.kernel.vector(x[i], self.__x)
            y[i] = np.sum(self.__c * k)
            
        return y