/usr/include/shogun/regression/KernelRidgeRegression.h is in libshogun-dev 3.2.0-7.3build4.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 | /*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Written (W) 2006 Mikio L. Braun
* Written (W) 1999-2009 Soeren Sonnenburg
* Copyright (C) 1999-2009 Fraunhofer Institute FIRST and Max-Planck-Society
*/
#ifndef _KERNELRIDGEREGRESSION_H__
#define _KERNELRIDGEREGRESSION_H__
#include <shogun/lib/config.h>
#include <shogun/regression/Regression.h>
#ifdef HAVE_LAPACK
#include <shogun/machine/KernelMachine.h>
namespace shogun
{
/** which training method to use for KRR */
enum ETrainingType
{
/// via pseudo inverse
PINV=1,
/// or gauss-seidel iterative method
GS=2
};
/** @brief Class KernelRidgeRegression implements Kernel Ridge Regression - a regularized least square
* method for classification and regression.
*
* It is similar to support vector machines (cf. CSVM). However in contrast to
* SVMs a different objective is optimized that leads to a dense solution (thus
* not only a few support vectors are active in the end but all training
* examples). This makes it only applicable to rather few (a couple of
* thousand) training examples. In case a linear kernel is used RR is closely
* related to Fishers Linear Discriminant (cf. LDA).
*
* Internally (for linear kernels) it is solved via minimizing the following system
*
* \f[
* \frac{1}{2}\left(\sum_{i=1}^N(y_i-{\bf w}\cdot {\bf x}_i)^2 + \tau||{\bf w}||^2\right)
* \f]
*
* which boils down to solving a linear system
*
* \f[
* {\bf w} = \left(\tau {\bf I}+ \sum_{i=1}^N{\bf x}_i{\bf x}_i^T\right)^{-1}\left(\sum_{i=1}^N y_i{\bf x}_i\right)
* \f]
*
* and in the kernel case
* \f[
* {\bf \alpha}=\left({\bf K}+\tau{\bf I}\right)^{-1}{\bf y}
* \f]
* where K is the kernel matrix and y the vector of labels. The expressed
* solution can again be written as a linear combination of kernels (cf.
* CKernelMachine) with bias \f$b=0\f$.
*/
class CKernelRidgeRegression : public CKernelMachine
{
public:
/** problem type */
MACHINE_PROBLEM_TYPE(PT_REGRESSION);
/** default constructor */
CKernelRidgeRegression();
/** constructor
*
* @param tau regularization constant tau
* @param k kernel
* @param lab labels
* @param m method to use for training PINV (pseudo inverse by default)
*/
CKernelRidgeRegression(float64_t tau, CKernel* k, CLabels* lab, ETrainingType m=PINV);
/** default destructor */
virtual ~CKernelRidgeRegression() {}
/** set regularization constant
*
* @param tau new tau
*/
inline void set_tau(float64_t tau) { m_tau = tau; };
/** set convergence precision for gauss seidel method
*
* @param epsilon new epsilon
*/
inline void set_epsilon(float64_t epsilon) { m_epsilon = epsilon; }
/** load regression from file
*
* @param srcfile file to load from
* @return if loading was successful
*/
virtual bool load(FILE* srcfile);
/** save regression to file
*
* @param dstfile file to save to
* @return if saving was successful
*/
virtual bool save(FILE* dstfile);
/** get classifier type
*
* @return classifier type KernelRidgeRegression
*/
virtual EMachineType get_classifier_type()
{
return CT_KERNELRIDGEREGRESSION;
}
/** @return object name */
virtual const char* get_name() const { return "KernelRidgeRegression"; }
protected:
/** train regression
*
* @param data training data (parameter can be avoided if distance or
* kernel-based regressors are used and distance/kernels are
* initialized with train data)
*
* @return whether training was successful
*/
virtual bool train_machine(CFeatures* data=NULL);
private:
void init();
/** train regression using Gauss-Seidel iterative method
*
* @return whether training was successful
*/
bool train_machine_gs();
/** train regression using pinv
*
* @return whether training was successful
*/
bool train_machine_pinv();
private:
/** regularization parameter tau */
float64_t m_tau;
/** epsilon constant */
float64_t m_epsilon;
/** training function */
ETrainingType m_train_func;
};
}
#endif // HAVE_LAPACK
#endif // _KERNELRIDGEREGRESSION_H__
|