/usr/include/root/TMVA/TSynapse.h is in libroot-tmva-dev 5.34.19+dfsg-1.2.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 | // @(#)root/tmva $Id$
// Author: Matt Jachowski
/**********************************************************************************
* Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
* Package: TMVA *
* Class : TMVA::TSynapse *
* Web : http://tmva.sourceforge.net *
* *
* Description: *
* Synapse class for use in derivatives of MethodANNBase *
* *
* Authors (alphabetical): *
* Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
* *
* Copyright (c) 2005: *
* CERN, Switzerland *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted according to the terms listed in LICENSE *
* (http://tmva.sourceforge.net/LICENSE) *
**********************************************************************************/
#ifndef ROOT_TMVA_TSynapse
#define ROOT_TMVA_TSynapse
//////////////////////////////////////////////////////////////////////////
// //
// TSynapse //
// //
// Synapse used by derivatives of MethodANNBase //
// //
//////////////////////////////////////////////////////////////////////////
#ifndef ROOT_TString
#include "TString.h"
#endif
#ifndef ROOT_TFormula
#include "TFormula.h"
#endif
namespace TMVA {
class TNeuron;
class MsgLogger;
class TSynapse : public TObject {
public:
TSynapse();
virtual ~TSynapse();
// set the weight of the synapse
void SetWeight(Double_t weight);
// get the weight of the synapse
Double_t GetWeight() { return fWeight; }
// set the learning rate
void SetLearningRate(Double_t rate) { fLearnRate = rate; }
// get the learning rate
Double_t GetLearningRate() { return fLearnRate; }
// decay the learning rate
void DecayLearningRate(Double_t rate){ fLearnRate *= (1-rate); }
// set the pre-neuron
void SetPreNeuron(TNeuron* pre) { fPreNeuron = pre; }
// set hte post-neuron
void SetPostNeuron(TNeuron* post) { fPostNeuron = post; }
// get the weighted output of the pre-neuron
Double_t GetWeightedValue();
// get the weighted error field of the post-neuron
Double_t GetWeightedDelta();
// force the synapse to adjust its weight according to its error field
void AdjustWeight();
// calulcate the error field of the synapse
void CalculateDelta();
// initialize the error field of the synpase to 0
void InitDelta() { fDelta = 0.0; fCount = 0; }
void SetDEDw(Double_t DEDw) { fDEDw = DEDw; }
Double_t GetDEDw() { return fDEDw; }
Double_t GetDelta() { return fDelta; }
private:
Double_t fWeight; // weight of the synapse
Double_t fLearnRate; // learning rate parameter
Double_t fDelta; // local error field
Double_t fDEDw; // sum of deltas
Int_t fCount; // number of updates contributing to error field
TNeuron* fPreNeuron; // pointer to pre-neuron
TNeuron* fPostNeuron; // pointer to post-neuron
static MsgLogger* fgLogger; //! message logger, static to save resources
MsgLogger& Log() const { return *fgLogger; }
ClassDef(TSynapse,0) // Synapse class used by MethodANNBase and derivatives
};
} // namespace TMVA
#endif
|