/usr/include/ITK-4.9/itkRBFBackPropagationLearningFunction.hxx is in libinsighttoolkit4-dev 4.9.0-4ubuntu1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 | /*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkRBFBackPropagationLearningFunction_hxx
#define itkRBFBackPropagationLearningFunction_hxx
#include "itkRBFBackPropagationLearningFunction.h"
namespace itk
{
namespace Statistics
{
template<typename LayerType, typename TTargetVector>
RBFBackPropagationLearningFunction<LayerType,TTargetVector>
::RBFBackPropagationLearningFunction()
{
m_LearningRate1 = 0.05;
m_LearningRate2 = 3;
m_LearningRate3 = 0.75;
}
template<typename LayerType, typename TTargetVector>
void
RBFBackPropagationLearningFunction<LayerType,TTargetVector>
::Learn(LayerType* layer,ValueType lr)
{
typename LayerType::WeightSetType::Pointer outputweightset = layer->GetModifiableOutputWeightSet();
typename LayerType::WeightSetType::Pointer inputweightset = layer->GetModifiableInputWeightSet();
typename LayerType::ValuePointer currentdeltavalues = inputweightset->GetTotalDeltaValues();
vnl_matrix<ValueType> DW_temp(currentdeltavalues,inputweightset->GetNumberOfOutputNodes(),
inputweightset->GetNumberOfInputNodes());
typename LayerType::ValuePointer DBValues = inputweightset->GetDeltaBValues();
vnl_vector<ValueType> DB;
DB.set_size(inputweightset->GetNumberOfOutputNodes());
DB.fill(0);
DB.copy_in(DBValues);
if(layer->GetLayerTypeCode() == LayerInterfaceType::OUTPUTLAYER) //If output layer do back propagation
{
DW_temp *= lr;
inputweightset->SetDWValues(DW_temp.data_block());
DB *= lr;
inputweightset->SetDBValues(DB.data_block());
}
else //else update centers, widths using gradient descent
{
DW_temp *= m_LearningRate2;
DB *= m_LearningRate3;
inputweightset->SetDWValues(DW_temp.data_block());
inputweightset->SetDBValues(DB.data_block());
}
}
template<typename LayerType, typename TTargetVector>
void
RBFBackPropagationLearningFunction<LayerType,TTargetVector>
::Learn(LayerType* itkNotUsed(layer), TTargetVector itkNotUsed(errors), ValueType itkNotUsed(lr))
{
}
/** Print the object */
template<typename LayerType, typename TTargetVector>
void
RBFBackPropagationLearningFunction<LayerType,TTargetVector>
::PrintSelf( std::ostream& os, Indent indent ) const
{
os << indent << "RBFBackPropagationLearningFunction(" << this << ")" << std::endl;
os << indent << "m_LearningRate1 = " << m_LearningRate1 << std::endl;
os << indent << "m_LearningRate2 = " << m_LearningRate2 << std::endl;
os << indent << "m_LearningRate3 = " << m_LearningRate3 << std::endl;
os << indent << "m_OutputErrors = " << m_OutputErrors << std::endl;
Superclass::PrintSelf( os, indent );
}
} // end namespace Statistics
} // end namespace itk
#endif
|