/usr/include/shark/Models/Softmax.h is in libshark-dev 3.1.3+ds1-2.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 | //===========================================================================
/*!
*
*
* \brief Soft-max transformation.
*
*
*
* \author O. Krause, T. Glasmachers
* \date 2010-2011
*
*
* \par Copyright 1995-2015 Shark Development Team
*
* <BR><HR>
* This file is part of Shark.
* <http://image.diku.dk/shark/>
*
* Shark is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Shark is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
*
*/
//===========================================================================
#ifndef SHARK_MODELS_SOFTMAX_H
#define SHARK_MODELS_SOFTMAX_H
#include <shark/Core/DLLSupport.h>
#include <shark/Models/AbstractModel.h>
namespace shark {
///
/// \brief Softmax function
///
/// \par
/// Squash an n-dimensional real vector space
/// to the (n-1)-dimensional probability simplex:
/// \f[
/// f_i(x) = \frac{\exp(x_i)}{\sum_j \exp(x_j)}
/// \f]
/// This also corresponds to the exponential norm of the input.
///
/// in the case of n=1, the output is
/// \f[
/// f_i(x) = \frac{\exp((2i-1)x)}{\exp(x_j)+\exp(-x_j)}
/// \f]
/// and the output dimension is 2.
///
/// This convention ensures that all models that are trained via CrossEntropy
/// can be used as input to this model and the output will be the probability
/// of the labels.
class Softmax : public AbstractModel<RealVector,RealVector>
{
private:
struct InternalState : public State{
RealMatrix results;
void resize(std::size_t numPatterns,std::size_t inputs){
results.resize(numPatterns,inputs);
}
};
public:
/// Constructor
SHARK_EXPORT_SYMBOL Softmax(size_t inputs);
/// Constructor
SHARK_EXPORT_SYMBOL Softmax();
/// \brief From INameable: return the class name.
std::string name() const
{ return "Softmax"; }
RealVector parameterVector()const{
return RealVector();
}
void setParameterVector(RealVector const& newParameters){
SIZE_CHECK(newParameters.size()==0);
}
size_t inputSize()const{
return m_inputSize;
}
size_t outputSize()const{
return m_inputSize==1?2:m_inputSize;
}
size_t numberOfParameters()const{
return 0;
}
boost::shared_ptr<State> createState()const{
return boost::shared_ptr<State>(new InternalState());
}
SHARK_EXPORT_SYMBOL void eval(BatchInputType const& patterns,BatchOutputType& output)const;
SHARK_EXPORT_SYMBOL void eval(BatchInputType const& patterns,BatchOutputType& output, State & state)const;
using AbstractModel<RealVector,RealVector>::eval;
SHARK_EXPORT_SYMBOL void weightedParameterDerivative(
BatchInputType const& patterns, BatchOutputType const& coefficients, State const& state, RealVector& gradient
)const;
SHARK_EXPORT_SYMBOL void weightedInputDerivative(
BatchInputType const& patterns, RealMatrix const& coefficients, State const& state, BatchOutputType& gradient
)const;
void setStructure(std::size_t inputSize){
m_inputSize = inputSize;
}
/// From ISerializable, reads a model from an archive
SHARK_EXPORT_SYMBOL void read( InArchive & archive );
/// From ISerializable, writes a model to an archive
SHARK_EXPORT_SYMBOL void write( OutArchive & archive ) const;
private:
std::size_t m_inputSize;
};
}
#endif
|