This file is indexed.

/usr/include/shark/Models/RecurrentStructure.h is in libshark-dev 3.0.1+ds1-2ubuntu1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
//===========================================================================
/*! \brief Offers a basic structure for recurrent networks
 * 
 *  \author  O. Krause
 *   \date    2011
 * 
 * \par Copyright 1995-2015 Shark Development Team
 * 
 * <BR><HR>
 * This file is part of Shark.
 * <http://image.diku.dk/shark/>
 * 
 * Shark is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published 
 * by the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 * 
 * Shark is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 * 
 * You should have received a copy of the GNU Lesser General Public License
 * along with Shark.  If not, see <http://www.gnu.org/licenses/>.
 *
 */
#ifndef SHARK_ML_MODEL_RECURENTNETWORK_H
#define SHARK_ML_MODEL_RECURENTNETWORK_H

#include <shark/Core/DLLSupport.h>
#include <shark/LinAlg/Base.h>
#include <shark/Core/ISerializable.h>
namespace shark{
//!  \brief Offers a basic structure for recurrent networks.
//!
//! it is possible to define the tzpe of sigmoids and the form of the connection matrix.
//! this structure can be shared between different types of ents like the RNNet and the OnlineRNNet
class RecurrentStructure: public ISerializable
{
public:
	//! Creates an empty recurrent neural network.
	//! A call to setStructure is needed afterwards to configure the topology of the network
	SHARK_EXPORT_SYMBOL RecurrentStructure();


	//! type enum for the different variants of sigmoids
	enum SigmoidType{
		///f(x) = x
		Linear,
		///f(x) = 1/(1+exp(-x))
		Logistic,
		///f(x) = tanh(x)
		Tanh,
		/// f(x) = x/(1+|x|)
		FastSigmoid 
	};

	//!returns the connection Matrix of the network.
	//!
	//!The format is best described with an example:
	//!given a Network with 2 inputs, 2 outputs and 2 hidden and no bias unit,
	//!where the inputs are only connected to the hidden units.
	//!The corresponding matrix looks like this:
	//!
	//!1 2 3 4 5 6 7
	//!1 1 0 1 1 1 1 first hidden
	//!1 1 0 1 1 1 1 second hidden
	//!0 0 0 1 1 1 1 first output
	//!0 0 0 1 1 1 1 second output
	//!
	//!The ith row stands for the ith neuron of the network and when an element
	//!(i,j) is 1, the ith unit will receive input from unit j.
	//! the first =0,..,inputs-1 columns are the input neurons followd by the column of the bias, 
	//! which is completely zero in this example
	//!if j is a hidden or output neuron, the activation from the PREVIOUS time step
	//!is used. if j is an input neuron, the current input is used.
	//!input neurons can't receive activation. This is no limitation, since the hidden
	//!layer can be subdivided in arbitrary sublayers when the right topology is used.
	//! the last column of the matrix is reserved for the bias neuron. So the matrix has size
	//! NxN+1
	const IntMatrix& connections()const{
		return m_connectionMatrix;
	}
	//! returns whether the connection between neuron i and j exists
	bool connection(std::size_t i, std::size_t j)const{
		return m_connectionMatrix(i,j);
	}

	//!returns the current weight matrix
	const RealMatrix& weights()const{
		return m_weights;
	}
	//! returns the weight of the connection between neuron i and j
	double weight(std::size_t i, std::size_t j)const{
		return m_weights(i,j);
	}

	//!returns the type of sigmoid used in this network
	SigmoidType sigmoidType() const {
		return m_sigmoidType;
	}

	//!sets the type of sigmoid used in this network
	//!\param sigmoidType the type of sigmoid
	void setSigmoidType(SigmoidType sigmoidType){
		m_sigmoidType = sigmoidType;
	}

	//!Sets the weight matrix. It is not allowed that elements are non-zero
	//!when the element in the connection matrix is 0!
	//!\param weights the new weight matrix
	SHARK_EXPORT_SYMBOL void setWeights(const RealMatrix& weights);

	//!  \brief Based on a given connection matrix a network is created.
	//!
	//!  This method needs to know how many inputs and outputs the network has
	//!  and how the units are connected.
	//!
	//!  If a standard structure is needed, see the other version of this method.
	//!  Also see #connections for a quick explanation of the matrix format
	//!
	//!  The same mechanic applies alo to recurrentConnections
	//!  but every element can be set, not only the lower triangular part.
	//!
	//! After this operation, all weights are initialized to 0.
	//!
	//!
	//! \param inputs number of input neurons of the network
	//! \param outputs number of output neurons of the network
	//! \param connections feed-forward connections. default is true
	//! \param sigmoidType the type of the sigmoid to be used. the default is the Logistic function
	SHARK_EXPORT_SYMBOL void setStructure(std::size_t inputs, std::size_t outputs, const IntMatrix& connections, SigmoidType sigmoidType = Logistic);


	//! \brief Creates a fully connected topology for the network with optional bias
	//!
	//!  After a call, the network will have hidden+out units.
	//!
	//!
	//! \param in number of input neurons
	//! \param hidden number of output neurons
	//! \param out number of input neurons
	//! \param bias enables bias neuron, default is true
	//! \param sigmoidType the type of the sigmoid to be used. the default is the Logistic function
	SHARK_EXPORT_SYMBOL void setStructure(std::size_t in, std::size_t hidden, std::size_t out, bool bias = true, SigmoidType sigmoidType = Logistic);

	//! get internal parameters of the model
	SHARK_EXPORT_SYMBOL RealVector parameterVector() const;
	
	//! set internal parameters of the model
	SHARK_EXPORT_SYMBOL void setParameterVector(RealVector const& newParameters);

	//! From ISerializable, reads the Network from an archive
	SHARK_EXPORT_SYMBOL void read( InArchive & archive );

	//! From ISerializable, writes the Network to an archive
	SHARK_EXPORT_SYMBOL void write( OutArchive & archive ) const;

	//! The number of input neurons of the network
	std::size_t inputs()const{
		return m_inputNeurons;
	}
	//! The number of output neurons of the network
	std::size_t outputs()const{
		return m_outputNeurons;
	}
	std::size_t numberOfNeurons()const{
		return m_numberOfNeurons;
	}
	std::size_t numberOfUnits()const{
		return m_numberOfUnits;
	}

	//! The index of the bias unit
	std::size_t bias()const{
		return m_bias;
	}

	//! number of parameters of the network
	std::size_t parameters()const{
		return m_numberOfParameters;
	}

	//! Activation function for a neuron.
	SHARK_EXPORT_SYMBOL double neuron(double activation);

	//! Computes the derivative of the neuron.
	SHARK_EXPORT_SYMBOL double neuronDerivative(double activation);

protected:

	//================Convenience index variables=====================
	//! The total number of neurons of the network (input, output and hidden).
	std::size_t m_numberOfNeurons;

	//! total number units of the network (input, output, hidden and bias)
	std::size_t m_numberOfUnits;

	//! The number of input neurons of the network
	std::size_t m_inputNeurons;
	//! The number of output neurons of the network
	std::size_t m_outputNeurons;
	//! The number of hidden neurons of the network
	std::size_t m_hidden;

	//! index of the bias unit
	std::size_t m_bias;

	//! type of Sigmoid used by the network
	SigmoidType m_sigmoidType;

	//===================network variables========================
	//! The absolute number of parameters of the network
	std::size_t m_numberOfParameters;

	//! stores the topology of the network.
	//! Element (i,j) is 1 if the ith neuron receives input from neuron j.
	//! The data for neuron i is stored in the ith row of the matrix.
	IntMatrix m_connectionMatrix;

	//! stores the feed-forward part of the weights. the recurrent part is added
	//! via m_recurrentWeights. The weights for neuron i are stored in the ith row of the matrix
	RealMatrix m_weights;
};
}

#endif //RNNET_H