/usr/lib/python3/dist-packages/keras/activations.py is in python3-keras 2.1.1-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 | from __future__ import absolute_import
import six
import warnings
from . import backend as K
from .utils.generic_utils import deserialize_keras_object
from .engine import Layer
def softmax(x, axis=-1):
"""Softmax activation function.
# Arguments
x : Tensor.
axis: Integer, axis along which the softmax normalization is applied.
# Returns
Tensor, output of softmax transformation.
# Raises
ValueError: In case `dim(x) == 1`.
"""
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim > 2:
e = K.exp(x - K.max(x, axis=axis, keepdims=True))
s = K.sum(e, axis=axis, keepdims=True)
return e / s
else:
raise ValueError('Cannot apply softmax to a tensor that is 1D')
def elu(x, alpha=1.0):
return K.elu(x, alpha)
def selu(x):
"""Scaled Exponential Linear Unit. (Klambauer et al., 2017)
# Arguments
x: A tensor or variable to compute the activation function for.
# References
- [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
"""
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale * K.elu(x, alpha)
def softplus(x):
return K.softplus(x)
def softsign(x):
return K.softsign(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
return x
def serialize(activation):
return activation.__name__
def deserialize(name, custom_objects=None):
return deserialize_keras_object(name,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name='activation function')
def get(identifier):
if identifier is None:
return linear
if isinstance(identifier, six.string_types):
identifier = str(identifier)
return deserialize(identifier)
elif callable(identifier):
if isinstance(identifier, Layer):
warnings.warn((
'Do not pass a layer instance (such as {identifier}) as the '
'activation argument of another layer. Instead, advanced '
'activation layers should be used just like any other '
'layer in a model.'
).format(identifier=identifier.__class__.__name__))
return identifier
else:
raise ValueError('Could not interpret '
'activation function identifier:', identifier)
|