blob: 94d616f2b52c8bb648bcc4b1b75b9788b1cd9f6a [file] [log] [blame]
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2015 Benoit Steiner <benoit.steiner.goog@gmail.com>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H
#define EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H
namespace Eigen {
/** scalar_sigmoid_fast_derivative_op
* \ingroup CXX11_NeuralNetworks_Module
* \brief Template functor to compute the fast derivative of a sigmoid
*
* Input should be the backpropagated gradient.
*
* \sa class CwiseUnaryOp, Cwise::sigmoid_fast_derivative()
*/
template <typename T>
struct scalar_sigmoid_fast_derivative_op {
EIGEN_EMPTY_STRUCT_CTOR(scalar_sigmoid_fast_derivative_op)
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T operator()(const T& y) const {
const T one = T(1);
return (one - y) * y;
}
template <typename Packet>
inline Packet packetOp(const Packet& y) const {
const Packet one = internal::pset1<Packet>(1);
return internal::pmul(internal::psub(one, y), y);
}
};
namespace internal {
template <typename T>
struct functor_traits<scalar_sigmoid_fast_derivative_op<T> > {
enum {
Cost = NumTraits<T>::AddCost * 2 + NumTraits<T>::MulCost,
PacketAccess = packet_traits<T>::HasAdd && packet_traits<T>::HasMul &&
packet_traits<T>::HasNegate
};
};
} // namespace internal
/** scalar_tanh_fast_derivative_op
* \ingroup CXX11_NeuralNetworks_Module
* \brief Template functor to compute the fast derivative of a tanh
*
* Input should be the backpropagated gradient.
*
* \sa class CwiseUnaryOp, Cwise::tanh_fast_derivative()
*/
template <typename T>
struct scalar_tanh_fast_derivative_op {
EIGEN_EMPTY_STRUCT_CTOR(scalar_tanh_fast_derivative_op)
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T operator()(const T& y) const {
const T one = T(1);
return one - (y * y);
}
template <typename Packet>
inline Packet packetOp(const Packet& y) const {
const Packet one = internal::pset1<Packet>(1);
return internal::psub(one, internal::pmul(y, y));
}
};
namespace internal {
template <typename T>
struct functor_traits<scalar_tanh_fast_derivative_op<T> > {
enum {
Cost = NumTraits<T>::AddCost * 2 + NumTraits<T>::MulCost * 1,
PacketAccess = packet_traits<T>::HasAdd && packet_traits<T>::HasMul &&
packet_traits<T>::HasNegate
};
};
} // namespace internal
/**
* \ingroup CXX11_NeuralNetworks_Module
* \brief Template functor to clip the the magnitude of the first scalar.
*
* \sa class CwiseBinaryOp, MatrixBase::Clip
*/
template <typename Scalar>
struct scalar_clip_op {
EIGEN_EMPTY_STRUCT_CTOR(scalar_clip_op)
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar
operator()(const Scalar& a, const Scalar& b) const {
return numext::mini(numext::maxi(a, -b), b);
}
template <typename Packet>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Packet
packetOp(const Packet& a, const Packet& b) const {
return internal::pmin(internal::pmax(a, internal::pnegate(b)), b);
}
};
namespace internal {
template <typename Scalar>
struct functor_traits<scalar_clip_op<Scalar> > {
enum {
Cost = NumTraits<Scalar>::AddCost * 3,
PacketAccess = packet_traits<Scalar>::HasMax &&
packet_traits<Scalar>::HasMin &&
packet_traits<Scalar>::HasNegate
};
};
} // namespace internal
} // end namespace Eigen
#endif // EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H