pt_selu_activation_layer.h 846 Bytes
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
/*
 * pocket-tensor (c) 2018 Gustavo Valiente gustavo.valiente.m@gmail.com
 * Kerasify (c) 2016 Robert W. Rose
 *
 * MIT License, see LICENSE file.
 */

#ifndef PT_SELU_ACTIVATION_LAYER_H
#define PT_SELU_ACTIVATION_LAYER_H

#include "pt_tensor.h"
#include "pt_activation_layer.h"

namespace pt
{

class SeluActivationLayer : public ActivationLayer
{

public:
    using ActivationLayer::apply;

    SeluActivationLayer() = default;

    void apply(Tensor& out) const final
    {
        constexpr auto alpha = FloatType(1.6732632423543772848170429916717);
        constexpr auto scale = FloatType(1.0507009873554804934193349852946);

        for(FloatType& value : out)
        {
            if(value < 0)
            {
                value = alpha * std::expm1(value);
            }

            value *= scale;
        }
    }
};

}

#endif