{- | Provides activation functions - unary functions that are differentiable almost everywhere and so they can be used in backward loss propagation.
-}


module Synapse.NN.Layers.Activations
    ( -- * 'ActivationFn' type alias and 'Activation' newtype


      ActivationFn
    , activateScalar
    , activateMat

    , Activation (Activation, unActivation)
    , layerActivation

      -- * Activation functions


    , relu
    , sigmoid
    ) where


import Synapse.NN.Layers.Layer (AbstractLayer(..), LayerConfiguration)

import Synapse.Tensors (ElementwiseScalarOps((+.), (/.)), SingletonOps(unSingleton))

import Synapse.Tensors.Mat (Mat)
import qualified Synapse.Tensors.Mat as M

import Synapse.Autograd (Symbol(unSymbol), SymbolMat, Symbolic, constSymbol)


-- | 'ActivationFn' is a type alias that represents unary functions that differentiable almost everywhere.

type ActivationFn a = SymbolMat a -> SymbolMat a


-- | Applies activation function to a scalar to produce new scalar.

activateScalar :: Symbolic a => ActivationFn a -> a -> a
activateScalar :: forall a. Symbolic a => ActivationFn a -> a -> a
activateScalar ActivationFn a
fn = Mat a -> a
Mat a -> DType (Mat a)
forall f. SingletonOps f => f -> DType f
unSingleton (Mat a -> a) -> (a -> Mat a) -> a -> a
forall b c a. (b -> c) -> (a -> b) -> a -> c
. Symbol (Mat a) -> Mat a
forall a. Symbol a -> a
unSymbol (Symbol (Mat a) -> Mat a) -> (a -> Symbol (Mat a)) -> a -> Mat a
forall b c a. (b -> c) -> (a -> b) -> a -> c
. ActivationFn a
fn ActivationFn a -> (a -> Symbol (Mat a)) -> a -> Symbol (Mat a)
forall b c a. (b -> c) -> (a -> b) -> a -> c
. Mat a -> Symbol (Mat a)
forall a. a -> Symbol a
constSymbol (Mat a -> Symbol (Mat a)) -> (a -> Mat a) -> a -> Symbol (Mat a)
forall b c a. (b -> c) -> (a -> b) -> a -> c
. a -> Mat a
DType (Mat a) -> Mat a
forall f. SingletonOps f => DType f -> f
M.singleton

-- | Applies activation function to a scalar to produce new scalar.

activateMat :: Symbolic a => ActivationFn a -> Mat a -> Mat a
activateMat :: forall a. Symbolic a => ActivationFn a -> Mat a -> Mat a
activateMat ActivationFn a
fn = Symbol (Mat a) -> Mat a
forall a. Symbol a -> a
unSymbol (Symbol (Mat a) -> Mat a)
-> (Mat a -> Symbol (Mat a)) -> Mat a -> Mat a
forall b c a. (b -> c) -> (a -> b) -> a -> c
. ActivationFn a
fn ActivationFn a
-> (Mat a -> Symbol (Mat a)) -> Mat a -> Symbol (Mat a)
forall b c a. (b -> c) -> (a -> b) -> a -> c
. Mat a -> Symbol (Mat a)
forall a. a -> Symbol a
constSymbol


{- | 'Activation' newtype wraps 'ActivationFn's - unary functions that can be thought of as activation functions for neural network layers.

Any activation function must be differentiable almost everywhere and so
it must be function that operates on 'Synapse.Autograd.Symbol's, which is allows for function to be differentiated when needed.
-}
newtype Activation a = Activation
    { forall a. Activation a -> ActivationFn a
unActivation :: ActivationFn a  -- ^ Unwraps 'Activation' newtype.

    }

instance AbstractLayer Activation where
    inputSize :: forall a. Activation a -> Maybe Int
inputSize Activation a
_ = Maybe Int
forall a. Maybe a
Nothing
    outputSize :: forall a. Activation a -> Maybe Int
outputSize Activation a
_ = Maybe Int
forall a. Maybe a
Nothing

    nParameters :: forall a. Activation a -> Int
nParameters Activation a
_ = Int
0
    getParameters :: forall a. SymbolIdentifier -> Activation a -> [SymbolMat a]
getParameters SymbolIdentifier
_ Activation a
_ = []
    updateParameters :: forall a. Activation a -> [Mat a] -> Activation a
updateParameters = Activation a -> [Mat a] -> Activation a
forall a b. a -> b -> a
const

    symbolicForward :: forall a.
(Symbolic a, Floating a, Ord a) =>
SymbolIdentifier
-> SymbolMat a -> Activation a -> (SymbolMat a, SymbolMat a)
symbolicForward SymbolIdentifier
_ SymbolMat a
input (Activation ActivationFn a
fn) = (ActivationFn a
fn SymbolMat a
input, DType (SymbolMat a) -> SymbolMat a
forall f. SingletonOps f => DType f -> f
M.singleton a
DType (SymbolMat a)
0)

-- | Creates configuration for activation layer.

layerActivation :: Activation a -> LayerConfiguration (Activation a)
layerActivation :: forall a. Activation a -> LayerConfiguration (Activation a)
layerActivation = Activation a -> Int -> Activation a
forall a b. a -> b -> a
const


-- Activation functions


-- | ReLU function.

relu :: (Symbolic a, Fractional a) => ActivationFn a
relu :: forall a. (Symbolic a, Fractional a) => ActivationFn a
relu SymbolMat a
x = (SymbolMat a
x SymbolMat a -> SymbolMat a -> SymbolMat a
forall a. Num a => a -> a -> a
+ SymbolMat a -> SymbolMat a
forall a. Num a => a -> a
abs SymbolMat a
x) SymbolMat a -> DType (SymbolMat a) -> SymbolMat a
forall f.
(ElementwiseScalarOps f, Fractional (DType f)) =>
f -> DType f -> f
/. a
DType (SymbolMat a)
2.0

-- | Sigmoid function.

sigmoid :: (Symbolic a, Floating a) => ActivationFn a
sigmoid :: forall a. (Symbolic a, Floating a) => ActivationFn a
sigmoid SymbolMat a
x = SymbolMat a -> SymbolMat a
forall a. Fractional a => a -> a
recip (SymbolMat a -> SymbolMat a) -> SymbolMat a -> SymbolMat a
forall a b. (a -> b) -> a -> b
$ SymbolMat a -> SymbolMat a
forall a. Floating a => a -> a
exp (SymbolMat a -> SymbolMat a
forall a. Num a => a -> a
negate SymbolMat a
x) SymbolMat a -> DType (SymbolMat a) -> SymbolMat a
forall f.
(ElementwiseScalarOps f, Num (DType f)) =>
f -> DType f -> f
+. a
DType (SymbolMat a)
1.0