object ActivationFun
The ActivationFun
object contains common Activation functions and provides
both scalar and vector versions.
- See also
en.wikipedia.org/wiki/Activation_function Convention: fun activation function (e.g., sigmoid) funV vector version of activation function (e.g., sigmoidV) funM matrix version of activation function (e.g., sigmoidM) funDV vector version of dervivative (e.g., sigmoidDV) funDM matrix version of dervivative (e.g., sigmoidDM) ------------------------------------------------------------------------------ Supports: id, reLU, tanh, sigmoid, gaussain, softmax Related functions: logistic, logit
- Alphabetic
- By Inheritance
- ActivationFun
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
gaussian(t: Double): Double
Compute the value of the 'gaussian' function at scalar 't'.
Compute the value of the 'gaussian' function at scalar 't'.
- t
the gaussian function argument
-
def
gaussianDM(yp: MatriD, tt: MatriD): MatriD
Compute the derivative matrix for 'sigmoid' function at matrix 'yp' where 'yp' is pre-computed by 'yp = gaussianM (tt)'.
Compute the derivative matrix for 'sigmoid' function at matrix 'yp' where 'yp' is pre-computed by 'yp = gaussianM (tt)'.
- yp
the derivative function vector argument
- tt
the domain value for the function
-
def
gaussianDV(yp: VectoD, tt: VectoD): VectoD
Compute the derivative vector for 'gaussian' function at vector 'yp' where 'yp' is pre-computed by 'yp = gaussianV (tt)'.
Compute the derivative vector for 'gaussian' function at vector 'yp' where 'yp' is pre-computed by 'yp = gaussianV (tt)'.
- yp
the derivative function vector argument
- tt
the domain value for the function
- val gaussianM: FunctionM_2M
- val gaussianV: FunctionV_2V
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
id(t: Double): Double
Compute the value of the identity 'id' function at scalar 't'.
Compute the value of the identity 'id' function at scalar 't'.
- t
the id function argument
- val idDM: FunctionM_2M
-
def
idDV(yp: VectoD): VectoD
Compute the derivative vector for 'id' function at vector 'yp' where 'yp' is pre-computed by 'yp = idV (tt)'.
Compute the derivative vector for 'id' function at vector 'yp' where 'yp' is pre-computed by 'yp = idV (tt)'.
- yp
the derivative function vector argument
- def idM(tt: MatriD): MatriD
- def idV(tt: VectoD): VectoD
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
logistic(t: Double, a: Double = 1.0, b: Double = 1.0, c: Double = 1.0): Double
Compute the value of the 'logistic' function at scalar 't'.
Compute the value of the 'logistic' function at scalar 't'. With the default settings, it is identical to 'sigmoid'. Note, it is not typically used as an activation function
- t
the logistic function argument
- a
the shift parameter (1 => mid at 0, <1 => mid shift left, >= mid shift right
- b
the spread parameter (1 => sigmoid rate, <1 => slower than, >1 => faster than) althtough typically positive, a negative b will cause the function to decrease
- c
the scale parameter (range is 0 to c)
- See also
www.cs.xu.edu/math/math120/01f/logistic.pdf
- def logisticV(tt: VectoD, a: Double = 1.0, b: Double = 1.0, c: Double = 1.0): VectoD
-
def
logit(p: Double): Double
Compute the log of the odds of an event occurring (e.g., success, 1).
Compute the log of the odds of an event occurring (e.g., success, 1). The inverse of the 'logit' function is the standard logistic function (sigmoid function). Note, it is not typically used as an activation function
- p
the probability, a number between 0 and 1.
- val logitV: FunctionV_2V
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
reLU(t: Double): Double
Compute the value of the identity 'reLU' function at scalar 't'.
Compute the value of the identity 'reLU' function at scalar 't'.
- t
the id function argument
- val reLUDM: FunctionM_2M
-
def
reLUDV(yp: VectoD): VectoD
Compute the derivative vector for 'id' function at vector 'yp' where 'yp' is pre-computed by 'yp = idV (tt)'.
Compute the derivative vector for 'id' function at vector 'yp' where 'yp' is pre-computed by 'yp = idV (tt)'.
- yp
the derivative function vector argument
- val reLUM: FunctionM_2M
- val reLUV: FunctionV_2V
-
def
sigmoid(t: Double): Double
Compute the value of the 'sigmoid' function at 't'.
Compute the value of the 'sigmoid' function at 't'. This is a special case of the logistic function, where 'a = 0' and 'b = 1'. It is also referred to as the standard logistic function. It is also the inverse of the logit function.
- t
the sigmoid function argument
- val sigmoidDM: FunctionM_2M
-
def
sigmoidDV(yp: VectoD): VectoD
Compute the derivative vector for 'sigmoid' function at vector 'yp' where 'yp' is pre-computed by 'yp = sigmoidV (tt)'.
Compute the derivative vector for 'sigmoid' function at vector 'yp' where 'yp' is pre-computed by 'yp = sigmoidV (tt)'.
- yp
the derivative function vector argument
- val sigmoidM: FunctionM_2M
- val sigmoidV: FunctionV_2V
-
def
softmaxDM(yp: VectoD): MatriD
Compute the derivative vector for 'softmax' function at vector 'yp' where 'yp' is pre-computed by 'yp = softmaxV (tt)'.
Compute the derivative vector for 'softmax' function at vector 'yp' where 'yp' is pre-computed by 'yp = softmaxV (tt)'.
- yp
the derivative function vector argument
- val softmaxM: FunctionM_2M
-
def
softmaxV(tt: VectoD): VectoD
Compute the vector of values of the 'softmax' function applied to vector 'tt'.
Compute the vector of values of the 'softmax' function applied to vector 'tt'.
- tt
the softmax function vector argument
- See also
https://en.wikipedia.org/wiki/Softmax_function Note, scalar function version is not needed.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- val tanhDM: FunctionM_2M
-
def
tanhDV(yp: VectoD): VectoD
Compute the derivative vector for 'tanh' function at vector 'yp' where 'yp' is pre-computed by 'yp = tanhV (tt)'.
Compute the derivative vector for 'tanh' function at vector 'yp' where 'yp' is pre-computed by 'yp = tanhV (tt)'.
- yp
the derivative function vector argument
- val tanhM: FunctionM_2M
-
def
tanhV(tt: VectoD): VectoD
Compute the vector of values of the 'tanh' function applied to vector 'tt'.
Compute the vector of values of the 'tanh' function applied to vector 'tt'.
- tt
the tanh function vector argument
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )