Rot*_*ung 6 c# derivative neural-network activation-function
我可以在数学中找到激活函数列表,但在代码中找不到。因此,我想如果应该有一个列表,那么这将是代码中此列表的正确位置。从以下2个链接中的算法翻译开始:https : //en.wikipedia.org/wiki/Activation_function https://stats.stackexchange.com/questions/115258/comprehensive-list-of-activation-functions-in神经网络的利弊
我们的目标是拥有一个激活类(带有函数及其派生类),并且可以通过UI轻松访问。
编辑:我的尝试
using UnityEngine;
using System.Collections;
using System;
///<summary>
///Activation Functions from:
///https://en.wikipedia.org/wiki/Activation_function
///https://stats.stackexchange.com/questions/115258/comprehensive-list-of-activation-functions-in-neural-networks-with-pros-cons
///D infront means the Deravitive of the function
///x is the input of one perceptron. a is the alpha value sometimes needed.
///</summary>
[System.Serializable]
public class Activation
{
public ActivationType activationType;
public Activation(ActivationType type)
{
activationType = type;
}
public double AFunction(double x)
{
switch(activationType)
{
case ActivationType.Identity:
return Identity(x);
case ActivationType.BinaryStep:
return BinaryStep(x);
case ActivationType.Logistic:
return Logistic(x);
case ActivationType.Tanh:
return Tanh(x);
case ActivationType.ArcTan:
return ArcTan(x);
case ActivationType.ReLU:
return ReLU(x);
case ActivationType.SoftPlus:
return SoftPlus(x);
case ActivationType.BentIdentity:
return BentIdentity(x);
case ActivationType.Sinusoid:
return Sinusoid(x);
case ActivationType.Sinc:
return Sinc(x);
case ActivationType.Gaussian:
return Gaussian(x);
case ActivationType.Bipolar:
return Bipolar(x);
case ActivationType.BipolarSigmoid:
return BipolarSigmoid(x);
}
return 0;
}
public double ActivationDerivative(double x)
{
switch(activationType)
{
case ActivationType.Logistic:
return DLogistic(x);
case ActivationType.Tanh:
return DTanh(x);
case ActivationType.ArcTan:
return DArcTan(x);
case ActivationType.ReLU:
return DReLU(x);
case ActivationType.SoftPlus:
return DSoftPlus(x);
case ActivationType.BentIdentity:
return DBentIdentity(x);
case ActivationType.Sinusoid:
return DSinusoid(x);
case ActivationType.Sinc:
return DSinc(x);
case ActivationType.Gaussian:
return DGaussian(x);
case ActivationType.BipolarSigmoid:
return DBipolarSigmoid(x);
}
return 0;
}
public double AFunction(double x, double a)
{
switch(activationType)
{
case ActivationType.PReLU:
return PReLU(x,a);
case ActivationType.ELU:
return ELU(x,a);
}
return 0;
}
public double ActivationDerivative(double x, double a)
{
switch(activationType)
{
case ActivationType.PReLU:
return DPReLU(x,a);
case ActivationType.ELU:
return DELU(x,a);
}
return 0;
}
public double Identity(double x)
{
return x;
}
public double BinaryStep(double x)
{
return x < 0 ? 0 : 1;
}
public double Logistic(double x)
{
return 1/(1+Math.Pow(Math.E,-x));
}
public double DLogistic(double x)
{
return Logistic(x)*(1-Logistic(x));
}
public double Tanh(double x)
{
return 2/(1+Math.Pow(Math.E, -(2*x)))-1;
}
public double DTanh(double x)
{
return 1-Math.Pow(Tanh(x),2);
}
public double ArcTan(double x)
{
return Math.Atan(x);
}
public double DArcTan(double x)
{
return 1/Math.Pow(x,2)+1;
}
//Rectified Linear Unit
public double ReLU(double x)
{
return Math.Max(0,x);// x < 0 ? 0 : x;
}
public double DReLU(double x)
{
return Math.Max(0,1);// x < 0 ? 0 : x;
}
//Parameteric Rectified Linear Unit
public double PReLU(double x, double a)
{
return x < 0 ? a*x : x;
}
public double DPReLU(double x, double a)
{
return x < 0 ? a : 1;
}
//Exponential Linear Unit
public double ELU(double x, double a)
{
return x < 0 ? a*(Math.Pow(Math.E, x) - 1) : x;
}
public double DELU(double x, double a)
{
return x < 0 ? ELU(x, a)+a: 1;
}
public double SoftPlus(double x)
{
return Math.Log(Math.Exp(x)+1);
}
public double DSoftPlus(double x)
{
return Logistic(x);
}
public double BentIdentity(double x)
{
return (((Math.Sqrt(Math.Pow(x,2)+1))-1)/2)+x;
}
public double DBentIdentity(double x)
{
return (x/(2*Math.Sqrt(Math.Pow(x,2)+1)))+1;
}
// public float SoftExponential(float x)
// {
//
// }
public double Sinusoid(double x)
{
return Math.Sin(x);
}
public double DSinusoid(double x)
{
return Math.Cos(x);
}
public double Sinc(double x)
{
return x == 0 ? 1 : Math.Sin(x)/x;
}
public double DSinc(double x)
{
return x == 0 ? 0 : (Math.Cos(x)/x)-(Math.Sin(x)/Math.Pow(x,2));
}
public double Gaussian(double x)
{
return Math.Pow(Math.E, Math.Pow(-x, 2));
}
public double DGaussian(double x)
{
return -2*x*Math.Pow(Math.E, Math.Pow(-x,2));
}
public double Bipolar(double x)
{
return x < 0 ? -1:1;
}
public double BipolarSigmoid(double x)
{
return (1-Math.Exp(-x))/(1+Math.Exp(-x));
}
public double DBipolarSigmoid(double x)
{
return 0.5 * (1 + BipolarSigmoid(x)) * (1 - BipolarSigmoid(x));
}
public double Scaler(double x, double min, double max)
{
return (x - min) / (max - min);
}
}
public enum ActivationType
{
Identity,
BinaryStep,
Logistic,
Tanh,
ArcTan,
ReLU,
PReLU,
ELU,
SoftPlus,
BentIdentity,
Sinusoid,
Sinc,
Gaussian,
Bipolar,
BipolarSigmoid
}
Run Code Online (Sandbox Code Playgroud)
不知道我的数学是否正确,所以我不将其发布为答案。如果有人愿意进行错误检查,我可以为您解答。
我发现了这个:软指数激活函数
\n\nC# 转换:
\n\npublic double SoftExponential(double x, double alpha = 0.0, double max_value = 0.0)\n{\n\n // """Soft Exponential activation function by Godfrey and Gashler\n // See: https://arxiv.org/pdf/1602.01321.pdf\n // \xce\xb1 == 0: f(\xce\xb1, x) = x\n // \xce\xb1 > 0: f(\xce\xb1, x) = (exp(\xce\xb1x)-1) / \xce\xb1 + \xce\xb1\n // \xce\xb1< 0: f(\xce\xb1, x) = -ln(1 - \xce\xb1(x + \xce\xb1)) / \xce\xb1\n // """\n\n if (alpha == 0)\n return x;\n else if (alpha > 0)\n return alpha + (Math.Exp(alpha * x) - 1.0) / alpha;\n else\n return -Math.Log(1 - alpha * (x + alpha)) / alpha;\n}\nRun Code Online (Sandbox Code Playgroud)\n
| 归档时间: |
|
| 查看次数: |
2032 次 |
| 最近记录: |