我可以在数学中找到激活函数列表,但不能在代码中找到.
所以我想如果有一个列表,这将是代码中这样一个列表的正确位置.
从这两个链接中的算法转换开始:
https://en.wikipedia.org/wiki/Activation_function
https://stats.stackexchange.com/questions/115258/comprehensive-list-of-activation-functions-in-neural-networks-with-pros-cons
所以我想如果有一个列表,这将是代码中这样一个列表的正确位置.
从这两个链接中的算法转换开始:
https://en.wikipedia.org/wiki/Activation_function
https://stats.stackexchange.com/questions/115258/comprehensive-list-of-activation-functions-in-neural-networks-with-pros-cons
我们的目标是通过UI轻松访问Activation类(包含函数及其派生类).
编辑:
我的尝试
using UnityEngine; using System.Collections; using System; ///<summary> ///Activation Functions from: ///https://en.wikipedia.org/wiki/Activation_function ///https://stats.stackexchange.com/questions/115258/comprehensive-list-of-activation-functions-in-neural-networks-with-pros-cons ///D infront means the Deravitive of the function ///x is the input of one perceptron. a is the alpha value sometimes needed. ///</summary> [System.Serializable] public class Activation { public ActivationType activationType; public Activation(ActivationType type) { activationType = type; } public double AFunction(double x) { switch(activationType) { case ActivationType.Identity: return Identity(x); case ActivationType.BinaryStep: return BinaryStep(x); case ActivationType.Logistic: return Logistic(x); case ActivationType.Tanh: return Tanh(x); case ActivationType.ArcTan: return ArcTan(x); case ActivationType.ReLU: return ReLU(x); case ActivationType.softPlus: return SoftPlus(x); case ActivationType.BentIdentity: return BentIdentity(x); case ActivationType.Sinusoid: return Sinusoid(x); case ActivationType.Sinc: return Sinc(x); case ActivationType.Gaussian: return Gaussian(x); case ActivationType.Bipolar: return Bipolar(x); case ActivationType.Bipolarsigmoid: return Bipolarsigmoid(x); } return 0; } public double ActivationDerivative(double x) { switch(activationType) { case ActivationType.Logistic: return DLogistic(x); case ActivationType.Tanh: return DTanh(x); case ActivationType.ArcTan: return DArcTan(x); case ActivationType.ReLU: return DReLU(x); case ActivationType.softPlus: return DSoftPlus(x); case ActivationType.BentIdentity: return DBentIdentity(x); case ActivationType.Sinusoid: return DSinusoid(x); case ActivationType.Sinc: return DSinc(x); case ActivationType.Gaussian: return DGaussian(x); case ActivationType.Bipolarsigmoid: return DBipolarsigmoid(x); } return 0; } public double AFunction(double x,double a) { switch(activationType) { case ActivationType.PReLU: return PReLU(x,a); case ActivationType.ELU: return ELU(x,a); } return 0; } public double ActivationDerivative(double x,double a) { switch(activationType) { case ActivationType.PReLU: return DPReLU(x,a); case ActivationType.ELU: return DELU(x,a); } return 0; } public double Identity(double x) { return x; } public double BinaryStep(double x) { return x < 0 ? 0 : 1; } public double Logistic(double x) { return 1/(1+Math.Pow(Math.E,-x)); } public double DLogistic(double x) { return Logistic(x)*(1-Logistic(x)); } public double Tanh(double x) { return 2/(1+Math.Pow(Math.E,-(2*x)))-1; } public double DTanh(double x) { return 1-Math.Pow(Tanh(x),2); } public double ArcTan(double x) { return Math.atan(x); } public double DArcTan(double x) { return 1/Math.Pow(x,2)+1; } //Rectified Linear Unit public double ReLU(double x) { return Math.Max(0,x);// x < 0 ? 0 : x; } public double DReLU(double x) { return Math.Max(0,1);// x < 0 ? 0 : x; } //Parameteric Rectified Linear Unit public double PReLU(double x,double a) { return x < 0 ? a*x : x; } public double DPReLU(double x,double a) { return x < 0 ? a : 1; } //Exponential Linear Unit public double ELU(double x,double a) { return x < 0 ? a*(Math.Pow(Math.E,x) - 1) : x; } public double DELU(double x,double a) { return x < 0 ? ELU(x,a)+a: 1; } public double SoftPlus(double x) { return Math.Log(Math.Exp(x)+1); } public double DSoftPlus(double x) { return Logistic(x); } public double BentIdentity(double x) { return (((Math.Sqrt(Math.Pow(x,2)+1))-1)/2)+x; } public double DBentIdentity(double x) { return (x/(2*Math.Sqrt(Math.Pow(x,2)+1)))+1; } // public float SoftExponential(float x) // { // // } public double Sinusoid(double x) { return Math.Sin(x); } public double DSinusoid(double x) { return Math.Cos(x); } public double Sinc(double x) { return x == 0 ? 1 : Math.Sin(x)/x; } public double DSinc(double x) { return x == 0 ? 0 : (Math.Cos(x)/x)-(Math.Sin(x)/Math.Pow(x,2)); } public double Gaussian(double x) { return Math.Pow(Math.E,Math.Pow(-x,2)); } public double DGaussian(double x) { return -2*x*Math.Pow(Math.E,2)); } public double Bipolar(double x) { return x < 0 ? -1:1; } public double Bipolarsigmoid(double x) { return (1-Math.Exp(-x))/(1+Math.Exp(-x)); } public double DBipolarsigmoid(double x) { return 0.5 * (1 + Bipolarsigmoid(x)) * (1 - Bipolarsigmoid(x)); } public double Scaler(double x,double min,double max) { return (x - min) / (max - min); } } public enum ActivationType { Identity,BinaryStep,Logistic,Tanh,ArcTan,ReLU,PReLU,ELU,SoftPlus,BentIdentity,Sinusoid,Sinc,Gaussian,Bipolar,Bipolarsigmoid }
不确定我的数学是否正确所以我不会将其作为答案发布.
如果有人愿意做错误检查我可以做出答案.
解决方法
我发现了这个:
Soft Exponential activation function
C#转换:
public double SoftExponential(double x,double alpha = 0.0,double max_value = 0.0) { // """Soft Exponential activation function by Godfrey and Gashler // See: https://arxiv.org/pdf/1602.01321.pdf // α == 0: f(α,x) = x // α > 0: f(α,x) = (exp(αx)-1) / α + α // α< 0: f(α,x) = -ln(1 - α(x + α)) / α // """ if (alpha == 0) return x; else if (alpha > 0) return alpha + (Math.Exp(alpha * x) - 1.0) / alpha; else return -Math.Log(1 - alpha * (x + alpha)) / alpha; }
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。