|
|
@ -15,49 +15,84 @@ import java.util.*;
|
|
|
|
public abstract class Nerve {
|
|
|
|
public abstract class Nerve {
|
|
|
|
private List<Nerve> son = new ArrayList<>();//轴突下一层的连接神经元
|
|
|
|
private List<Nerve> son = new ArrayList<>();//轴突下一层的连接神经元
|
|
|
|
private List<Nerve> fathor = new ArrayList<>();//树突上一层的连接神经元
|
|
|
|
private List<Nerve> fathor = new ArrayList<>();//树突上一层的连接神经元
|
|
|
|
private Map<Integer, Double> dendrites = new HashMap<>();//上一层权重
|
|
|
|
protected Map<Integer, Double> dendrites = new HashMap<>();//上一层权重
|
|
|
|
|
|
|
|
protected Map<Integer, Double> wg = new HashMap<>();//上一层权重与梯度的积
|
|
|
|
private int id;//同级神经元编号,注意在同层编号中ID应有唯一性
|
|
|
|
private int id;//同级神经元编号,注意在同层编号中ID应有唯一性
|
|
|
|
protected int upNub;//上一层神经元数量
|
|
|
|
protected int upNub;//上一层神经元数量
|
|
|
|
|
|
|
|
protected int downNub;//下一层神经元的数量
|
|
|
|
protected Map<Long, List<Double>> features = new HashMap<>();
|
|
|
|
protected Map<Long, List<Double>> features = new HashMap<>();
|
|
|
|
static final Logger logger = LogManager.getLogger(Nerve.class);
|
|
|
|
static final Logger logger = LogManager.getLogger(Nerve.class);
|
|
|
|
private double threshold;//此神经元的阈值
|
|
|
|
protected double threshold;//此神经元的阈值
|
|
|
|
protected ActiveFunction activeFunction = new ActiveFunction();
|
|
|
|
protected ActiveFunction activeFunction = new ActiveFunction();
|
|
|
|
protected String name;//该神经元所属类型
|
|
|
|
protected String name;//该神经元所属类型
|
|
|
|
protected double outNub;//输出数值(ps:只有训练模式的时候才可保存输出过的数值)
|
|
|
|
protected double outNub;//输出数值(ps:只有训练模式的时候才可保存输出过的数值)
|
|
|
|
|
|
|
|
protected double E;//模板期望值
|
|
|
|
|
|
|
|
protected double gradient;//当前梯度
|
|
|
|
|
|
|
|
protected final double studyPoint = 0.1;
|
|
|
|
|
|
|
|
protected double sigmaW;//对上一层权重与上一层梯度的积进行求和
|
|
|
|
|
|
|
|
private int backNub = 0;//当前节点被反向传播的次数
|
|
|
|
|
|
|
|
|
|
|
|
protected Nerve(int id, int upNub, String name) {//该神经元在同层神经元中的编号
|
|
|
|
protected Nerve(int id, int upNub, String name, int downNub) {//该神经元在同层神经元中的编号
|
|
|
|
this.id = id;
|
|
|
|
this.id = id;
|
|
|
|
this.upNub = upNub;
|
|
|
|
this.upNub = upNub;
|
|
|
|
this.name = name;
|
|
|
|
this.name = name;
|
|
|
|
|
|
|
|
this.downNub = downNub;
|
|
|
|
initPower();//生成随机权重
|
|
|
|
initPower();//生成随机权重
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
public void sendMessage(long enevtId, double parameter, boolean isStudy) throws Exception {
|
|
|
|
public void sendMessage(long enevtId, double parameter, boolean isStudy, double E) throws Exception {
|
|
|
|
if (son.size() > 0) {
|
|
|
|
if (son.size() > 0) {
|
|
|
|
for (Nerve nerve : son) {
|
|
|
|
for (Nerve nerve : son) {
|
|
|
|
nerve.input(enevtId, parameter, isStudy);
|
|
|
|
nerve.input(enevtId, parameter, isStudy, E);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
} else {
|
|
|
|
throw new Exception("this layer is lastIndex");
|
|
|
|
throw new Exception("this layer is lastIndex");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
public void backSendMessage(double parameter) throws Exception {//反向传播
|
|
|
|
private void backSendMessage(long eventId) throws Exception {//反向传播
|
|
|
|
if (fathor.size() > 0) {
|
|
|
|
if (fathor.size() > 0) {
|
|
|
|
for (Nerve nerve : fathor) {
|
|
|
|
for (int i = 0; i < fathor.size(); i++) {
|
|
|
|
nerve.backGetMessage(parameter);
|
|
|
|
fathor.get(i).backGetMessage(wg.get(i + 1), eventId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
|
|
|
|
throw new Exception("this layer is firstIndex");
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
protected void input(long eventId, double parameter, boolean isStudy) throws Exception {//输入
|
|
|
|
protected void input(long eventId, double parameter, boolean isStudy, double E) throws Exception {//输入
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private void backGetMessage(double parameter, long eventId) throws Exception {//反向传播
|
|
|
|
|
|
|
|
backNub++;
|
|
|
|
|
|
|
|
sigmaW = ArithUtil.add(sigmaW, parameter);
|
|
|
|
|
|
|
|
if (backNub == downNub) {//进行新的梯度计算
|
|
|
|
|
|
|
|
backNub = 0;
|
|
|
|
|
|
|
|
gradient = ArithUtil.mul(activeFunction.sigmoidG(outNub), sigmaW);
|
|
|
|
|
|
|
|
updatePower(eventId);//修改阈值
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private void backGetMessage(double parameter) {//反向传播
|
|
|
|
protected void updatePower(long eventId) throws Exception {//修改阈值
|
|
|
|
|
|
|
|
double h = ArithUtil.mul(gradient, studyPoint);//梯度下降
|
|
|
|
|
|
|
|
threshold = ArithUtil.sub(threshold, h);//更新阈值
|
|
|
|
|
|
|
|
updateW(h, eventId);
|
|
|
|
|
|
|
|
sigmaW = 0;//求和结果归零
|
|
|
|
|
|
|
|
backSendMessage(eventId);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private void updateW(double h, long eventId) {//h是学习率 * 当前g(梯度)
|
|
|
|
|
|
|
|
List<Double> list = features.get(eventId);
|
|
|
|
|
|
|
|
for (Map.Entry<Integer, Double> entry : dendrites.entrySet()) {
|
|
|
|
|
|
|
|
int key = entry.getKey();//上层隐层神经元的编号
|
|
|
|
|
|
|
|
double w = entry.getValue();//接收到编号为KEY的上层隐层神经元的权重
|
|
|
|
|
|
|
|
double bn = list.get(key - 1);//接收到编号为KEY的上层隐层神经元的输入
|
|
|
|
|
|
|
|
double wp = ArithUtil.mul(bn, h);//编号为KEY的上层隐层神经元权重的变化值
|
|
|
|
|
|
|
|
w = ArithUtil.add(w, wp);//修正后的编号为KEY的上层隐层神经元权重
|
|
|
|
|
|
|
|
double dm = ArithUtil.mul(w, gradient);//返回给相对应的神经元
|
|
|
|
|
|
|
|
wg.put(key, dm);//保存上一层权重与梯度的积
|
|
|
|
|
|
|
|
dendrites.put(key, w);//保存修正结果
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
features.remove(eventId); //清空当前上层输入参数参数
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
protected boolean insertParameter(long eventId, double parameter) {//添加参数
|
|
|
|
protected boolean insertParameter(long eventId, double parameter) {//添加参数
|
|
|
@ -100,7 +135,7 @@ public abstract class Nerve {
|
|
|
|
dendrites.put(i, random.nextDouble());
|
|
|
|
dendrites.put(i, random.nextDouble());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//生成随机阈值
|
|
|
|
//生成随机阈值
|
|
|
|
threshold = ArithUtil.mul(random.nextDouble(), 10);
|
|
|
|
threshold = random.nextDouble();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|