apida大改

pull/2/head
lidapeng 5 years ago
parent 5a26118ccf
commit b809a4a60f

@ -2,6 +2,9 @@ package org.wlld.MatrixTools;
import org.wlld.tools.ArithUtil;
import java.util.ArrayList;
import java.util.List;
public class MatrixOperation {
private MatrixOperation() {
@ -370,4 +373,22 @@ public class MatrixOperation {
}
}
}
//行向量转LIST
public static List<Double> rowVectorToList(Matrix matrix) throws Exception {
List<Double> list = new ArrayList<>();
for (int j = 0; j < matrix.getY(); j++) {
list.add(matrix.getNumber(0, j));
}
return list;
}
//list转行向量
public static Matrix listToRowVector(List<Double> list) throws Exception {
Matrix matrix = new Matrix(1, list.size());
for (int i = 0; i < list.size(); i++) {
matrix.setNub(0, i, list.get(i));
}
return matrix;
}
}

@ -8,4 +8,6 @@ package org.wlld.config;
public class StudyPattern {
public static final int Speed_Pattern = 1;//速度学习模式
public static final int Accuracy_Pattern = 2;//精准学习模式
public static final int Cover_Pattern = 3;//覆盖率模式
}

@ -96,19 +96,34 @@ public class Convolution {
if (isOnce) {
return null;
} else {
return late(myMatrix);
return late(myMatrix, 2);
}
}
private Matrix late(Matrix matrix) throws Exception {//迟化处理
public Matrix getBorder(Matrix matrix, Matrix kernel) throws Exception {
int x = matrix.getX() - 2;//求导后矩阵的行数
int y = matrix.getY() - 2;//求导后矩阵的列数
Matrix myMatrix = new Matrix(x, y);//最终合成矩阵
for (int i = 0; i < x; i++) {//遍历行
for (int j = 0; j < y; j++) {//遍历每行的列
double dm = MatrixOperation.convolution(matrix, kernel, i, j);
if (dm > 0) {//存在边缘
myMatrix.setNub(i, j, dm);
}
}
}
return myMatrix;
}
protected Matrix late(Matrix matrix, int size) throws Exception {//迟化处理
int xn = matrix.getX();
int yn = matrix.getY();
int x = xn / 2;//求导后矩阵的行数
int y = yn / 2;//求导后矩阵的列数
int x = xn / size;//求导后矩阵的行数
int y = yn / size;//求导后矩阵的列数
Matrix myMatrix = new Matrix(x, y);//迟化后的矩阵
for (int i = 0; i < xn - 2; i += 2) {
for (int j = 0; j < yn - 2; j += 2) {
Matrix matrix1 = matrix.getSonOfMatrix(i, j, 2, 2);
for (int i = 0; i < xn - size; i += size) {
for (int j = 0; j < yn - size; j += size) {
Matrix matrix1 = matrix.getSonOfMatrix(i, j, size, size);
double maxNub = 0;
for (int t = 0; t < matrix1.getX(); t++) {
for (int k = 0; k < matrix1.getY(); k++) {
@ -119,7 +134,7 @@ public class Convolution {
}
}
//迟化的最大值是 MAXNUB
myMatrix.setNub(i / 2, j / 2, maxNub);
myMatrix.setNub(i / size, j / size, maxNub);
}
}
return myMatrix;

@ -4,6 +4,7 @@ package org.wlld.imageRecognition;
import org.wlld.MatrixTools.Matrix;
import org.wlld.MatrixTools.MatrixOperation;
import org.wlld.config.Classifier;
import org.wlld.config.Kernel;
import org.wlld.config.StudyPattern;
import org.wlld.function.Sigmod;
import org.wlld.i.OutBack;
@ -26,9 +27,11 @@ public class Operation {//进行计算
private MatrixBack matrixBack = new MatrixBack();
private ImageBack imageBack = new ImageBack();
private OutBack outBack;
private double avg;
public Operation(TempleConfig templeConfig) {
this.templeConfig = templeConfig;
avg = templeConfig.getAvg();
}
public Operation(TempleConfig templeConfig, OutBack outBack) {
@ -57,6 +60,83 @@ public class Operation {//进行计算
return sub(matrix1);
}
private double one(List<List<Double>> rightLists, double sigma) {
for (List<Double> list : rightLists) {
for (double nub : list) {
sigma = ArithUtil.add(nub, sigma);
}
}
return sigma;
}
private void lastOne(List<List<Double>> rightLists, double avg) {
for (List<Double> list : rightLists) {
for (int i = 0; i < list.size(); i++) {
list.set(i, ArithUtil.sub(list.get(i), avg));
}
}
}
//从一张图片的局部进行学习
public void coverStudy(Matrix matrixRight, Map<Integer, Double> right
, Matrix matrixWrong, Map<Integer, Double> wrong) throws Exception {
if (templeConfig.getStudyPattern() == StudyPattern.Cover_Pattern) {
//先用边缘算子卷一遍
matrixRight = convolution.late(convolution.getBorder(matrixRight, Kernel.ALL_Two), 2);
matrixWrong = convolution.late(convolution.getBorder(matrixWrong, Kernel.ALL_Two), 2);
List<List<Double>> rightLists = getFeatures(matrixRight);
List<List<Double>> wrongLists = getFeatures(matrixWrong);
int nub = rightLists.size() * 2 * 9;
double sigma = one(rightLists, 0);
sigma = one(rightLists, sigma);
avg = ArithUtil.div(sigma, nub);
templeConfig.setAvg(avg);
lastOne(rightLists, avg);
lastOne(wrongLists, avg);
//特征塞入容器完毕
int size = rightLists.size();
for (int j = 0; j < 3; j++) {
for (int i = 0; i < size; i++) {
List<Double> rightList = rightLists.get(i);
List<Double> wrongList = wrongLists.get(i);
intoDnnNetwork(1, rightList, templeConfig.getSensoryNerves(), true, right, null);
intoDnnNetwork(1, wrongList, templeConfig.getSensoryNerves(), true, wrong, null);
//System.out.println("right:" + rightList);
//System.out.println("wrong:" + wrongList);
}
}
} else {
throw new Exception("PATTERN IS NOT COVER");
}
}
public double coverPoint(Matrix matrix, int rightId) throws Exception {
if (templeConfig.getStudyPattern() == StudyPattern.Cover_Pattern) {
matrix = convolution.late(convolution.getBorder(matrix, Kernel.ALL_Two), 2);
List<List<Double>> lists = getFeatures(matrix);
//特征塞入容器完毕
int size = lists.size();
int right = 0;
int all = 0;
lastOne(lists, avg);
for (int i = 0; i < size; i++) {
List<Double> list = lists.get(i);
MaxPoint maxPoint = new MaxPoint();
long pid = IdCreator.get().nextId();
intoDnnNetwork(pid, list, templeConfig.getSensoryNerves(), false, null, maxPoint);
int id = maxPoint.getId();
if (id == rightId) {
right++;
}
all++;
}
//ArithUtil.div(coverBody.getRightNub(), size)
return ArithUtil.div(right, all);
} else {
throw new Exception("PATTERN IS NOT COVER");
}
}
//模板学习
public void study(Matrix matrix, Map<Integer, Double> tagging) throws Exception {
if (templeConfig.getStudyPattern() == StudyPattern.Speed_Pattern) {
@ -135,19 +215,6 @@ public class Operation {//进行计算
Map<Integer, Double> map = new HashMap<>();
map.put(tagging, 1.0);
intoDnnNetwork(1, featureALL, templeConfig.getSensoryNerves(), true, map, null);
// int classifier = templeConfig.getClassifier();
// switch (classifier) {
// case Classifier.DNN:
// dnn(tagging, myMatrixR);
// break;
// case Classifier.LVQ:
// lvq(tagging, myMatrixR);
// break;
// case Classifier.VAvg:
// vectorAvg(tagging, myMatrixR);
// break;
// }
}
}
@ -214,7 +281,7 @@ public class Operation {//进行计算
Map<Integer, Double> map = new HashMap<>();
map.put(tagging, 1.0);
List<Double> feature = getFeature(myMatrix);
System.out.println(feature);
//System.out.println(feature);
intoDnnNetwork(1, feature, templeConfig.getSensoryNerves(), true, map, null);
}
@ -227,6 +294,7 @@ public class Operation {//进行计算
private void lvq(int tagging, Matrix myMatrix) throws Exception {//LVQ学习
LVQ lvq = templeConfig.getLvq();
Matrix vector = MatrixOperation.matrixToVector(myMatrix, true);
System.out.println(vector.getString());
MatrixBody matrixBody = new MatrixBody();
matrixBody.setMatrix(vector);
matrixBody.setId(tagging);
@ -237,12 +305,11 @@ public class Operation {//进行计算
List<Double> list = new ArrayList<>();
Normalization normalization = templeConfig.getNormalization();
double middle = normalization.getAvg();
//double sub = ArithUtil.sub(normalization.getMax(), normalization.getMin());
for (int i = 0; i < matrix.getX(); i++) {
for (int j = 0; j < matrix.getY(); j++) {
double nub = matrix.getNumber(i, j);
if (nub != 0) {
nub = ArithUtil.sub(nub, middle);//middle
nub = ArithUtil.sub(nub, middle);
list.add(nub);
} else {
list.add(0.0);
@ -253,6 +320,32 @@ public class Operation {//进行计算
return list;
}
private List<List<Double>> getFeatures(Matrix matrix) throws Exception {
List<List<Double>> lists = new ArrayList<>();
int x = matrix.getX() - 3;//求导后矩阵的行数
int y = matrix.getY() - 3;//求导后矩阵的列数
for (int i = 0; i < x; i += 3) {//遍历行
for (int j = 0; j < y; j += 3) {//遍历每行的列
Matrix myMatrix = matrix.getSonOfMatrix(i, j, 3, 3);
lists.add(getListFeature(myMatrix));
}
}
return lists;
}
private List<Double> getListFeature(Matrix matrix) throws Exception {
List<Double> list = new ArrayList<>();
int x = matrix.getX();
int y = matrix.getY();
for (int i = 0; i < x; i++) {
for (int j = 0; j < y; j++) {
double nub = matrix.getNumber(i, j) / 300;
list.add(nub);
}
}
return list;
}
//图像视觉 speed 模式
public void look(Matrix matrix, long eventId) throws Exception {
if (templeConfig.getStudyPattern() == StudyPattern.Speed_Pattern) {
@ -295,8 +388,19 @@ public class Operation {//进行计算
Matrix myMatrix = matrixBack.getMatrix();
//卷积层输出即边框回归的输入的特征向量
frameBody.setEndMatrix(myMatrix);
//Matrix vector = MatrixOperation.matrixToVector(myMatrix, true);
int id = getClassificationIdByLVQ(myMatrix);
int classifier = templeConfig.getClassifier();
int id = 0;
switch (classifier) {
case Classifier.LVQ:
id = getClassificationIdByLVQ(myMatrix);
break;
case Classifier.DNN:
id = getClassificationIdByDnn(myMatrix);
break;
case Classifier.VAvg:
id = getClassificationIdByVag(myMatrix);
break;
}
frameBody.setId(id);
}
return toPosition(frameBodies, frame.getWidth(), frame.getHeight());

File diff suppressed because it is too large Load Diff

@ -8,7 +8,7 @@ import java.util.*;
public class VectorK {
private Map<Integer, List<Matrix>> matrixMap = new HashMap<>();
private Map<Integer, Matrix> matrixK = new HashMap<>();
private Map<Integer, Matrix> matrixK = new HashMap<>();//这个作为模型拿出来
private int length;
public VectorK(int length) {
@ -43,27 +43,6 @@ public class VectorK {
return matrix;
}
private Matrix mind(List<Matrix> matrixList) throws Exception {//拿中位数
Matrix matrix = new Matrix(1, length);
List<List<Double>> lists = new ArrayList<>();
for (Matrix matrix1 : matrixList) {
for (int i = 0; i < matrix1.getY(); i++) {
if (lists.size() <= i) {
lists.add(new ArrayList<>());
}
List<Double> list = lists.get(i);
list.add(matrix1.getNumber(0, i));
Collections.sort(list);
}
}
for (int i = 0; i < length; i++) {
List<Double> list = lists.get(i);
int index = list.size() / 2;
matrix.setNub(0, i, list.get(index));
}
return matrix;
}
public void study() throws Exception {
for (Map.Entry<Integer, List<Matrix>> entry : matrixMap.entrySet()) {
List<Matrix> matrixList = entry.getValue();
@ -71,4 +50,20 @@ public class VectorK {
matrixK.put(entry.getKey(), matrix);
}
}
public Map<Integer, List<Double>> getKMatrix() throws Exception {
Map<Integer, List<Double>> matrixList = new HashMap<>();
for (Map.Entry<Integer, Matrix> entry : matrixK.entrySet()) {
List<Double> list = MatrixOperation.rowVectorToList(entry.getValue());
matrixList.put(entry.getKey(), list);
}
return matrixList;
}
public void insertKMatrix(Map<Integer, List<Double>> matrixList) throws Exception {
for (Map.Entry<Integer, List<Double>> entry : matrixList.entrySet()) {
Matrix matrix = MatrixOperation.listToRowVector(entry.getValue());
matrixK.put(entry.getKey(), matrix);
}
}
}

@ -86,6 +86,8 @@ public class LVQ {
long type = matrixBody.getId();//类别
double distEnd = 0;
int id = 0;
double dis0 = 0;
double dis1 = 1;
for (int i = 0; i < typeNub; i++) {
MatrixBody modelBody = model[i];
Matrix modelMatrix = modelBody.getMatrix();
@ -95,10 +97,16 @@ public class LVQ {
id = modelBody.getId();
distEnd = dist;
}
if (i == 0) {
dis0 = dist;
} else {
dis1 = dist;
}
}
MatrixBody modelBody = model[id];
Matrix modelMatrix = modelBody.getMatrix();
boolean isRight = id == type;
System.out.println("type==" + type + ",dis0==" + dis0 + ",dis1==" + dis1);
Matrix matrix1 = op(matrix, modelMatrix, isRight);
modelBody.setMatrix(matrix1);
}

@ -10,36 +10,26 @@ import org.wlld.tools.ArithUtil;
* @date 1:29 2020/3/15
*/
public class Normalization {
private double max;
private double min;
private long number;
private double avg;
private double sigma;
public double getMax() {
return max;
}
public double getMin() {
return min;
}
public double getAvg() {
return avg;
}
public void setAvg(double avg) {
this.avg = avg;
}
public void avg() {
avg = ArithUtil.div(sigma, number);
//System.out.println("avg==" + avg);
if (avg == 0) {
avg = ArithUtil.div(sigma, number);
}
System.out.println(avg);
}
public void putFeature(double nub) {
if (nub > max) {
max = nub;
}
if (min == 0 || (nub != 0 && nub < min)) {
min = nub;
}
if (nub != 0) {
sigma = ArithUtil.add(sigma, nub);
number++;

@ -1,5 +1,6 @@
package org.wlld.nerveEntity;
import org.wlld.MatrixTools.Matrix;
import org.wlld.imageRecognition.border.Frame;
import org.wlld.imageRecognition.modelEntity.KBorder;
import org.wlld.imageRecognition.modelEntity.LvqModel;
@ -22,7 +23,34 @@ public class ModelParameter {
private DymNerveStudy dymOutNerveStudy = new DymNerveStudy();//动态神经元输出层
private Map<Integer, KBorder> borderMap = new HashMap<>();//边框距离模型
private LvqModel lvqModel;//LVQ模型
private Map<Integer, List<Double>> matrixK = new HashMap<>();//均值特征向量
private Frame frame;//先验边框
private double avg;//特别均值
private double dnnAvg;//
public double getDnnAvg() {
return dnnAvg;
}
public void setDnnAvg(double dnnAvg) {
this.dnnAvg = dnnAvg;
}
public double getAvg() {
return avg;
}
public Map<Integer, List<Double>> getMatrixK() {
return matrixK;
}
public void setMatrixK(Map<Integer, List<Double>> matrixK) {
this.matrixK = matrixK;
}
public void setAvg(double avg) {
this.avg = avg;
}
public Frame getFrame() {
return frame;

@ -45,7 +45,7 @@ public class OutNerve extends Nerve {
} else {
this.E = 0;
}
System.out.println("E===" + this.E + ",out==" + out+",nerveId=="+getId());
System.out.println("E===" + this.E + ",out==" + out+",nerveId=="+getId());
gradient = outGradient();//当前梯度变化
//调整权重 修改阈值 并进行反向传播
updatePower(eventId);
@ -66,8 +66,8 @@ public class OutNerve extends Nerve {
Matrix myMatrix = dynamicNerve(matrix, eventId, isKernelStudy);
if (isKernelStudy) {//回传
Matrix matrix1 = matrixMapE.get(E);
//System.out.println("E================" + E);
//System.out.println(myMatrix.getString());
System.out.println("E================" + E);
System.out.println(myMatrix.getString());
if (matrix1.getX() <= myMatrix.getX() && matrix1.getY() <= myMatrix.getY()) {
double g = getGradient(myMatrix, matrix1);
backMatrix(g, eventId);

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long
Loading…
Cancel
Save