I am writing a neural network based on this guide (https://learn.microsoft.com/en-us/archive/msdn-magazine/2012/october/test-run-neural-network-back-propagation-for-programmers), and no matter what the inputs, desired outputs, or layer sizes are, it always outputs one:
using System;
namespace Neural_Network_Test
{
internal class Program
{
private int numInput;
private int numHidden;
private int numOutput;
private float[,] inputNodes;
private float[,] weightsIH;
private float[,] weightsHO;
private float[,] outputsIH;
private float[,] outputsHO;
private float[,] sumIH;
private float[,] sumHO;
private float[,] biasIH;
private float[,] biasHO;
private float[,] gradientsO;
private float[,] gradientsH;
private float[,] prevWeightsDeltaIH;
private float[,] prevBiasesDeltaIH;
private float[,] prevWeightsDeltaHO;
private float[,] prevBiasesDeltaHO;
private float[,] rawIH;
private float[,] desiredOutputs;
private float eta = 0.9f;
private float momentum = 0.4f;
private int iteration = 0;
private static void Main(string[] args)
{
Program program = new Program();
program.numInput = 3;
program.numHidden = 4;
program.numOutput = 2;
program.desiredOutputs = new float[program.numOutput, 1];
for (int i = 0; i < program.numOutput; i++)
{
Console.Write("Output " + (i + 1).ToString() + ": ");
program.desiredOutputs[i, 0] = Convert.ToInt32(Console.ReadLine());
}
Console.Write("Enter Iterations: ");
int numIterations = 0;
numIterations = Convert.ToInt32(Console.ReadLine());
program.SetMisc();
program.SetNodes();
program.SetWeights();
for (int i = 0; i < numIterations; i++)
{
program.ComputeOutputs();
program.BackPropagation();
program.AddDeltas();
Console.WriteLine("Outputs: ");
for (int j = 0; j < program.numOutput; j++)
{
Console.WriteLine("Output " + (j + 1).ToString() + ": " + program.outputsHO[j, 0].ToString());
}
float cost = 0f;
for (int w = 0; w < program.numOutput; w++)
{
cost += 0.5f * (float)Math.Pow(program.desiredOutputs[w, 0] - program.outputsHO[w, 0], 2);
}
Console.WriteLine("Cost: " + cost.ToString());
program.iteration++;
}
}
private void SetNodes()
{
Random rnd = new Random();
for (int i = 0; i < numInput; i++)
{
inputNodes[i, 0] = (float)rnd.NextDouble();
}
}
private void SetWeights()
{
Random rnd = new Random();
weightsIH = new float[numInput, numHidden];
for(int i = 0; i < numInput; i++)
{
for(int j = 0; j < numHidden; j++)
{
if (rnd.NextDouble() > 0.5)
{
weightsIH[i, j] = (float)rnd.NextDouble();
}
else
{
weightsIH[i, j] = (float)rnd.NextDouble() * -1;
}
weightsIH[i, j] = 1f;
}
}
weightsHO = new float[numHidden, numOutput];
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numOutput; j++)
{
if (rnd.NextDouble() > 0.5)
{
weightsHO[i, j] = (float)rnd.NextDouble();
}
else
{
weightsHO[i, j] = (float)rnd.NextDouble() * -1;
}
weightsHO[i, j] = 1f;
}
}
for(int i = 0; i < numHidden; i++)
{
biasIH[i, 0] = rnd.Next(20) - 10;
}
for (int i = 0; i < numOutput; i++)
{
biasHO[i, 0] = rnd.Next(20) - 10;
}
for(int i = 0; i < numHidden; i++)
{
biasIH[i, 0] = 0;
}
for(int i = 0; i < numOutput; i++)
{
biasHO[i, 0] = 0;
}
weightsIH = new float[numInput, numHidden];
weightsHO = new float[numHidden, numOutput];
float value = 0.1f;
for (int i = 0; i < numInput; i++)
{
for (int j = 0; j < numHidden; j++)
{
weightsIH[i, j] = (float)Math.Round(value, 2);
value += 0.1f;
}
}
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numOutput; j++)
{
weightsHO[i, j] = value;
value += 0.1f;
}
}
}
private void SetMisc()
{
sumIH = new float[numHidden, 1];
biasIH = new float[numHidden, 1];
sumHO = new float[numOutput, 1];
biasHO = new float[numOutput, 1];
gradientsO = new float[numOutput, 1];
gradientsH = new float[numHidden, 1];
prevWeightsDeltaIH = new float[numInput, numHidden];
prevWeightsDeltaHO = new float[numHidden, numOutput];
prevBiasesDeltaIH = new float[numHidden, 1];
prevBiasesDeltaHO = new float[numOutput, 1];
outputsIH = new float[numHidden, 1];
outputsHO = new float[numOutput, 1];
rawIH = new float[numHidden, 1];
inputNodes = new float[numHidden, 1];
}
private void ComputeOutputs()
{
//Hidden
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numInput; j++)
{
sumIH[i, 0] += inputNodes[j, 0] * weightsIH[j, i]; //
}
sumIH[i, 0] += biasIH[i, 0];
rawIH[i, 0] = sumIH[i, 0];
outputsIH[i, 0] = SigmoidFunction(sumIH[i, 0]);
}
//Output
for (int i = 0; i < numOutput; i++)
{
for (int j = 0; j < numHidden; j++)
{
sumHO[i, 0] += outputsIH[j, 0] * weightsHO[j, i];
}
sumHO[i, 0] += biasHO[i, 0];
outputsHO[i, 0] = HyperTanFunction(sumHO[i, 0]);
}
}
private void BackPropagation()
{
//Outputs
for (int i = 0; i < numOutput; i++)
{
gradientsO[i, 0] = (desiredOutputs[i, 0] - outputsHO[i, 0]) * (1 - outputsHO[i, 0]) * (1 + outputsHO[i, 0]);
}
//Hidden
for (int i = 0; i < numHidden; i++)
{
float outputComp = 0;
for (int j = 0; j < numOutput; j++)
{
outputComp += gradientsO[j, 0] * weightsHO[i, j];
}
gradientsH[i, 0] = (outputsIH[i, 0]) * (1 - outputsIH[i, 0]) * outputComp;
}
//Delta IH
for (int i = 0; i < numInput; i++)
{
for (int j = 0; j < numHidden; j++)
{
prevWeightsDeltaIH[i, j] = eta * gradientsH[j, 0] * inputNodes[i, 0];
}
}
//Delta HO
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numOutput; j++)
{
}
}
//Bias IH
for (int i = 0; i < numHidden; i++)
{
biasIH[i, 0] = eta * gradientsH[i, 0] * 1;
}
//Bias HO
for (int i = 0; i < numOutput; i++)
{
biasHO[i, 0] = eta * gradientsO[i, 0] * 1;
}
}
private void AddDeltas()
{
if (iteration > 0)
{
for (int i = 0; i < numInput; i++)
{
for (int j = 0; j < numHidden; j++)
{
weightsIH[i, j] += prevWeightsDeltaIH[i, j] + (prevWeightsDeltaIH[i, j] * momentum);
}
}
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numOutput; j++)
{
weightsHO[i, j] += prevWeightsDeltaHO[i, j] + (prevWeightsDeltaHO[i, j] * momentum);
}
}
for (int i = 0; i < numHidden; i++)
{
biasIH[i, 0] += prevBiasesDeltaIH[i, 0] + (prevBiasesDeltaIH[i, 0] * momentum);
}
for (int i = 0; i < numOutput; i++)
{
biasHO[i, 0] += prevBiasesDeltaHO[i, 0] + (prevBiasesDeltaHO[i, 0] * momentum);
}
}
else
{
for (int i = 0; i < numInput; i++)
{
for (int j = 0; j < numHidden; j++)
{
weightsIH[i, j] += prevWeightsDeltaIH[i, j];
}
}
for (int i = 0; i < numHidden; i++)
{
for (int j = 0; j < numOutput; j++)
{
weightsHO[i, j] += prevWeightsDeltaHO[i, j];
}
}
for (int i = 0; i < numHidden; i++)
{
biasIH[i, 0] += prevBiasesDeltaIH[i, 0];
}
for (int i = 0; i < numOutput; i++)
{
biasHO[i, 0] += prevBiasesDeltaHO[i, 0];
}
}
}
private static float SigmoidFunction(float x)
{
if (x < -45.0f) return 0.0f;
else if (x > 45.0) return 1.0f;
else return (float)(1.0f / (1.0f + Math.Exp(-x)));
}
private static float HyperTanFunction(float x)
{
if (x < -10.0) return -1.0f;
else if (x > 10.0) return 1.0f;
else return (float)(Math.Tanh(x));
}
}
}
I have tried changing the values of the input layer, the desired values, layer sizes, checked the backpropagation code, and many other things, but it always outputs one.
Related
I'm trying to implement minimax algorithm, but it isn't working, the computer is just making moves randomly. Here is the method Minimax and ComputerLogic.
static int MiniMax(int[,] board, int depth, bool isMax)
{
int len = 0;
for (int i = 0; i < board.GetLength(0); i++)
{
for (int j = 0; j < board.GetLength(1); j++)
{
if (board[i, j] == 0) len++;
}
}
int best_score, score;
if (WinVariants(board, 1))
{
best_score = -10;
return best_score;
}
else if (WinVariants(board, 2))
{
best_score = 10;
return best_score;
}
else if (len == 0)
{
best_score = 0;
return best_score;
}
if (isMax)
{
best_score = -int.MaxValue;
for (int i = 0; i < board.GetLength(0); i++)
{
for (int j = 0; j < board.GetLength(1); j++)
{
if (board[i, j] == 0)
{
board[i, j] = 1;
score = MiniMax(board, depth + 1, false);
board[i, j] = 0;
best_score = Math.Max(best_score, score);
}
}
}
}
else
{
best_score = int.MaxValue;
for (int i = 0; i < board.GetLength(0); i++)
{
for (int j = 0; j < board.GetLength(1); j++)
{
if (board[i, j] == 0)
{
board[i, j] = 2;
score = MiniMax(board, depth + 1, true);
board[i, j] = 0;
best_score = Math.Min(best_score, score);
}
}
}
}
return best_score;
}
static void ComputerLogic(int[,] grid, int VariantMove)
{
int[,] board = Copy(grid);
int bestScore = -int.MaxValue, score;
int len = 0;
Random rnd = new Random();
if (len <= 2)
{
while (true)
{
int x = rnd.Next(0, 7);
int y = rnd.Next(0, 7);
len++;
if (grid[x, y] == 0)
{
grid[x, y] = 1;
break;
}
}
}
else
{
while (true)
{
for (int i = 0; i < board.GetLength(0); i++)
{
for (int j = 0; j < board.GetLength(1); j++)
{
if (board[i, j] == 0)
{
board[i, j] = 1;
score = MiniMax(board, 0, false);
board[i, j] = 0;
if (score > bestScore)
{
bestScore = score;
grid[i, j] = 1;
break;
}
}
}
}
}
}
}
Too, i have method for Drawing my grid view 2d array.
If I choose the PC to go first, then it goes randomly. Otherwise, my mesh just isn't drawing.
I am guessing that I am not filling my main grid array correctly. Its copy is used for verification - board.
Can please help me with this problem?
I have this code and I work finding squares of "0" in 2d arrays.
How can I make find rectangles and not only squares?
I'm having some difficulty doing this.
// Set first column of S[,]
for (i = 0; i < R; i++)
{
S[i, 0] = M[i, 0];
}
// Set first row of S[][]
for (j = 0; j < C; j++)
{
S[0, j] = M[0, j];
}
// Construct other entries of S[,]
for (i = 1; i < R; i++)
{
for (j = 1; j < C; j++)
{
if (M[i, j] == 0)
S[i, j] = Math.Min(S[i, j - 1], Math.Min(S[i - 1, j], S[i - 1, j - 1])) + 1;
}
}
// Find the maximum entry, and indexes of maximum entry in S[,]
max_of_s = S[0, 0];
max_i = 0;
max_j = 0;
for (i = 0; i < R; i++)
{
for (j = 0; j < C; j++)
{
if (max_of_s < S[i, j])
{
max_of_s = S[i, j];
max_i = i;
max_j = j;
}
}
}
Console.WriteLine("Maximum size sub-matrix is: ");
for (i = max_i; i > max_i - max_of_s; i--)
{
for (j = max_j; j > max_j - max_of_s; j--)
{
M[i, j] = 2;
Console.Write(M[i, j] + " ");
}
Console.WriteLine();
}
Find any size rectangles and not only squares.
I try to solve pricing problem using Gurobi in C#. It's already worked on AMPL and LINGO but got infeasible when I try in C#. Maybe I wrong in indexing or something, please help me
this is my code in C#
static void Main()
{
try
{
// Model
GRBEnv env = new GRBEnv();
GRBModel model = new GRBModel(env);
model.ModelName = "OLRPTW Subproblem";
GRBVar[,] x = new GRBVar[DC1, DC1]; //variable of route
GRBVar[] D = new GRBVar[DC1]; //variable of duration
for (int i = 0; i < DC1; i++)
{
for (int j = 0; j < DC1; j++)
{
x[i, j] = model.AddVar(0.0, 1.0, 0.0, GRB.BINARY, "x" + i + "," + j);
}
}
for (int i = 0; i < DC1; i++)
{
D[i] = model.AddVar(A[i], B[i], 0.0, GRB.CONTINUOUS, "D" + i);
}
for (int i = 0; i < DC1; i++)
{
for (int j = 0; j < DC1; j++)
{
time[i, j] = Distance[i, j] + servicetime[i];
}
}
for (int i = 0; i < DC1; i++)
{
for (int j = 0; j < DC1; j++)
{
ReducedCost[i,j] = Distance[i, j]-DualU[i] + Demand[i]*DualV;
}
}
// Integrate new variables
model.Update();
//objective function
GRBLinExpr sumRC = 0.0;
for (int i = 0; i < DC1; i++)
{
for (int j = 0; j < DC1; j++)
{
sumRC += ReducedCost[i, j] * x[i, j];
}
}
model.SetObjective(sumRC, GRB.MINIMIZE);
//constraint 1
GRBLinExpr sumI = 0.0;
for (int i = 0; i < DC1; i++)
{
int j = 0;
sumI += x[i, j];
}
model.AddConstr(sumI, GRB.EQUAL, 1.0, "C1.1");
GRBLinExpr sumJ = 0.0;
for (int j = 0; j < DC1; j++)
{
int i = 0;
sumI += x[i, j];
}
model.AddConstr(sumJ, GRB.EQUAL,1.0, "C1.2");
//constraint 2
for (int h = 0; h < C; h++)
{
GRBLinExpr sumIH = 0.0;
GRBLinExpr sumHJ = 0.0;
for (int i = 0; i < DC1; i++)
{
sumIH += x[i, h];
}
for (int j = 0; j < DC1; j++)
{
sumHJ += x[h, j];
}
model.AddConstr(sumIH - sumHJ, GRB.EQUAL, 0.0, "C2");
}
//constraint 3
GRBLinExpr sumCap = 0.0;
for (int i = 0; i < DC1; i++)
{
for (int j = 0; j < DC1; j++)
{
sumCap += Demand[i] * x[i, j];
}
}
model.AddConstr(sumCap, GRB.LESS_EQUAL, Capacity, "Cap");
//constraint 4
for (int i = 0; i < C; i++)
{
for (int j = 0; j < C; j++)
if(i!=j)
{
GRBLinExpr Time = D[i] + time[i, j] + D[j];
GRBLinExpr M = 10000 * (1 - x[i, j]);
model.AddConstr(Time <= M, "Time" + i + j);
}
}
for (int i = 0; i < DC1; i++)
{
model.AddConstr(x[i, i], GRB.EQUAL, 0,"C*");
}
// Solve
model.Optimize();
I have a function to calculate the costMatrix like this code below.
public double[,] makeCostMatrixClassic(List<PointD> firstSeq, List<PointD> secondSeq)
{
double[,] costMatrix = new double[firstSeq.Count, secondSeq.Count];
costMatrix[0, 0] = Math.Round(this.getEuclideanDistance(firstSeq[0], secondSeq[0]), 3);
// For D(n,1)
for (int i = 0; i < firstSeq.Count; i++)
{
for (int j = 0; j <= i; j++)
{
costMatrix[i, 0] += Math.Round(this.getEuclideanDistance(firstSeq[j], secondSeq[0]), 3);
}
}
// For D(1,m)
for (int i = 0; i < secondSeq.Count; i++)
{
for (int j = 0; j <= i; j++)
{
costMatrix[0, i] += Math.Round(this.getEuclideanDistance(firstSeq[0], secondSeq[j]), 3);
}
}
// For D(n,m)
for (int i = 1; i < firstSeq.Count; i++)
{
for (int j = 1; j < secondSeq.Count; j++)
{
double min = this.findMin(costMatrix[i - 1, j - 1], costMatrix[i - 1, j], costMatrix[i, j - 1]);
costMatrix[i, j] = min + Math.Round(this.getEuclideanDistance(firstSeq[i], secondSeq[j]), 3);
}
}
return costMatrix;
}
For the 3rd loop (n,m), how could i improve its performance if the "count" of each Sequence is 1 million points.
I have encountered some difficulties while training my nn. When I use, lets say, 10 training sets, at the end of training procces neural network is trained just for the last two. I'm entering same values that I have used to train network and I am getting wrong results save for the last two. It seems to me that new nn memory suppresses older memory. I'm using 64 input neurons, 42 neurons in hidden layer and one output neuron. Sigmoid function is used for activating neurons. Training inputs and expected outputs are in 0 to 1 range. Does anyone have any clue what might be causing the problem?
Neuron b = new Neuron();
Fft f = new Fft();
float e = 2.71828f;
float eta = 0.05f;
float alpha = 0.05f;
float[] saw = new float[42];
float[] dh = new float[42];
float error = 0;
float dto = 0;
Random broj = new Random();
TextReader br = new StreamReader("d:/trening.txt");
TextReader ir = new StreamReader("d:\\input.txt");
float NextFloat(Random rng, float min, float max)
{
return (float)(min + (rng.NextDouble() * (max - min)));
}
public void load()//load memory
{
int i, j;
byte[] floatBytes;
BinaryReader br = new BinaryReader(File.Open("d:/memorija.txt", FileMode.Open));
for (j = 0; j <= 41; j++)
{
for (i = 0; i <= 64; i++)
{
floatBytes = br.ReadBytes(4);
b.w12[i][j] = BitConverter.ToSingle(floatBytes, 0);
}
}
for (j = 0; j <= 1; j++)
{
for (i = 0; i <= 41; i++)
{
floatBytes = br.ReadBytes(4);
b.w23[i][j] = BitConverter.ToSingle(floatBytes, 0);
}
}
br.Close();
}
public void trening()//Get training inputs and expected outputs
{ //Calls process methode
int i, n,ct=0;
using (TextReader tr = new StreamReader("d:/trening.txt"))
{
do
{
ct++;
} while (tr.ReadLine() != null);
tr.Close();
}
for (n = 0; n < (ct-1)/65; n++)
{
for (i = 1; i <= 65; i++)
b.input[i] = Convert.ToSingle(br.ReadLine());
process(b.input[65]);
target.Text = ((b.input[65]).ToString());
}
}
public void process(double t)//Trains nn using backpropagation
{
error = 0;
do
{
int i, j, k;
BinaryWriter bw = new BinaryWriter(File.Open("d:\\memorija.txt", FileMode.Create));
i = k = j = 0;
for (j = 1; j <= 41; j++)
{
b.ulaz2[j] = b.w12[0][j];
for (i = 1; i <= 64; i++)
{
b.ulaz2[j] += b.input[i] * b.w12[i][j];
} b.izlaz2[j] = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz2[j])));
if (b.izlaz2[j] < 0)
MessageBox.Show(b.izlaz2[j].ToString());
}
for (k = 1; k <= 1; k++)
{
b.ulaz3 = b.w23[0][k];
for (j = 1; j <= 41; j++)
{
b.ulaz3 += b.izlaz2[j] * b.w23[j][k];
} b.izlaz = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz3)));
error += (float)(0.5 * (t - b.izlaz) * (t - b.izlaz));
dto = (float)(t - b.izlaz) * b.izlaz * (1 - b.izlaz);
}
for (j = 1; j <= 41; j++)
{
saw[j] = 0;
for (k = 1; k <= 1; k++)
{
saw[j] += dto * b.izlaz2[j];
} dh[j] = saw[j] * b.izlaz2[j] * (1 - b.izlaz2[j]);
}
for (j = 1; j <= 41; j++)
{
b.w12d[0][j] = eta * dh[j] + alpha * b.w12d[0][j];
b.w12[0][j] += b.w12d[0][j];
for (i = 1; i <= 64; i++)
{
b.w12d[i][j] = eta * b.input[i] * dh[j] + alpha * b.w12d[i][j];
b.w12[i][j] += b.w12d[i][j];
}
}
for (k = 1; k <= 1; k++)
{
b.w23d[0][k] = eta * dto + alpha * b.w23d[0][k];
b.w23[0][k] += b.w23d[0][k];
for (j = 1; j <= 41; j++)
{
b.w23d[j][k] = eta * b.izlaz2[j] * dto + alpha * b.w23d[j][k];
b.w23[j][k] += b.w23d[j][k];
}
}
for (j = 0; j <= 41; j++)
{
for (i = 0; i <= 64; i++)
bw.Write(b.w12[i][j]);
}
for (j = 0; j <= 1; j++)
{
for (i = 0; i <= 41; i++)
bw.Write(b.w23[i][j]);
}
bw.Close();
izlazb.Text = Convert.ToString(b.izlaz);
errorl.Text = Convert.ToString(Math.Abs(b.izlaz - b.input[64]));
} while (Math.Abs(b.izlaz - t) > 0.03);
}
public void test()//This methode gets input values and gives output based on previous training
{
int i = 0, j = 0, k = 0;
for (i = 1; i < 65; i++)
b.input[i] = (float)Convert.ToDouble(ir.ReadLine());
for (j = 1; j <= 41; j++)
{
b.ulaz2[j] = b.w12[0][j];
for (i = 1; i <= 64; i++)
{
b.ulaz2[j] += b.input[i] * b.w12[i][j];
} b.izlaz2[j] = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz2[j])));
}
for (k = 1; k <= 1; k++)
{
b.ulaz3 = b.w23[0][k];
for (j = 1; j <= 41; j++)
{
b.ulaz3 += b.izlaz2[j] * b.w23[j][k];
} b.izlaz = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz3)));
} izlazb.Text = Convert.ToString(b.izlaz);
target.Text = "/";
errorl.Text = "/";
}
public void reset()//Resets memory
{
BinaryWriter fw = new BinaryWriter(File.Open("d:\\memorija.txt", FileMode.Create));
int i = 0;
int j = 0;
Random broj = new Random();
for (j = 0; j <= 41; j++)
{
for (i = 0; i <= 64; i++)
{
b.w12[i][j] = 0;
b.w12[i][j] = 2 * (NextFloat(broj, -0.5f, 0.5f));
fw.Write(b.w12[i][j]);
}
}
for (j = 0; j <= 1; j++)
{
for (i = 0; i <= 41; i++)
{
b.w23[i][j] = 0;
b.w23[i][j] = 2 * (NextFloat(broj, -0.5f, 0.5f));
fw.Write(b.w23[i][j]);
}
}
fw.Close();
}
}
}
And neuron class
public class Neuron
{
public float[][] w12 = new float[65][];//(65, 42);
public float[][] w12d = new float[65][];//(65, 42);
public float[][] w23 = new float[42][];//(42,2);
public float[][] w23d = new float[42][];//(42, 2);
public float[] ulaz2 = new float[42];
public float[] izlaz2 = new float[42];
public float ulaz3;
public float[] input =new float[66];
public static float[] ioutput;
public float izlaz;
public void arrayInit()
{
int i, j;
for (i = 0; i <=64; i++)
{
w12[i] = new float[42];
w12d[i] = new float[42];
}
for (i = 0; i <42; i++)
{
w23[i] = new float[2];
w23d[i] = new float[2];
}
for (j = 0; j < 42; j++)
for (i = 0; i <=64; i++)
{
w12[i][j] = 0;
w12d[i][j] = 0;
}
for (j = 0; j < 2; j++)
for (i = 0; i < 42; i++)
{
w23[i][j] = 0;
w23d[i][j] = 0;
}
}
}
I found out what the problem was. I didn't mix training arrays, I was introducing one array to nn until it was trained for it, instead of introducing all arrays in cyclic manner. I hope this will be useful for someone.