How to apply weights in neural network? - c#

I am attempting to learn how to implement neural networks. The one main thing I can't seem to find anywhere is how to apply and store the weights. I got this to work in a simple [2 Input, 4 Output] Network by manually doing all of the multiplication for each output. But now I have 4 Input, 4 Hidden in 2 Layers, and 4 Outputs and am storing them in a 4x4x2 Array. I can't seem to grasp how to apply this using for loops. The current code below shows what I currently have and the parts commented as neural network are what I am currently trying to figure out.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
//Import SFML
using SFML.Graphics;
using SFML.System;
using SFML.Window;
namespace GeneticAlgorithims
{
class Program
{
//Neural Network Variables
public static int Inputs = 4;
public static int Outputs = 4;
public static int Hiddens = 2;
//End Variables
public static Random rand = new Random();
public static List<Organism> organisms = new List<Organism>();
public static RenderWindow window = new RenderWindow(new VideoMode(800,600), "Neural Networks", Styles.Default, new ContextSettings(512, 512, 16));
static void Main(string[] args)
{
Generate();
while(window.IsOpen)
{
window.DispatchEvents();
Update();
window.Clear(Color.Cyan);
Draw();
window.Display();
}
}
static void Generate()
{
for (int x = 0; x < 2; x++)
{
organisms.Add(new Organism(new Vector2f(rand.Next(0, (int)window.Size.X), rand.Next(0, (int)window.Size.Y)), new Matrix(new double[Inputs, Inputs, Hiddens])));
}
foreach (Organism organism in organisms)
{
organism.color = new Color((byte)rand.Next(0, 255),(byte)rand.Next(0, 255),(byte)rand.Next(0, 255));
organism.rotation = rand.Next(1, 360);
}
}
static void Update()
{
//Image map = window.Capture();
foreach(Organism organism in organisms)
{
//Get Near Organism
Organism near = new Organism();
foreach (Organism check in organisms)
{
if (check != organism)
{
if (Math.Abs(check.position.X - organism.position.X) < 128)
{
if (Math.Abs(check.position.Y - organism.position.Y) < 128)
{
near = check;
}
}
}
}
//Begin Neural Network
int CurrentColor = near.color.R;
float DistanceX = (near.position != null) ? Math.Abs(near.position.X - organism.position.X) : 0;
float DistanceY = (near.position != null) ? Math.Abs(near.position.Y - organism.position.Y) : 0;
float Health = organism.Health;
Console.WriteLine(CurrentColor);
for (int A = 0; A < Inputs; A++)
{
for (int B = 0; B < Outputs; B += 4)
{
for (int C = 0; C < Hiddens; C++)
{
organism.rotation +=
}
}
}
//End Neural Network
organism.Age++;
organism.position.X += organism.Speed * (float)Math.Cos(organism.rotation * 0.0174f);
organism.position.Y += organism.Speed * (float)Math.Sin(organism.rotation * 0.0174f);
float X = organism.position.X;
float Y = organism.position.Y;
if (organism.position.X > window.Size.X) organism.position.X = 0;
if (organism.position.X < 0) organism.position.X = window.Size.X;
if (organism.position.Y > window.Size.Y) organism.position.Y = 0;
if (organism.position.Y < 0) organism.position.Y = window.Size.Y;
if(organism.Age % 2500 == 0)
{
Organism newOrganism = new Organism(organism.position, organism.brain);
newOrganism.rotation = rand.Next(0, 360);
newOrganism.color = organism.color;
//organisms.Add(newOrganism);
//return;
}
if (organism.Age > 3000)
{
//organisms.Remove(organism);
//return;
}
}
}
static void Draw()
{
Sprite bufferSprite = new Sprite(new Texture("img.png"));
foreach (Organism organism in organisms)
{
Vertex[] line = new Vertex[2];
line[0] = new Vertex(organism.position, Color.Red);
line[1] = new Vertex(new Vector2f(organism.position.X + (float)(120 * Math.Cos((organism.rotation - 30) * 0.0174)), organism.position.Y + (float)(120 * Math.Sin((organism.rotation - 30) * 0.0174))), Color.Red);
window.Draw(line, PrimitiveType.Lines);
line[0] = new Vertex(organism.position, Color.Red);
line[1] = new Vertex(new Vector2f(organism.position.X + (float)(120 * Math.Cos((organism.rotation + 30) * 0.0174)), organism.position.Y + (float)(120 * Math.Sin((organism.rotation + 30) * 0.0174))), Color.Red);
window.Draw(line, PrimitiveType.Lines);
bufferSprite.Origin = new Vector2f(8, 8);
bufferSprite.Color = organism.color;
bufferSprite.Rotation = organism.rotation;
bufferSprite.Position = organism.position;
window.Draw(bufferSprite);
}
}
}
}

So as I said in the comments you'd have a layer class with weights and a network class. There's no need to break things up smaller than that in most cases.
So for the layer class, we have N weights plus a bias:
class Layer
{
double[] weights;
int nOut, nIn;
Layer(int inputs, int outputs)
{
nIn=inputs; nOut=outputs;
weights=new double[(inputs+1)*outputs]; //Assume random weights chosen
}
double[] processInput(double[] in)
{
double[] out=new double[nOut]; //initialise all to zero
for(int i=0; i<nOut; i++)
{
out[i] = weights[(i+1)*nIn] //bias always present
for(int j=0; j<nIn; j++)
{
out[i] +=in[j]*weights[(i+1)*nIn+j +1] //skips bias
}
out[i]=activationFunction(out[i])
}
return out
}
}
Then for the network class
class Network
{
List<Layer>
//skipping adding and setting up layers
double[] processInput(double[] in)
{
double[] temp=in;
for(Layer l:layers)
{
temp=layer.processInput(temp);
//input for next layer is output from previous
}
return temp;
}
}
Hopefully that's clear

Related

System.StackOverFlowException was thrown during debugging but the line it breaks on is different each debug

I'm creating a fairly simple C# Monogame project using visual studios 2019. I am getting a stack overflow error but it breaks on random lines each time I debug. Sometimes it'll break just on a "{". I think I've had weird errors like this before that seem like something messed up in the lower level code I don't have access to. I've already tried clean code, rebuild project, closing solutions, unloading/reloading project, and restarting my pc. Can anyone help me out?
So I found which code block is breaking somehow.
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using System;
namespace Project1
{
class Map
{
public Vector2 Player;
public Vector2 Destination;
public GameState gamestate = new GameState();
public int Rows { get; set; }
public int Cols { get; set; }
private SpriteBatch spriteBatch;
private Texture2D texture;
private Color[] colors;
private bool[,] state;
private bool[,] next_state;
private Random random = new Random();
public Map(GraphicsDevice graphicsDevice, SpriteBatch spriteBatch, int rows, int cols)
{
texture = new Texture2D(graphicsDevice, cols, rows);
this.spriteBatch = spriteBatch;
colors = new Color[rows * cols];
state = new bool[rows, cols];
next_state = new bool[rows, cols];
this.Rows = rows;
this.Cols = cols;
Player = new Vector2(random.Next(Cols), random.Next(Rows));
Destination = new Vector2(random.Next(Cols), random.Next(Rows));
gamestate.Reset();
}
public void Randomize()
{
Player = new Vector2(random.Next(Cols), random.Next(Rows));
Destination = new Vector2(random.Next(Cols), random.Next(Rows));
for (int r = 0; r < Rows; r++)
{
for (int c = 0; c < Cols; c++)
{
int ran = random.Next(3);
if (ran == 1) next_state[r, c] = true;
else next_state[r, c] = false;
}
}
next_state[(int)Player.Y, (int)Player.X] = true;
next_state[(int)Destination.Y, (int)Destination.X] = true;
Apply();
}
int neighbors;
public void Step()
{
for (int r = 0; r < Rows; r++)
{
for (int c = 0; c < Cols; c++)
{
neighbors = 0;
if (r < Rows - 1 && state[r + 1, c]) neighbors++;
if (c < Cols - 1 && state[r, c + 1]) neighbors++;
if (r > 0 && state[r - 1, c]) neighbors++;
if (c > 0 && state[r, c - 1]) neighbors++;
if (r < Rows - 1 && c < Cols - 1 && state[r + 1, c
+ 1]) neighbors++;
if (r > 0 && c > 0 && state[r - 1, c - 1]) neighbors++;
if (r < Rows - 1 && c > 0 && state[r + 1, c - 1]) neighbors++;
if (r > 0 && c < Cols - 1 && state[r - 1, c + 1]) neighbors++;
if (state[r, c] == true)
{
if (neighbors < 2 || neighbors > 3)
next_state[r, c] = false;
}
else
{
if (neighbors == 3) next_state[r, c] = true;
}
}
}
gamestate.Moves++;
Apply();
}
public void Apply()
{
int i = 0;
state = next_state;
for (int r = 0; r < Rows; r++)
{
for (int c = 0; c < Cols; c++)
{
if (state[r, c] == true) colors[i] = Color.Black;
else colors[i] = Color.White;
if (state[r, c] == true && Player == new Vector2(c, r)) colors[i] = Color.Red;
else if (state[r, c] == false && Player == new Vector2(c, r)) colors[i] = Color.Green;
if (Destination == new Vector2(c, r)) colors[i] = Color.Blue;
i++;
}
}
texture.SetData(colors);
if (Player == Destination)
{
gamestate.Level++;
gamestate.SaveState();
gamestate.Reset();
Randomize();
}
if (state[(int)Player.Y, (int)Player.X])
{
gamestate.Level = 0;
Randomize();
}
}
Vector2 size_to_draw;
Vector2 position_to_draw;
Rectangle rectangle_to_draw;
public void Draw()
{
size_to_draw = new Vector2(Cols * 15, Rows * 15);
position_to_draw = new Vector2((Game1.screen.X - size_to_draw.X) / 2, (Game1.screen.Y - size_to_draw.Y) / 2);
rectangle_to_draw = new Rectangle((int)position_to_draw.X, (int)position_to_draw.Y, (int)size_to_draw.X, (int)size_to_draw.Y);
spriteBatch.Draw(texture, rectangle_to_draw, Color.White);
gamestate.DrawData(spriteBatch);
}
}
}
When I take out
if (Player == Destination)
{
gamestate.Level++;
gamestate.SaveState();
gamestate.Reset();
Randomize();
}
if (state[(int)Player.Y, (int)Player.X])
{
gamestate.Level = 0;
Randomize();
}
from the Apply() method it lets me run the program.
Here is the gamestate class:
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using System.Collections.Generic;
namespace Project1
{
class GameState
{
public float Clock { get; set; }
public int Level { get; set; }
public int Moves { get; set; }
public List<SaveState> saves = new List<SaveState>();
public void SaveState()
{
saves.Add(new SaveState(Level, Moves, Clock));
}
public void Reset()
{
Clock = 0;
Moves = 0;
}
public void DrawData(SpriteBatch spriteBatch)
{
for (int i = 0; i < saves.Count; i++)
{
spriteBatch.DrawString(Game1.font, saves[(saves.Count - 1) - i].String, new Vector2(10, 10 + (30 * i)), Color.White);
}
}
}
struct SaveState
{
public SaveState(int level, int moves, float time)
{
Level = level;
Moves = moves;
Timestamp = time;
String = " Level: " + Level + " Moves: " + Moves + " Time: " + Timestamp;
}
int Level;
int Moves;
float Timestamp;
public string String;
}
}
Randomize() calls Apply() and the code you removed from Apply() to make it work calls Randomize().
Since all those (infinite) method calls are pushed on the stack you will eventually get a stack overflow.

My neural networks works well only with no hidden layers

You may think I am crazy, but I decided to write a neural network from scratch in C# for studying purposes. Please, be patient, I still have little experience)). English is not my first language, so I am sorry for it in advance.
I started with a program for handwritten digit recognizing with the MNIST database. I've read through a book about the algorithms inside the process and wrote this code.
public class NeuralNetwork
{
private List<Matrix<double>> weights = new List<Matrix<double>>();
private List<Vector<double>> biases = new List<Vector<double>>();
private Random random = new Random();
private List<Image> test_data;
private int[] layer_sizes;
public NeuralNetwork(params int[] layers)
{
layer_sizes = layers;
for (int i = 0; i < layers.Length - 1; i++)
{
var weigthLayer = Matrix<double>.Build.Dense(layers[i + 1], layers[i], (k, j) => random.NextDouble());
weights.Add(weigthLayer);
}
for (int i = 1; i < layers.Length; i++)
{
var biasesLayer = Vector<double>.Build.Dense(layers[i], (k) => random.NextDouble());
biases.Add(biasesLayer);
}
}
public Vector<double> FeedForward(Vector<double> a)
{
for (int i = 0; i < weights.Count; i++)
{
a = Sigmoid(weights[i].Multiply(a) + biases[i]);
}
return Sigmoid(a);
}
public void SGD(ITrainingDataProvider dataProvider, int epochs, int chuck_size, double eta)
{
test_data = new MnistReader().ReadTestData();
Console.WriteLine("SGD algorithm started");
var training_data = dataProvider.ReadTrainingData();
Console.WriteLine("Training data has beeen read");
Console.WriteLine($"Training data test: {Test(training_data)}%");
Console.WriteLine($"Test data test: {Test(test_data)}%");
for (int epoch = 0; epoch < epochs; epoch++)
{
training_data = training_data.OrderBy(item => random.Next()).ToList();
List<List<Image>> chunks = training_data.ChunkBy(chuck_size);
foreach (List<Image> chunk in chunks)
{
ProcessChunk(chunk, eta);
}
Console.WriteLine($"Epoch: {epoch + 1}/{epochs}");
Console.WriteLine($"Training data test: {Test(training_data)}%");
Console.WriteLine($"Test data test: {Test(test_data)}%");
}
Console.WriteLine("Done!");
}
private double Test(List<Image> data)
{
int count = 0;
foreach (Image im in data)
{
var output = FeedForward(im.DataToVector());
int number = output.MaximumIndex();
if (number == (int)im.Label)
{
count++;
}
}
return (double)count / data.Count * 100;
}
private void ProcessChunk(List<Image> chunk, double eta)
{
Delta[] deltas = new Delta[chunk.Count];
for (int i = 0; i < chunk.Count; i++)
{
Image image = chunk[i];
var input = image.DataToVector();
var desired_output = Vector<double>.Build.Dense(layer_sizes[layer_sizes.Length - 1]);
desired_output[(int)image.Label] = 1;
Delta delta = BackPropagation(input, desired_output);
deltas[i] = delta;
}
Delta sum = deltas[0];
for (int i = 1; i < deltas.Length; i++)
{
sum += deltas[i];
}
Delta average_delta = sum / deltas.Length;
for (int i = 0; i < layer_sizes.Length - 1; i++)
{
weights[i] += average_delta.d_weights[i].Multiply(eta);
biases[i] += average_delta.d_biases[i].Multiply(eta);
}
}
private Delta BackPropagation(Vector<double> input, Vector<double> desired_output)
{
List<Vector<double>> activations = new List<Vector<double>>();
List<Vector<double>> zs = new List<Vector<double>>();
Vector<double> a = input;
activations.Add(input);
for (int i = 0; i < layer_sizes.Length - 1; i++)
{
var z = weights[i].Multiply(a) + biases[i];
zs.Add(z);
a = Sigmoid(z);
activations.Add(a);
}
List<Vector<double>> errors = new List<Vector<double>>();
List<Matrix<double>> delta_weights = new List<Matrix<double>>();
List<Vector<double>> delta_biases = new List<Vector<double>>();
var error = CDerivative(activations[activations.Count - 1], desired_output).HProd(SigmoidDerivative(zs[^1]));
errors.Add(error);
int steps = 0;
for (int i = layer_sizes.Length - 2; i >= 1; i--)
{
var layer_error = weights[i].Transpose().Multiply(errors[steps]).HProd(SigmoidDerivative(zs[i - 1]));
errors.Add(layer_error);
steps++;
}
errors.Reverse();
for (int i = layer_sizes.Length - 1; i >= 1; i--)
{
var delta_layer_weights = (errors[i - 1].ToColumnMatrix() * activations[i - 1].ToRowMatrix()).Multiply(-1);
delta_weights.Add(delta_layer_weights);
var delta_layer_biases = errors[i - 1].Multiply(-1);
delta_biases.Add(delta_layer_biases);
}
delta_biases.Reverse();
delta_weights.Reverse();
return new Delta { d_weights = delta_weights, d_biases = delta_biases };
}
private Vector<double> CDerivative(Vector<double> x, Vector<double> y)
{
return x - y;
}
private Vector<double> Sigmoid(Vector<double> x)
{
for (int i = 0; i < x.Count; i++)
{
x[i] = 1.0 / (1.0 + Math.Exp(-x[i]));
}
return x;
}
private Vector<double> SigmoidDerivative(Vector<double> x)
{
for (int i = 0; i < x.Count; i++)
{
x[i] = Math.Exp(-x[i]) / Math.Pow(1.0 + Math.Exp(-x[i]), 2);
}
return x;
}
}
Delta class. A simple DTO to store weights and biases changes in a single object.
public class Delta
{
public List<Matrix<double>> d_weights { get; set; }
public List<Vector<double>> d_biases { get; set; }
public static Delta operator +(Delta d1, Delta d2)
{
Delta result = d1;
for (int i = 0; i < d2.d_weights.Count; i++)
{
result.d_weights[i] += d2.d_weights[i];
}
for (int i = 0; i < d2.d_biases.Count; i++)
{
result.d_biases[i] += d2.d_biases[i];
}
return result;
}
public static Delta operator /(Delta d1, double d)
{
Delta result = d1;
for (int i = 0; i < d1.d_weights.Count; i++)
{
result.d_weights[i] /= d;
}
for (int i = 0; i < d1.d_biases.Count; i++)
{
result.d_biases[i] /= d;
}
return result;
}
}
Everything ended up working fine, however complex networks with 1 or more hidden layers don't show any significant results. They are getting best 70% accuracy and then the learning curve drops. The accuracy returns to its 20-30%. Typically, the graph looks like a square root function, but in my case it is more like a turned around quadratic parabola the graph of my tries with different amounts of neurons in the first hidden layer
After a few tries, I found out, that without any hidden layers the algorithm works just fine. It learns up to 90% of accuracy and then the graph never falls down. Apparently, the bug is somewhere in back-propagation algorithm. It doesn't cause any problems with only input and output layers, but it does, when I add a hidden layer.
I have been trying to find the problem for a long time and I hope that someone, smarter than me, will be able to help.
Thanks in advance!

c# Multi Threading only using 25% of CPU

I have a process for a sort of genetic nesting algorithm that I am trying to multi-thread. The process looks something like the following.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace ConsoleApp1
{
class Program
{
static void Main(string[] args)
{
CurrentNest = new CuttingRun();
for (int i = 0; i < 80; i++)
{
double w = GetRandomNumber(24, 50);
double h = GetRandomNumber(10, 15);
CurrentNest.PartList.Add(new LwCube { Width = w, Height = h, PartID = i });
}
//Task.Run(() =>
//{
// Parallel.For(0, 64, (i) => Parallel_Nest());
//});
while (true)
{
Parallel_Nest();
}
//Console.ReadKey();
}
public static double GetRandomNumber(double minimum, double maximum)
{
Random random = new Random();
return random.NextDouble() * (maximum - minimum) + minimum;
}
public static CuttingRun CurrentNest { get; set; }
public static void Parallel_Nest()
{
Random random = new Random();
int randomNumber = random.Next(2000, 10000);
var retVal = Nester.Nest_Parts(CurrentNest, randomNumber);
CurrentNest.Iterations++;
if (CurrentNest.SavedList.Count > 0)
{
if (retVal.Count < CurrentNest.SavedList.Count)
{
CurrentNest.SavedList = retVal;
}
}
else
{
CurrentNest.SavedList = retVal;
}
Console.WriteLine(CurrentNest.Iterations.ToString() + " " + CurrentNest.SavedList.Count.ToString());
if (CurrentNest.SavedList != retVal)
{
retVal.Clear();
}
}
}
//Models
public class LwSheet
{
public LwSheet(double width, double height)
{
SheetWidth = width;
SheetHeight = height;
FreeRectangles.Add(new LwCube { Width = width, Height = height, X = 0, Y = 0 });
}
public List<LwCube> UsedRectangles = new List<LwCube>();
public List<LwCube> FreeRectangles = new List<LwCube>();
public double SheetWidth { get; set; }
public double SheetHeight { get; set; }
public double TotalUsed { get; set; }
public bool Place_Part(LwCube prt)
{
bool retVal = false;
LwCube bestNode = FindPositionForBestAreaFit(prt);
//if the bestNode has a height then add our parts to the list
if (bestNode.Height > 0)
{
bestNode.PartID = prt.PartID;
int numRectanglesToProcess = FreeRectangles.Count;
for (int i = 0; i < numRectanglesToProcess; ++i)
{
if (SplitFreeNode(FreeRectangles[i], ref bestNode))
{
FreeRectangles.RemoveAt(i);
--i;
--numRectanglesToProcess;
}
}
PruneFreeList();
UsedRectangles.Add(bestNode);
retVal = true;
}
return retVal;
}
bool SplitFreeNode(LwCube freeNode, ref LwCube usedNode)
{
// Test with SAT if the rectangles even intersect.
if (usedNode.X >= freeNode.X + freeNode.Width || usedNode.X + usedNode.Width <= freeNode.X ||
usedNode.Y >= freeNode.Y + freeNode.Height || usedNode.Y + usedNode.Height <= freeNode.Y)
return false;
if (usedNode.X < freeNode.X + freeNode.Width && usedNode.X + usedNode.Width > freeNode.X)
{
// New node at the top side of the used node.
if (usedNode.Y > freeNode.Y && usedNode.Y < freeNode.Y + freeNode.Height)
{
LwCube newNode = new LwCube { Width = freeNode.Width, X = freeNode.X, Y = freeNode.Y };
newNode.Height = usedNode.Y - newNode.Y;
FreeRectangles.Add(newNode);
}
// New node at the bottom side of the used node.
if (usedNode.Y + usedNode.Height < freeNode.Y + freeNode.Height)
{
LwCube newNode = new LwCube { Width = freeNode.Width, X = freeNode.X };
newNode.Y = usedNode.Y + usedNode.Height;
newNode.Height = freeNode.Y + freeNode.Height - (usedNode.Y + usedNode.Height);
FreeRectangles.Add(newNode);
}
}
if (usedNode.Y < freeNode.Y + freeNode.Height && usedNode.Y + usedNode.Height > freeNode.Y)
{
// New node at the left side of the used node.
if (usedNode.X > freeNode.X && usedNode.X < freeNode.X + freeNode.Width)
{
LwCube newNode = new LwCube { Height = freeNode.Height, X = freeNode.X, Y = freeNode.Y };
newNode.Width = usedNode.X - newNode.X;
FreeRectangles.Add(newNode);
}
// New node at the right side of the used node.
if (usedNode.X + usedNode.Width < freeNode.X + freeNode.Width)
{
LwCube newNode = new LwCube { Height = freeNode.Height, Y = freeNode.Y };
newNode.X = usedNode.X + usedNode.Width;
newNode.Width = freeNode.X + freeNode.Width - (usedNode.X + usedNode.Width);
FreeRectangles.Add(newNode);
}
}
return true;
}
void PruneFreeList()
{
for (int i = 0; i < FreeRectangles.Count; ++i)
for (int j = i + 1; j < FreeRectangles.Count; ++j)
{
if (IsContainedIn(FreeRectangles[i], FreeRectangles[j]))
{
FreeRectangles.RemoveAt(i);
--i;
break;
}
if (IsContainedIn(FreeRectangles[j], FreeRectangles[i]))
{
FreeRectangles.RemoveAt(j);
--j;
}
}
}
bool IsContainedIn(LwCube a, LwCube b)
{
return a.X >= b.X && a.Y >= b.Y
&& a.X + a.Width <= b.X + b.Width
&& a.Y + a.Height <= b.Y + b.Height;
}
LwCube FindPositionForBestAreaFit(LwCube prt)
{
LwCube bestNode = new LwCube();
var bestAreaFit = SheetWidth * SheetHeight;
for (int i = 0; i < FreeRectangles.Count; ++i)
{
double areaFit = FreeRectangles[i].Width * FreeRectangles[i].Height - prt.Width * prt.Height;
// Try to place the rectangle in upright (non-flipped) orientation.
if (FreeRectangles[i].Width >= prt.Width && FreeRectangles[i].Height >= prt.Height)
{
if (areaFit < bestAreaFit)
{
bestNode.X = FreeRectangles[i].X;
bestNode.Y = FreeRectangles[i].Y;
bestNode.Height = prt.Height;
bestNode.Width = prt.Width;
bestAreaFit = areaFit;
}
}
}
return bestNode;
}
}
public class LwCube
{
public int PartID { get; set; }
public double Width { get; set; }
public double Height { get; set; }
public double X { get; set; }
public double Y { get; set; }
}
public class CuttingRun
{
public List<LwCube> PartList = new List<LwCube>();
public List<LwSheet> SavedList = new List<LwSheet>();
public List<LwSheet> Sheets = new List<LwSheet>();
public int Iterations { get; set; }
}
//Actions
public static class Nester
{
public static List<LwSheet> Nest_Parts(CuttingRun cuttingRun, int loopCount)
{
var SheetList = new List<LwSheet>();
List<LwCube> partList = new List<LwCube>();
partList.AddRange(cuttingRun.PartList);
while (partList.Count > 0)
{
LwSheet newScore = new LwSheet(97, 49);
List<LwCube> addingParts = new List<LwCube>();
foreach (var prt in partList)
{
addingParts.Add(new LwCube { Width = prt.Width, Height = prt.Height, PartID = prt.PartID });
}
if (addingParts.Count > 0)
{
var sheets = new ConcurrentBag<LwSheet>();
Parallel.For(0, loopCount, (i) =>
{
var hmr = new LwSheet(97, 49);
Add_Parts_To_Sheet(hmr, addingParts);
sheets.Add(hmr);
});
//for (int i = 0; i < loopCount; i++)
//{
// var hmr = new LwSheet(97, 49);
// Add_Parts_To_Sheet(hmr, addingParts, addToLarge, addToMedium);
// sheets.Add(hmr);
//}
addingParts.Clear();
var bestSheet = sheets.Where(p => p != null).OrderByDescending(p => p.TotalUsed).First();
sheets = null;
newScore = bestSheet;
foreach (var ur in newScore.UsedRectangles)
{
partList.Remove(partList.Single(p => p.PartID == ur.PartID));
}
SheetList.Add(newScore);
}
}
return SheetList;
}
public static void Add_Parts_To_Sheet(LwSheet sh, List<LwCube> parts)
{
var myList = new List<LwCube>();
myList.AddRange(parts);
myList.Shuffle();
foreach (var prt in myList)
{
sh.Place_Part(prt);
}
myList.Clear();
foreach (var ur in sh.UsedRectangles)
{
sh.TotalUsed += ur.Width * ur.Height;
}
}
[ThreadStatic] private static Random Local;
public static Random ThisThreadsRandom
{
get { return Local ?? (Local = new Random(unchecked(Environment.TickCount * 31 + System.Threading.Thread.CurrentThread.ManagedThreadId))); }
}
public static void Shuffle<T>(this IList<T> list)
{
int n = list.Count;
while (n > 1)
{
n--;
int k = ThisThreadsRandom.Next(n + 1);
T value = list[k];
list[k] = list[n];
list[n] = value;
}
}
}
}
I have tried using parallel for loops on each of the loops to try and speed up the process. I have also tried changing them to tasks and used task.WhenAll. However I am only able to use around 25% of my CPU. If I start the program 4 different times, I can use 100%.
I am wondering if anyone has any ideas on how I could use 100% of my CPU without starting the program more than once?
EDIT: After adding a scaled down working version I also commented out one of the parallel loops and one of the normal loops to show where I put them in the code.
However I am only able to use around 25% of my CPU. If I start the program 4 different times, I can use 100%. I am wondering if anyone has any ideas on how I could use 100% of my CPU without starting the program more than once?
Your code appears to be a mixture of asynchronous (presumably I/O-bound) and parallel (presumably CPU-bound) portions. I say "appears to be" because we can't say for sure where the problem is since this is not a minimal reproducible example.
But, if that assumption is correct, then the reason your CPU is underutilized is simple: the parallel CPU-bound portions are waiting for their input data from the asynchronous I/O-bound portions. The only way to fix that is to run the I/O-bound portions concurrently. Move your I/O-bound code as early in the pipeline as possible, and then be sure to run the I/O-bound portions as concurrently as possible. E.g., if you have to call a WebApi for each item, call it as soon as you have the item; or if you're reading items from a database, try to read as many in a batch as possible. This is to minimize the amount of time that the CPU-bound portions have to wait for their data.
"Asynchronous Parallel ForEach" is rarely a good tool for this kind of problem. I would either look into TPL Dataflow or build your own pipeline using Channels.
At the end of the day, it is possible that the algorithm as a whole is I/O-bound. In that case, there isn't a whole lot you can do: only one CPU would be used because the I/O couldn't even keep up with that single CPU, and in that case using more CPUs wouldn't provide any benefit.

Generating a maze in C# - It Doesn't Show the maze

Although there is a maze.display() in this code, there isn't any maze appearing in the console for my console application. It just says "Press any key to continue..."
source is http://rosettacode.org/wiki/Maze_generation
Here is the code :
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Drawing;
namespace MazeGeneration
{
public static class Extensions
{
public static IEnumerable<T> Shuffle<T>(this IEnumerable<T> source, Random rng)
{
var e = source.ToArray();
for (var i = e.Length - 1; i >= 0; i--)
{
var swapIndex = rng.Next(i + 1);
yield return e[swapIndex];
e[swapIndex] = e[i];
}
}
public static CellState OppositeWall(this CellState orig)
{
return (CellState)(((int)orig >> 2) | ((int)orig << 2)) & CellState.Initial;
}
}
[Flags]
public enum CellState
{
Top = 1,
Right = 2,
Bottom = 4,
Left = 8,
Visited = 128,
Initial = Top | Right | Bottom | Left,
}
public struct RemoveWallAction
{
public Point Neighbour;
public CellState Wall;
}
public class Maze
{
private readonly CellState[,] _cells;
private readonly int _width;
private readonly int _height;
private readonly Random _rng;
public Maze(int width, int height)
{
_width = width;
_height = height;
_cells = new CellState[width, height];
for (var x = 0; x < width; x++)
for (var y = 0; y < height; y++)
_cells[x, y] = CellState.Initial;
_rng = new Random();
VisitCell(_rng.Next(width), _rng.Next(height));
}
public CellState this[int x, int y]
{
get { return _cells[x, y]; }
set { _cells[x, y] = value; }
}
public IEnumerable<RemoveWallAction> GetNeighbours(Point p)
{
if (p.X > 0) yield return new RemoveWallAction { Neighbour = new Point(p.X - 1, p.Y), Wall = CellState.Left };
if (p.Y > 0) yield return new RemoveWallAction { Neighbour = new Point(p.X, p.Y - 1), Wall = CellState.Top };
if (p.X < _width - 1) yield return new RemoveWallAction { Neighbour = new Point(p.X + 1, p.Y), Wall = CellState.Right };
if (p.Y < _height - 1) yield return new RemoveWallAction { Neighbour = new Point(p.X, p.Y + 1), Wall = CellState.Bottom };
}
public void VisitCell(int x, int y)
{
this[x, y] |= CellState.Visited;
foreach (var p in GetNeighbours(new Point(x, y)).Shuffle(_rng).Where(z => !(this[z.Neighbour.X, z.Neighbour.Y].HasFlag(CellState.Visited))))
{
this[x, y] -= p.Wall;
this[p.Neighbour.X, p.Neighbour.Y] -= p.Wall.OppositeWall();
VisitCell(p.Neighbour.X, p.Neighbour.Y);
}
}
public void Display()
{
var firstLine = string.Empty;
for (var y = 0; y < _height; y++)
{
var sbTop = new StringBuilder();
var sbMid = new StringBuilder();
for (var x = 0; x < _width; x++)
{
sbTop.Append(this[x, y].HasFlag(CellState.Top) ? "+--" : "+ ");
sbMid.Append(this[x, y].HasFlag(CellState.Left) ? "| " : " ");
}
if (firstLine == string.Empty)
firstLine = sbTop.ToString();
Debug.WriteLine(sbTop + "+");
Debug.WriteLine(sbMid + "|");
Debug.WriteLine(sbMid + "|");
}
Debug.WriteLine(firstLine);
}
}
class Program
{
static void Main(string[] args)
{
var maze = new Maze(20, 20);
maze.Display();
}
}
}
Maybe try to change Debug.WriteLine to Console.WriteLine
The output isn't going to the console, it is going to the output window in Visual Studio. If your output window isn't visible, enable it with Ctrl+W, O
If you really do want the output to go to the console, replace instances of Debug.WriteLine to Console.WriteLine

Neural Network, Getting output less then 1

I am trying to create a neural network for the function y=e^(-(x-u)^2)/(2*o^2)) where u = 50 and o = 15.
I must train my neural network so I can find the 2 x's for each y. I have created the folling code, it seems to learn it nicely, but once I test the outputs go I only get numbers around 0.99 to 1 where I should get 25 and 75 and I just can't see why. My best guess is that my error correction is wrong, but can't find the error. The neural network uses back-propagation.
The test code and training set
class Program
{
static void Main(string[] args)
{
args = new string[] {
"c:\\testTrain.csv",
"c:\\testValues.csv"
};
// Output File
string fileTrainPath = null;
string fileValuesPath = null;
if (args.Length > 0)
{
fileTrainPath = args[0];
if (File.Exists(fileTrainPath))
File.Delete(fileTrainPath);
fileValuesPath = args[1];
if (File.Exists(fileValuesPath))
File.Delete(fileValuesPath);
}
double learningRate = 0.1;
double u = 50;
double o = 15;
Random rand = new Random();
Network net = new Network(1, 8, 4, 2);
NetworkTrainer netTrainer = new NetworkTrainer(learningRate, net);
List<TrainerSet> TrainerSets = new List<TrainerSet>();
for(int i = 0; i <= 20; i++)
{
double random = rand.NextDouble();
TrainerSets.Add(new TrainerSet(){
Inputs = new double[] { random },
Outputs = getX(random, u, o)
});
}
// Train Network
string fileTrainValue = String.Empty;
for (int i = 0; i <= 10000; i++)
{
if (i == 5000)
{ }
double error = netTrainer.RunEpoch(TrainerSets);
Console.WriteLine("Epoch " + i + ": Error = " + error);
if(fileTrainPath != null)
fileTrainValue += i + "," + learningRate + "," + error + "\n";
}
if (fileTrainPath != null)
File.WriteAllText(fileTrainPath, fileTrainValue);
// Test Network
string fileValuesValue = String.Empty;
for (int i = 0; i <= 100; i++)
{
double y = rand.NextDouble();
double[] dOutput = getX(y, u, o);
double[] Output = net.Compute(new double[] { y });
if (fileValuesPath != null)
fileValuesValue += i + "," + y + "," + dOutput[0] + "," + dOutput[1] + "," + Output[0] + "," + Output[1] + "\n";
}
if (fileValuesPath != null)
File.WriteAllText(fileValuesPath, fileValuesValue);
}
public static double getResult(int x, double u, double o)
{
return Math.Exp(-Math.Pow(x-u,2)/(2*Math.Pow(o,2)));
}
public static double[] getX(double y, double u, double o)
{
return new double[] {
u + Math.Sqrt(2 * Math.Pow(o, 2) * Math.Log(1/y)),
u - Math.Sqrt(2 * Math.Pow(o, 2) * Math.Log(1/y)),
};
}
}
The code behind the network
public class Network
{
protected int inputsCount;
protected int layersCount;
protected NetworkLayer[] layers;
protected double[] output;
public int Count
{
get
{
return layers.Count();
}
}
public NetworkLayer this[int index]
{
get { return layers[index]; }
}
public Network(int inputsCount, params int[] neuronsCount)
{
this.inputsCount = Math.Max(1, inputsCount);
this.layersCount = Math.Max(1, neuronsCount.Length);
layers = new NetworkLayer[neuronsCount.Length];
for (int i = 0; i < layersCount; i++)
layers[i] = new NetworkLayer(neuronsCount[i],
(i == 0) ? inputsCount : neuronsCount[i - 1]);
}
public virtual double[] Compute(double[] input)
{
output = input;
foreach (NetworkLayer layer in layers)
output = layer.Compute(output);
return output;
}
}
public class NetworkLayer
{
protected int inputsCount = 0;
protected int neuronsCount = 0;
protected Neuron[] neurons;
protected double[] output;
public Neuron this[int index]
{
get { return neurons[index]; }
}
public int Count
{
get { return neurons.Length; }
}
public int Inputs
{
get { return inputsCount; }
}
public double[] Output
{
get { return output; }
}
public NetworkLayer(int neuronsCount, int inputsCount)
{
this.inputsCount = Math.Max( 1, inputsCount );
this.neuronsCount = Math.Max( 1, neuronsCount );
neurons = new Neuron[this.neuronsCount];
output = new double[this.neuronsCount];
// create each neuron
for (int i = 0; i < neuronsCount; i++)
neurons[i] = new Neuron(inputsCount);
}
public virtual double[] Compute(double[] input)
{
// compute each neuron
for (int i = 0; i < neuronsCount; i++)
output[i] = neurons[i].Compute(input);
return output;
}
}
public class Neuron
{
protected static Random rand = new Random((int)DateTime.Now.Ticks);
public int Inputs;
public double[] Input;
public double[] Weights;
public double Output = 0;
public double Threshold;
public double Error;
public Neuron(int inputs)
{
this.Inputs = inputs;
Weights = new double[inputs];
for (int i = 0; i < inputs; i++)
Weights[i] = rand.NextDouble() * 0.5;
}
public double Compute(double[] inputs)
{
Input = inputs;
double e = 0.0;
for (int i = 0; i < inputs.Length; i++)
e += Weights[i] * inputs[i];
e -= Threshold;
return (Output = sigmoid(e));
}
private double sigmoid(double value)
{
return (1 / (1 + Math.Exp(-1 * value)));
//return 1 / (1 + Math.Exp(-value));
}
}
My Trainer
public class NetworkTrainer
{
private Network network;
private double learningRate = 0.1;
public NetworkTrainer(double a, Network network)
{
this.network = network;
this.learningRate = a;
}
public double Run(double[] input, double[] output)
{
network.Compute(input);
return CorrectErrors(output);
}
public double RunEpoch(List<TrainerSet> sets)
{
double error = 0.0;
for (int i = 0, n = sets.Count; i < n; i++)
error += Run(sets[i].Inputs, sets[i].Outputs);
// return summary error
return error;
}
private double CorrectErrors(double[] desiredOutput)
{
double[] errorLast = new double[desiredOutput.Length];
NetworkLayer lastLayer = network[network.Count - 1];
for (int i = 0; i < desiredOutput.Length; i++)
{
// S(p)=y(p)*[1-y(p)]*(yd(p)-y(p))
lastLayer[i].Error = lastLayer[i].Output * (1-lastLayer[i].Output)*(desiredOutput[i] - lastLayer[i].Output);
errorLast[i] = lastLayer[i].Error;
}
// Calculate errors
for (int l = network.Count - 2; l >= 0; l--)
{
for (int n = 0; n < network[l].Count; n++)
{
double newError = 0;
for (int np = 0; np < network[l + 1].Count; np++)
{
newError += network[l + 1][np].Weights[n] * network[l + 1][np].Error;
}
network[l][n].Error = newError;
}
}
// Update Weights
// w = w + (a * input * error)
for (int l = network.Count - 1; l >= 0; l--)
{
for (int n = 0; n < network[l].Count; n++)
{
for (int i = 0; i < network[l][n].Inputs; i++)
{
// deltaW = a * y(p) * s(p)
double deltaW = learningRate * network[l][n].Output * network[l][n].Error;
network[l][n].Weights[i] += deltaW;
}
}
}
double returnError = 0;
foreach (double e in errorLast)
returnError += e;
return returnError;
}
}
For regression problems your output layer should have the identity (or at least a linear) activation function. This way you don't have to scale your output. The derivative of the identity function is 1 and thus the derivative dE/da_i for the output layer is y-t (lastLayer[i].Output - desiredOutput[i]).

Categories