Network differences

As mentioned earlier, the only difference between our networks are the layers that are created and added to the network object. In an LSTM we will add LSTM layers, and in a GRU, unsurprisingly, we will add GRU layers, and so forth. All four types of creation functions are displayed as follows for you to compare:

public static NeuralNetwork MakeLstm(int inputDimension, int hiddenDimension, int hiddenLayers, int outputDimension, INonlinearity decoderUnit, double initParamsStdDev, Random rng)
{
List<ILayer> layers = new List<ILayer>();
for (int h = 0; h<hiddenLayers; h++)
{
layers.Add(h == 0
? new LstmLayer(inputDimension, hiddenDimension, initParamsStdDev, rng)
: new LstmLayer(hiddenDimension, hiddenDimension, initParamsStdDev, rng));
}
layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng));
return new NeuralNetwork(layers);
}


public static NeuralNetwork MakeFeedForward(int inputDimension, int hiddenDimension, inthiddenLayers, int outputDimension, INonlinearity hiddenUnit, INonlinearity decoderUnit, double initParamsStdDev, Random rng)
{
List<ILayer> layers = new List<ILayer>();
for (int h = 0; h<hiddenLayers; h++)
{
layers.Add(h == 0? new FeedForwardLayer(inputDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng): new FeedForwardLayer(hiddenDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng));
}
layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng));
return new NeuralNetwork(layers);
}


public static NeuralNetwork MakeGru(int inputDimension, int hiddenDimension, int hiddenLayers, int outputDimension, INonlinearity decoderUnit, double initParamsStdDev, Random rng)
{
List<ILayer> layers = new List<ILayer>();
for (int h = 0; h<hiddenLayers; h++)
{
layers.Add(h == 0? new GruLayer(inputDimension, hiddenDimension, initParamsStdDev, rng): new GruLayer(hiddenDimension, hiddenDimension, initParamsStdDev, rng));
}
layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng));
return new NeuralNetwork(layers);
}


public static NeuralNetwork MakeRnn(int inputDimension, int hiddenDimension, int hiddenLayers, int outputDimension, INonlinearity hiddenUnit, INonlinearity decoderUnit, double initParamsStdDev, Random rng)
{
List<ILayer> layers = new List<ILayer>();
for (int h = 0; h<hiddenLayers; h++)
{
layers.Add(h == 0? new RnnLayer(inputDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng)
: new RnnLayer(hiddenDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng));
}
layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng));
return new NeuralNetwork(layers);
}
..................Content has been hidden....................

You can't read the all page of ebook, please click here login for view all page.
Reset