0

Having an issue in my neural network where the error on the inputs gets enormously small (in the negative thousands). The network can learn one training set (ie 1+3=4) and will output four with inputs 1 and 3 but cant learn the generel pattern from larger datasets. My friend has taken a look at it and can't see the issue. Any help appreciated.

        for (int j = 0; j <3000; j++) 
        {
             for (int i = 0; i < tr_inp.Length; i++)
            {
                nn.inputs = tr_inp[i];
                nn.desired = tr_out[i];
                nn.FeedForward(tr_inp[i]);
                nn.Backpropagate(tr_out[i]);
            }

training loop,

  public void FeedForward(double[] inputs)
    {
        this.inputs = inputs;
        //set inputs outputs to the input weight,
        for (int i = 0; i < nodes[0].Count; i++)
        {
            nodes[0][i].output = nodes[0][i].weights[0];
        }
        //set hidden layers outputs to dot product
        for (int i = 0; i < nodes[1].Count; i++)
        {
            double sum = 0;
            for (int j = 0; j < nodes[1][i].weights.Length; j++)
            {
                sum += nodes[1][i].weights[j] * nodes[0][j].output;
            }
            nodes[1][i].output = Normalization.Logistic(sum);
        }
        for (int i = 0; i < output; i++)
        {
            double sum = 0;
            for (int j = 0; j < hidden; j++)
            {
                sum += nodes[2][i].weights[j] * nodes[1][j].output;
            }
            nodes[2][i].output = Normalization.Logistic(sum);
        }

    }
    public void initilizeError()
    {
        for (int j = 0; j < hidden; j++)
        {

            nodes[1][j].error = 0;
        }
        for (int j = 0; j < input; j++)
        {

            nodes[0][j].error = 0;
        }
    }
    public void Backpropagate(double[] desired)
    {
        #region error calculations
        this.desired = desired;

        for (int j = 0; j < output; j++)
        {
            nodes[2][j].error = (desired[j] - nodes[2][j].output);
        }

        for (int j = 0; j < hidden; j++)
        {

           // nodes[1][j].error = 0;
        }


        for (int i = 0; i < output; i++)
        {
            for (int j = 0; j < hidden; j++)
            {

                nodes[1][j].error += nodes[2][i].weights[j] * nodes[2][i].error;
            }
        }

        for (int j = 0; j < input; j++)
        {

          //  nodes[0][j].error = 0;
        }

        for (int i = 0; i < hidden; i++)
        {
            for (int j = 0; j < input; j++)
            {

                nodes[0][j].error += nodes[1][i].weights[j] * nodes[1][i].error;
            }
        }
        #endregion
        #region Backpropagation


        for (int i = 0; i < input; i++)
        {
            var Dx = Normalization.Dx_Logistic(nodes[0][i].output);
            for (int j = 0; j < input; j++)
            {
                nodes[0][i].weights[0] += nodes[0][i].error * inputs[j]*Dx;
            }
        }
        for (int i = 0; i < hidden; i++)
        {
            var Dx = Normalization.Dx_Logistic(nodes[1][i].output);
            for (int j = 0; j < input; j++)
            {
                nodes[1][i].weights[j] += nodes[1][i].error * nodes[0][j].output * Dx;
            }
        }
        for (int i = 0; i < output; i++)
        {
            var Dx = Normalization.Dx_Logistic(nodes[2][i].output);
            for (int j = 0; j < hidden; j++)
            {
                nodes[2][i].weights[j] += nodes[2][i].error * nodes[1][j].output * Dx;
            }
        }

        #endregion
    }
}
BinkyNichols
  • 586
  • 4
  • 14
  • I think you need to describe your neural network a bit. Doesn't look like you are using the inputs in your feed forward function. Only in back prop. Also, you typically only need hidden nodes for complex neural nets. 1+3 is not complex. – Mike Aug 27 '15 at 23:08
  • the desired outputs are given to the backprop, "tr_out" the inputs are fed to the feedforward "tr_in" – BinkyNichols Aug 28 '15 at 14:49

0 Answers0