Yazoo ---> Online Help Docs ---> Embedding C/C++ into Yazoo: A Neural Network Tutorial

C implementation

NN.c


#include "NN.h"
#include <math.h>


int main(int argc, char **argv)
{
   neural_network myNN;
   double *inputs, step_size, *target_outputs, learning_rate;
   long i, inputs_num, outputs_num;


   /* ----- set up data types, etc. ----- */


   for (i = 0; i < inputs_num; i++)
       *(myNN.activity + i) = *(inputs + i);
   for (i = inputs_num; i < myNN.neurons_num; i++)
       *(myNN.activity + i) = 0;

   if ( argc == 6 )   {     // i.e. if we're in training mode then

       if (GetSteadyState(myNN, inputs_num, step_size) != 0) return 1;
       Train(myNN, -learning_rate);
       
       for (i = 0; i < outputs_num; i++)
           *(myNN.activity + inputs_num + i) = *(target_outputs + i);

       if (GetSteadyState(myNN, inputs_num+outputs_num, step_size) != 0)
           return 1;
       Train(myNN, learning_rate);        }

   else if (GetSteadyState(myNN, inputs_num, step_size) != 0) return 1;
   

   /* ----- save results ----- */
   
   
   return 0;
}


// Evolves a network to the self-consistent state x_i = f( W_ij x_j ).

int GetSteadyState(neural_network NN, long num_clamped, double StepSize)
{
   const double max_mean_sq_diff = 0.001;
   const long MaxIterations = 1000;

   double diff, sq_diff, input, new_output;
   unsigned long Iteration, i, j;
   
   if (num_clamped == NN.neurons_num) return 0;
   
       // iterate until the network is at a steady state
   for (Iteration = 1; Iteration <= MaxIterations; Iteration++)   {
       sq_diff = 0;

       for (i = num_clamped; i < NN.neurons_num; i++) {
           input = 0;
           for (j = 0; j < NN.neurons_num; j++)  {
           if (i != j)  {
               input += (*(NN.activity + j)) *
                               (*(NN.weights + i*NN.neurons_num + j));
           }}
           new_output = 1./(1 + exp(-input));
           
           diff = (new_output - *(NN.activity + i));
           sq_diff += diff*diff;
           *(NN.activity + i) *= 1-StepSize;
           *(NN.activity + i) += StepSize * new_output;
       }
       
       if (sq_diff < max_mean_sq_diff * (NN.neurons_num - num_clamped))
           return 0;
   }
   
   return 1;
}


// Updates the weights and biases with the Hebbian rule.

void Train(neural_network NN, double LearningRate)
{
   int i, j;
   
   for (i = 0; i < NN.neurons_num; i++)   {
   for (j = 0; j < NN.neurons_num; j++)   {
   if (i != j)   {
       *(NN.weights + i*NN.neurons_num + j) += LearningRate *
               (*(NN.activity + i)) * (*(NN.activity + j));
   }}}
}

NN.h


typedef struct {
   unsigned long neurons_num;    // 'N'

   double *weights;              // N x N array of incoming synapses
   double *activity;             // length-N vector
} neural_network;

extern int GetSteadyState(neural_network, long, double);
extern void Train(neural_network, double);


Prev: Background: neural networks   Next: Wiring the C into Yazoo


Last update: July 28, 2013

Get Yazoo scripting language at SourceForge.net. Fast, secure and Free Open Source software downloads