FANN lib train and test

//+------------------------------------------------------------------+

//|                                                    FannLib_2.mq4 |

//|                      Copyright © 2009, MetaQuotes Software Corp. |

//|                                        http://www.metaquotes.net |

//+------------------------------------------------------------------+

#property copyright "Copyright © 2009, MetaQuotes Software Corp."

#property link      "http://www.metaquotes.net"

#property indicator_separate_window

#property  indicator_buffers 0

#include <Fann2MQL.mqh>

// Global defines

#define ANN_PATH "C:\\ANN\\"

#define nn_layer 4   // the total number of layers. here, there is one input layer, 2 hidden layers, and one output layer = 4 layers.

#define nn_input 3   // Number of input neurones. Our pattern is made of 3 numbers, so that means 3 input neurones.

#define nn_hidden1 8 // Number of neurones on the first hidden layer

#define nn_hidden2 5 // number on the second hidden layer

#define nn_output 1  // number of outputs

#define nn_input_nn_output 4 // IMPORTANT! size = nn_input + nn_output

bool training = true;

bool testing  = true;

// trainingData[][] will contain the examples we're gonna use to teach the rules to the neurones.

double      trainingData[][nn_input_nn_output]; 

int maxTraining = 500;  // maximun number of time we will train the neurones with some examples

double targetMSE = 0.00002; // the Mean-Square Error of the neurones we should get at most (you will understand this lower in the code)

int ann1,ann2; // This var will be the identifier of the neuronal network.

//+------------------------------------------------------------------+

//| Custom indicator deinitialization function                       |

//+------------------------------------------------------------------+

int deinit()

  {

//----

      f2M_destroy_all_anns();

//----

   return(0);

  }

  

//+------------------------------------------------------------------+

//| Custom indicator initialization function                         |

//+------------------------------------------------------------------+

int init()

  {

   int i;

   double MSE;  

//---- indicators

   IndicatorBuffers(0);

   IndicatorDigits(6);

   string AnnPath = ANN_PATH+"FANN_NET.net";   

   

   if(training==true)

   {

     Print("##### TRAINING #####");   

     ArrayResize(trainingData,1);    

     // We create a new neuronal networks

     ann1 = f2M_create_standard(nn_layer, nn_input, nn_hidden1, nn_hidden2, nn_output);

     // We set the activation function. Don't worry about that. Just do it.

 f2M_set_act_function_hidden (ann1, FANN_SIGMOID_SYMMETRIC_STEPWISE);

 f2M_set_act_function_output (ann1, FANN_SIGMOID_SYMMETRIC_STEPWISE);

 // Some studies show that statistically, the best results are reached using this range; but you can try to change and see is it gets better or worst

 f2M_randomize_weights (ann1, -0.77, 0.77);

     // UP UP = UP / if a < b && b < c then output = 1

     prepareTrainingData(1,2,3,1);

     prepareTrainingData(8,12,20,1);

     prepareTrainingData(4,6,8,1);

     prepareTrainingData(0,5,11,1);

     // UP DOWN = DOWN / if a < b && b > c then output = 0

     prepareTrainingData(1,2,1,0);

     prepareTrainingData(8,10,7,0);

     prepareTrainingData(7,10,7,0);

     prepareTrainingData(2,3,1,0);

     // DOWN DOWN = DOWN / if a > b && b > c then output = 0

     prepareTrainingData(8,7,6,0);

     prepareTrainingData(20,10,1,0);

     prepareTrainingData(3,2,1,0);

     prepareTrainingData(9,4,3,0);

     prepareTrainingData(7,6,5,0);

     // DOWN UP = UP / if a > b && b < c then output = 1

     prepareTrainingData(5,4,5,1);

     prepareTrainingData(2,1,6,1);

     prepareTrainingData(20,12,18,1);

     prepareTrainingData(8,2,10,1);      

     printDataArray();  

     for (i=0;i<maxTraining;i++) 

     {

        MSE = teach(ann1,false); // everytime the loop run, the teach() function is activated. Check the comments associated to this function to understand more.

        if (MSE < targetMSE) 

        { // if the MSE is lower than what we defined (here targetMSE = 0.02)

           debug("training finished. Trainings ",i+1); // then we print in the console how many training it took them to understand

           i = maxTraining; // and we go out of this loop

        }

     } 

     // we print to the console the MSE value once the training is completed

     debug("MSE",f2M_get_MSE(ann1));     

     //save network   

     debug("FANN file:",AnnPath); 

     f2M_save (ann1, AnnPath);  

   }  

   if(testing==true)

   {

     Print("##### TESTING #####");  

     ann2 = f2M_create_from_file(AnnPath);

     //output=1

     prepareTestingData(ann2,1,2,3);

     prepareTestingData(ann2,8,12,20);

     prepareTestingData(ann2,4,6,8);

     prepareTestingData(ann2,0,5,11);

   }

//----

   return(0);

  }  

//+------------------------------------------------------------------+

//| Custom indicator iteration function                              |

//+------------------------------------------------------------------+

int start()

  {

   int    counted_bars=IndicatorCounted();

//----

   

//----

   return(0);

  }

//+------------------------------------------------------------------+

/*************************

** teach()

** Get all the trainign data and use them to train the neurones one time.

** In order to properly train the neurones, you need to run this function many time,

** until the Mean-Square Error get low enough.

*************************/

double teach(int ann, bool fast) {

   int i,j;

   double MSE;

   double inputVector[];

   double outputVector[];

   // we resize the arrays to the right size

   ArrayResize(inputVector,nn_input);

   ArrayResize(outputVector,nn_output);

   int call;

   int bufferSize = ArraySize(trainingData)/nn_input_nn_output-1;

   for (i=0;i<bufferSize;i++) {

     for (j=0;j<nn_input;j++) {

       inputVector[j]=trainingData[bufferSize][j];

     }

     for (j=0;j<nn_output;j++) {

       outputVector[j]=trainingData[bufferSize][nn_input+j];

     }

      //f2M_train() is showing the neurones only one example at a time.

      if(fast)

        call = f2M_train_fast(ann, inputVector, outputVector);

      else

        call = f2M_train(ann, inputVector, outputVector);

   }

   // Once we have show them an example, we check if how good they are by checking their MSE. If it's low, they learn good!

   MSE = f2M_get_MSE(ann);

   return(MSE);

}

/*************************

** prepareTrainingData()

** Prepare the data for training.

*************************/

void prepareTrainingData(double input1, double input2, double input3, double output) {

   int i,j;

   double inputVector[];

   double outputVector[];

   // we resize the arrays to the right size

   ArrayResize(inputVector,nn_input);

   ArrayResize(outputVector,nn_output);

   

   inputVector[0]  = input1;

   inputVector[1]  = input2;

   inputVector[2]  = input3;

   outputVector[0] = output;

   int bufferSize = ArraySize(trainingData)/nn_input_nn_output-1;

      

   //register the input data to the main array

   for (j=0;j<nn_input;j++) {

      trainingData[bufferSize][j] = inputVector[j];

   }

   for (j=0;j<nn_output;j++) {

      trainingData[bufferSize][nn_input+j] = outputVector[j];

   }

   

   ArrayResize(trainingData,bufferSize+2);

}

/*************************

** prepareTestingData()

** Prepare the data for testing.

*************************/

double prepareTestingData(int ann, double input1, double input2, double input3) {

   int i,j;

   int out;

   double output;

   

   double inputVector[];

   // we resize the arrays to the right size

   ArrayResize(inputVector,nn_input);

   

   inputVector[0]  = input1;

   inputVector[1]  = input2;

   inputVector[2]  = input3;

   // We sent new data to the neurones.

   out = f2M_run(ann, inputVector);

   // and check what they say about it using f2M_get_output().

   output = f2M_get_output(ann, 0);

   debug("Testing()",output);

   return(output);

}

/*************************

** printDataArray()

** Print the datas used for training the neurones

** This is useless. Just created for debug purpose.

*************************/

void printDataArray() {

   int i,j;

   int bufferSize = ArraySize(trainingData)/nn_input_nn_output-1;

   string lineBuffer = "";

        

   for (i=0;i<bufferSize;i++) {

      for (j=0;j<nn_input_nn_output;j++) {

         lineBuffer = StringConcatenate(lineBuffer, trainingData[i][j], ",");

      }

      debug("DataArray["+i+"]", lineBuffer);      

      lineBuffer = "";

   }

}

/*************************

** debug()

** Print data to the console

*************************/

void debug(string a, string b) {

   Print(a+" ==> "+b);

}