SOM

http://articles.mql4.com/783#8558

//+------------------------------------------------------------------+

//|                                                    !NeuroInd.mq4 |

//|                                Copyright © 2008, Sergeev Alexey  |

//+------------------------------------------------------------------+

#property indicator_separate_window

#property indicator_buffers 1

#property indicator_color1 LimeGreen

#property indicator_width1 1

int NUM_MA=10;

int DEPTH_MA=11;

double Target[];// current output vector - is drawn on the indicator

int nIN=0; // dimensionality of the input vector

int nOUT=0; // imensionality of the output vector

int nLayer=0; // number of layers

int nNeuron[10]; // number of neurons in the layer

// layers

double W[10][100][100]; // weights of a layer

double Thresh[10][100]; // threshold of a layer

double Out[10][100]; // output of a layer

// patterns for the network

double MaIn[10][11]; // array of target input vectors

//------------------------------------------------------------------ Sigmoid

double Sigmoid(double x) 

double rez=1/(1+MathExp(-x)); 

return(rez); 

}

//------------------------------------------------------------------ init

int init()

{  

SetIndexBuffer(0,Target); SetIndexStyle(0,DRAW_LINE); 

return(0);  

}

//------------------------------------------------------------------ deinit

int deinit() { return(0); }

//------------------------------------------------------------------ GetWeight

void GetWeight() // Reading weights from the file

{

int j,i, k;

string FileName = "eurusd_15.wgh";

        int hFile = FileOpen(FileName, FILE_READ|FILE_BIN);

FileSeek(hFile, 0, SEEK_SET);

nIN = FileReadInteger(hFile); // dimensionality of the data vector

nOUT = FileReadInteger(hFile); // dimensionality of the output

nLayer = FileReadInteger(hFile);// netwotk dimensionality

// ÷èòàåì ðàçìåðíîñòè êàæäîãî ñëîÿ

for (k=0; k<nLayer; k++) nNeuron[k]=FileReadInteger(hFile);

// reading weights and thresholds of each layer

for(k=1; k<nLayer;k++)  // going through layers starting from the first L[1] (L[0] - input data vector)

for(i=0; i<nNeuron[k];i++) 

{

for(j=0; j<nNeuron[k-1];j++) W[k][i][j]=FileReadDouble(hFile);

Thresh[k][i]=FileReadDouble(hFile);

}

FileClose(hFile);

}

//------------------------------------------------------------------ CalculateNetwork

void CalculateNetwork() // Calculation of the network output

{

int i,j, k;

double sum=0;

for(k=1; k<nLayer;k++) // going through layers starting from the first one L[1] (L[0] - input data vector)

for(i=0;i<nNeuron[k];i++) // input layer

sum=0;

for(j=0; j<nNeuron[k-1]; j++) sum+=W[k][i][j]*Out[k-1][j];

Out[k][i]=Sigmoid(sum+Thresh[k][i]);

}

}

//------------------------------------------------------------------ start

int start() 

{

Print("Taking weights");

GetWeight(); // Taking initial weights from the file

Print("Calculating the network");

// giving to the net inputs for going over :)

// Attention!!! Give only 50 elements, becauase it was calculated for this number!!!

int i, ma, depth, k;

double op;

for (i=5000; i>0; i--) // going through bars and collecting values of MA fan

{

for (depth=0; depth<DEPTH_MA; depth++) //calculating moving averages

for (ma=0; ma<NUM_MA; ma++) 

MaIn[ma][depth]=iMA(NULL, 0, 2+ma*3, 0, MODE_EMA, PRICE_MEDIAN, i+depth*3);

for (depth=0; depth<DEPTH_MA; depth++) // calculating vertical differences

{

op=(High[i+depth*3]+Low[i+depth*3])/2.0; 

for (ma=0; ma<NUM_MA; ma++) MaIn[ma][depth]-=op;

}

k=0;

for (ma=0; ma<NUM_MA; ma++) // calculating horizontal differences

{

for (depth=0; depth<DEPTH_MA-1; depth++) 

{

MaIn[ma][depth]-=MaIn[ma][depth+1];

op=MathArctan(MaIn[ma][depth]/(5*Point))/3.141592653589793238642463+0.5;

Out[0][k]=op; k++; // assign to the data vextor

if (k>=nNeuron[0]) break;

}

if (k>=nNeuron[0]) break;

}

CalculateNetwork();

Target[i]=Out[nNeuron[nLayer-1]][0];

}

return(0);

}