Data de publicació: May 15, 2017 7:56:29 PM
Makefile
D_I = include
PROGRAMES = test_vsann.o vsann.o
test_vsann : $(PROGRAMES)
gcc -o test_vsann $(PROGRAMES) -lm
test_vsann.o : test_vsann.c $(D_I)/vsann.h
gcc -c $<
vsann.o : vsann.c $(D_I)/vsann.h
gcc -c $<
.PHONY : netejar
net :
$(RM) *.o
vsann.h
/******************************************************************************
Network: vsANN ( very simple Artificial Neural Network )
Backpropagation Network with Bias Terms per each Node.
Application: Recognizing handwritten digits.
License: GPLv3
Author: Gac Genetic
Date: May 2017
Reference: Guineueta. Barcelona (Catalonia)
******************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <stdarg.h>
//Max size of training set
#define _MAX_TRAINING_SET
//Type of activation function
#define _EQUAL -1
#define _BINARY_STEP 0
#define _SIGMOID 1
#define LR 0.1
//Node vsANN.
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//Correcció a fer. La capa 0 només fa cas al camp INPUT. Es una capa especial. Per altra banda, z serà el camp input a cada node de cada capa!!!!!!!!!
typedef struct _Node{
double bias;
//z
double input;
//gz
double output;
int n_weights;
double *weight;
double delta;
} t_Node;
//Layer, also Inputs as first layer, and Outputs as the last one
typedef struct _Layer{
int n_nodes;
int a_function;
t_Node *node;
} t_Layer;
//Very Simple Artificial Neural Network
typedef struct _vsANN{
int n_bytes;
double LearningRate;
double error;
int n_inputs;
int n_outputs;
int n_layers;
t_Layer *layer;
} t_vsANN;
typedef char* t_input_training_SET;
int LAYER_INPUT;
int LAYER_OUTPUT;
//a_function, is the type of activation function
//inputs, the number of inputs
//outputs, the number of outputs
//layers, the number of layers including the input and output layers
//... the numbers of nodes of each hidden layer
//By default all weights initialized with a random number between -1 and 1
void _vsANN_create (t_vsANN *vsANN, int a_function, int inputs, int outputs, int layers, ... );
//Compute the output for the n_input array of inputs.
int _vsANN_run(t_vsANN *vsANN, double n_input[], double n_output[]);
//Compute the output taking the inputs from a file. Same format as a train_file
int _vsANN_run_from_file(t_vsANN *vsANN, char* _training_file);
//Print vsANN
void _vsANN_print(t_vsANN *vsANN);
//void _vsANN_print_connections(t_vsANN *vsANN);
//Give each connection a random weight between min_weight and max_weight
//From the beginning the weights are random between -1.0 and 1.0
void _vsANN_rand_weights(t_vsANN *vsANN, long int min_weight, long int max_weight);
/****************************************************************************************************
Load from file the training input-outputs and train vsANN
The file must be formatted like
num_train_data num_input num_output
inputdata separated by space
outputdata separated by space
.
.
.
inputdata seperated by space
outputdata seperated by space
Obviously the number of inputs and outputs must be coincident ( file == vsANN )
******************************************************************************************************/
void _vsANN_train_on_file(t_vsANN *vsANN, char* _test_file, int max_epochs, double desired_error);
double _random_weight( long int min, long int max );
int _function_binary_step(double z);
double _function_sigmoid(double z);
void ComputeOutputOutputError(t_vsANN *vsANN, double* outputs_target);
void BackPropagateNET( t_vsANN *vsANN );
void UpdateWeights(t_vsANN *vsANN);
vsann.c
#include "include/vsann.h"
void _vsANN_create (t_vsANN *vsANN, int a_function, int inputs, int outputs, int layers, ... ){
va_list ap;
int layer, node, weight;
srand(time(NULL));
vsANN->n_inputs = inputs;
vsANN->n_outputs = outputs;
vsANN->n_layers = layers;
vsANN->LearningRate = LR;
vsANN->n_bytes = sizeof(t_vsANN);
vsANN->layer=(t_Layer *)(malloc(sizeof(t_Layer) * layers));
vsANN->n_bytes +=sizeof(t_Layer) * layers;
//Inicialitza nodes per layer. Nombre de paràmetres variable ...
va_start(ap, layers);
LAYER_INPUT = 0;
LAYER_OUTPUT = layers - 1;
vsANN->layer[LAYER_INPUT].n_nodes = inputs;
vsANN->layer[LAYER_OUTPUT].n_nodes = outputs;
for (layer = 1; layer < layers - 1; layer++) vsANN->layer[layer].n_nodes = va_arg(ap, int);
//LAYER_INPUT, same type node as the other layers but the output is just the input. Nothing else.
vsANN->layer[LAYER_INPUT].node=(t_Node *)(malloc(sizeof(t_Node) * vsANN->layer[LAYER_INPUT].n_nodes));
vsANN->n_bytes += sizeof(t_Node) * vsANN->layer[LAYER_INPUT].n_nodes;
for ( node = 0; node < vsANN->layer[LAYER_INPUT].n_nodes; node++ ){
vsANN->layer[LAYER_INPUT].node[node].input = 0;
vsANN->layer[LAYER_INPUT].node[node].output = 0;
}
//Ara inicialitzem weights de totes les layers des de la 1 a la LAYER_OUTPUT
for ( layer = 1; layer < layers; layer++ ){
vsANN->layer[layer].a_function = a_function;
vsANN->layer[layer].node=(t_Node *)(malloc(sizeof(t_Node) * vsANN->layer[layer].n_nodes));
vsANN->n_bytes += sizeof(t_Node) * vsANN->layer[layer].n_nodes;
for ( node = 0; node < vsANN->layer[layer].n_nodes; node++ ){
vsANN->layer[layer].node[node].bias = _random_weight(-1,1);
vsANN->layer[layer].node[node].n_weights = vsANN->layer[layer - 1].n_nodes;
vsANN->layer[layer].node[node].weight=(double *)(malloc(sizeof(double) * vsANN->layer[layer - 1].n_nodes));
for ( weight = 0; weight < vsANN->layer[layer].node[node].n_weights; weight++ )
vsANN->layer[layer].node[node].weight[weight] = _random_weight(-1,1);
vsANN->layer[layer].node[node].input = 0;
vsANN->layer[layer].node[node].output = 0;
}
}
}
int _vsANN_run(t_vsANN *vsANN, double n_input[], double n_output[]){
int n_inputs, n_outputs;
int input, output, node;
int nodes_layer_input, nodes_layer_act, nodes_layer_ant;
int n_layers, layer;
double gz_node;
double z_node;
n_inputs = vsANN->n_inputs;
n_outputs = vsANN->n_outputs;
//First layer
for ( node = 0; node < vsANN->layer[LAYER_INPUT].n_nodes; node++ ){
vsANN->layer[LAYER_INPUT].node[node].input = n_input[node];
vsANN->layer[LAYER_INPUT].node[node].output = n_input[node];
}
//Process all the layers including the output layer
n_layers = vsANN->n_layers;
for ( layer = 1; layer < n_layers; layer++ ){
nodes_layer_ant = vsANN->layer[layer-1].n_nodes;
nodes_layer_act = vsANN->layer[layer].n_nodes;
for ( node = 0; node < nodes_layer_act; node++ ){
//Each node
z_node = vsANN->layer[layer].node[node].bias;
for ( input = 0; input < nodes_layer_ant; input++ )
z_node += vsANN->layer[layer].node[node].weight[input] * vsANN->layer[layer-1].node[input].output;
vsANN->layer[layer].node[node].input = gz_node;
switch(vsANN->layer[layer].a_function){
case _BINARY_STEP: gz_node = _function_binary_step(z_node);
break;
case _SIGMOID: gz_node = _function_sigmoid(z_node);
break;
default: break;
}
vsANN->layer[layer].node[node].output = gz_node;
}
}
//Returns the calculated outputs
for ( output = 0; output < n_outputs; output++) n_output[output] = vsANN->layer[n_layers-1].node[output].output;
}
int _vsANN_run_from_file(t_vsANN *vsANN, char* _test_file){
FILE *ft;
int n_tests, n_inputs, n_outputs;
long test, input, output;
double *inputs, *outputs_target, *outputs_computed;
int i;
ft = fopen(_test_file, "r");
if (ft == NULL) exit(EXIT_FAILURE);
//First line: num training, num inputs, num outputs
if ( fscanf(ft, "%d %d %d", &n_tests, &n_inputs, &n_outputs) != EOF );
//The number of inputs and outputs must be coincident
if ( ( vsANN->n_inputs != n_inputs ) || ( vsANN->n_outputs != n_outputs ) ) {
printf("\nImpossible to test a trained vsANN with this file: n_inputs or n_outputs not coincident \n");
exit(-1);
}
inputs = ( double *)(malloc(sizeof(double) * n_inputs));
outputs_target = ( double *)(malloc(sizeof(double) * n_outputs));
outputs_computed = ( double *)(malloc(sizeof(double) * n_outputs));
for ( test = 0; test < n_tests; test++ ){
for ( input = 0; input < n_inputs; input++ ) fscanf(ft, "%lf", &(inputs[input]));
for ( output = 0; output < n_outputs; output++ ) fscanf(ft, "%lf", &(outputs_target[output]));
_vsANN_run(vsANN, inputs, outputs_computed);
// printf("\n INPUT %d", test);
// printf("\nInputs %d ", n_inputs);
// for ( input = 0; input < n_inputs; input++ ) printf("%f ", inputs[input]);
printf("\nOutputs (1 Target, 2 Computed) %d \n", n_outputs);
for ( output = 0; output < n_outputs; output++ ) printf("%f ", outputs_target[output]);
printf("\n");
for ( output = 0; output < n_outputs; output++ ) printf("%f ", outputs_computed[output]);
// getchar();
}
fclose(ft);
free(inputs);
free(outputs_target);
free(outputs_computed);
}
void _vsANN_random_weights(t_vsANN *vsANN, long int min_weight, long int max_weight){
int layer, node, output, weight;
//Initializing all layers weights
for ( layer = 0; layer < vsANN->n_layers; layer ++ ){
for ( node = 0; node < vsANN->layer[layer].n_nodes; node++ ){
vsANN->layer[layer].node[node].bias = _random_weight( min_weight, max_weight);
for ( weight = 0; weight < vsANN->layer[layer].node[node].n_weights; weight++ )
vsANN->layer[layer].node[node].weight[weight] = _random_weight( min_weight, max_weight);
vsANN->layer[layer].node[node].output = 0;
}
}
}
int _function_equal(double z){
return (z);
}
int _function_binary_step(double z){
return (z >= 0 ) ? 1 : -1;
}
double _function_sigmoid(double z){
return ( 1 / ( 1 + powf(M_E, -z ) ) );
}
//Returns a random double number between min and max
double _random_weight( long int min, long int max ){
double nr;
if ( max < min ) return 0;
nr = (double)rand() / (double)RAND_MAX;
nr = nr * ( max - min);
return min + nr;
}
void _vsANN_print(t_vsANN *vsANN){
int n_inputs, n_layers, n_outputs, n_weights;
int n_nodes_layer;
int i, j, k;
printf("\n***************************************************************************");
printf("\nvsANN %d BYTES", sizeof(t_vsANN));
printf("\nvsANN %d BYTES", vsANN->n_bytes);
n_inputs = vsANN->n_inputs;
n_layers = vsANN->n_layers;
n_outputs = vsANN->n_outputs;
printf("\nInputs %d Layers %d Outputs %d", n_inputs, n_layers, n_outputs);
printf("\n***************************************************************************");
printf("\nLAYERS ...");
for ( i = 0; i < n_layers; i++ ){
n_nodes_layer = vsANN->layer[i].n_nodes;
printf("\n\nLayer %d de n_layers %d amb n_nodes_layer %d a_function %d", i + 1, n_layers, n_nodes_layer, vsANN->layer[i].a_function );
for ( j = 0; j < n_nodes_layer; j++ ){
printf("\nNode %d de Layer %d ", j, i);
printf("\n\tDelta: %lf", vsANN->layer[i].node[j].delta );
printf("\n\tBias: %lf", vsANN->layer[i].node[j].bias );
n_weights = vsANN->layer[i].node[j].n_weights;
printf("\n\tWeights: ");
for ( k = 0; k < n_weights; k++ ) printf("%lf ", vsANN->layer[i].node[j].weight[k]);
printf("\n\tInput: %lf", vsANN->layer[i].node[j].input );
printf("\n\tOutput: %lf", vsANN->layer[i].node[j].output );
}
}
printf("\n");
}
void _vsANN_train_on_file(t_vsANN *vsANN, char* _training_file, int max_epochs, double desired_error){
FILE *ft;
int n_train, n_inputs, n_outputs;
long epoch, train, input, output;
double *inputs, *outputs_target, *outputs_computed;
int i;
ft = fopen(_training_file, "r");
if (ft == NULL) exit(EXIT_FAILURE);
//First line: num training, num inputs, num outputs
if ( fscanf(ft, "%d %d %d", &n_train, &n_inputs, &n_outputs) != EOF );
//The number of inputs amnd outputs must be coincident
if ( ( vsANN->n_inputs != n_inputs ) || ( vsANN->n_outputs != n_outputs ) ) {
printf("\nImpossible to train vsANN with this file: n_inputs or n_outputs not coincident \n");
exit(-1);
}
printf("\nn_train %d n_inputs %d n_outputs %d\n", n_train, n_inputs, n_outputs );
inputs = ( double *)(malloc(sizeof(double) * n_inputs));
outputs_target = ( double *)(malloc(sizeof(double) * n_outputs));
outputs_computed = ( double *)(malloc(sizeof(double) * n_outputs));
epoch = 0;
vsANN->error = 10;
while ( ( epoch < max_epochs ) && ( vsANN->error > desired_error ) ){
vsANN->error = 10;
for ( train = 0; train < n_train; train++ ){
for ( input = 0; input < n_inputs; input++ ) fscanf(ft, "%lf", &(inputs[input]));
for ( output = 0; output < n_outputs; output++ ) fscanf(ft, "%lf", &(outputs_target[output]));
//for ( input = 0; input < n_inputs; input++ ) printf("train %d \n%lf", train, inputs[input]);
//for ( output = 0; output < n_outputs; output++ ) printf("train %d \n%lf", train, outputs_target[output]); getchar();
_vsANN_run(vsANN, inputs, outputs_computed);
//Compute the error
ComputeOutputOutputError(vsANN, outputs_target);
BackPropagateNET(vsANN);
UpdateWeights(vsANN);
}
//File init again. Not 0 but 1 position, second line.
fseek( ft, 0, SEEK_SET );
if ( fscanf(ft, "%d %d %d", &n_train, &n_inputs, &n_outputs) != EOF );
vsANN->error = vsANN->error / n_train;
epoch++;
printf("\nEpoch %ld Error %2.10lf Desired error %lf", epoch, vsANN->error, desired_error);
}
printf("\n******************Error %lf Desired error %lf", vsANN->error, desired_error);
fclose(ft);
free(inputs);
free(outputs_target);
free(outputs_computed);
}
void ComputeOutputOutputError(t_vsANN *vsANN, double* outputs_target)
{
int i, output_layer;
double output, error = 0;
output_layer = vsANN->n_layers - 1;
for ( i = 0; i < vsANN->n_outputs; i++ ){
output = vsANN->layer[output_layer].node[i].output;
error = outputs_target[i] - output;
vsANN->layer[output_layer].node[i].delta = error * output * ( 1 - output );
vsANN->error += 0.5 * pow ( error, 2 );
//printf("\nComputeOutput Output %d Target %lf Output %lf Error %lf Delta %lf", i, outputs_target[i], output, vsANN->error, vsANN->layer[output_layer].node[i].delta );
}
}
void BackPropagateNET( t_vsANN *vsANN ){
int i, j;
float delta, output;
int layer;
//printf("\nBackpropagateNET A");
for (layer = vsANN->n_layers-2; layer > LAYER_INPUT; layer--) {
for ( i = 0; i < vsANN->layer[layer].n_nodes; i++ ){
output = vsANN->layer[layer].node[i].output;
delta = 0;
for ( j = 0; j < vsANN->layer[layer+1].n_nodes; j++ ){
delta += vsANN->layer[layer+1].node[j].weight[i] * vsANN->layer[layer+1].node[j].delta;
}
vsANN->layer[layer].node[i].delta = delta * output * (1-output);
}
}
//printf("\nBackpropagateNET B");
}
void UpdateWeights(t_vsANN *vsANN){
int layer, node_i, node_j;
float output, delta;
//printf("\nUpdateWeights A");
for ( layer = 0; layer < vsANN->n_layers-1; layer++ ){
for ( node_i = 0; node_i < vsANN->layer[layer].n_nodes; node_i++ ){
if ( layer < vsANN->n_layers-1 ){
output = vsANN->layer[layer].node[node_i].output;
for ( node_j = 0; node_j < vsANN->layer[layer+1].n_nodes; node_j++ ){
delta = vsANN->layer[layer+1].node[node_j].delta;
vsANN->layer[layer+1].node[node_j].weight[node_i] += vsANN->LearningRate * delta * output;
vsANN->layer[layer+1].node[node_j].bias += vsANN->LearningRate * delta;
}
}
}
}
//printf("\nUpdateWeights B");
}
test_vsann.c
#include "include/vsann.h"
int main(){
t_vsANN vsANN;
FILE *_training_file;
t_input_training_SET training_input;
double test;
int i;
long int min, max;
_vsANN_create(&vsANN, _SIGMOID, 256, 9, 6, 64, 128, 128, 64);
_vsANN_train_on_file(&vsANN,"nums.txt", 10000, 0.005);
_vsANN_run_from_file(&vsANN,"nums_meus.txt");
}