Data de publicació: Mar 21, 2017 9:58:4 PM
//Dedicated to Llorenç Magraner
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
//Learning rate and Threshold can be changed it depends on the problem
#define LR 0.1
#define MAX_EPOCHS 5000
float randomFloat(){
return (float)rand() / (float)RAND_MAX;
}
int _compute_Output(float W[], float x, float y){
return (W[0] + x * W[1] + y * W[2] >= 0 ) ? 1 : -1;
}
int main(int argc, char *argv[]){
srand(time(NULL));
float W[3];
float x[300], y[300];
int outputs[300];
float localError, globalError;
int numberTests, i, p, epoch, output_calc;
FILE *fp;
if ((fp = fopen("test_1Q3Q.txt", "r")) == NULL) {printf("No datafile found.\n");exit(1);}
i = 0;
while (fscanf(fp, "%f %f %d", &x[i], &y[i], &outputs[i]) != EOF) i++;
numberTests = i;
printf("Training number %d \n",numberTests); getchar();
//Bias, threshold
W[0] = randomFloat();
//Weights
W[1] = randomFloat();
W[2] = randomFloat();
epoch = 0;
do {
epoch++;
globalError = 0;
for (p = 0; p < numberTests; p++) {
output_calc = _compute_Output(W, x[p], y[p]);
localError = outputs[p] - output_calc;
//Bias
W[0] += LR * localError;
//Weights
W[1] += LR * localError * x[p];
W[2] += LR * localError * y[p];
globalError += ( localError * localError);
}
globalError = globalError / numberTests ;
printf("Epoca %d: GlobalError: %.4f sqrt(GlobalError) = %.4f\n", epoch, globalError, sqrt(globalError));
} while ( globalError != 0 && epoch <= MAX_EPOCHS );
printf("\nGrading Line: %.2f*x + %.2f*y + %.2f = 0\n", W[1], W[2], W[0]);
return 0;
}
gcc -lm perceptron.c -o perceptron
Great sequence of seven videos explaining what a neural network is and how it works.