// Backpropagation // See https://www.miximum.fr/blog/introduction-au-deep-learning-2/ // Build : cc -o backprop backprop.c -lm #include #include #include #include typedef long double real; /* real sampleNormal() { real u = ((real) rand() / (RAND_MAX)) * 2 - 1; real v = ((real) rand() / (RAND_MAX)) * 2 - 1; real r = u * u + v * v; if (r == 0 || r > 1) return sampleNormal(); real c = sqrt(-2 * log(r) / r); return u * c; } */ int randomLessThan (int n) { return rand() % n; } // return a uniformly distributed random number real RandomGenerator() { return ( (real)(rand()) + 1. )/( (real)(RAND_MAX) + 1. ); } // return a normally distributed random number real normalRandom() { real y1=RandomGenerator(); real y2=RandomGenerator(); return cos(2*3.1415926*y2)*sqrt(-2.*log(y1)); } real sigma (real x) { return 1 / (1 + exp(-x)); } real sigmaprime (real x) { return sigma(x) * (1 - sigma(x)); } #define nl 5 // Number of layers #define npl 6 // Number of neurons per layer #define n (nl * npl) // Total number of neurons #define p 10 #define alpha 3 // Learning rate #define nX 3 // Number of inputs real W[n][n]; // Matrix of connection weights // Element at i-th line and j-th column = weight of connection from neuron j to neuron i real B[n]; // Biases real X[n][nX]; // Inputs, i-th column = vector representing the i-th input real T[n][nX]; // Expected outputs real Z[n][nX]; // Aggregation real A[n][nX]; // Activations real delta[n][nX]; real delta1[n][nX]; real avgdelta[n]; real GW[n][n]; // Gradient of weights real GB[n]; // Gradient of biases void init (void) { srand(time(NULL)); int l, i, j; // for (i=0; i<10; i++) printf("%d\n", randomLessThan(p)); // for (i=0; i<10; i++) printf("%Lf\n", normalRandom()); for (i=0; i