/** * backprop.c * * Implements a simple backpropagation neural network. * See backprop.h for more information on using the backprop network. * * * written by * Joshua Petitt * Center for Intelligent Information Processing (CIIPS) * University of Western Australia * 2003 */ #include "backprop.h" #include #include #include /*************************************************************** * LOCAL FUNCTIONS **************************************************************/ double sigmoid(double x) { return 1.0/(1.0+exp(-x)); // the sigmoid function } /************************************ * Layer Manipulation functions ************************************/ void RandomizeLayer(layer_t* l) { int i; float* W; W = l->W; for(i=0;i<(l->depth)*(l->width);i++) { *W = (float) (2.0*(((double)rand())/RAND_MAX)-1.0); W++; } } void InputToLayer(layer_t* l, float* values) { int i; float *x; x = l->x; for(i=0;iwidth;i++) { x[i]=values[i]; } } void ActivateLayer(layer_t* l) { int i,j; float sum; float* W; // for each neuron in layer W = l->W; for(i=0;idepth;i++) { // calculate weighted input sum = 0; for(j=0;jwidth;j++) { sum += (*W)*l->x[j]; W++; } // compute activation function and save output of layer l->y[i] = (float) sigmoid(sum); } } void WeightedGradient(layer_t* l, float* Wg) { int i,j; for(i=0;iwidth;i++) { Wg[i]=0; for(j=0;jdepth;j++) { Wg[i] += (*(l->W+j*l->width+i))*(l->g[j]); } } } void PrintLayerOutput(layer_t* l) { int i; for(i=0;idepth;i++) { printf("%4.3f ",l->y[i]); } printf("\n"); } void SaveLayerWeights(layer_t* l, FILE *fp) { if(fwrite(l->W,(sizeof(float)*(l->depth)*(l->width)),1,fp)!=1) { printf("error writing to file\n"); exit(1); } } void SaveLayerWeightsHDT(layer_t* l, FILE *fp) { int i,j; float* W; W = l->W; fprintf(fp,"{\n"); for(i=0;idepth;i++) { fprintf(fp,"{"); for(j=0;jwidth;j++) { fprintf(fp,"%f",*W); if(jwidth-1) { fprintf(fp,", "); } W++; } if(idepth-1) { fprintf(fp,"},\n"); } else { fprintf(fp,"}\n"); } } fprintf(fp,"}"); } void LoadLayerWeights(layer_t* l, FILE *fp) { if(fread(l->W,(sizeof(float)*(l->depth)*(l->width)),1,fp)!=1) { printf("error reading from file\n"); exit(1); } } /************************************ * Network Manipulation functions ************************************/ void InputToNetwork(network_t* n, float* values) { InputToLayer(&n->layers[0],values); } void OutputFromNetwork(network_t* n, float* values) { int i; layer_t* pl; pl = &n->layers[n->size-1]; for(i=0;idepth;i++) { values[i]=pl->y[i]; } } void ActivateNetwork(network_t* n) { int i; for(i=0;isize-1;i++) { ActivateLayer(&n->layers[i]); InputToLayer(&n->layers[i+1],n->layers[i].y); } ActivateLayer(&n->layers[n->size-1]); } void RandomizeNetwork(network_t* n) { int i; for(i=0;isize;i++) { RandomizeLayer(&n->layers[i]); } } float TrainNetwork(network_t* n, float *yd) { int i,j,k; float error; float *W; layer_t* pl; layer_t* pl_next; // update the output layer error =0; pl = &n->layers[n->size-1]; W = pl->W; for(i=0;idepth;i++) { pl->g[i] = pl->y[i]*(1 - pl->y[i]) // local gradient *(yd[i] - pl->y[i]); // output error error += (yd[i] - pl->y[i])*(yd[i] - pl->y[i]); // squared error // update the layer weights for(j=0;jwidth;j++) { (*W) += (n->lr)* // learning rate (pl->g[i])* // gradient (pl->x[j]); // signal W++; } } error = sqrt(error)/(pl->depth); // compute average error // compute error and update weights for(k=n->size-2;k>=0;k--) // for each layer in the network { pl = &n->layers[k]; pl_next = &n->layers[k+1]; // calculate weighted gradient of next layer WeightedGradient(pl_next,pl->g); // calculate the error W = pl->W; for(i=0;idepth;i++) { pl->g[i] *= pl->y[i]*(1 - pl->y[i]); // local gradient for(j=0;jwidth;j++) { (*W) += (n->lr)* // learning rate (pl->g[i])* // gradient (pl->x[j]); // signal W++; } } } return error; } #ifdef LINUX void PrintNetwork(network_t* n) { int i; for(i=0;isize;i++) { PrintLayerOutput(&n->layers[i]); } printf("\n"); } void PrintNetworkOutput(network_t* n) { PrintLayerOutput(&n->layers[n->size-1]); } void SaveNetworkWeights(network_t* n, const char* filename) { FILE *fp; int i; if((fp=fopen(filename,"w+b"))==NULL) { printf("Cannot open %s\n",filename); exit(1); } for(i=0;isize;i++) { SaveLayerWeights(&n->layers[i],fp); } fclose(fp); } void LoadNetworkWeights(network_t* n, const char* filename) { FILE *fp; int i; if((fp=fopen(filename,"rb"))==NULL) { printf("Cannot open %s\n",filename); exit(1); } for(i=0;isize;i++) { LoadLayerWeights(&n->layers[i],fp); } fclose(fp); } void SaveNetworkWeightsHDT(network_t* n, const char* filename) { FILE *fp; int i; if((fp=fopen(filename,"w+"))==NULL) { printf("Cannot open %s\n",filename); exit(1); } fprintf(fp,"#include \"eyebot_neural_types.h\"\n\nnetwork_weight_data_t W=\n{\n"); for(i=0;isize;i++) { fprintf(fp,"// LAYER %d\n",i); SaveLayerWeightsHDT(&n->layers[i],fp); if(isize-1) { fprintf(fp,","); } fprintf(fp,"\n"); } fprintf(fp,"};\n\n"); fclose(fp); } #endif #ifdef EYEBOT #include "eyebot.h" #include "eyebot_neural_types.h" #include void LoadNetworkWeightsHDT(network_t* n, DeviceSemantics device) { network_weight_data_t *W; W = (network_weight_data_t*) HDTFindEntry(NEURALNET,device); if(W==NULL) OSPanic("Can't find\n neural data\n"); memcpy(n->layers[0].W,W->W_HID,sizeof(W->W_HID)); memcpy(n->layers[1].W,W->W_OUT,sizeof(W->W_OUT)); } #endif