OCR project
This commit is contained in:
171
NeuralNetwork/Load.c
Normal file
171
NeuralNetwork/Load.c
Normal file
@@ -0,0 +1,171 @@
|
||||
#include "Load.h"
|
||||
|
||||
double *resizearray(double *img, int w, int h, int x, int y)
|
||||
{
|
||||
double *image = (double *)calloc(x * y, sizeof(double));
|
||||
float ratioX = (float)w / x;
|
||||
float ratioY = (float)h / y;
|
||||
for (int i = 0; i < x; ++i)
|
||||
{
|
||||
for (int j = 0; j < y; ++j)
|
||||
{
|
||||
image[i * x + j] =
|
||||
img[((int)(i * ratioY)) * w + ((int)(j * ratioX))];
|
||||
}
|
||||
}
|
||||
return image;
|
||||
}
|
||||
|
||||
int FindBlackPixelrow(double *img, int w, int x)
|
||||
{
|
||||
int bool = 0;
|
||||
/*A boolean that memorize if the line contain black pixel or not*/
|
||||
double pixel;
|
||||
|
||||
for (int i = 0; i < w; i++)
|
||||
{
|
||||
pixel = img[x * w + i];
|
||||
if (pixel == 1)
|
||||
{
|
||||
bool = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return bool;
|
||||
}
|
||||
|
||||
int FindBlackPixelcol(double *img, int w, int h, int x)
|
||||
{
|
||||
int bool = 0;
|
||||
/*A boolean that memorize if the line contain black pixel or not*/
|
||||
double pixel;
|
||||
|
||||
for (int i = 0; i < h; i++)
|
||||
{
|
||||
pixel = img[i * w + x];
|
||||
if (pixel == 1)
|
||||
{
|
||||
bool = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return bool;
|
||||
}
|
||||
|
||||
void get_binerize_matrix(SDL_Surface *img, double *image)
|
||||
{
|
||||
/* Variables */
|
||||
Uint32 pixel;
|
||||
Uint8 r;
|
||||
Uint8 g;
|
||||
Uint8 b;
|
||||
int w = img->w;
|
||||
int h = img->h;
|
||||
|
||||
for (int i = 0; i < h; i++)
|
||||
{
|
||||
for (int j = 0; j < w; j++)
|
||||
{
|
||||
pixel = getpixel(img, j, i);
|
||||
SDL_GetRGB(pixel, img->format, &r, &g, &b);
|
||||
|
||||
Uint32 average = (r + b + g) / 3;
|
||||
|
||||
if (average > 150) /*we can make an average here*/
|
||||
image[i * w + j] = 0.0;
|
||||
else
|
||||
image[i * w + j] = 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
double *resizechar(SDL_Surface *img, int size)
|
||||
{
|
||||
int startcol = 0;
|
||||
int endcol = 0;
|
||||
int startrow = 0;
|
||||
int endrow = 0;
|
||||
|
||||
int img_w = img->w;
|
||||
int img_h = img->h;
|
||||
|
||||
double *img_array = (double *)malloc(img_h * img_w * sizeof(double));
|
||||
|
||||
get_binerize_matrix(img, img_array);
|
||||
|
||||
for (int i = 0; i < img_w; i++)
|
||||
{
|
||||
if (FindBlackPixelcol(img_array, img_w, img_h, i))
|
||||
{
|
||||
startcol = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (int i = img_w - 1; i >= 0; i--)
|
||||
{
|
||||
if (FindBlackPixelcol(img_array, img_w, img_h, i))
|
||||
{
|
||||
endcol = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < img_h; i++)
|
||||
{
|
||||
if (FindBlackPixelrow(img_array, img_w, i))
|
||||
{
|
||||
startrow = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (int i = img_h - 1; i >= 0; i--)
|
||||
{
|
||||
if (FindBlackPixelrow(img_array, img_w, i))
|
||||
{
|
||||
endrow = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
double *img_carre;
|
||||
int img_carre_size;
|
||||
int lencol = endcol - startcol;
|
||||
int lenrow = endrow - startrow;
|
||||
|
||||
if (lencol > lenrow)
|
||||
{
|
||||
img_carre_size = lencol;
|
||||
img_carre = (double *)calloc(lencol * lencol, sizeof(double));
|
||||
int start = lencol / 2 - lenrow / 2;
|
||||
for (int k = startrow; k < endrow; k++)
|
||||
{
|
||||
for (int z = startcol; z < endcol; z++)
|
||||
{
|
||||
img_carre[(k - startrow + start) * lencol + z - startcol] =
|
||||
img_array[k * img_w + z];
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
img_carre_size = lenrow;
|
||||
img_carre = (double *)calloc(lenrow * lenrow, sizeof(double));
|
||||
int start = lenrow / 2 - lencol / 2;
|
||||
for (int k = startrow; k < endrow; k++)
|
||||
{
|
||||
for (int z = startcol; z < endcol; z++)
|
||||
{
|
||||
img_carre[(k - startrow) * lenrow + z - startcol + start] =
|
||||
img_array[k * img_w + z];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
double *image;
|
||||
image = resizearray(img_carre, img_carre_size, img_carre_size, size, size);
|
||||
|
||||
SDL_FreeSurface(img);
|
||||
free(img_array);
|
||||
free(img_carre);
|
||||
|
||||
return image;
|
||||
}
|
||||
21
NeuralNetwork/Load.h
Normal file
21
NeuralNetwork/Load.h
Normal file
@@ -0,0 +1,21 @@
|
||||
#ifndef LOAD_H
|
||||
#define LOAD_H
|
||||
|
||||
#include <SDL2/SDL.h>
|
||||
#include <err.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "../ImageTreatment/Tools/tools.h"
|
||||
|
||||
int FindBlackPixelrow(double *img, int w, int x);
|
||||
|
||||
int FindBlackPixelcol(double *img, int w, int h, int x);
|
||||
|
||||
double *resizechar(SDL_Surface *img, int size);
|
||||
|
||||
double *resizearray(double *img, int w, int h, int x, int y);
|
||||
|
||||
void get_binerize_matrix(SDL_Surface *img, double *image);
|
||||
|
||||
#endif
|
||||
135
NeuralNetwork/structure.c
Normal file
135
NeuralNetwork/structure.c
Normal file
@@ -0,0 +1,135 @@
|
||||
#include "structure.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
|
||||
#include "toolsnetworks.h"
|
||||
#include "training.h"
|
||||
#include "traitement.h"
|
||||
|
||||
// Definition des structure du reseau de neurone
|
||||
|
||||
Neural_Network_Cell Create_Cell(int nb_weight)
|
||||
{
|
||||
Neural_Network_Cell cell;
|
||||
cell.nb_weight = nb_weight;
|
||||
cell.biais = 0;
|
||||
cell.output = 0;
|
||||
cell.weights = (double *)malloc(nb_weight * sizeof(double));
|
||||
cell.previous_dError = (double *)malloc(nb_weight * sizeof(double));
|
||||
return cell;
|
||||
}
|
||||
|
||||
Neural_Network_Layer Create_Layer(int nb_cell, int nb_weight)
|
||||
{
|
||||
Neural_Network_Layer layer;
|
||||
layer.nb_cells = nb_cell;
|
||||
layer.cells =
|
||||
(Neural_Network_Cell *)malloc(nb_cell * sizeof(Neural_Network_Cell));
|
||||
|
||||
for (int i = 0; i < nb_cell; i++)
|
||||
{
|
||||
*(layer.cells + i) = Create_Cell(nb_weight);
|
||||
}
|
||||
return layer;
|
||||
}
|
||||
|
||||
void Free_Network(Neural_Network *network)
|
||||
{
|
||||
for (int i = 0; i < network->nb_layers; i++)
|
||||
{
|
||||
for (int j = 0; j < network->layers[i].nb_cells; j++)
|
||||
{
|
||||
free(network->layers[i].cells[j].weights);
|
||||
free(network->layers[i].cells[j].previous_dError);
|
||||
}
|
||||
free(network->layers[i].cells);
|
||||
}
|
||||
free(network->layers);
|
||||
free(network);
|
||||
}
|
||||
|
||||
int getIndiceMax(Neural_Network *network)
|
||||
{
|
||||
Neural_Network_Layer layer = network->layers[network->nb_layers - 1];
|
||||
int i_max = 0;
|
||||
for (int i = 0; i < layer.nb_cells; i++)
|
||||
{
|
||||
if (layer.cells[i].output > layer.cells[i_max].output)
|
||||
i_max = i;
|
||||
}
|
||||
return i_max;
|
||||
}
|
||||
|
||||
int Save_Network(Neural_Network *network, char *filename)
|
||||
{
|
||||
FILE *file;
|
||||
char path[100];
|
||||
sprintf(path, "src/SaveNeuralNetwork/%s", filename);
|
||||
file = fopen(path, "w");
|
||||
|
||||
if (!file)
|
||||
return 0;
|
||||
|
||||
for (int i = 0; i < network->nb_layers; i++)
|
||||
{
|
||||
int nb_c = network->layers[i].nb_cells;
|
||||
|
||||
for (int j = 0; j < nb_c; j++)
|
||||
{
|
||||
int nb_w = network->layers[i].cells[j].nb_weight;
|
||||
|
||||
for (int k = 0; k < nb_w; k++)
|
||||
{
|
||||
fprintf(file, "%f\n", network->layers[i].cells[j].weights[k]);
|
||||
}
|
||||
|
||||
fprintf(file, "%f\n", network->layers[i].cells[j].biais);
|
||||
}
|
||||
}
|
||||
|
||||
fclose(file);
|
||||
return 1;
|
||||
}
|
||||
|
||||
int Load_Network(Neural_Network *network, char *filename)
|
||||
{
|
||||
FILE *file;
|
||||
char path[100];
|
||||
sprintf(path, "src/SaveNeuralNetwork/%s", filename);
|
||||
file = fopen(path, "r");
|
||||
|
||||
if (!file)
|
||||
return 0;
|
||||
|
||||
char *cvalue = calloc(128, sizeof(char));
|
||||
double value;
|
||||
char *ptr;
|
||||
|
||||
for (int i = 0; i < network->nb_layers; i++)
|
||||
{
|
||||
int nb_c = network->layers[i].nb_cells;
|
||||
|
||||
for (int j = 0; j < nb_c; j++)
|
||||
{
|
||||
int nb_w = network->layers[i].cells[j].nb_weight;
|
||||
|
||||
for (int k = 0; k < nb_w; k++)
|
||||
{
|
||||
fgets(cvalue, 128, file);
|
||||
value = strtod(cvalue, &ptr);
|
||||
network->layers[i].cells[j].weights[k] = value;
|
||||
}
|
||||
|
||||
fgets(cvalue, 128, file);
|
||||
value = strtod(cvalue, &ptr);
|
||||
network->layers[i].cells[j].biais = value;
|
||||
}
|
||||
}
|
||||
|
||||
free(cvalue);
|
||||
return 1;
|
||||
}
|
||||
36
NeuralNetwork/structure.h
Normal file
36
NeuralNetwork/structure.h
Normal file
@@ -0,0 +1,36 @@
|
||||
#ifndef STRUCTURE_H
|
||||
#define STRUCTURE_H
|
||||
|
||||
typedef struct
|
||||
{
|
||||
int nb_weight;
|
||||
double *weights;
|
||||
double *previous_dError;
|
||||
double biais;
|
||||
double output;
|
||||
} Neural_Network_Cell;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
int nb_cells;
|
||||
Neural_Network_Cell *cells;
|
||||
} Neural_Network_Layer;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
int nboutput;
|
||||
int nb_layers;
|
||||
Neural_Network_Layer *layers;
|
||||
double output;
|
||||
} Neural_Network;
|
||||
|
||||
Neural_Network_Cell Create_Cell(int nb_weight);
|
||||
Neural_Network_Layer Create_Layer(int nb_cell, int nb_weight);
|
||||
|
||||
void Free_Network(Neural_Network *network);
|
||||
|
||||
int getIndiceMax(Neural_Network *network);
|
||||
int Save_Network(Neural_Network *network, char *filename);
|
||||
int Load_Network(Neural_Network *network, char *filename);
|
||||
|
||||
#endif
|
||||
98
NeuralNetwork/toolsnetworks.c
Normal file
98
NeuralNetwork/toolsnetworks.c
Normal file
@@ -0,0 +1,98 @@
|
||||
#include "toolsnetworks.h"
|
||||
|
||||
#include <SDL2/SDL_image.h>
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <sys/stat.h>
|
||||
#include <time.h>
|
||||
|
||||
#include "Load.h"
|
||||
#include "structure.h"
|
||||
#include "training.h"
|
||||
#include "traitement.h"
|
||||
|
||||
// Definition des fonctions de type outils et fonctions mathématiques
|
||||
|
||||
float my_rand(void)
|
||||
{
|
||||
return ((float)(rand() % 10000) / 5000) - 1;
|
||||
}
|
||||
|
||||
double sigmoid(double val)
|
||||
{
|
||||
return (1.0 / (1.0 + exp(-1.0 * val)));
|
||||
}
|
||||
|
||||
void softmax(Neural_Network_Layer *layer)
|
||||
{
|
||||
double min = 0;
|
||||
for (int i = 0; i < layer->nb_cells; i++)
|
||||
{
|
||||
if (layer->cells[i].output < min)
|
||||
min = layer->cells[i].output;
|
||||
}
|
||||
|
||||
double somme = 0;
|
||||
for (int i = 0; i < layer->nb_cells; i++)
|
||||
{
|
||||
layer->cells[i].output -= min;
|
||||
somme += exp(layer->cells[i].output);
|
||||
}
|
||||
|
||||
for (int i = 0; i < layer->nb_cells; i++)
|
||||
{
|
||||
layer->cells[i].output = (exp(layer->cells[i].output) / somme);
|
||||
}
|
||||
}
|
||||
|
||||
char indiceToChar(int indice)
|
||||
{
|
||||
if (indice < 26)
|
||||
return indice + 97;
|
||||
else if (indice < 52)
|
||||
return indice + 39;
|
||||
else if (indice < 62)
|
||||
return indice - 4;
|
||||
else
|
||||
{
|
||||
if (indice == 62)
|
||||
return 46;
|
||||
else if (indice == 63)
|
||||
return 44;
|
||||
else
|
||||
return 39;
|
||||
}
|
||||
}
|
||||
|
||||
double *imagetomatrix(char *str, int size)
|
||||
{
|
||||
SDL_Surface *loadedImage = 0;
|
||||
loadedImage = SDL_LoadBMP(str);
|
||||
double *img = NULL;
|
||||
|
||||
if (!loadedImage)
|
||||
{
|
||||
printf("Can't find the bmp file, %s\n", str);
|
||||
return img;
|
||||
}
|
||||
|
||||
img = resizechar(loadedImage, size);
|
||||
return img;
|
||||
}
|
||||
|
||||
double *segmentationtomatrix(SDL_Surface *loadedImage, int size)
|
||||
{
|
||||
double *img = NULL;
|
||||
|
||||
if (!loadedImage)
|
||||
{
|
||||
printf("Can't find the bmp file\n");
|
||||
return img;
|
||||
}
|
||||
|
||||
img = resizechar(loadedImage, size);
|
||||
|
||||
return img;
|
||||
}
|
||||
19
NeuralNetwork/toolsnetworks.h
Normal file
19
NeuralNetwork/toolsnetworks.h
Normal file
@@ -0,0 +1,19 @@
|
||||
#ifndef TOOLS__NETWORKS_H
|
||||
#define TOOLS__NETWORKS_H
|
||||
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
#include "structure.h"
|
||||
|
||||
float my_rand(void);
|
||||
|
||||
void softmax(Neural_Network_Layer *layer);
|
||||
|
||||
char indiceToChar(int indice);
|
||||
|
||||
double sigmoid(double val);
|
||||
|
||||
double *imagetomatrix(char *str, int size);
|
||||
double *segmentationtomatrix(SDL_Surface *loadedImage, int size);
|
||||
|
||||
#endif
|
||||
87
NeuralNetwork/training.c
Normal file
87
NeuralNetwork/training.c
Normal file
@@ -0,0 +1,87 @@
|
||||
#include "training.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
|
||||
#include "structure.h"
|
||||
#include "toolsnetworks.h"
|
||||
#include "traitement.h"
|
||||
|
||||
#define NB_OUTPUT 65
|
||||
|
||||
void training(Neural_Network *network, int nb_repetition)
|
||||
{
|
||||
int nbchar = 65;
|
||||
int nbimageschar = 53;
|
||||
int datasetsize = nbchar * nbimageschar;
|
||||
|
||||
// On construit les tableaux input de taille nbrepetition, et on initialise
|
||||
// en même temps le coût attendu à 0 pour toutres les répétitions (on a
|
||||
// nbrépétition tableaux de taille 63 initialisés à 0).
|
||||
double **input = (double **)malloc(datasetsize * sizeof(double *));
|
||||
double **cost = (double **)malloc(datasetsize * sizeof(double));
|
||||
|
||||
char str[100];
|
||||
|
||||
for (int i = 0; i < datasetsize; i++)
|
||||
{
|
||||
cost[i] = (double *)calloc(NB_OUTPUT, sizeof(double));
|
||||
}
|
||||
|
||||
for (int i = 0; i < nbimageschar; i++)
|
||||
{
|
||||
for (int j = 0; j < nbchar; j++)
|
||||
{
|
||||
sprintf(str, "src/Dataset/image-%d-%d.bmp", j, i + 1);
|
||||
input[i * nbchar + j] = imagetomatrix(str, 20);
|
||||
cost[i * nbchar + j][j] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
double err = 1.0;
|
||||
|
||||
for (int i = 0; i < nb_repetition; i++)
|
||||
{
|
||||
err = 0;
|
||||
|
||||
for (int j = 0; j < datasetsize; j++)
|
||||
{
|
||||
// On fait ensuite appel au ForwardPass sur cette itération <=> on
|
||||
// voit ce que renvoie notre réseau pour la matrice de pixel
|
||||
// correspondant à l'image récupérée.
|
||||
ForwardPass(input[j], network);
|
||||
|
||||
double tmp_err = 0.0;
|
||||
|
||||
for (int k = 0; k < NB_OUTPUT; k++)
|
||||
{
|
||||
tmp_err += (cost[j][k] - network->layers[1].cells[k].output)
|
||||
* (cost[j][k] - network->layers[1].cells[k].output);
|
||||
}
|
||||
|
||||
tmp_err /= NB_OUTPUT;
|
||||
err += tmp_err;
|
||||
|
||||
BackwardPass(cost[j], input[j], network);
|
||||
}
|
||||
|
||||
err /= datasetsize;
|
||||
printf("Erreur : %f\n", err);
|
||||
}
|
||||
|
||||
for (int i = 0; i < datasetsize; i++)
|
||||
{
|
||||
free(input[i]);
|
||||
}
|
||||
|
||||
for (int i = 0; i < datasetsize; i++)
|
||||
{
|
||||
free(cost[i]);
|
||||
}
|
||||
|
||||
free(input);
|
||||
free(cost);
|
||||
}
|
||||
8
NeuralNetwork/training.h
Normal file
8
NeuralNetwork/training.h
Normal file
@@ -0,0 +1,8 @@
|
||||
#ifndef TRAINING_H
|
||||
#define TRAINING_H
|
||||
|
||||
#include "structure.h"
|
||||
|
||||
void training(Neural_Network *network, int nb_repetition);
|
||||
|
||||
#endif
|
||||
129
NeuralNetwork/traitement.c
Normal file
129
NeuralNetwork/traitement.c
Normal file
@@ -0,0 +1,129 @@
|
||||
#include "traitement.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
|
||||
#include "structure.h"
|
||||
#include "toolsnetworks.h"
|
||||
#include "training.h"
|
||||
|
||||
#define NB_PIXEL 400
|
||||
#define NB_OUTPUT 65
|
||||
#define DELTA 0.2
|
||||
|
||||
// Fonctions de traitement du reseau de neurone : Initialisation, Forwardpass
|
||||
// et backpropagation.
|
||||
|
||||
void Initialisation(Neural_Network *network)
|
||||
{
|
||||
network->nb_layers = 2;
|
||||
network->layers = (Neural_Network_Layer *)malloc(
|
||||
network->nb_layers * sizeof(Neural_Network_Layer));
|
||||
network->layers[0] = Create_Layer((int)NB_PIXEL * 2 / 3, NB_PIXEL);
|
||||
network->layers[1] = Create_Layer(NB_OUTPUT, (int)NB_PIXEL * 2 / 3);
|
||||
network->output = 0.0;
|
||||
network->nboutput = NB_OUTPUT;
|
||||
|
||||
for (int i = 0; i < network->nb_layers; i++)
|
||||
{
|
||||
for (int j = 0; j < network->layers[i].nb_cells; j++)
|
||||
{
|
||||
network->layers[i].cells[j].biais = my_rand();
|
||||
for (int k = 0; k < network->layers[i].cells[j].nb_weight; k++)
|
||||
{
|
||||
network->layers[i].cells[j].weights[k] = my_rand();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ForwardPass(double entries[], Neural_Network *network)
|
||||
{
|
||||
// Le passage de Xor à OCR est similaire pour le traitement du premier
|
||||
// layer.
|
||||
Neural_Network_Layer layer, previous_layer;
|
||||
|
||||
// First Layer treatment
|
||||
layer = (*network).layers[0];
|
||||
for (int i = 0; i < layer.nb_cells; i++)
|
||||
{
|
||||
Neural_Network_Cell cell = layer.cells[i];
|
||||
double tmp = cell.biais;
|
||||
for (int j = 0; j < cell.nb_weight; j++)
|
||||
{
|
||||
tmp += cell.weights[j] * entries[j];
|
||||
}
|
||||
|
||||
(*network).layers[0].cells[i].output = sigmoid(tmp);
|
||||
}
|
||||
|
||||
// Cette fois, on a plus qu'un noeud en output, donc plus de network.output
|
||||
// en int, mais on peut mettre un char à la place pour accéder au résultat
|
||||
// renvoyé par notre réseau.
|
||||
|
||||
// Output Layer treatment
|
||||
layer = (*network).layers[(*network).nb_layers - 1];
|
||||
previous_layer = (*network).layers[0];
|
||||
|
||||
for (int i = 0; i < layer.nb_cells; i++)
|
||||
{
|
||||
Neural_Network_Cell cell = layer.cells[i];
|
||||
double tmp = cell.biais;
|
||||
|
||||
for (int k = 0; k < cell.nb_weight; k++)
|
||||
{
|
||||
tmp += cell.weights[k] * previous_layer.cells[k].output;
|
||||
}
|
||||
|
||||
(*network).layers[1].cells[i].output = tmp;
|
||||
}
|
||||
|
||||
softmax(&layer);
|
||||
(*network).output = getIndiceMax(network);
|
||||
}
|
||||
|
||||
void BackwardPass(double *expected, double *entries, Neural_Network *network)
|
||||
{
|
||||
for (int i = 0; i < network->nboutput; i++)
|
||||
{
|
||||
double cell_output = (*network).layers[1].cells[i].output;
|
||||
double dCell_output = cell_output * (1 - cell_output);
|
||||
double dError = (expected[i] - cell_output);
|
||||
|
||||
for (int j = 0; j < (*network).layers[1].cells[i].nb_weight; j++)
|
||||
{
|
||||
double f = (*network).layers[0].cells[j].output;
|
||||
|
||||
(*network).layers[1].cells[i].previous_dError[j] =
|
||||
(*network).layers[1].cells[i].weights[j] * dCell_output
|
||||
* dError;
|
||||
|
||||
(*network).layers[1].cells[i].weights[j] +=
|
||||
DELTA * f * dCell_output * dError;
|
||||
}
|
||||
|
||||
(*network).layers[1].cells[i].biais += DELTA * dCell_output * dError;
|
||||
}
|
||||
|
||||
for (int i = 0; i < (*network).layers[0].nb_cells; i++)
|
||||
{
|
||||
double cell_output = (*network).layers[0].cells[i].output;
|
||||
double dg = cell_output * (1 - cell_output);
|
||||
double dError = 0;
|
||||
|
||||
for (int j = 0; j < (*network).layers[1].nb_cells; j++)
|
||||
{
|
||||
dError += (*network).layers[1].cells[j].previous_dError[i];
|
||||
}
|
||||
|
||||
for (int j = 0; j < (*network).layers[0].cells[i].nb_weight; j++)
|
||||
{
|
||||
double f = entries[j];
|
||||
(*network).layers[0].cells[i].weights[j] += DELTA * f * dg * dError;
|
||||
}
|
||||
|
||||
(*network).layers[0].cells[i].biais += DELTA * dg * dError;
|
||||
}
|
||||
}
|
||||
11
NeuralNetwork/traitement.h
Normal file
11
NeuralNetwork/traitement.h
Normal file
@@ -0,0 +1,11 @@
|
||||
#ifndef TRAITEMENT_H
|
||||
#define TRAITEMENT_H
|
||||
#include "structure.h"
|
||||
|
||||
void Initialisation(Neural_Network *network);
|
||||
|
||||
void ForwardPass(double entries[], Neural_Network *network);
|
||||
|
||||
void BackwardPass(double *expected, double *entries, Neural_Network *network);
|
||||
|
||||
#endif
|
||||
Reference in New Issue
Block a user