mirror of
https://github.com/codeplea/genann.git
synced 2025-10-03 08:42:44 +00:00
Changed name case, code style.
This commit is contained in:
32
README.md
32
README.md
@@ -1,6 +1,6 @@
|
||||
#GENANN
|
||||
#Genann
|
||||
|
||||
GENANN is a very minimal library for training and using feedforward artificial neural
|
||||
Genann is a very minimal library for training and using feedforward artificial neural
|
||||
networks (ANN) in C. Its primary focus is on being simple, fast, and hackable. It achieves
|
||||
this by providing only the necessary functions and little extra.
|
||||
|
||||
@@ -39,7 +39,7 @@ double **training_data_input, **training_data_output, **test_data_input;
|
||||
/* New network with 5 inputs,
|
||||
* 2 hidden layer of 10 neurons each,
|
||||
* and 1 output. */
|
||||
GENANN *ann = genann_init(5, 2, 10, 1);
|
||||
genann *ann = genann_init(5, 2, 10, 1);
|
||||
|
||||
/* Learn on the training set. */
|
||||
for (i = 0; i < 300; ++i) {
|
||||
@@ -63,23 +63,23 @@ prevent over-fitting.
|
||||
|
||||
###Creating and Freeing ANNs
|
||||
```C
|
||||
GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
|
||||
GENANN *genann_copy(GENANN const *ann);
|
||||
void genann_free(GENANN *ann);
|
||||
genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
|
||||
genann *genann_copy(genann const *ann);
|
||||
void genann_free(genann *ann);
|
||||
```
|
||||
|
||||
Creating a new ANN is done with the `genann_init()` function. It's arguments
|
||||
are the number of inputs, the number of hidden layers, the number of neurons in
|
||||
each hidden layer, and the number of outputs. It returns a `GENANN` struct pointer.
|
||||
each hidden layer, and the number of outputs. It returns a `genann` struct pointer.
|
||||
|
||||
Calling `genann_copy()` will create a deep-copy of an existing GENANN struct.
|
||||
Calling `genann_copy()` will create a deep-copy of an existing `genann` struct.
|
||||
|
||||
Call `genann_free()` when you're finished with an ANN returned by `genann_init()`.
|
||||
|
||||
|
||||
###Training ANNs
|
||||
```C
|
||||
void genann_train(GENANN const *ann, double const *inputs,
|
||||
void genann_train(genann const *ann, double const *inputs,
|
||||
double const *desired_outputs, double learning_rate);
|
||||
```
|
||||
|
||||
@@ -88,14 +88,14 @@ should be called by passing in an array of inputs, an array of expected output,
|
||||
and a learning rate. See *example1.c* for an example of learning with
|
||||
backpropogation.
|
||||
|
||||
A primary design goal of GENANN was to store all the network weights in one
|
||||
A primary design goal of Genann was to store all the network weights in one
|
||||
contigious block of memory. This makes it easy and efficient to train the
|
||||
network weights using direct-search numeric optimizion algorthims,
|
||||
such as [Hill Climbing](https://en.wikipedia.org/wiki/Hill_climbing),
|
||||
[the Genetic Algorithm](https://en.wikipedia.org/wiki/Genetic_algorithm), [Simulated
|
||||
Annealing](https://en.wikipedia.org/wiki/Simulated_annealing), etc.
|
||||
These methods can be used by searching on the ANN's weights directly.
|
||||
Every `GENANN` struct contains the members `int total_weights;` and
|
||||
Every `genann` struct contains the members `int total_weights;` and
|
||||
`double *weight;`. `*weight` points to an array of `total_weights`
|
||||
size which contains all weights used by the ANN. See *example2.c* for
|
||||
an example of training using random hill climbing search.
|
||||
@@ -103,16 +103,16 @@ an example of training using random hill climbing search.
|
||||
###Saving and Loading ANNs
|
||||
|
||||
```C
|
||||
GENANN *genann_read(FILE *in);
|
||||
void genann_write(GENANN const *ann, FILE *out);
|
||||
genann *genann_read(FILE *in);
|
||||
void genann_write(genann const *ann, FILE *out);
|
||||
```
|
||||
|
||||
GENANN provides the `genann_read()` and `genann_write()` functions for loading or saving an ANN in a text-based format.
|
||||
Genann provides the `genann_read()` and `genann_write()` functions for loading or saving an ANN in a text-based format.
|
||||
|
||||
###Evaluating
|
||||
|
||||
```C
|
||||
double const *genann_run(GENANN const *ann, double const *inputs);
|
||||
double const *genann_run(genann const *ann, double const *inputs);
|
||||
```
|
||||
|
||||
Call `genann_run()` on a trained ANN to run a feed-forward pass on a given set of inputs. `genann_run()`
|
||||
@@ -134,4 +134,4 @@ If you're looking for a heavier, more opinionated neural network library in C,
|
||||
I highly recommend the [FANN library](http://leenissen.dk/fann/wp/). Another
|
||||
good library is Peter van Rossum's [Lightweight Neural
|
||||
Network](http://lwneuralnet.sourceforge.net/), which despite its name, is
|
||||
heavier and has more features than GENANN.
|
||||
heavier and has more features than Genann.
|
||||
|
@@ -14,7 +14,7 @@ int main(int argc, char *argv[])
|
||||
/* New network with 2 inputs,
|
||||
* 1 hidden layer of 2 neurons,
|
||||
* and 1 output. */
|
||||
GENANN *ann = genann_init(2, 1, 2, 1);
|
||||
genann *ann = genann_init(2, 1, 2, 1);
|
||||
|
||||
/* Train on the four labeled data points many times. */
|
||||
for (i = 0; i < 300; ++i) {
|
||||
|
@@ -16,7 +16,7 @@ int main(int argc, char *argv[])
|
||||
/* New network with 2 inputs,
|
||||
* 1 hidden layer of 2 neurons,
|
||||
* and 1 output. */
|
||||
GENANN *ann = genann_init(2, 1, 2, 1);
|
||||
genann *ann = genann_init(2, 1, 2, 1);
|
||||
|
||||
double err;
|
||||
double last_err = 1000;
|
||||
@@ -29,7 +29,7 @@ int main(int argc, char *argv[])
|
||||
genann_randomize(ann);
|
||||
}
|
||||
|
||||
GENANN *save = genann_copy(ann);
|
||||
genann *save = genann_copy(ann);
|
||||
|
||||
/* Take a random guess at the ANN weights. */
|
||||
for (i = 0; i < ann->total_weights; ++i) {
|
||||
|
@@ -16,7 +16,7 @@ int main(int argc, char *argv[])
|
||||
exit(1);
|
||||
}
|
||||
|
||||
GENANN *ann = genann_read(saved);
|
||||
genann *ann = genann_read(saved);
|
||||
fclose(saved);
|
||||
|
||||
if (!ann) {
|
||||
|
@@ -78,7 +78,7 @@ int main(int argc, char *argv[])
|
||||
* 1 hidden layer(s) of 4 neurons.
|
||||
* 3 outputs (1 per class)
|
||||
*/
|
||||
GENANN *ann = genann_init(4, 1, 4, 3);
|
||||
genann *ann = genann_init(4, 1, 4, 3);
|
||||
|
||||
int i, j;
|
||||
int loops = 5000;
|
||||
|
34
genann.c
34
genann.c
@@ -75,7 +75,7 @@ double genann_act_threshold(double a) {
|
||||
}
|
||||
|
||||
|
||||
GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
|
||||
genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
|
||||
if (hidden_layers < 0) return 0;
|
||||
if (inputs < 1) return 0;
|
||||
if (outputs < 1) return 0;
|
||||
@@ -89,8 +89,8 @@ GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
|
||||
const int total_neurons = (inputs + hidden * hidden_layers + outputs);
|
||||
|
||||
/* Allocate extra size for weights, outputs, and deltas. */
|
||||
const int size = sizeof(GENANN) + sizeof(double) * (total_weights + total_neurons + (total_neurons - inputs));
|
||||
GENANN *ret = malloc(size);
|
||||
const int size = sizeof(genann) + sizeof(double) * (total_weights + total_neurons + (total_neurons - inputs));
|
||||
genann *ret = malloc(size);
|
||||
if (!ret) return 0;
|
||||
|
||||
ret->inputs = inputs;
|
||||
@@ -102,7 +102,7 @@ GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
|
||||
ret->total_neurons = total_neurons;
|
||||
|
||||
/* Set pointers. */
|
||||
ret->weight = (double*)((char*)ret + sizeof(GENANN));
|
||||
ret->weight = (double*)((char*)ret + sizeof(genann));
|
||||
ret->output = ret->weight + ret->total_weights;
|
||||
ret->delta = ret->output + ret->total_neurons;
|
||||
|
||||
@@ -115,11 +115,11 @@ GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
|
||||
}
|
||||
|
||||
|
||||
GENANN *genann_read(FILE *in) {
|
||||
genann *genann_read(FILE *in) {
|
||||
int inputs, hidden_layers, hidden, outputs;
|
||||
fscanf(in, "%d %d %d %d", &inputs, &hidden_layers, &hidden, &outputs);
|
||||
|
||||
GENANN *ann = genann_init(inputs, hidden_layers, hidden, outputs);
|
||||
genann *ann = genann_init(inputs, hidden_layers, hidden, outputs);
|
||||
|
||||
int i;
|
||||
for (i = 0; i < ann->total_weights; ++i) {
|
||||
@@ -130,15 +130,15 @@ GENANN *genann_read(FILE *in) {
|
||||
}
|
||||
|
||||
|
||||
GENANN *genann_copy(GENANN const *ann) {
|
||||
const int size = sizeof(GENANN) + sizeof(double) * (ann->total_weights + ann->total_neurons + (ann->total_neurons - ann->inputs));
|
||||
GENANN *ret = malloc(size);
|
||||
genann *genann_copy(genann const *ann) {
|
||||
const int size = sizeof(genann) + sizeof(double) * (ann->total_weights + ann->total_neurons + (ann->total_neurons - ann->inputs));
|
||||
genann *ret = malloc(size);
|
||||
if (!ret) return 0;
|
||||
|
||||
memcpy(ret, ann, size);
|
||||
|
||||
/* Set pointers. */
|
||||
ret->weight = (double*)((char*)ret + sizeof(GENANN));
|
||||
ret->weight = (double*)((char*)ret + sizeof(genann));
|
||||
ret->output = ret->weight + ret->total_weights;
|
||||
ret->delta = ret->output + ret->total_neurons;
|
||||
|
||||
@@ -146,7 +146,7 @@ GENANN *genann_copy(GENANN const *ann) {
|
||||
}
|
||||
|
||||
|
||||
void genann_randomize(GENANN *ann) {
|
||||
void genann_randomize(genann *ann) {
|
||||
int i;
|
||||
for (i = 0; i < ann->total_weights; ++i) {
|
||||
double r = GENANN_RANDOM();
|
||||
@@ -156,13 +156,13 @@ void genann_randomize(GENANN *ann) {
|
||||
}
|
||||
|
||||
|
||||
void genann_free(GENANN *ann) {
|
||||
void genann_free(genann *ann) {
|
||||
/* The weight, output, and delta pointers go to the same buffer. */
|
||||
free(ann);
|
||||
}
|
||||
|
||||
|
||||
double const *genann_run(GENANN const *ann, double const *inputs) {
|
||||
double const *genann_run(genann const *ann, double const *inputs) {
|
||||
double const *w = ann->weight;
|
||||
double *o = ann->output + ann->inputs;
|
||||
double const *i = ann->output;
|
||||
@@ -173,8 +173,8 @@ double const *genann_run(GENANN const *ann, double const *inputs) {
|
||||
|
||||
int h, j, k;
|
||||
|
||||
const GENANN_ACTFUN act = ann->activation_hidden;
|
||||
const GENANN_ACTFUN acto = ann->activation_output;
|
||||
const genann_actfun act = ann->activation_hidden;
|
||||
const genann_actfun acto = ann->activation_output;
|
||||
|
||||
/* Figure hidden layers, if any. */
|
||||
for (h = 0; h < ann->hidden_layers; ++h) {
|
||||
@@ -217,7 +217,7 @@ double const *genann_run(GENANN const *ann, double const *inputs) {
|
||||
}
|
||||
|
||||
|
||||
void genann_train(GENANN const *ann, double const *inputs, double const *desired_outputs, double learning_rate) {
|
||||
void genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate) {
|
||||
/* To begin with, we must run the network forward. */
|
||||
genann_run(ann, inputs);
|
||||
|
||||
@@ -334,7 +334,7 @@ void genann_train(GENANN const *ann, double const *inputs, double const *desired
|
||||
}
|
||||
|
||||
|
||||
void genann_write(GENANN const *ann, FILE *out) {
|
||||
void genann_write(genann const *ann, FILE *out) {
|
||||
fprintf(out, "%d %d %d %d", ann->inputs, ann->hidden_layers, ann->hidden, ann->outputs);
|
||||
|
||||
int i;
|
||||
|
26
genann.h
26
genann.h
@@ -40,18 +40,18 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
typedef double (*GENANN_ACTFUN)(double a);
|
||||
typedef double (*genann_actfun)(double a);
|
||||
|
||||
|
||||
typedef struct GENANN {
|
||||
typedef struct genann {
|
||||
/* How many inputs, outputs, and hidden neurons. */
|
||||
int inputs, hidden_layers, hidden, outputs;
|
||||
|
||||
/* Which activation function to use for hidden neurons. Default: gennann_act_sigmoid_cached*/
|
||||
GENANN_ACTFUN activation_hidden;
|
||||
genann_actfun activation_hidden;
|
||||
|
||||
/* Which activation function to use for output. Default: gennann_act_sigmoid_cached*/
|
||||
GENANN_ACTFUN activation_output;
|
||||
genann_actfun activation_output;
|
||||
|
||||
/* Total number of weights, and size of weights buffer. */
|
||||
int total_weights;
|
||||
@@ -68,33 +68,33 @@ typedef struct GENANN {
|
||||
/* Stores delta of each hidden and output neuron (total_neurons - inputs long). */
|
||||
double *delta;
|
||||
|
||||
} GENANN;
|
||||
} genann;
|
||||
|
||||
|
||||
|
||||
/* Creates and returns a new ann. */
|
||||
GENANN *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
|
||||
genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
|
||||
|
||||
/* Creates ANN from file saved with genann_write. */
|
||||
GENANN *genann_read(FILE *in);
|
||||
genann *genann_read(FILE *in);
|
||||
|
||||
/* Sets weights randomly. Called by init. */
|
||||
void genann_randomize(GENANN *ann);
|
||||
void genann_randomize(genann *ann);
|
||||
|
||||
/* Returns a new copy of ann. */
|
||||
GENANN *genann_copy(GENANN const *ann);
|
||||
genann *genann_copy(genann const *ann);
|
||||
|
||||
/* Frees the memory used by an ann. */
|
||||
void genann_free(GENANN *ann);
|
||||
void genann_free(genann *ann);
|
||||
|
||||
/* Runs the feedforward algorithm to calculate the ann's output. */
|
||||
double const *genann_run(GENANN const *ann, double const *inputs);
|
||||
double const *genann_run(genann const *ann, double const *inputs);
|
||||
|
||||
/* Does a single backprop update. */
|
||||
void genann_train(GENANN const *ann, double const *inputs, double const *desired_outputs, double learning_rate);
|
||||
void genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate);
|
||||
|
||||
/* Saves the ann. */
|
||||
void genann_write(GENANN const *ann, FILE *out);
|
||||
void genann_write(genann const *ann, FILE *out);
|
||||
|
||||
|
||||
double genann_act_sigmoid(double a);
|
||||
|
20
test.c
20
test.c
@@ -32,7 +32,7 @@
|
||||
|
||||
|
||||
void basic() {
|
||||
GENANN *ann = genann_init(1, 0, 0, 1);
|
||||
genann *ann = genann_init(1, 0, 0, 1);
|
||||
|
||||
lequal(ann->total_weights, 2);
|
||||
double a;
|
||||
@@ -67,7 +67,7 @@ void basic() {
|
||||
|
||||
|
||||
void xor() {
|
||||
GENANN *ann = genann_init(2, 1, 2, 1);
|
||||
genann *ann = genann_init(2, 1, 2, 1);
|
||||
ann->activation_hidden = genann_act_threshold;
|
||||
ann->activation_output = genann_act_threshold;
|
||||
|
||||
@@ -102,7 +102,7 @@ void xor() {
|
||||
|
||||
|
||||
void backprop() {
|
||||
GENANN *ann = genann_init(1, 0, 0, 1);
|
||||
genann *ann = genann_init(1, 0, 0, 1);
|
||||
|
||||
double input, output;
|
||||
input = .5;
|
||||
@@ -121,7 +121,7 @@ void train_and() {
|
||||
double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
|
||||
double output[4] = {0, 0, 0, 1};
|
||||
|
||||
GENANN *ann = genann_init(2, 0, 0, 1);
|
||||
genann *ann = genann_init(2, 0, 0, 1);
|
||||
|
||||
int i, j;
|
||||
|
||||
@@ -145,7 +145,7 @@ void train_or() {
|
||||
double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
|
||||
double output[4] = {0, 1, 1, 1};
|
||||
|
||||
GENANN *ann = genann_init(2, 0, 0, 1);
|
||||
genann *ann = genann_init(2, 0, 0, 1);
|
||||
genann_randomize(ann);
|
||||
|
||||
int i, j;
|
||||
@@ -171,7 +171,7 @@ void train_xor() {
|
||||
double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
|
||||
double output[4] = {0, 1, 1, 0};
|
||||
|
||||
GENANN *ann = genann_init(2, 1, 2, 1);
|
||||
genann *ann = genann_init(2, 1, 2, 1);
|
||||
|
||||
int i, j;
|
||||
|
||||
@@ -194,7 +194,7 @@ void train_xor() {
|
||||
|
||||
|
||||
void persist() {
|
||||
GENANN *first = genann_init(1000, 5, 50, 10);
|
||||
genann *first = genann_init(1000, 5, 50, 10);
|
||||
|
||||
FILE *out = fopen("persist.txt", "w");
|
||||
genann_write(first, out);
|
||||
@@ -202,7 +202,7 @@ void persist() {
|
||||
|
||||
|
||||
FILE *in = fopen("persist.txt", "r");
|
||||
GENANN *second = genann_read(in);
|
||||
genann *second = genann_read(in);
|
||||
fclose(out);
|
||||
|
||||
lequal(first->inputs, second->inputs);
|
||||
@@ -222,9 +222,9 @@ void persist() {
|
||||
|
||||
|
||||
void copy() {
|
||||
GENANN *first = genann_init(1000, 5, 50, 10);
|
||||
genann *first = genann_init(1000, 5, 50, 10);
|
||||
|
||||
GENANN *second = genann_copy(first);
|
||||
genann *second = genann_copy(first);
|
||||
|
||||
lequal(first->inputs, second->inputs);
|
||||
lequal(first->hidden_layers, second->hidden_layers);
|
||||
|
Reference in New Issue
Block a user