|
Answer» The following code for a neural network, for some infuriating reason, outputs 50 if I use any number above two. Is this just my computer, or my code?
Code: [Select]#pragma region "Stuff behind the scenes that I don't feel like looking at" inline double sigmoid(double input) { double tmp = 0; double in = -1 * (input - 5); double e = 2.718281828459045; tmp = pow(e, in); tmp++; tmp = 1 / tmp; return tmp; } class neuron { public: double current_value; };
class layer { public: neuron *NEURONS; int neurons_length; ~layer() { delete neurons; } };
class CONNECTION { public: neuron *previous, *next; double weight; ~connection() { delete previous; delete next; } };
connection *ReDimPreserve(connection *ptr, int size, int newsize) { connection *temptr = new connection[newsize]; for (int i = 0; i < size; i++) { *(temptr + i) = *(ptr + i); } return temptr; }
class neural_network { public: int inputs_length, outputs_length, layers_length, connections_length; layer *layers; connection *connections; neuron *inputs; neuron *outputs; neural_network() {} neural_network(int inputs_size, int outputs_size, int layers_size, layer *lyrs, neuron *inpts, neuron *outpts, double initial_connection) { this->inputs_length = inputs_size; this->outputs = outpts; this->outputs_length = outputs_size; for (int i = 0; i < outputs_length; i++) { outputs[i].current_value = 0; } this->layers_length = layers_size; this->layers = lyrs; for (int i = 0; i < layers_length; i++) { for (int B = 0; b < layers[i].neurons_length; b++) { layers[i].neurons[b].current_value = 0; } } this->inputs = inpts; this->connections_length = 0; int counter = 0; //connections for (int i = 0; i < layers_length + 1; i++) { if (i == 0) { if (layers_length != 0) { int tmp = inputs_length * layers->neurons_length; connections = new connection[tmp]; for (int b = 0; b < tmp + connections_length; b++) { (this->connections + b)->previous = inputs + (b % inputs_length); (this->connections + b)->next = layers[i].neurons + (b % layers[i].neurons_length); (this->connections + b)->weight = initial_connection; } this->connections_length = tmp; } } else if(i == layers_length) { int tmp = (layers + layers_length - 1)->neurons_length * outputs_length; connections = ReDimPreserve(connections, this->connections_length, tmp + this->connections_length); for (int b = connections_length; b < tmp + connections_length; b++) { (this->connections + b)->previous = layers[i - 1].neurons + (b % layers[i - 1].neurons_length); (this->connections + b)->next = outputs + (b % outputs_length); (this->connections + b)->weight = initial_connection; } this->connections_length += tmp; } else { int tmp = layers[i - 1].neurons_length * layers[i].neurons_length; connections = ReDimPreserve(connections, this->connections_length, tmp + this->connections_length); for (int b = connections_length; b < tmp + connections_length; b++) { (this->connections + b)->previous = layers[i - 1].neurons + (b % layers[i - 1].neurons_length); (this->connections + b)->next = layers[i].neurons + (b % layers[i].neurons_length); (this->connections + b)->weight = initial_connection; } this->connections_length += tmp; } } } inline double sigmoid(double input) { double tmp = 0; double in = -1 * (input - 5); double e = 2.718281828459045; tmp = pow(e, in); tmp++; tmp = 1 / tmp; return tmp; } void run() { double previous_one = 0; for (int i = 0; i < this->connections_length; i++) { double tmp = 0; tmp = sigmoid(connections[i].previous->current_value) * connections[i].weight; connections[i].next->current_value += tmp; } } private: ~neural_network() { delete inputs; delete outputs; delete connections; delete layers; } }; #pragma endregion
int main() { layer *layers = new layer[2]; neuron *l1neurons = new neuron[50]; neuron *l2neurons = new neuron[50]; neuron *in = new neuron[60]; neuron *out = new neuron[3]; layers->neurons = l1neurons; (layers + 1)->neurons = l2neurons; layers->neurons_length = 50; (layers + 1)->neurons_length = 50; for (int i = 0; i < 60; i++) { (in + i)->current_value = 10000000; } neural_network *stockpicker = new neural_network(60, 3, 2, layers, in, out, 1); stockpicker->run(); cout << out->current_value << ", " << (out + 1)->current_value << ", " << (out + 2)->current_value; cin.ignore(); return 0; }Its the code! .... but I haven't found where the typo is yet. You have a few locations where variable is sent to = 50. So this is a good place to start and work backwards from.
Quote layers->neurons_length = 50; (layers + 1)->neurons_length = 50; Well, that's the section where I chose arbitrary values just to test it. For some reason, it works perfectly with small numbers of neurons, but not with larger ones.I found it. The limits of double (and long double) cause the sigmoid FUNCTION to output 1 for large inputs, which occurs in the run function when all the inputs from previous neurons are summed and run through. Thanks, anyway.
|