In Files

Parent

Files

Class Index [+]

Quicksearch

Neuro::Network

Public Class Methods

load(string) click to toggle source

Creates a Network object plus state from the Marshal dumped string string, and returns it.

static VALUE rb_network_load(VALUE klass, VALUE string)
{
    VALUE input_size, hidden_size, output_size, learned,
        hidden_layer, output_layer, pair[2];
    Network *network;
    VALUE hash = rb_marshal_load(string);
    input_size = rb_hash_aref(hash, SYM("input_size"));
    hidden_size = rb_hash_aref(hash, SYM("hidden_size"));
    output_size = rb_hash_aref(hash, SYM("output_size"));
    learned = rb_hash_aref(hash, SYM("learned"));
        Check_Type(input_size, T_FIXNUM);
        Check_Type(hidden_size, T_FIXNUM);
        Check_Type(output_size, T_FIXNUM);
        Check_Type(learned, T_FIXNUM);
    network = Network_allocate();
    Network_init(network, NUM2INT(input_size), NUM2INT(hidden_size),
            NUM2INT(output_size), NUM2INT(learned));
    hidden_layer = rb_hash_aref(hash, SYM("hidden_layer"));
    output_layer = rb_hash_aref(hash, SYM("output_layer"));
    Check_Type(hidden_layer, T_ARRAY);
    Check_Type(output_layer, T_ARRAY);
    pair[0] = (VALUE) network->hidden_layer;
    pair[1] = (VALUE) 0;
    rb_iterate(rb_each, hidden_layer, setup_layer_i, (VALUE) pair);
    pair[0] = (VALUE) network->output_layer;
    pair[1] = (VALUE) 0;
    rb_iterate(rb_each, output_layer, setup_layer_i, (VALUE) pair);
    return Data_Wrap_Struct(klass, NULL, rb_network_free, network);
}
load(string) click to toggle source

Creates a Network object plus state from the Marshal dumped string string, and returns it.

static VALUE rb_network_load(VALUE klass, VALUE string)
{
    VALUE input_size, hidden_size, output_size, learned,
        hidden_layer, output_layer, pair[2];
    Network *network;
    VALUE hash = rb_marshal_load(string);
    input_size = rb_hash_aref(hash, SYM("input_size"));
    hidden_size = rb_hash_aref(hash, SYM("hidden_size"));
    output_size = rb_hash_aref(hash, SYM("output_size"));
    learned = rb_hash_aref(hash, SYM("learned"));
        Check_Type(input_size, T_FIXNUM);
        Check_Type(hidden_size, T_FIXNUM);
        Check_Type(output_size, T_FIXNUM);
        Check_Type(learned, T_FIXNUM);
    network = Network_allocate();
    Network_init(network, NUM2INT(input_size), NUM2INT(hidden_size),
            NUM2INT(output_size), NUM2INT(learned));
    hidden_layer = rb_hash_aref(hash, SYM("hidden_layer"));
    output_layer = rb_hash_aref(hash, SYM("output_layer"));
    Check_Type(hidden_layer, T_ARRAY);
    Check_Type(output_layer, T_ARRAY);
    pair[0] = (VALUE) network->hidden_layer;
    pair[1] = (VALUE) 0;
    rb_iterate(rb_each, hidden_layer, setup_layer_i, (VALUE) pair);
    pair[0] = (VALUE) network->output_layer;
    pair[1] = (VALUE) 0;
    rb_iterate(rb_each, output_layer, setup_layer_i, (VALUE) pair);
    return Data_Wrap_Struct(klass, NULL, rb_network_free, network);
}
new(input_size, hidden_size, output_size) click to toggle source

Returns a Neuro::Network instance of the given size specification.

static VALUE rb_network_initialize(int argc, VALUE *argv, VALUE self)
{
    Network *network;
    VALUE input_size, hidden_size, output_size;

    rb_scan_args(argc, argv, "3", &input_size, &hidden_size, &output_size);
        Check_Type(input_size, T_FIXNUM);
        Check_Type(hidden_size, T_FIXNUM);
        Check_Type(output_size, T_FIXNUM);
    Data_Get_Struct(self, Network, network);
    Network_init(network, NUM2INT(input_size), NUM2INT(hidden_size),
        NUM2INT(output_size), 0);
    Network_init_weights(network);
    return self;
}

Public Instance Methods

_dump(...) click to toggle source

Returns the serialized data for this Network instance for the Marshal module.

static VALUE rb_network_dump(int argc, VALUE *argv, VALUE self)
{
    VALUE port = Qnil, hash;
    Network *network;

    rb_scan_args(argc, argv, "01", &port);
    Data_Get_Struct(self, Network, network);
    hash = Network_to_hash(network);
    return rb_marshal_dump(hash, port);
}
debug() click to toggle source

Returns nil, if debugging is switchted off. Returns the IO object, that is used for debugging output, if debugging is switchted on.

static VALUE rb_network_debug(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return network->debug;
}
debug=(io) click to toggle source

Switches debugging on, if io is an IO object. If it is nil, debugging is switched off.

static VALUE rb_network_debug_set(VALUE self, VALUE io)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    network->debug = io;
    return io;
}
debug_step() click to toggle source

Returns the Integer number of steps, that are done during learning, before a debugging message is printed to debug.

static VALUE rb_network_debug_step(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->debug_step);
}
debug_step=(step) click to toggle source

Sets the number of steps, that are done during learning, before a debugging message is printed to step. If step is equal to or less than 0 the default value (=1000) is set.

static VALUE rb_network_debug_step_set(VALUE self, VALUE step)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    Check_Type(step, T_FIXNUM);
    network->debug_step = NUM2INT(step);
    if (network->debug_step <= 0) network->debug_step = DEFAULT_DEBUG_STEP;
    return step;
}
decide(data) click to toggle source

The network is given the Array data (size has to be == input_size), and it responds with another Array (size == output_size) by returning it.

static VALUE rb_network_decide(VALUE self, VALUE data)
{
    Network *network;
    VALUE result;
    int i;

    Data_Get_Struct(self, Network, network);

        Check_Type(data, T_ARRAY);
    if (RARRAY(data)->len != network->input_size)
        rb_raise(rb_cNeuroError, "size of data != input_size");
    transform_data(network->tmp_input, data);
    feed;
    result = rb_ary_new2(network->output_size);
    for (i = 0; i < network->output_size; i++) {
        rb_ary_store(result, i,
            rb_float_new(network->output_layer[i]->output));
    }
    return result;
}
dump(...) click to toggle source

Returns the serialized data for this Network instance for the Marshal module.

static VALUE rb_network_dump(int argc, VALUE *argv, VALUE self)
{
    VALUE port = Qnil, hash;
    Network *network;

    rb_scan_args(argc, argv, "01", &port);
    Data_Get_Struct(self, Network, network);
    hash = Network_to_hash(network);
    return rb_marshal_dump(hash, port);
}
hidden_size() click to toggle source

Returns the hidden_size of this Network as an Integer. This is the number of nodes in the hidden layer.

static VALUE rb_network_hidden_size(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->hidden_size);
}
input_size() click to toggle source

Returns the input_size of this Network as an Integer. This is the number of weights, that are connected to the input of the hidden layer.

static VALUE rb_network_input_size(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->input_size);
}
learn(data, desired, max_error, eta) click to toggle source

The network should respond with the Array desired (size == output_size), if it was given the Array data (size == input_size). The learning process ends, if the resulting error sinks below max_error and convergence is assumed. A lower eta parameter leads to slower learning, because of low weight changes. A too high eta can lead to wildly oscillating weights, and result in slower learning or no learning at all. The last two parameters should be chosen appropriately to the problem at hand. ;)

The return value is an Integer value, that denotes the number of learning steps, which were necessary, to learn the data, or max_iterations, if the data couldn’t be learned.

static VALUE rb_network_learn(VALUE self, VALUE data, VALUE desired, VALUE
        max_error, VALUE eta)
{
    Network *network;
    double max_error_float, eta_float, error, sum,
        *output_delta, *hidden_delta;
    int i, j, count;

    Data_Get_Struct(self, Network, network);

        Check_Type(data, T_ARRAY);
    if (RARRAY(data)->len != network->input_size)
        rb_raise(rb_cNeuroError, "size of data != input_size");
    transform_data(network->tmp_input, data);

        Check_Type(desired, T_ARRAY);
    if (RARRAY(desired)->len != network->output_size)
        rb_raise(rb_cNeuroError, "size of desired != output_size");
    transform_data(network->tmp_output, desired);
    CAST2FLOAT(max_error);
    max_error_float = RFLOAT(max_error)->value;
    if (max_error_float <= 0) rb_raise(rb_cNeuroError, "max_error <= 0");
    max_error_float *= 2.0;
    CAST2FLOAT(eta);
    eta_float = RFLOAT(eta)->value;
    if (eta_float <= 0) rb_raise(rb_cNeuroError, "eta <= 0");

    output_delta = ALLOCA_N(double, network->output_size);
    hidden_delta = ALLOCA_N(double, network->hidden_size);
    for(count = 0; count < network->max_iterations; count++) {
        feed;

        /* Compute output weight deltas and current error */
        error = 0.0;    
        for (i = 0; i < network->output_size; i++) {
            output_delta[i] = network->tmp_output[i] -
                network->output_layer[i]->output;
            error += output_delta[i] * output_delta[i];
            output_delta[i] *= network->output_layer[i]->output *
                (1.0 - network->output_layer[i]->output);
            /* diff * (sigmoid' = 2 * output  * beta * (1 - output)) */

        }

        if (count % network->debug_step == 0)
            Network_debug_error(network, count, error, max_error_float);

        /* Get out if error is below max_error ^ 2 */
        if (error < max_error_float) goto CONVERGED;

        /* Compute hidden weight deltas */
        
                for (i = 0; i < network->hidden_size; i++) {
            sum = 0.0;
                        for (j = 0; j < network->output_size; j++)
                                sum += output_delta[j] *
                    network->output_layer[j]->weights[i];
                        hidden_delta[i] = sum * network->hidden_layer[i]->output *
                (1.0 - network->hidden_layer[i]->output);
            /* sum * (sigmoid' = 2 * output  * beta * (1 - output)) */
                }
        
        /* Adjust weights */

                for (i = 0; i < network->output_size; i++)
                        for (j = 0; j < network->hidden_size; j++)
                network->output_layer[i]->weights[j] +=
                    eta_float * output_delta[i] *
                    network->hidden_layer[j]->output;

                for (i = 0; i < network->hidden_size; i++)
                        for (j = 0; j < network->input_size; j++)
                                network->hidden_layer[i]->weights[j] += eta_float *
                    hidden_delta[i] * network->tmp_input[j];
    }
    Network_debug_bail_out(network);
CONVERGED:
    network->learned++;
    return INT2NUM(count);
}
learned() click to toggle source

Returns the number of calls to learn as an integer.

static VALUE rb_network_learned(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->learned);
}
max_iterations() click to toggle source

Returns the maximal number of iterations, that are done before learn gives up and returns without having learned the given data.

static VALUE rb_network_max_iterations(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->max_iterations);
}
max_iterations=(iterations) click to toggle source

Sets the maximal number of iterations, that are done before learn gives up and returns without having learned the given data, to iterations. If iterations is equal to or less than 0, the default value (=10_000) is set.

static VALUE rb_network_max_iterations_set(VALUE self, VALUE iterations)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    Check_Type(iterations, T_FIXNUM);
    network->max_iterations = NUM2INT(iterations);
    if (network->max_iterations <= 0)
        network->max_iterations = DEFAULT_MAX_ITERATIONS;
    return iterations;
}
output_size() click to toggle source

Returns the output_size of this Network as an Integer. This is the number of nodes in the output layer.

static VALUE rb_network_output_size(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return INT2NUM(network->output_size);
}
to_h() click to toggle source

Returns the state of the network as a Hash.

static VALUE rb_network_to_h(VALUE self)
{
    Network *network;

    Data_Get_Struct(self, Network, network);
    return Network_to_hash(network);
}
to_s() click to toggle source

Returns a short string for the network.

static VALUE rb_network_to_s(VALUE self)
{
    Network *network;
    VALUE argv[5];
    int argc = 5;

    Data_Get_Struct(self, Network, network);
    argv[0] = rb_str_new2("#<%s:%u,%u,%u>");
    argv[1] = rb_funcall(self, id_class, 0, 0);
    argv[1] = rb_funcall(argv[1], id_name, 0, 0);
    argv[2] = INT2NUM(network->input_size);
    argv[3] = INT2NUM(network->hidden_size);
    argv[4] = INT2NUM(network->output_size);
    return rb_f_sprintf(argc, argv);
}

Disabled; run with --debug to generate this.

[Validate]

Generated with the Darkfish Rdoc Generator 1.1.6.