Skip to content

Instantly share code, notes, and snippets.

@radarek
Created February 22, 2009 00:22
Show Gist options
  • Save radarek/68248 to your computer and use it in GitHub Desktop.
Save radarek/68248 to your computer and use it in GitHub Desktop.
diff --git a/ext/ruby_fann/fann_augment.h b/ext/ruby_fann/fann_augment.h
index a51be4c..2c977c5 100644
--- a/ext/ruby_fann/fann_augment.h
+++ b/ext/ruby_fann/fann_augment.h
@@ -19,9 +19,9 @@ FANN_EXTERNAL struct fann_train_data * FANN_API fann_create_train_from_rb_ary(
unsigned int i, j;
fann_type *data_input, *data_output;
struct fann_train_data *data = (struct fann_train_data *)malloc(sizeof(struct fann_train_data));
- unsigned int num_input = RARRAY(RARRAY(inputs)->ptr[0])->len;
- unsigned int num_output =RARRAY(RARRAY(outputs)->ptr[0])->len;
- unsigned int num_data = RARRAY(inputs)->len;
+ unsigned int num_input = RARRAY_LEN(RARRAY_PTR(inputs)[0]);
+ unsigned int num_output =RARRAY_LEN(RARRAY_PTR(outputs)[0]);
+ unsigned int num_data = RARRAY_LEN(inputs);
if(data == NULL) {
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
@@ -72,29 +72,29 @@ FANN_EXTERNAL struct fann_train_data * FANN_API fann_create_train_from_rb_ary(
data->input[i] = data_input;
data_input += num_input;
- inputs_i = RARRAY(inputs)->ptr[i];
- outputs_i = RARRAY(outputs)->ptr[i];
+ inputs_i = RARRAY_PTR(inputs)[i];
+ outputs_i = RARRAY_PTR(outputs)[i];
- if(RARRAY(inputs_i)->len != num_input)
+ if(RARRAY_LEN(inputs_i) != num_input)
{
rb_raise (
rb_eRuntimeError,
"Number of inputs at [%d] is inconsistent: (%d != %d)",
- i, RARRAY(inputs_i)->len, num_input);
+ i, RARRAY_LEN(inputs_i), num_input);
}
- if(RARRAY(outputs_i)->len != num_output)
+ if(RARRAY_LEN(outputs_i) != num_output)
{
rb_raise (
rb_eRuntimeError,
"Number of outputs at [%d] is inconsistent: (%d != %d)",
- i, RARRAY(outputs_i)->len, num_output);
+ i, RARRAY_LEN(outputs_i), num_output);
}
for(j = 0; j != num_input; j++)
{
- data->input[i][j]=NUM2DBL(RARRAY(inputs_i)->ptr[j]);
+ data->input[i][j]=NUM2DBL(RARRAY_PTR(inputs_i)[j]);
}
data->output[i] = data_output;
@@ -102,9 +102,9 @@ FANN_EXTERNAL struct fann_train_data * FANN_API fann_create_train_from_rb_ary(
for(j = 0; j != num_output; j++)
{
- data->output[i][j]=NUM2DBL(RARRAY(outputs_i)->ptr[j]);
+ data->output[i][j]=NUM2DBL(RARRAY_PTR(outputs_i)[j]);
}
}
return data;
-}
\ No newline at end of file
+}
diff --git a/ext/ruby_fann/neural_network.c b/ext/ruby_fann/neural_network.c
index bf32dbf..18d934e 100644
--- a/ext/ruby_fann/neural_network.c
+++ b/ext/ruby_fann/neural_network.c
@@ -275,7 +275,7 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
Check_Type(num_outputs, T_FIXNUM);
// Initialize layers:
- unsigned int num_layers=RARRAY(hidden_neurons)->len + 2; // NUM2INT(num_inputs) + NUM2INT(num_outputs) + RARRAY(hidden_neurons)->len;
+ unsigned int num_layers=RARRAY_LEN(hidden_neurons) + 2; // NUM2INT(num_inputs) + NUM2INT(num_outputs) + RARRAY_LEN(hidden_neurons);
unsigned int layers[num_layers];
// Input:
@@ -285,7 +285,7 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
// Hidden:
int i;
for (i=1; i<=num_layers-2; i++) {
- layers[i]=NUM2UINT(RARRAY(hidden_neurons)->ptr[i-1]);
+ layers[i]=NUM2UINT(RARRAY_PTR(hidden_neurons)[i-1]);
}
ann = fann_create_standard_array(num_layers, layers);
@@ -352,19 +352,19 @@ static VALUE fann_train_data_initialize(VALUE self, VALUE hash)
rb_raise (rb_eRuntimeError, "[desired_outputs] must be present when [inputs] used.");
}
- if (RARRAY(inputs)->len < 1)
+ if (RARRAY_LEN(inputs) < 1)
{
rb_raise (rb_eRuntimeError, "[inputs/desired_outputs] must contain at least one value.");
}
// The data is here, start constructing:
- if(RARRAY(inputs)->len != RARRAY(desired_outputs)->len)
+ if(RARRAY_LEN(inputs) != RARRAY_LEN(desired_outputs))
{
rb_raise (
rb_eRuntimeError,
"Number of inputs must match number of outputs: (%d != %d)",
- RARRAY(inputs)->len,
- RARRAY(desired_outputs)->len);
+ RARRAY_LEN(inputs),
+ RARRAY_LEN(desired_outputs));
}
train_data = fann_create_train_from_rb_ary(inputs, desired_outputs);
@@ -1057,11 +1057,11 @@ static VALUE run (VALUE self, VALUE inputs)
fann_type* outputs;
// Convert inputs to type needed for NN:
- unsigned int len = RARRAY(inputs)->len;
+ unsigned int len = RARRAY_LEN(inputs);
fann_type fann_inputs[len];
for (i=0; i<len; i++)
{
- fann_inputs[i] = NUM2DBL(RARRAY(inputs)->ptr[i]);
+ fann_inputs[i] = NUM2DBL(RARRAY_PTR(inputs)[i]);
}
@@ -1379,12 +1379,12 @@ static VALUE set_cascade_activation_functions(VALUE self, VALUE cascade_activati
struct fann* f;
Data_Get_Struct (self, struct fann, f);
- unsigned int cnt = RARRAY(cascade_activation_functions)->len;
+ unsigned int cnt = RARRAY_LEN(cascade_activation_functions);
enum fann_activationfunc_enum fann_activation_functions[cnt];
int i;
for (i=0; i<cnt; i++)
{
- fann_activation_functions[i] = sym_to_activation_function(RARRAY(cascade_activation_functions)->ptr[i]);
+ fann_activation_functions[i] = sym_to_activation_function(RARRAY_PTR(cascade_activation_functions)[i]);
}
fann_set_cascade_activation_functions(f, fann_activation_functions, cnt);
@@ -1441,12 +1441,12 @@ static VALUE set_cascade_activation_steepnesses(VALUE self, VALUE cascade_activa
struct fann* f;
Data_Get_Struct (self, struct fann, f);
- unsigned int cnt = RARRAY(cascade_activation_steepnesses)->len;
+ unsigned int cnt = RARRAY_LEN(cascade_activation_steepnesses);
fann_type fann_activation_steepnesses[cnt];
int i;
for (i=0; i<cnt; i++)
{
- fann_activation_steepnesses[i] = NUM2DBL(RARRAY(cascade_activation_steepnesses)->ptr[i]);
+ fann_activation_steepnesses[i] = NUM2DBL(RARRAY_PTR(cascade_activation_steepnesses)[i]);
}
fann_set_cascade_activation_steepnesses(f, fann_activation_steepnesses, cnt);
diff --git a/lib/ruby_fann/neurotica.rb b/lib/ruby_fann/neurotica.rb
index 4452433..4a0b88c 100644
--- a/lib/ruby_fann/neurotica.rb
+++ b/lib/ruby_fann/neurotica.rb
@@ -48,9 +48,9 @@ module RubyFann
fillcolor = "transparent" # : "khaki3"
layer = neuron[:layer]
fillcolor = case layer
- when 0: @input_layer_color
- when max_layer: @output_layer_color
- else; @hidden_layer_colors[(layer-1) % @hidden_layer_colors.length]
+ when 0 then @input_layer_color
+ when max_layer then @output_layer_color
+ else @hidden_layer_colors[(layer-1) % @hidden_layer_colors.length]
end
#puts "adding neuron with #{neuron[:value]}"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment