Skip to content

Instantly share code, notes, and snippets.

@hollance
Last active April 21, 2023 17:13
  • Star 30 You must be signed in to star a gist
  • Fork 8 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save hollance/26fae7f93c3783654e707657d430b779 to your computer and use it in GitHub Desktop.
Playing with BNNS on macOS 10.12. The "hello world" of neural networks.
/*
The "hello world" of neural networks: a simple 3-layer feed-forward
network that implements an XOR logic gate.
The first layer is the input layer. It has two neurons a and b, which
are the two inputs to the XOR gate.
The middle layer is the hidden layer. This has two neurons h1, h2 that
will learn what it means to be an XOR gate.
Neuron a is connected to h1 and h2. Neuron b is also connected to h1
and h2. Each of these four connections has its own weight. You learn
these weights by training the network (not done in this demo program).
The final layer is the output layer. This has a single neuron. Its
value is either "high" or "low", just like the output of an XOR gate.
Both h1 and h2 are connected to the o neuron.
+---+ +----+
| a | | h1 |
+---+ +----+ +---+
| o |
+---+ +----+ +---+
| b | | h2 |
+---+ +----+
The expected output is:
predict(0, 0) should give 0
predict(0, 1) should give 1
predict(1, 0) should give 1
predict(1, 1) should give 0
*/
#include <Accelerate/Accelerate.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
BNNSFilter hidden_layer;
BNNSFilter output_layer;
bool create_network(void) {
BNNSFilterParameters filter_params;
bzero(&filter_params, sizeof(filter_params));
BNNSActivation activation;
bzero(&activation, sizeof(activation));
activation.function = BNNSActivationFunctionSigmoid;
// These weights and bias values were found by training the network
// (using a different program). These numbers represent what the net
// has learned, in this case the proper response of an XOR gate.
float input_to_hidden_weights[] = { 54.0f, 14.0f, 17.0f, 14.0f };
float input_to_hidden_bias[] = { -8.0f, -20.0f };
float hidden_to_output_weights[] = { 92.0f, -98.0f };
float hidden_to_output_bias[] = { -48.0f };
BNNSFullyConnectedLayerParameters input_to_hidden_params;
bzero(&input_to_hidden_params, sizeof(input_to_hidden_params));
input_to_hidden_params.in_size = 2;
input_to_hidden_params.out_size = 2;
input_to_hidden_params.activation = activation;
input_to_hidden_params.weights.data = input_to_hidden_weights;
input_to_hidden_params.weights.data_type = BNNSDataTypeFloat32;
input_to_hidden_params.bias.data = input_to_hidden_bias;
input_to_hidden_params.bias.data_type = BNNSDataTypeFloat32;
BNNSFullyConnectedLayerParameters hidden_to_output_params;
bzero(&hidden_to_output_params, sizeof(hidden_to_output_params));
hidden_to_output_params.in_size = 2;
hidden_to_output_params.out_size = 1;
hidden_to_output_params.activation = activation;
hidden_to_output_params.weights.data = hidden_to_output_weights;
hidden_to_output_params.weights.data_type = BNNSDataTypeFloat32;
hidden_to_output_params.bias.data = hidden_to_output_bias;
hidden_to_output_params.bias.data_type = BNNSDataTypeFloat32;
BNNSVectorDescriptor input_desc;
bzero(&input_desc, sizeof(input_desc));
input_desc.size = 2;
input_desc.data_type = BNNSDataTypeFloat32;
BNNSVectorDescriptor hidden_desc;
bzero(&hidden_desc, sizeof(hidden_desc));
hidden_desc.size = 2;
hidden_desc.data_type = BNNSDataTypeFloat32;
hidden_layer = BNNSFilterCreateFullyConnectedLayer(&input_desc, &hidden_desc, &input_to_hidden_params, &filter_params);
if (hidden_layer == NULL) {
fprintf(stderr, "BNNSFilterCreateFullyConnectedLayer failed for hidden_layer\n");
return false;
}
BNNSVectorDescriptor output_desc;
bzero(&output_desc, sizeof(output_desc));
output_desc.size = 1;
output_desc.data_type = BNNSDataTypeFloat32;
output_layer = BNNSFilterCreateFullyConnectedLayer(&hidden_desc, &output_desc, &hidden_to_output_params, &filter_params);
if (output_layer == NULL) {
fprintf(stderr, "BNNSFilterCreateFullyConnectedLayer failed for output_layer\n");
return false;
}
return true;
}
float predict(float a, float b) {
// These arrays hold the inputs and outputs to and from the layers.
float input[] = { a, b };
float hidden[] = { 0.0f, 0.0f };
float output[] = { 0.0f };
int status = BNNSFilterApply(hidden_layer, input, hidden);
if (status != 0) {
fprintf(stderr, "BNNSFilterApply failed on hidden_layer\n");
}
status = BNNSFilterApply(output_layer, hidden, output);
if (status != 0) {
fprintf(stderr, "BNNSFilterApply failed on output_layer\n");
}
printf("Predict %f, %f = %f\n", a, b, output[0]);
return output[0];
}
void destroy_network(void) {
BNNSFilterDestroy(hidden_layer);
BNNSFilterDestroy(output_layer);
}
int main(int argc, const char * argv[]) {
if (create_network()) {
printf("Making predictions for XOR gate:\n");
predict(0, 0);
predict(0, 1);
predict(1, 0);
predict(1, 1);
destroy_network();
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment