#include #include "activation.h" #include "neuralnet_impl.h" #include #include "test.h" #include "test_util.h" #include TEST_CASE(neuralnet_perceptron_test) { const int num_layers = 2; const int input_size = 1; const R weights[] = {0.3}; const R biases[] = {0.0}; const nnLayer layers[] = { {.type = nnLinear, .linear = {.weights = nnMatrixFromArray(1, 1, weights), .biases = nnMatrixFromArray(1, 1, biases)}}, {.type = nnSigmoid}, }; nnNeuralNetwork* net = nnMakeNet(layers, num_layers, input_size); assert(net); nnQueryObject* query = nnMakeQueryObject(net, 1); const R input[] = {0.9}; R output[1]; nnQueryArray(net, query, input, output); const R expected_output = sigmoid(input[0] * weights[0]); printf( "\n[neuralnet_perceptron_test] Output: %f, Expected: %f\n", output[0], expected_output); TEST_TRUE(double_eq(output[0], expected_output, EPS)); nnDeleteQueryObject(&query); nnDeleteNet(&net); } TEST_CASE(neuralnet_xor_test) { // First (hidden) layer. const R weights0[] = {1, 1, 1, 1}; const R biases0[] = {0, -1}; // Second (output) layer. const R weights1[] = {1, -2}; const R biases1[] = {0}; // Network. const int num_layers = 3; const int input_size = 2; const nnLayer layers[] = { {.type = nnLinear, .linear = {.weights = nnMatrixFromArray(2, 2, weights0), .biases = nnMatrixFromArray(1, 2, biases0)}}, {.type = nnRelu}, {.type = nnLinear, .linear = {.weights = nnMatrixFromArray(2, 1, weights1), .biases = nnMatrixFromArray(1, 1, biases1)}}, }; nnNeuralNetwork* net = nnMakeNet(layers, num_layers, input_size); assert(net); // First layer weights. TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 0), 1); TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 1), 1); TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 2), 1); TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 3), 1); // Second linear layer (third layer) weights. TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.weights, 0, 0), 1); TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.weights, 0, 1), -2); // First layer biases. TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.biases, 0, 0), 0); TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.biases, 0, 1), -1); // Second linear layer (third layer) biases. TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.biases, 0, 0), 0); // Test. #define M 4 nnQueryObject* query = nnMakeQueryObject(net, M); const R test_inputs[M][2] = { {0., 0.}, {1., 0.}, {0., 1.}, {1., 1.} }; nnMatrix test_inputs_matrix = nnMatrixMake(M, 2); nnMatrixInit(&test_inputs_matrix, (const R*)test_inputs); nnQuery(net, query, &test_inputs_matrix); const R expected_outputs[M] = {0., 1., 1., 0.}; for (int i = 0; i < M; ++i) { const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); printf( "\nInput: (%f, %f), Output: %f, Expected: %f\n", test_inputs[i][0], test_inputs[i][1], test_output, expected_outputs[i]); } for (int i = 0; i < M; ++i) { const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); TEST_TRUE(double_eq(test_output, expected_outputs[i], OUTPUT_EPS)); } nnDeleteQueryObject(&query); nnDeleteNet(&net); }