#include #include #include #include "activation.h" #include "neuralnet_impl.h" #include "test.h" #include "test_util.h" #include TEST_CASE(neuralnet_train_sigmoid_test) { const int num_layers = 1; const int layer_sizes[] = { 1, 1 }; const nnActivation layer_activations[] = { nnSigmoid }; nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); assert(net); // Train. // Try to learn the sigmoid function. #define N 3 R inputs[N]; R targets[N]; for (int i = 0; i < N; ++i) { inputs[i] = lerp(-1, +1, (R)i / (R)(N-1)); targets[i] = sigmoid(inputs[i]); } nnMatrix inputs_matrix = nnMatrixMake(N, 1); nnMatrix targets_matrix = nnMatrixMake(N, 1); nnMatrixInit(&inputs_matrix, inputs); nnMatrixInit(&targets_matrix, targets); nnTrainingParams params = { .learning_rate = 0.9, .max_iterations = 100, .seed = 0, .weight_init = nnWeightInit01, .debug = false, }; nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); const R weight = nnMatrixAt(&net->weights[0], 0, 0); const R expected_weight = 1.0; printf("\nTrained network weight: %f, Expected: %f\n", weight, expected_weight); TEST_TRUE(double_eq(weight, expected_weight, WEIGHT_EPS)); // Test. nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1); const R test_input[] = { 0.3 }; R test_output[1]; nnQueryArray(net, query, test_input, test_output); const R expected_output = 0.574442516811659; // sigmoid(0.3) printf("Output: %f, Expected: %f\n", test_output[0], expected_output); TEST_TRUE(double_eq(test_output[0], expected_output, OUTPUT_EPS)); nnDeleteQueryObject(&query); nnDeleteNet(&net); }