#include #include "activation.h" #include "neuralnet_impl.h" #include #include #include "test.h" #include "test_util.h" #include TEST_CASE(neuralnet_train_sigmoid_test) { const int num_layers = 2; const int input_size = 1; const nnLayer layers[] = { {.type = nnLinear, .linear = {.input_size = 1, .output_size = 1}}, {.type = nnSigmoid}, }; nnNeuralNetwork* net = nnMakeNet(layers, num_layers, input_size); assert(net); // Train. // Try to learn the sigmoid function. #define N 3 R inputs[N]; R targets[N]; for (int i = 0; i < N; ++i) { inputs[i] = lerp(-1, +1, (R)i / (R)(N - 1)); targets[i] = sigmoid(inputs[i]); } nnMatrix inputs_matrix = nnMatrixMake(N, 1); nnMatrix targets_matrix = nnMatrixMake(N, 1); nnMatrixInit(&inputs_matrix, inputs); nnMatrixInit(&targets_matrix, targets); nnTrainingParams params = { .learning_rate = 0.9, .max_iterations = 100, .seed = 0, .weight_init = nnWeightInit01, .debug = false, }; nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); const R weight = nnMatrixAt(&net->layers[0].linear.weights, 0, 0); const R expected_weight = 1.0; printf( "\nTrained network weight: %f, Expected: %f\n", weight, expected_weight); TEST_TRUE(double_eq(weight, expected_weight, WEIGHT_EPS)); // Test. nnQueryObject* query = nnMakeQueryObject(net, 1); const R test_input[] = {0.3}; R test_output[1]; nnQueryArray(net, query, test_input, test_output); const R expected_output = 0.574442516811659; // sigmoid(0.3) printf("Output: %f, Expected: %f\n", test_output[0], expected_output); TEST_TRUE(double_eq(test_output[0], expected_output, OUTPUT_EPS)); nnDeleteQueryObject(&query); nnDeleteNet(&net); }