LCOV - code coverage report
Current view: top level - ml/backend/Dnnl/tests - test_DnnlActivation.cpp (source / functions) Hit Total Coverage
Test: test_coverage.info.cleaned Lines: 105 106 99.1 %
Date: 2022-02-28 03:37:41 Functions: 48 48 100.0 %

          Line data    Source code
       1             : /**
       2             :  * @file test_common.cpp
       3             :  *
       4             :  * @brief Tests for common ml functionality
       5             :  *
       6             :  * @author David Tellenbach
       7             :  */
       8             : 
       9             : #include "doctest/doctest.h"
      10             : #include <random>
      11             : 
      12             : #include "DataContainer.h"
      13             : #include "DnnlActivationLayer.h"
      14             : 
      15             : using namespace elsa;
      16             : using namespace elsa::ml::detail;
      17             : using namespace doctest;
      18             : 
      19             : TEST_SUITE_BEGIN("ml-dnnl");
      20             : 
      21             : // TODO(dfrank): remove and replace with proper doctest usage of test cases
      22             : #define SECTION(name) DOCTEST_SUBCASE(name)
      23             : 
      24             : template <typename F, typename data_t>
      25           8 : static void testActivation(F func, data_t alpha, data_t beta, const DataContainer<data_t>& input,
      26             :                            const DataContainer<data_t>& test)
      27             : {
      28           8 :     REQUIRE(input.getSize() == test.getSize());
      29             : 
      30        6728 :     for (int i = 0; i < input.getSize(); ++i) {
      31        6720 :         REQUIRE(func(input[i], alpha, beta) == Approx(test[i]));
      32             :     }
      33           8 : }
      34             : 
      35             : template <typename F, typename data_t>
      36           8 : static void testActivationDerivative(F func, data_t alpha, data_t beta,
      37             :                                      const DataContainer<data_t>& input,
      38             :                                      const DataContainer<data_t>& outGrad,
      39             :                                      const DataContainer<data_t>& test)
      40             : {
      41           8 :     REQUIRE(input.getSize() == test.getSize());
      42             : 
      43        6728 :     for (int i = 0; i < input.getSize(); ++i) {
      44        6720 :         REQUIRE(func(input[i], alpha, beta) * outGrad[i] == Approx(test[i]));
      45             :     }
      46           8 : }
      47             : 
      48             : template <typename LayerType, typename Func, typename FuncDer>
      49          24 : void testActivationLayer(Func f, FuncDer f_der)
      50             : {
      51          24 :     std::mt19937 mt(123); // The random number generator using a deterministic seed
      52          24 :     std::uniform_real_distribution<float> dist(-1, 1);
      53          24 :     std::uniform_int_distribution<index_t> distIdx(1, 10);
      54             : 
      55             :     // Create random input
      56          48 :     IndexVector_t inputVec(4);
      57         120 :     for (int i = 0; i < inputVec.size(); ++i)
      58          96 :         inputVec[i] = distIdx(mt);
      59             : 
      60          48 :     VolumeDescriptor inputDesc(inputVec);
      61             : 
      62          48 :     DataContainer<float> input(inputDesc);
      63       20184 :     for (auto& coeff : input)
      64       20160 :         coeff = dist(mt);
      65             : 
      66             :     // Create random output-gradient
      67          48 :     DataContainer<float> outputGradient(inputDesc);
      68       20184 :     for (auto& coeff : outputGradient)
      69       20160 :         coeff = dist(mt);
      70             : 
      71             :     // Construct layer and set parameters
      72          48 :     LayerType layer(inputDesc);
      73          24 :     float alpha = dist(mt);
      74          24 :     layer.setAlpha(alpha);
      75             : 
      76          24 :     float beta = dist(mt);
      77          24 :     layer.setBeta(beta);
      78             : 
      79             :     // Set input and compile layer
      80          24 :     layer.setInput(input);
      81          24 :     layer.compile(elsa::ml::PropagationKind::Full);
      82             : 
      83             :     // Get Dnnl execution stream
      84          48 :     auto engine = layer.getEngine();
      85          48 :     dnnl::stream s(*engine);
      86             : 
      87          32 :     SECTION("Basics")
      88             :     {
      89           8 :         REQUIRE(layer.getInputDescriptor() == inputDesc);
      90           8 :         REQUIRE(layer.getOutputDescriptor() == inputDesc);
      91             :     }
      92          32 :     SECTION("Forward")
      93             :     {
      94           8 :         layer.forwardPropagate(s);
      95          16 :         auto output = layer.getOutput();
      96             : 
      97           8 :         testActivation(f, alpha, beta, input, output);
      98             :     }
      99             : 
     100          32 :     SECTION("Backward")
     101             :     {
     102           8 :         layer.setOutputGradient(outputGradient);
     103           8 :         layer.backwardPropagate(s);
     104          16 :         auto inputGradient = layer.getInputGradient();
     105             : 
     106           8 :         testActivationDerivative(f_der, alpha, beta, input, outputGradient, inputGradient);
     107             :     }
     108          24 : }
     109             : 
     110           3 : TEST_CASE("DnnlRelu")
     111             : {
     112             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     113         840 :                 [[maybe_unused]] float beta) {
     114         840 :         if (coeff <= 0)
     115         435 :             return alpha * coeff;
     116             :         else
     117         405 :             return coeff;
     118             :     };
     119             : 
     120             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     121         840 :                     [[maybe_unused]] float beta) {
     122         840 :         if (coeff <= 0)
     123         435 :             return alpha;
     124             :         else
     125         405 :             return 1.f;
     126             :     };
     127           3 :     testActivationLayer<DnnlRelu<float>>(f, f_der);
     128           3 : }
     129             : 
     130           3 : TEST_CASE("DnnlAbs")
     131             : {
     132             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     133         840 :                 [[maybe_unused]] float beta) { return std::abs(coeff); };
     134             : 
     135             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     136         840 :                     [[maybe_unused]] float beta) {
     137         840 :         if (coeff == 0)
     138           0 :             return 0.f;
     139         840 :         else if (coeff < 0)
     140         435 :             return -1.f;
     141             :         else
     142         405 :             return 1.f;
     143             :     };
     144           3 :     testActivationLayer<DnnlAbs<float>>(f, f_der);
     145           3 : }
     146             : 
     147           3 : TEST_CASE("DnnlElu")
     148             : {
     149             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     150         840 :                 [[maybe_unused]] float beta) {
     151         840 :         if (coeff <= 0)
     152         435 :             return alpha * (std::exp(coeff) - 1);
     153             :         else
     154         405 :             return coeff;
     155             :     };
     156             : 
     157             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     158         840 :                     [[maybe_unused]] float beta) {
     159         840 :         if (coeff <= 0)
     160         435 :             return alpha * std::exp(coeff);
     161             :         else
     162         405 :             return 1.f;
     163             :     };
     164           3 :     testActivationLayer<DnnlElu<float>>(f, f_der);
     165           3 : }
     166             : 
     167           3 : TEST_CASE("DnnlLinear")
     168             : {
     169             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     170         840 :                 [[maybe_unused]] float beta) { return alpha * coeff + beta; };
     171             : 
     172             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     173         840 :                     [[maybe_unused]] float beta) { return alpha; };
     174           3 :     testActivationLayer<DnnlLinear<float>>(f, f_der);
     175           3 : }
     176             : 
     177           3 : TEST_CASE("DnnlTanh")
     178             : {
     179             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     180         840 :                 [[maybe_unused]] float beta) { return std::tanh(coeff); };
     181             : 
     182             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     183         840 :                     [[maybe_unused]] float beta) {
     184         840 :         return (1 - std::tanh(coeff) * std::tanh(coeff));
     185             :     };
     186           3 :     testActivationLayer<DnnlTanh<float>>(f, f_der);
     187           3 : }
     188             : 
     189           3 : TEST_CASE("DnnlLogistic")
     190             : {
     191             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     192        2520 :                 [[maybe_unused]] float beta) { return 1.f / (1.f + std::exp(-1.f * coeff)); };
     193             : 
     194             :     auto f_der = [&f]([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     195        1680 :                       [[maybe_unused]] float beta) {
     196         840 :         return f(coeff, alpha, beta) * (1.f - f(coeff, alpha, beta));
     197           3 :     };
     198           3 :     testActivationLayer<DnnlLogistic<float>>(f, f_der);
     199           3 : }
     200             : 
     201           3 : TEST_CASE("DnnlExp")
     202             : {
     203             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     204        1680 :                 [[maybe_unused]] float beta) { return std::exp(coeff); };
     205             : 
     206             :     auto f_der = [&f]([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     207         840 :                       [[maybe_unused]] float beta) { return f(coeff, alpha, beta); };
     208           3 :     testActivationLayer<DnnlExp<float>>(f, f_der);
     209           3 : }
     210             : 
     211           3 : TEST_CASE("DnnlSoftRelu")
     212             : {
     213             :     auto f = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     214         840 :                 [[maybe_unused]] float beta) { return std::log(1 + std::exp(coeff)); };
     215             : 
     216             :     auto f_der = []([[maybe_unused]] const auto& coeff, [[maybe_unused]] float alpha,
     217         840 :                     [[maybe_unused]] float beta) { return 1.f / (1.f + std::exp(-1.f * coeff)); };
     218           3 :     testActivationLayer<DnnlSoftRelu<float>>(f, f_der);
     219           3 : }
     220             : TEST_SUITE_END();

Generated by: LCOV version 1.15