CMSIS-DSP: Added Naive Gaussian Bayesian Estimator

pull/19/head
Christophe Favergeon 7 years ago
parent 302ada6633
commit 94b63664f2

@ -279,6 +279,13 @@
* @defgroup groupSVM SVM Functions * @defgroup groupSVM SVM Functions
*/ */
/**
* @defgroup groupBayes Bayesian estimators
*
*/
#ifndef _ARM_MATH_H #ifndef _ARM_MATH_H
#define _ARM_MATH_H #define _ARM_MATH_H
@ -7139,6 +7146,35 @@ void arm_svm_sigmoid_predict_f32(const arm_svm_sigmoid_instance_f32 *S,
int * pResult); int * pResult);
/**
* @brief Instance structure for Naive Gaussian Bayesian estimator.
*/
typedef struct
{
uint32_t vectorDimension; /**< Dimension of vector space */
uint32_t numberOfClasses; /**< Number of different classes */
const float32_t *theta; /**< Mean values for the Gaussians */
const float32_t *sigma; /**< Variances for the Gaussians */
const float32_t *classPriors; /**< Class prior probabilities */
float32_t epsilon; /**< Additive value to variances */
} arm_gaussian_naive_bayes_instance_f32;
/**
* @brief Naive Gaussian Bayesian Estimator
*
* @param[in] *S points to a naive bayes instance structure
* @param[in] *in points to the elements of the input vector.
* @param[in] *pBuffer points to a buffer of length numberOfClasses
* @return The predicted class
*
*/
uint32_t arm_gaussian_naive_bayes_predict_f32(const arm_gaussian_naive_bayes_instance_f32 *S,
const float32_t * in,
float32_t *pBuffer);
/** /**
* @ingroup groupInterpolation * @ingroup groupInterpolation
*/ */

@ -0,0 +1,19 @@
cmake_minimum_required (VERSION 3.6)
project(CMSISDSPBayes)
include(config)
include(configDsp)
file(GLOB SRC "./*_*.c")
add_library(CMSISDSPBayes STATIC ${SRC})
configLib(CMSISDSPBayes ${ROOT})
configDsp(CMSISDSPBayes ${ROOT})
### Includes
target_include_directories(CMSISDSPBayes PUBLIC "${DSP}/Include")

@ -0,0 +1,294 @@
/* ----------------------------------------------------------------------
* Project: CMSIS DSP Library
* Title: arm_naive_gaussian_bayes_predict_f32
* Description: Naive Gaussian Bayesian Estimator
*
*
* Target Processor: Cortex-M cores
* -------------------------------------------------------------------- */
/*
* Copyright (C) 2010-2019 ARM Limited or its affiliates. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "arm_math.h"
#include <limits.h>
#include <math.h>
/**
* @addtogroup groupBayes
* @{
*/
/**
* @brief Naive Gaussian Bayesian Estimator
*
* @param[in] *S points to a naive bayes instance structure
* @param[in] *in points to the elements of the input vector.
* @param[in] *pBuffer points to a buffer of length numberOfClasses
* @return The predicted class
*
*/
#define PI_F 3.1415926535897932384626433832795f
#define DPI_F (2*3.1415926535897932384626433832795f)
#if defined(ARM_MATH_NEON)
#include "NEMath.h"
uint32_t arm_gaussian_naive_bayes_predict_f32(const arm_gaussian_naive_bayes_instance_f32 *S,
const float32_t * in,
float32_t *pBuffer)
{
int nbClass;
int nbDim;
const float32_t *pPrior = S->classPriors;
const float32_t *pTheta = S->theta;
const float32_t *pSigma = S->sigma;
const float32_t *pTheta1 = S->theta + S->vectorDimension;
const float32_t *pSigma1 = S->sigma + S->vectorDimension;
float32_t *buffer = pBuffer;
const float32_t *pIn=in;
float32_t result;
float32_t sigma,sigma1;
float32_t tmp,tmp1;
uint32_t index;
uint32_t vecBlkCnt;
uint32_t classBlkCnt;
float32x4_t epsilonV;
float32x4_t sigmaV,sigmaV1;
float32x4_t tmpV,tmpVb,tmpV1;
float32x2_t tmpV2;
float32x4_t thetaV,thetaV1;
float32x4_t inV;
epsilonV = vdupq_n_f32(S->epsilon);
classBlkCnt = S->numberOfClasses >> 1;
while(classBlkCnt > 0)
{
pIn = in;
tmp = log(*pPrior++);
tmp1 = log(*pPrior++);
tmpV = vdupq_n_f32(0.0);
tmpV1 = vdupq_n_f32(0.0);
vecBlkCnt = S->vectorDimension >> 2;
while(vecBlkCnt > 0)
{
sigmaV = vld1q_f32(pSigma);
thetaV = vld1q_f32(pTheta);
sigmaV1 = vld1q_f32(pSigma1);
thetaV1 = vld1q_f32(pTheta1);
inV = vld1q_f32(pIn);
sigmaV = vaddq_f32(sigmaV, epsilonV);
sigmaV1 = vaddq_f32(sigmaV1, epsilonV);
tmpVb = vmulq_n_f32(sigmaV,DPI_F);
tmpVb = vlogq_f32(tmpVb);
tmpV = vmlsq_n_f32(tmpV,tmpVb,0.5);
tmpVb = vmulq_n_f32(sigmaV1,DPI_F);
tmpVb = vlogq_f32(tmpVb);
tmpV1 = vmlsq_n_f32(tmpV1,tmpVb,0.5);
tmpVb = vsubq_f32(inV,thetaV);
tmpVb = vmulq_f32(tmpVb,tmpVb);
tmpVb = vmulq_f32(tmpVb, vinvq_f32(sigmaV));
tmpV = vmlsq_n_f32(tmpV,tmpVb,0.5);
tmpVb = vsubq_f32(inV,thetaV1);
tmpVb = vmulq_f32(tmpVb,tmpVb);
tmpVb = vmulq_f32(tmpVb, vinvq_f32(sigmaV1));
tmpV1 = vmlsq_n_f32(tmpV1,tmpVb,0.5);
pIn += 4;
pTheta += 4;
pSigma += 4;
pTheta1 += 4;
pSigma1 += 4;
vecBlkCnt--;
}
tmpV2 = vpadd_f32(vget_low_f32(tmpV),vget_high_f32(tmpV));
tmp += tmpV2[0] + tmpV2[1];
tmpV2 = vpadd_f32(vget_low_f32(tmpV1),vget_high_f32(tmpV1));
tmp1 += tmpV2[0] + tmpV2[1];
vecBlkCnt = S->vectorDimension & 3;
while(vecBlkCnt > 0)
{
sigma = *pSigma + S->epsilon;
sigma1 = *pSigma1 + S->epsilon;
tmp -= 0.5*log(2.0 * PI_F * sigma);
tmp -= 0.5*(*pIn - *pTheta) * (*pIn - *pTheta) / sigma;
tmp1 -= 0.5*log(2.0 * PI_F * sigma1);
tmp1 -= 0.5*(*pIn - *pTheta1) * (*pIn - *pTheta1) / sigma1;
pIn++;
pTheta++;
pSigma++;
pTheta1++;
pSigma1++;
vecBlkCnt--;
}
*buffer++ = tmp;
*buffer++ = tmp1;
pSigma += S->vectorDimension;
pTheta += S->vectorDimension;
pSigma1 += S->vectorDimension;
pTheta1 += S->vectorDimension;
classBlkCnt--;
}
classBlkCnt = S->numberOfClasses & 1;
while(classBlkCnt > 0)
{
pIn = in;
tmp = log(*pPrior++);
tmpV = vdupq_n_f32(0.0);
vecBlkCnt = S->vectorDimension >> 2;
while(vecBlkCnt > 0)
{
sigmaV = vld1q_f32(pSigma);
thetaV = vld1q_f32(pTheta);
inV = vld1q_f32(pIn);
sigmaV = vaddq_f32(sigmaV, epsilonV);
tmpVb = vmulq_n_f32(sigmaV,DPI_F);
tmpVb = vlogq_f32(tmpVb);
tmpV = vmlsq_n_f32(tmpV,tmpVb,0.5);
tmpVb = vsubq_f32(inV,thetaV);
tmpVb = vmulq_f32(tmpVb,tmpVb);
tmpVb = vmulq_f32(tmpVb, vinvq_f32(sigmaV));
tmpV = vmlsq_n_f32(tmpV,tmpVb,0.5);
pIn += 4;
pTheta += 4;
pSigma += 4;
vecBlkCnt--;
}
tmpV2 = vpadd_f32(vget_low_f32(tmpV),vget_high_f32(tmpV));
tmp += tmpV2[0] + tmpV2[1];
vecBlkCnt = S->vectorDimension & 3;
while(vecBlkCnt > 0)
{
sigma = *pSigma + S->epsilon;
tmp -= 0.5*log(2.0 * PI_F * sigma);
tmp -= 0.5*(*pIn - *pTheta) * (*pIn - *pTheta) / sigma;
pIn++;
pTheta++;
pSigma++;
vecBlkCnt--;
}
*buffer++ = tmp;
classBlkCnt--;
}
arm_max_f32(pBuffer,S->numberOfClasses,&result,&index);
return(index);
}
#else
uint32_t arm_gaussian_naive_bayes_predict_f32(const arm_gaussian_naive_bayes_instance_f32 *S,
const float32_t * in,
float32_t *pBuffer)
{
int nbClass;
int nbDim;
const float32_t *pPrior = S->classPriors;
const float32_t *pTheta = S->theta;
const float32_t *pSigma = S->sigma;
float32_t *buffer = pBuffer;
const float32_t *pIn=in;
float32_t result;
float32_t sigma;
float32_t tmp;
float32_t acc1,acc2;
uint32_t index;
pTheta=S->theta;
pSigma=S->sigma;
for(nbClass = 0; nbClass < S->numberOfClasses; nbClass++)
{
pIn = in;
tmp = log(*pPrior);
acc1 = 0;
acc2 = 0;
for(nbDim = 0; nbDim < S->vectorDimension; nbDim++)
{
sigma = *pSigma + S->epsilon;
acc1 += log(2.0 * PI_F * sigma);
acc2 += (*pIn - *pTheta) * (*pIn - *pTheta) / sigma;
pIn++;
pTheta++;
pSigma++;
}
tmp = -0.5 * acc1;
tmp -= 0.5 * acc2;
*buffer = tmp + log(*pPrior++);
buffer++;
}
arm_max_f32(pBuffer,S->numberOfClasses,&result,&index);
return(index);
}
#endif
/**
* @} end of groupBayes group
*/

@ -28,6 +28,7 @@ option(STATISTICS "Statistics Functions" ON)
option(SUPPORT "Support Functions" ON) option(SUPPORT "Support Functions" ON)
option(TRANSFORM "Transform Functions" ON) option(TRANSFORM "Transform Functions" ON)
option(SVM "Support Vector Machine Functions" ON) option(SVM "Support Vector Machine Functions" ON)
option(BAYES "Bayesian Estimators" ON)
# When OFF it is the default behavior : all tables are included. # When OFF it is the default behavior : all tables are included.
option(CONFIGTABLE "Configuration of table allowed" OFF) option(CONFIGTABLE "Configuration of table allowed" OFF)
@ -230,6 +231,11 @@ if (SVM)
target_link_libraries(CMSISDSP INTERFACE CMSISDSPSVM) target_link_libraries(CMSISDSP INTERFACE CMSISDSPSVM)
endif() endif()
if (BAYES)
add_subdirectory(BayesFunctions)
target_link_libraries(CMSISDSP INTERFACE CMSISDSPBayes)
endif()
### Includes ### Includes
target_include_directories(CMSISDSP INTERFACE "${DSP}/Include") target_include_directories(CMSISDSP INTERFACE "${DSP}/Include")

@ -84,6 +84,7 @@ set(TESTSRC testmain.cpp
Source/BasicMathsBenchmarksQ15.cpp Source/BasicMathsBenchmarksQ15.cpp
Source/BasicMathsBenchmarksQ7.cpp Source/BasicMathsBenchmarksQ7.cpp
Source/SVMF32.cpp Source/SVMF32.cpp
Source/BayesF32.cpp
Source/FullyConnected.cpp Source/FullyConnected.cpp
Source/FullyConnectedBench.cpp Source/FullyConnectedBench.cpp
GeneratedSource/TestDesc.cpp GeneratedSource/TestDesc.cpp

@ -0,0 +1,15 @@
void test_gaussian_naive_bayes_predict_f32();
// Pattern IDs
static const int DIMS1_S16_ID=0;
static const int INPUTS1_F32_ID=1;
static const int PARAMS1_F32_ID=2;
static const int PROBAS1_F32_ID=3;
static const int PREDICTS1_S16_ID=4;
// Output IDs
static const int OUT_PROBA_F32_ID=0;
static const int OUT_PREDICT_S16_ID=1;
// Test IDs
static const int TEST_GAUSSIAN_NAIVE_BAYES_PREDICT_F32_1=1;

File diff suppressed because it is too large Load Diff

@ -1,121 +1,32 @@
#include "Test.h" #include "Test.h"
#include "Pattern.h" #include "Pattern.h"
#include "BasicTestsF32.h" #include "BayesF32.h"
#include "SVMF32.h" class BayesTests : public Client::Group
#include "BasicMathsBenchmarksF32.h"
#include "BasicMathsBenchmarksQ31.h"
#include "BasicMathsBenchmarksQ15.h"
#include "BasicMathsBenchmarksQ7.h"
#include "FullyConnected.h"
#include "FullyConnectedBench.h"
class BasicTests : public Client::Group
{ {
public: public:
BasicTests(Testing::testID_t id):Client::Group(id) BayesTests(Testing::testID_t id):Client::Group(id)
,BasicTestsF32Var(1) ,BayesF32Var(1)
{ {
this->addContainer(&BasicTestsF32Var); this->addContainer(&BayesF32Var);
} }
private: private:
BasicTestsF32 BasicTestsF32Var; BayesF32 BayesF32Var;
;
};
class SVMTests : public Client::Group
{
public:
SVMTests(Testing::testID_t id):Client::Group(id)
,SVMF32Var(1)
{
this->addContainer(&SVMF32Var);
}
private:
SVMF32 SVMF32Var;
; ;
}; };
class DSPTests : public Client::Group class DSPTests : public Client::Group
{ {
public: public:
DSPTests(Testing::testID_t id):Client::Group(id) DSPTests(Testing::testID_t id):Client::Group(id)
,BasicTestsVar(1) ,BayesTestsVar(3)
,SVMTestsVar(2)
{
this->addContainer(&BasicTestsVar);
this->addContainer(&SVMTestsVar);
}
private:
BasicTests BasicTestsVar;
SVMTests SVMTestsVar;
;
};
class BasicBenchmarks : public Client::Group
{
public:
BasicBenchmarks(Testing::testID_t id):Client::Group(id)
,BasicMathsBenchmarksF32Var(1)
,BasicMathsBenchmarksQ31Var(2)
,BasicMathsBenchmarksQ15Var(3)
,BasicMathsBenchmarksQ7Var(4)
{
this->addContainer(&BasicMathsBenchmarksF32Var);
this->addContainer(&BasicMathsBenchmarksQ31Var);
this->addContainer(&BasicMathsBenchmarksQ15Var);
this->addContainer(&BasicMathsBenchmarksQ7Var);
}
private:
BasicMathsBenchmarksF32 BasicMathsBenchmarksF32Var;
BasicMathsBenchmarksQ31 BasicMathsBenchmarksQ31Var;
BasicMathsBenchmarksQ15 BasicMathsBenchmarksQ15Var;
BasicMathsBenchmarksQ7 BasicMathsBenchmarksQ7Var;
;
};
class DSPBenchmarks : public Client::Group
{
public:
DSPBenchmarks(Testing::testID_t id):Client::Group(id)
,BasicBenchmarksVar(1)
{ {
this->addContainer(&BasicBenchmarksVar); this->addContainer(NULL);this->addContainer(NULL);this->addContainer(&BayesTestsVar);
} }
private: private:
BasicBenchmarks BasicBenchmarksVar; BayesTests BayesTestsVar;
;
};
class NNTests : public Client::Group
{
public:
NNTests(Testing::testID_t id):Client::Group(id)
,FullyConnectedVar(1)
{
this->addContainer(&FullyConnectedVar);
}
private:
FullyConnected FullyConnectedVar;
;
};
class NNBenchmarks : public Client::Group
{
public:
NNBenchmarks(Testing::testID_t id):Client::Group(id)
,FullyConnectedBenchVar(1)
{
this->addContainer(&FullyConnectedBenchVar);
}
private:
FullyConnectedBench FullyConnectedBenchVar;
; ;
}; };
class Root : public Client::Group class Root : public Client::Group
@ -123,21 +34,12 @@ class Root : public Client::Group
public: public:
Root(Testing::testID_t id):Client::Group(id) Root(Testing::testID_t id):Client::Group(id)
,DSPTestsVar(1) ,DSPTestsVar(1)
,DSPBenchmarksVar(2)
,NNTestsVar(3)
,NNBenchmarksVar(4)
{ {
this->addContainer(&DSPTestsVar); this->addContainer(&DSPTestsVar);
this->addContainer(&DSPBenchmarksVar); this->addContainer(NULL);this->addContainer(NULL);this->addContainer(NULL);
this->addContainer(&NNTestsVar);
this->addContainer(&NNBenchmarksVar);
} }
private: private:
DSPTests DSPTestsVar; DSPTests DSPTestsVar;
DSPBenchmarks DSPBenchmarksVar;
NNTests NNTestsVar;
NNBenchmarks NNBenchmarksVar;
; ;
}; };

@ -8,614 +8,29 @@ __ALIGNED(8) const char testDesc[]={
1,0,0,0, 1,0,0,0,
'n','y','D','S','P','\0', 'n','y','D','S','P','\0',
3,0,0,0, 3,0,0,0,
1,0,0,0,
'n','y','B','a','s','i','c','M','a','t','h','s','\0',
2,0,0,0,
1,0,0,0,
'n','y','B','a','s','i','c','M','a','t','h','s','F','3','2','\0',
0,0,0,0,
12,0,0,0,
0,0,0,0,
0,1,0,0,
0,4,0,0,
0,1,0,0,
0,8,0,0,
0,1,0,0,
0,12,0,0,
0,1,0,0,
0,16,0,0,
0,1,0,0,
0,20,0,0,
0,1,0,0,
0,24,0,0,
0,1,0,0,
0,28,0,0,
0,1,0,0,
0,32,0,0,
1,0,0,0,
8,32,0,0,
1,0,0,0,
16,32,0,0,
1,0,0,0,
24,32,0,0,
0,1,0,0,
2,0,0,0,
'O','u','t','p','u','t','\0',
'S','t','a','t','e','\0',
0,0,0,0,
1,0,0,0,
1,0,0,0,
'n','n',
1,0,0,0,
2,0,0,0,
'n','n',
1,0,0,0,
3,0,0,0, 3,0,0,0,
'n','n', 'n','y','B','a','y','e','s','\0',
1,0,0,0,
4,0,0,0,
'n','n',
1,0,0,0,
5,0,0,0,
'n','n',
1,0,0,0,
6,0,0,0,
'n','n',
1,0,0,0,
7,0,0,0,
'n','n',
1,0,0,0,
8,0,0,0,
'n','n',
1,0,0,0,
9,0,0,0,
'n','n',
1,0,0,0,
10,0,0,0,
'n','n',
1,0,0,0,
11,0,0,0,
'n','n',
1,0,0,0,
12,0,0,0,
'n','n',
1,0,0,0,
13,0,0,0,
'n','n',
1,0,0,0,
14,0,0,0,
'n','n',
1,0,0,0,
15,0,0,0,
'n','n',
1,0,0,0,
16,0,0,0,
'n','n',
1,0,0,0,
17,0,0,0,
'n','n',
1,0,0,0,
18,0,0,0,
'n','n',
1,0,0,0,
19,0,0,0,
'n','n',
1,0,0,0,
20,0,0,0,
'n','n',
1,0,0,0,
21,0,0,0,
'n','n',
1,0,0,0,
22,0,0,0,
'n','n',
1,0,0,0,
23,0,0,0,
'n','n',
1,0,0,0,
24,0,0,0,
'n','n',
3,0,0,0,
2,0,0,0,
'n','y','S','V','M','\0',
2,0,0,0, 2,0,0,0,
1,0,0,0, 1,0,0,0,
'n','y','S','V','M','F','3','2','\0', 'n','y','B','a','y','e','s','F','3','2','\0',
0,0,0,0, 0,0,0,0,
20,0,0,0,
24,36,0,0,
232,3,0,0,
184,51,0,0,
67,0,0,0,
200,52,0,0,
6,0,0,0,
216,52,0,0,
100,0,0,0,
104,54,0,0,
232,3,0,0,
8,70,0,0,
113,0,0,0,
208,71,0,0,
7,0,0,0,
224,71,0,0,
100,0,0,0,
112,73,0,0,
232,3,0,0,
16,89,0,0,
112,0,0,0,
208,90,0,0,
6,0,0,0,
224,90,0,0,
100,0,0,0,
112,92,0,0,
232,3,0,0,
16,108,0,0,
113,0,0,0,
216,109,0,0,
6,0,0,0,
232,109,0,0,
100,0,0,0,
120,111,0,0,
232,3,0,0,
24,127,0,0,
46,0,0,0,
208,127,0,0,
6,0,0,0,
224,127,0,0,
100,0,0,0,
1,0,0,0,
'O','u','t','p','u','t','\0',
0,0,0,0,
1,0,0,0,
1,0,0,0,
'n','n',
1,0,0,0,
2,0,0,0,
'n','n',
1,0,0,0,
3,0,0,0,
'n','n',
1,0,0,0,
4,0,0,0,
'n','n',
1,0,0,0,
5,0,0,0,
'n','n',
3,0,0,0,
2,0,0,0,
'n','y','D','S','P','\0',
3,0,0,0,
1,0,0,0,
'n','y','B','a','s','i','c','M','a','t','h','s','\0',
2,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'y','B','a','s','i','c','M','a','t','h','s','F','3','2','\0',
1,0,0,0,
2,0,0,0,
112,129,0,0,
0,1,0,0,
112,133,0,0,
0,1,0,0,
1,0,0,0,
'O','u','t','p','u','t','\0',
1,0,0,0,
'g',1,0,0,0,
6,0,0,0,
5,0,0,0,
1,0,0,0,
5,0,0,0,
16,0,0,0,
32,0,0,0,
64,0,0,0,
128,0,0,0,
0,1,0,0,
1,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
2,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
3,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
4,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
5,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
6,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
7,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
8,0,0,0,
'y',0,0,0,0,
'n',
2,0,0,0,
2,0,0,0,
'y',0,0,0,0,
'y','B','a','s','i','c','M','a','t','h','s','Q','3','1','\0',
1,0,0,0,
2,0,0,0,
112,137,0,0,
0,1,0,0,
112,141,0,0,
0,1,0,0,
1,0,0,0,
'O','u','t','p','u','t','\0',
1,0,0,0,
'g',1,0,0,0,
6,0,0,0,
5,0,0,0,
1,0,0,0,
5,0,0,0,
16,0,0,0,
32,0,0,0,
64,0,0,0,
128,0,0,0,
0,1,0,0,
1,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
2,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
3,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
4,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
5,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
6,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
7,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
8,0,0,0,
'y',0,0,0,0,
'n',
2,0,0,0,
3,0,0,0,
'y',0,0,0,0,
'y','B','a','s','i','c','M','a','t','h','s','Q','1','5','\0',
1,0,0,0,
2,0,0,0,
112,145,0,0,
0,1,0,0,
112,147,0,0,
0,1,0,0,
1,0,0,0,
'O','u','t','p','u','t','\0',
1,0,0,0,
'g',1,0,0,0,
6,0,0,0,
5,0,0,0, 5,0,0,0,
1,0,0,0,
5,0,0,0,
16,0,0,0,
32,0,0,0,
64,0,0,0,
128,0,0,0,
0,1,0,0,
1,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
2,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
3,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
4,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
5,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
6,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
7,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
8,0,0,0,
'y',0,0,0,0,
'n',
2,0,0,0,
4,0,0,0,
'y',0,0,0,0,
'y','B','a','s','i','c','M','a','t','h','s','Q','7','\0',
1,0,0,0,
2,0,0,0,
112,149,0,0,
0,1,0,0,
112,150,0,0,
0,1,0,0,
1,0,0,0,
'O','u','t','p','u','t','\0',
1,0,0,0,
'g',1,0,0,0,
6,0,0,0,
5,0,0,0,
1,0,0,0,
5,0,0,0,
16,0,0,0,
32,0,0,0,
64,0,0,0,
128,0,0,0,
0,1,0,0,
1,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
2,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
3,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
4,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
5,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
6,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
7,0,0,0,
'y',0,0,0,0,
'n',
1,0,0,0,
8,0,0,0,
'y',0,0,0,0,
'n',
3,0,0,0,
3,0,0,0,
'n','y','N','N','\0',
2,0,0,0,
1,0,0,0,
'n','y','F','u','l','l','y','C','o','n','n','e','c','t','e','d','\0',
0,0,0,0, 0,0,0,0,
60,0,0,0, 3,0,0,0,
112,151,0,0,
10,0,0,0,
128,151,0,0,
4,0,0,0,
136,151,0,0,
40,0,0,0,
176,151,0,0,
4,0,0,0,
184,151,0,0,
8,0,0,0,
192,151,0,0,
9,0,0,0,
208,151,0,0,
72,0,0,0,
24,152,0,0,
9,0,0,0,
40,152,0,0,
10,0,0,0,
56,152,0,0,
4,0,0,0,
64,152,0,0,
40,0,0,0,
104,152,0,0,
4,0,0,0,
112,152,0,0,
9,0,0,0,
128,152,0,0,
1,0,0,0,
136,152,0,0,
9,0,0,0,
152,152,0,0,
1,0,0,0,
160,152,0,0,
8,0,0,0,
168,152,0,0,
8,0,0,0,
176,152,0,0,
64,0,0,0,
240,152,0,0,
8,0,0,0,
248,152,0,0,
54,0,0,0,
48,153,0,0,
1,0,0,0,
56,153,0,0,
6,0,0,0,
64,153,0,0,
9,0,0,0,
80,153,0,0,
64,0,0,0,
144,153,0,0,
1,0,0,0,
152,153,0,0,
8,0,0,0,
160,153,0,0,
8,0,0,0, 8,0,0,0,
168,153,0,0, 140,0,0,0,
40,0,0,0, 56,2,0,0,
208,153,0,0, 146,0,0,0,
1,0,0,0, 128,4,0,0,
216,153,0,0, 50,0,0,0,
72,5,0,0,
10,0,0,0, 10,0,0,0,
232,153,0,0,
4,0,0,0,
240,153,0,0,
54,0,0,0,
40,154,0,0,
1,0,0,0,
48,154,0,0,
6,0,0,0,
56,154,0,0,
9,0,0,0,
72,154,0,0,
40,0,0,0,
112,154,0,0,
1,0,0,0,
120,154,0,0,
10,0,0,0,
136,154,0,0,
4,0,0,0,
144,154,0,0,
64,0,0,0,
208,154,0,0,
1,0,0,0,
216,154,0,0,
8,0,0,0,
224,154,0,0,
8,0,0,0,
232,154,0,0,
72,0,0,0,
48,155,0,0,
4,0,0,0,
56,155,0,0,
32,0,0,0,
88,155,0,0,
36,0,0,0,
128,155,0,0,
64,0,0,0,
192,155,0,0,
5,0,0,0,
200,155,0,0,
40,0,0,0,
240,155,0,0,
40,0,0,0,
24,156,0,0,
28,0,0,0,
56,156,0,0,
3,0,0,0,
64,156,0,0,
21,0,0,0,
88,156,0,0,
12,0,0,0,
104,156,0,0,
56,0,0,0,
160,156,0,0,
4,0,0,0,
168,156,0,0,
28,0,0,0,
200,156,0,0,
32,0,0,0,
2,0,0,0, 2,0,0,0,
'O','u','t','p','u','t','\0', 'P','r','o','b','a','s','\0',
'T','e','m','p','\0', 'P','r','e','d','i','c','t','s','\0',
0,0,0,0, 0,0,0,0,
1,0,0,0, 1,0,0,0,
1,0,0,0, 1,0,0,0,
'n','n', 'n','n',
1,0,0,0,
2,0,0,0,
'n','n',
1,0,0,0,
3,0,0,0,
'n','n',
1,0,0,0,
4,0,0,0,
'n','n',
1,0,0,0,
5,0,0,0,
'n','n',
1,0,0,0,
6,0,0,0,
'n','n',
1,0,0,0,
7,0,0,0,
'n','n',
1,0,0,0,
8,0,0,0,
'n','n',
1,0,0,0,
9,0,0,0,
'n','n',
1,0,0,0,
10,0,0,0,
'n','n',
1,0,0,0,
11,0,0,0,
'n','n',
1,0,0,0,
12,0,0,0,
'n','n',
1,0,0,0,
13,0,0,0,
'n','n',
1,0,0,0,
14,0,0,0,
'n','n',
1,0,0,0,
15,0,0,0,
'n','n',
3,0,0,0,
4,0,0,0,
'n','y','N','N','\0',
2,0,0,0,
1,0,0,0,
'n','y','F','u','l','l','y','C','o','n','n','e','c','t','e','d','\0',
1,0,0,0,
4,0,0,0,
232,156,0,0,
10,0,0,0,
248,156,0,0,
4,0,0,0,
0,157,0,0,
40,0,0,0,
40,157,0,0,
4,0,0,0,
2,0,0,0,
'O','u','t','p','u','t','\0',
'T','e','m','p','\0',
1,0,0,0,
'g',1,0,0,0,
5,0,0,0,
4,0,0,0,
1,0,0,0,
4,0,0,0,
10,0,0,0,
20,0,0,0,
100,0,0,0,
200,0,0,0,
1,0,0,0,
1,0,0,0,
'y',0,0,0,0,
'n',
}; };
#endif #endif

@ -1,126 +1,8 @@
#include "Test.h" #include "Test.h"
#include "BasicTestsF32.h" #include "BayesF32.h"
BasicTestsF32::BasicTestsF32(Testing::testID_t id):Client::Suite(id) BayesF32::BayesF32(Testing::testID_t id):Client::Suite(id)
{ {
this->addTest(1,(Client::test)&BasicTestsF32::test_add_f32); this->addTest(1,(Client::test)&BayesF32::test_gaussian_naive_bayes_predict_f32);
this->addTest(2,(Client::test)&BasicTestsF32::test_add_f32);
this->addTest(3,(Client::test)&BasicTestsF32::test_add_f32);
this->addTest(4,(Client::test)&BasicTestsF32::test_sub_f32);
this->addTest(5,(Client::test)&BasicTestsF32::test_sub_f32);
this->addTest(6,(Client::test)&BasicTestsF32::test_sub_f32);
this->addTest(7,(Client::test)&BasicTestsF32::test_mult_f32);
this->addTest(8,(Client::test)&BasicTestsF32::test_mult_f32);
this->addTest(9,(Client::test)&BasicTestsF32::test_mult_f32);
this->addTest(10,(Client::test)&BasicTestsF32::test_negate_f32);
this->addTest(11,(Client::test)&BasicTestsF32::test_negate_f32);
this->addTest(12,(Client::test)&BasicTestsF32::test_negate_f32);
this->addTest(13,(Client::test)&BasicTestsF32::test_offset_f32);
this->addTest(14,(Client::test)&BasicTestsF32::test_offset_f32);
this->addTest(15,(Client::test)&BasicTestsF32::test_offset_f32);
this->addTest(16,(Client::test)&BasicTestsF32::test_scale_f32);
this->addTest(17,(Client::test)&BasicTestsF32::test_scale_f32);
this->addTest(18,(Client::test)&BasicTestsF32::test_scale_f32);
this->addTest(19,(Client::test)&BasicTestsF32::test_dot_prod_f32);
this->addTest(20,(Client::test)&BasicTestsF32::test_dot_prod_f32);
this->addTest(21,(Client::test)&BasicTestsF32::test_dot_prod_f32);
this->addTest(22,(Client::test)&BasicTestsF32::test_abs_f32);
this->addTest(23,(Client::test)&BasicTestsF32::test_abs_f32);
this->addTest(24,(Client::test)&BasicTestsF32::test_abs_f32);
}
#include "SVMF32.h"
SVMF32::SVMF32(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&SVMF32::test_svm_linear_predict_f32);
this->addTest(2,(Client::test)&SVMF32::test_svm_polynomial_predict_f32);
this->addTest(3,(Client::test)&SVMF32::test_svm_rbf_predict_f32);
this->addTest(4,(Client::test)&SVMF32::test_svm_sigmoid_predict_f32);
this->addTest(5,(Client::test)&SVMF32::test_svm_rbf_predict_f32);
}
#include "BasicMathsBenchmarksF32.h"
BasicMathsBenchmarksF32::BasicMathsBenchmarksF32(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&BasicMathsBenchmarksF32::vec_mult_f32);
this->addTest(2,(Client::test)&BasicMathsBenchmarksF32::vec_add_f32);
this->addTest(3,(Client::test)&BasicMathsBenchmarksF32::vec_sub_f32);
this->addTest(4,(Client::test)&BasicMathsBenchmarksF32::vec_abs_f32);
this->addTest(5,(Client::test)&BasicMathsBenchmarksF32::vec_negate_f32);
this->addTest(6,(Client::test)&BasicMathsBenchmarksF32::vec_offset_f32);
this->addTest(7,(Client::test)&BasicMathsBenchmarksF32::vec_scale_f32);
this->addTest(8,(Client::test)&BasicMathsBenchmarksF32::vec_dot_f32);
}
#include "BasicMathsBenchmarksQ31.h"
BasicMathsBenchmarksQ31::BasicMathsBenchmarksQ31(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&BasicMathsBenchmarksQ31::vec_mult_q31);
this->addTest(2,(Client::test)&BasicMathsBenchmarksQ31::vec_add_q31);
this->addTest(3,(Client::test)&BasicMathsBenchmarksQ31::vec_sub_q31);
this->addTest(4,(Client::test)&BasicMathsBenchmarksQ31::vec_abs_q31);
this->addTest(5,(Client::test)&BasicMathsBenchmarksQ31::vec_negate_q31);
this->addTest(6,(Client::test)&BasicMathsBenchmarksQ31::vec_offset_q31);
this->addTest(7,(Client::test)&BasicMathsBenchmarksQ31::vec_scale_q31);
this->addTest(8,(Client::test)&BasicMathsBenchmarksQ31::vec_dot_q31);
}
#include "BasicMathsBenchmarksQ15.h"
BasicMathsBenchmarksQ15::BasicMathsBenchmarksQ15(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&BasicMathsBenchmarksQ15::vec_mult_q15);
this->addTest(2,(Client::test)&BasicMathsBenchmarksQ15::vec_add_q15);
this->addTest(3,(Client::test)&BasicMathsBenchmarksQ15::vec_sub_q15);
this->addTest(4,(Client::test)&BasicMathsBenchmarksQ15::vec_abs_q15);
this->addTest(5,(Client::test)&BasicMathsBenchmarksQ15::vec_negate_q15);
this->addTest(6,(Client::test)&BasicMathsBenchmarksQ15::vec_offset_q15);
this->addTest(7,(Client::test)&BasicMathsBenchmarksQ15::vec_scale_q15);
this->addTest(8,(Client::test)&BasicMathsBenchmarksQ15::vec_dot_q15);
}
#include "BasicMathsBenchmarksQ7.h"
BasicMathsBenchmarksQ7::BasicMathsBenchmarksQ7(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&BasicMathsBenchmarksQ7::vec_mult_q7);
this->addTest(2,(Client::test)&BasicMathsBenchmarksQ7::vec_add_q7);
this->addTest(3,(Client::test)&BasicMathsBenchmarksQ7::vec_sub_q7);
this->addTest(4,(Client::test)&BasicMathsBenchmarksQ7::vec_abs_q7);
this->addTest(5,(Client::test)&BasicMathsBenchmarksQ7::vec_negate_q7);
this->addTest(6,(Client::test)&BasicMathsBenchmarksQ7::vec_offset_q7);
this->addTest(7,(Client::test)&BasicMathsBenchmarksQ7::vec_scale_q7);
this->addTest(8,(Client::test)&BasicMathsBenchmarksQ7::vec_dot_q7);
}
#include "FullyConnected.h"
FullyConnected::FullyConnected(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(2,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(3,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(4,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(5,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(6,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(7,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(8,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(9,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(10,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(11,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(12,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(13,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(14,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
this->addTest(15,(Client::test)&FullyConnected::test_fully_connected_tflite_s8);
}
#include "FullyConnectedBench.h"
FullyConnectedBench::FullyConnectedBench(Testing::testID_t id):Client::Suite(id)
{
this->addTest(1,(Client::test)&FullyConnectedBench::test_fully_connected_tflite_s8);
} }

@ -0,0 +1,31 @@
#include "Test.h"
#include "Pattern.h"
class BayesF32:public Client::Suite
{
public:
BayesF32(Testing::testID_t id);
void setUp(Testing::testID_t,std::vector<Testing::param_t>& paramsArgs,Client::PatternMgr *mgr);
void tearDown(Testing::testID_t,Client::PatternMgr *mgr);
private:
#include "BayesF32_decl.h"
Client::Pattern<float32_t> input;
Client::Pattern<float32_t> params;
Client::Pattern<int16_t> dims;
Client::LocalPattern<float32_t> outputProbas;
Client::LocalPattern<int16_t> outputPredicts;
// Reference patterns are not loaded when we are in dump mode
Client::RefPattern<float32_t> probas;
Client::RefPattern<int16_t> predicts;
int nbPatterns,classNb,vecDim;
const float32_t *theta;
const float32_t *sigma;
const float32_t *classPrior;
float32_t epsilon;
arm_gaussian_naive_bayes_instance_f32 bayes;
};

@ -0,0 +1,10 @@
0x00000003
0x00000001
0x00000000
0x00000000
0x00000000
0x00000001
0x00000004
0x00000002
0x00000002
0x00000003

@ -0,0 +1,50 @@
0xc3017842
0xc36b5d8b
0xc2f2803d
0x415bb06f
0xc34a93d9
0xc3452b02
0x41236ac1
0xc30cc120
0xc33b4cac
0xc45d14e6
0x40b780c4
0xc2e83cdc
0xc385f13e
0xc35d1ff9
0xc30a7811
0x4001fb63
0xc30b3a53
0xc37a2b18
0xc357eae2
0xc301a4de
0x41001b9f
0xc2c75454
0xc3560891
0xc34a8caa
0xc2d9f406
0xc3407daf
0xc1bdcce3
0xc32b4608
0xc33e42f2
0xc473ac4d
0xc32ff4eb
0xc4ab3cff
0xc3779197
0xc3a0b3d2
0xbebb3f33
0xc32fcd25
0xc2d752b7
0x4165385a
0xc2cf8a8c
0xc3aaa905
0xc3050f3e
0xc2aef081
0x3f72c76f
0xc2812084
0xc343f541
0xc30e386f
0xc369727a
0xc3107f09
0x418753bd
0xc3385deb

@ -0,0 +1,147 @@
import os.path
import itertools
import Tools
import random
import numpy as np
from sklearn.naive_bayes import GaussianNB
def printS(a):
print("Interpreter[\"Number\"][\"%.9g\"]" % a,end="")
def printV(v):
start = False
print("{",end="")
for r in v:
if start:
print(",",end="")
start = True
printS(r)
print("}",end="")
def printM(v):
start = False
print("{",end="")
for r in v:
if start:
print(",",end="")
start = True
printV(r)
print("}",end="")
NBTESTSAMPLES = 10
VECDIM = [12,14,20]
BAYESCLASSES= [3,5,4]
NBTRAININGSAMPLES = 30
# Distance between the two centers (training vectors are gaussianly
# distributed around the centers)
CENTER_DISTANCE = 1
# Generate a randon points distributed around ome cluster.
# Cluster are on each axis like (1,0,0,0), (0,1,0,0), (0,0,1,0) etc ...
def newRandomVector(nbClasses,vecDim):
v = np.random.randn(vecDim)
v = v * CENTER_DISTANCE/2.0/6.0
c = np.random.choice(range(0,nbClasses))
c0 = np.zeros(vecDim)
c1 = np.copy(c0)
c1[c] = c0[0] + CENTER_DISTANCE
return((v + c1).tolist(),c)
def trainGaussian(nbClasses,vecDim):
inputs=[]
outputs=[]
# Generate test patterns for this classifier
for i in range(0,NBTRAININGSAMPLES):
v,c=newRandomVector(nbClasses,vecDim)
inputs.append(v)
outputs.append(c)
gnb = GaussianNB()
gnb.fit(inputs, outputs)
return(gnb)
def generateNewTest(config,nb):
dims=[]
inputs=[]
referenceproba=[]
referencepredict=[]
params=[]
dims.append(NBTESTSAMPLES)
classNb = BAYESCLASSES[nb % len(BAYESCLASSES)]
vecDim = VECDIM[nb % len(VECDIM)]
dims.append(classNb)
dims.append(vecDim)
# Train a classifier for a given vector dimension and
# given number of classes
gb = trainGaussian(classNb,vecDim)
params += list(np.reshape(gb.theta_,np.size(gb.theta_)))
params += list(np.reshape(gb.sigma_,np.size(gb.sigma_)))
params += list(np.reshape(gb.class_prior_,np.size(gb.class_prior_)))
params.append(gb.epsilon_)
#print("theta=",end="")
#printM(gb.theta_)
#print(";",end="")
#
#print("sigma=",end="")
#printM(gb.sigma_)
#print(";",end="")
#
#print("prior=",end="")
#printV(gb.class_prior_)
#print(";",end="")
#
#print("epsilon=",end="")
#printS(gb.epsilon_)
#print(";",end="")
#print(classNb,vecDim)
for _ in range(0,NBTESTSAMPLES):
# Generate a test pattern for this classifier
v,c=newRandomVector(classNb,vecDim)
inputs += v
#print("inputs=",end="")
#printV(v)
#print(";",end="")
y_pred = gb.predict([v])
referencepredict.append(y_pred[0])
probas = gb._joint_log_likelihood([v])
probas = probas[0]
referenceproba += list(probas)
inputs = np.array(inputs)
params = np.array(params)
referenceproba = np.array(referenceproba)
referencepredict = np.array(referencepredict)
dims = np.array(dims)
config.writeInput(nb, inputs,"Inputs")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, referenceproba,"Probas")
config.writeReferenceS16(nb, referencepredict,"Predicts")
config.writeReference(nb, params,"Params")
#print(inputs)
#print(dims)
#print(referencepredict)
#print(referenceproba)
#print(params)
def writeTests(config):
generateNewTest(config,1)
PATTERNDIR = os.path.join("Patterns","DSP","Bayes","Bayes")
PARAMDIR = os.path.join("Parameters","DSP","Bayes","Bayes")
configf32=Tools.Config(PATTERNDIR,PARAMDIR,"f32")
writeTests(configf32)

@ -0,0 +1,8 @@
H
3
// 10
0x000A
// 5
0x0005
// 14
0x000E

@ -0,0 +1,282 @@
W
140
// 0.082526
0x3da9035c
// -0.122715
0xbdfb5235
// 0.089570
0x3db770a5
// 0.984410
0x3f7c024a
// -0.053754
0xbd5c2cc3
// 0.105550
0x3dd82a93
// -0.069425
0xbd8e2ed6
// -0.054680
0xbd5ff873
// 0.017222
0x3c8d1478
// -0.051895
0xbd548fab
// 0.061678
0x3d7ca19d
// 0.094514
0x3dc19076
// 0.039464
0x3d21a4e9
// -0.047258
0xbd41911b
// -0.072871
0xbd953d88
// 0.959565
0x3f75a606
// 0.010759
0x3c304642
// -0.112127
0xbde5a2a1
// 0.057484
0x3d6b74aa
// 0.040273
0x3d24f5bb
// -0.016785
0xbc898139
// -0.035562
0xbd11a950
// -0.153482
0xbe1d2a73
// 0.238650
0x3e746095
// 0.067390
0x3d8a03b1
// 0.103171
0x3dd34b47
// -0.053730
0xbd5c13c6
// 0.015834
0x3c81b72e
// 1.043882
0x3f859de9
// 0.116564
0x3deeb952
// -0.029548
0xbcf20f78
// 0.103864
0x3dd4b698
// -0.067622
0xbd8a7d50
// -0.127750
0xbe02d0c7
// 0.000239
0x397a9d04
// 0.099036
0x3dcad399
// 0.116481
0x3dee8da6
// -0.014558
0xbc6e8694
// 0.004565
0x3b9593e2
// -0.007607
0xbbf94179
// -0.122853
0xbdfb9a56
// 0.011683
0x3c3f6926
// 1.005850
0x3f80bfae
// -0.065132
0xbd8563eb
// -0.073726
0xbd96fdac
// 0.060707
0x3d78a7e4
// -0.076584
0xbd9cd7d6
// 0.007135
0x3be9cdd1
// -0.028107
0xbce64108
// 0.044492
0x3d363db1
// 0.014834
0x3c730987
// -0.034259
0xbd0c52b7
// -0.078544
0xbda0db9b
// 0.009751
0x3c1fc37b
// -0.147147
0xbe16ada4
// -0.172388
0xbe308683
// 0.940812
0x3f70d90e
// 0.026363
0x3cd7f691
// 0.002243
0x3b12fddc
// -0.108973
0xbddf2d36
// -0.029423
0xbcf1079d
// 0.013915
0x3c63fa40
// 0.042297
0x3d2d3ff4
// -0.010303
0xbc28cc87
// 0.008521
0x3c0b9c2e
// 0.148936
0x3e1882c7
// -0.031127
0xbcfefe0e
// 0.035383
0x3d10edf8
// -0.121857
0xbdf99012
// -0.033660
0xbd09df09
// 0.009797
0x3c20851c
// 1.019472
0x3f827e0e
// -0.115635
0xbdecd1de
// 0.127865
0x3e02eef5
// 0.169645
0x3e2db76a
// -0.094044
0xbdc099f2
// -0.018883
0xbc9ab145
// 0.180708
0x3e390b75
// 0.078155
0x3da00f8b
// 0.059824
0x3d7509fc
// 0.029541
0x3cf1ff6a
// -0.125417
0xbe006d4e
// -0.093060
0xbdbe966d
// -0.067684
0xbd8a9e1c
// -0.010691
0xbc2f27ce
// -0.034047
0xbd0b74b7
// 0.000919
0x3a71054c
// 0.055346
0x3d62b2cc
// 1.119284
0x3f8f44b4
// 0.109775
0x3de0d1eb
// -0.054989
0xbd613bdf
// -0.022375
0xbcb74b12
// -0.102981
0xbdd2e7c3
// -0.055496
0xbd634f4d
// -0.070032
0xbd8f6d10
// 0.026004
0x3cd50655
// -0.082682
0xbda95538
// -0.063371
0xbd81c8d1
// -0.091020
0xbdba68a3
// 0.024148
0x3cc5d21e
// 1.106444
0x3f8d9ff1
// 0.076550
0x3d9cc610
// 0.077076
0x3d9dda01
// 0.026431
0x3cd885b3
// -0.057218
0xbd6a5d4f
// -0.119965
0xbdf5b055
// 0.007755
0x3bfe1e1f
// -0.016146
0xbc8444d1
// 0.029628
0x3cf2b64f
// -0.104383
0xbdd5c6cc
// -0.077044
0xbd9dc957
// -0.091904
0xbdbc37f3
// -0.061612
0xbd7c5ce5
// 0.028507
0x3ce98786
// 0.776034
0x3f46aa30
// -0.000410
0xb9d6e8ce
// 0.025324
0x3ccf74da
// -0.074543
0xbd98aa1e
// 0.066389
0x3d87f700
// 0.165833
0x3e29d021
// 0.127817
0x3e02e27c
// -0.056254
0xbd666a7a
// 0.091243
0x3dbadd7d
// -0.037609
0xbd1a0c55
// 0.061451
0x3d7bb3f4
// -0.073591
0xbd96b6c1
// 0.060130
0x3d764a9e
// -0.026010
0xbcd51281
// 0.022482
0x3cb82cc8
// 1.065327
0x3f885c9f
// 0.032134
0x3d039eae
// 0.015043
0x3c76776a
// -0.054310
0xbd5e73d8
// -0.082411
0xbda8c74e
// 0.071073
0x3d918eab
// -0.030931
0xbcfd62ed
// -0.045203
0xbd3926b8
// -0.040157
0xbd247b9e
// 0.042060
0x3d2c4744
// -0.049357
0xbd4a2abd

@ -0,0 +1,294 @@
W
146
// 1.029801
0x3f83d086
// 0.013457
0x3c5c79b6
// -0.031914
0xbd02b7c5
// -0.008120
0xbc05080f
// -0.030088
0xbcf67bcb
// -0.020092
0xbca497e0
// -0.008601
0xbc0ce97e
// 0.000164
0x392be656
// 0.025882
0x3cd40654
// 0.017745
0x3c915d5b
// 0.056998
0x3d6976db
// 0.044577
0x3d369628
// 0.085947
0x3db004c7
// -0.003667
0xbb705432
// -0.004632
0xbb97c9b5
// 1.028538
0x3f83a725
// -0.020162
0xbca52a8d
// 0.009027
0x3c13e64a
// 0.042113
0x3d2c7f34
// -0.009137
0xbc15b55c
// 0.052249
0x3d560343
// -0.053155
0xbd59b934
// -0.007010
0xbbe5b39b
// -0.006648
0xbbd9da7e
// 0.020218
0x3ca5a02c
// -0.039231
0xbd20b061
// -0.017365
0xbc8e40b2
// 0.029855
0x3cf4921b
// 0.034447
0x3d0d18a8
// 0.034614
0x3d0dc75b
// 0.975048
0x3f799cc5
// 0.011264
0x3c388c5f
// -0.002365
0xbb1af758
// 0.036047
0x3d13a68a
// -0.026534
0xbcd95e82
// 0.013960
0x3c64b70d
// -0.030861
0xbcfcd07c
// -0.012269
0xbc49038e
// 0.003020
0x3b45e658
// 0.003311
0x3b58f81b
// -0.023674
0xbcc1eefd
// -0.046168
0xbd3d1abc
// -0.024420
0xbcc80c93
// 0.012581
0x3c4e1ee2
// -0.027579
0xbce1ec6e
// 0.955489
0x3f749af4
// -0.016625
0xbc8831a6
// -0.022825
0xbcbafc88
// 0.014667
0x3c704fa7
// 0.014142
0x3c67b347
// 0.026414
0x3cd86230
// -0.002667
0xbb2ec1ba
// 0.034394
0x3d0ce06b
// 0.033885
0x3d0acb33
// 0.053964
0x3d5d09de
// 0.017966
0x3c932c79
// 0.018307
0x3c95f7b0
// 0.003610
0x3b6c948f
// -0.011409
0xbc3aeee9
// -0.027032
0xbcdd728e
// 1.048357
0x3f863091
// -0.074478
0xbd9887c4
// -0.007954
0xbc02534a
// -0.026101
0xbcd5d171
// 0.038192
0x3d1c6f76
// 0.007041
0x3be6b440
// 0.047545
0x3d42bf03
// 0.017187
0x3c8ccbba
// 0.009586
0x3c1d0f9e
// -0.048627
0xbd472d99
// 0.005179
0x3ba9b6ff
// 0.005278
0x3bacf685
// 0.010055
0x3c24bcf7
// 0.008276
0x3c079a15
// 0.008336
0x3c08953f
// 0.009200
0x3c16bc67
// 0.003820
0x3b7a5350
// 0.005601
0x3bb78b4f
// 0.002147
0x3b0cb90c
// 0.008073
0x3c044440
// 0.009211
0x3c16eada
// 0.007205
0x3bec1977
// 0.002050
0x3b06538c
// 0.003448
0x3b61f3d3
// 0.009982
0x3c238caa
// 0.008721
0x3c0ee130
// 0.010367
0x3c29d875
// 0.002999
0x3b449340
// 0.000442
0x39e77f72
// 0.001196
0x3a9cbe95
// 0.002232
0x3b1246b6
// 0.001798
0x3aebaa71
// 0.024016
0x3cc4bcd5
// 0.004937
0x3ba1c362
// 0.005719
0x3bbb6764
// 0.010487
0x3c2bd32e
// 0.000595
0x3a1c0e54
// 0.002536
0x3b263a43
// 0.002672
0x3b2f160d
// 0.006096
0x3bc7c386
// 0.006101
0x3bc7e6b8
// 0.006418
0x3bd25053
// 0.003347
0x3b5b5a53
// 0.003444
0x3b61bb86
// 0.004540
0x3b94c203
// 0.008730
0x3c0f0876
// 0.001083
0x3a8df210
// 0.006019
0x3bc53d1f
// 0.010820
0x3c31474c
// 0.006311
0x3bcecb67
// 0.004254
0x3b8b64b0
// 0.004957
0x3ba26f10
// 0.002931
0x3b400e48
// 0.003189
0x3b50f88e
// 0.008320
0x3c084fba
// 0.010628
0x3c2e21e7
// 0.002205
0x3b107e25
// 0.005055
0x3ba5a7f9
// 0.003927
0x3b80af71
// 0.006345
0x3bcfe96d
// 0.003697
0x3b7249aa
// 0.005524
0x3bb5013b
// 0.002320
0x3b180692
// 0.004196
0x3b898286
// 0.001478
0x3ac1b313
// 0.008951
0x3c12a673
// 0.007850
0x3c009dea
// 0.000553
0x3a10f224
// 0.002123
0x3b0b2396
// 0.004335
0x3b8e0a1f
// 0.008151
0x3c058da3
// 0.001092
0x3a8f207d
// 0.003715
0x3b737a06
// 0.003483
0x3b643d46
// 0.002256
0x3b13d170
// 0.006939
0x3be35e81
// 0.003748
0x3b75a570
// 0.001610
0x3ad2ff40
// 0.020283
0x3ca62780
// 0.004336
0x3b8e1725
// 0.266667
0x3e888889
// 0.133333
0x3e088889
// 0.266667
0x3e888889
// 0.200000
0x3e4ccccd
// 0.133333
0x3e088889
// 0.000000
0x2f665fae

@ -0,0 +1,22 @@
H
10
// 3
0x0003
// 1
0x0001
// 0
0x0000
// 0
0x0000
// 0
0x0000
// 1
0x0001
// 4
0x0004
// 2
0x0002
// 2
0x0002
// 3
0x0003

@ -0,0 +1,102 @@
W
50
// -129.469765
0xc3017843
// -235.365395
0xc36b5d8b
// -121.250445
0xc2f2803a
// 13.730572
0x415bb06d
// -202.577566
0xc34a93db
// -197.167996
0xc3452b02
// 10.213562
0x41236ac1
// -140.754396
0xc30cc120
// -187.299545
0xc33b4caf
// -884.326939
0xc45d14ed
// 5.734467
0x40b780c1
// -116.118883
0xc2e83cde
// -267.884718
0xc385f13e
// -221.124912
0xc35d1ffa
// -138.469027
0xc30a7812
// 2.030967
0x4001fb5d
// -139.227829
0xc30b3a53
// -250.168324
0xc37a2b17
// -215.917581
0xc357eae7
// -129.644014
0xc301a4de
// 8.006742
0x41001b9d
// -99.664704
0xc2c75454
// -214.033486
0xc3560893
// -202.549478
0xc34a8cab
// -108.976609
0xc2d9f406
// -192.490910
0xc3407dac
// -23.725058
0xc1bdcceb
// -171.273581
0xc32b4609
// -190.261554
0xc33e42f5
// -974.692509
0xc473ac52
// -175.956716
0xc32ff4eb
// -1369.906651
0xc4ab3d03
// -247.568709
0xc3779197
// -321.404806
0xc3a0b3d1
// -0.365720
0xbebb3fa5
// -175.801369
0xc32fcd27
// -107.661560
0xc2d752b8
// 14.326256
0x41653858
// -103.770617
0xc2cf8a8e
// -341.320548
0xc3aaa908
// -133.059540
0xc3050f3e
// -87.469737
0xc2aef081
// 0.948356
0x3f72c778
// -64.563516
0xc2812085
// -195.958042
0xc343f542
// -142.220445
0xc30e386f
// -233.447227
0xc369727d
// -144.496210
0xc3107f08
// 16.915887
0x418753bd
// -184.366865
0xc3385deb

@ -0,0 +1,87 @@
#include "BayesF32.h"
#include "Error.h"
#include "arm_math.h"
#include "Test.h"
#include <cstdio>
void BayesF32::test_gaussian_naive_bayes_predict_f32()
{
const float32_t *inp = input.ptr();
float32_t *bufp = outputProbas.ptr();
int16_t *p = outputPredicts.ptr();
for(int i=0; i < this->nbPatterns ; i ++)
{
*p = arm_gaussian_naive_bayes_predict_f32(&bayes,
inp,
bufp);
inp += this->vecDim;
bufp += this->classNb;
p++;
}
ASSERT_NEAR_EQ(outputProbas,probas,(float32_t)1e-3);
ASSERT_EQ(outputPredicts,predicts);
}
void BayesF32::setUp(Testing::testID_t id,std::vector<Testing::param_t>& paramsArgs,Client::PatternMgr *mgr)
{
switch(id)
{
case BayesF32::TEST_GAUSSIAN_NAIVE_BAYES_PREDICT_F32_1:
input.reload(BayesF32::INPUTS1_F32_ID,mgr);
params.reload(BayesF32::PARAMS1_F32_ID,mgr);
dims.reload(BayesF32::DIMS1_S16_ID,mgr);
const int16_t *dimsp=dims.ptr();
const float32_t *paramsp = params.ptr();
this->nbPatterns=dimsp[0];
this->classNb=dimsp[1];
this->vecDim=dimsp[2];
this->theta=paramsp;
this->sigma=paramsp + (this->classNb * this->vecDim);
this->classPrior=paramsp + 2*(this->classNb * this->vecDim);
this->epsilon=paramsp[this->classNb + 2*(this->classNb * this->vecDim)];
//printf("%f %f %f\n",this->theta[0],this->sigma[0],this->classPrior[0]);
// Reference patterns are not loaded when we are in dump mode
probas.reload(BayesF32::PROBAS1_F32_ID,mgr);
predicts.reload(BayesF32::PREDICTS1_S16_ID,mgr);
outputProbas.create(this->nbPatterns*this->classNb,BayesF32::OUT_PROBA_F32_ID,mgr);
outputPredicts.create(this->nbPatterns,BayesF32::OUT_PREDICT_S16_ID,mgr);
bayes.vectorDimension=this->vecDim;
bayes.numberOfClasses=this->classNb;
bayes.theta=this->theta;
bayes.sigma=this->sigma;
bayes.classPriors=this->classPrior;
bayes.epsilon=this->epsilon;
break;
}
}
void BayesF32::tearDown(Testing::testID_t id,Client::PatternMgr *mgr)
{
outputProbas.dump(mgr);
outputPredicts.dump(mgr);
}

@ -5,528 +5,25 @@ n
n n
y y
DSP DSP
3 1
n
y
BasicMaths
2 1
n
y
BasicMathsF32
0
12
Input1_f32.txt
Input2_f32.txt
Reference1_f32.txt
Reference2_f32.txt
Reference3_f32.txt
Reference4_f32.txt
Reference5_f32.txt
Reference6_f32.txt
Reference7_f32.txt
Reference8_f32.txt
Reference9_f32.txt
Reference10_f32.txt
2
Output
State
0
1 1
n
n
1 2
n
n
1 3
n
n
1 4
n
n
1 5
n
n
1 6
n
n
1 7
n
n
1 8
n
n
1 9
n
n
1 10
n
n
1 11
n
n
1 12
n
n
1 13
n
n
1 14
n
n
1 15
n
n
1 16
n
n
1 17
n
n
1 18
n
n
1 19
n
n
1 20
n
n
1 21
n
n
1 22
n
n
1 23
n
n
1 24
n
n
3 2
n
y
SVM
2 1
n
y
SVMF32
0
20
Samples1_f32.txt
Params1_f32.txt
Dims1_s16.txt
Reference1_s32.txt
Samples2_f32.txt
Params2_f32.txt
Dims2_s16.txt
Reference2_s32.txt
Samples3_f32.txt
Params3_f32.txt
Dims3_s16.txt
Reference3_s32.txt
Samples4_f32.txt
Params4_f32.txt
Dims4_s16.txt
Reference4_s32.txt
Samples5_f32.txt
Params5_f32.txt
Dims5_s16.txt
Reference5_s32.txt
1
Output
0
1 1
n
n
1 2
n
n
1 3
n
n
1 4
n
n
1 5
n
n
3 2
n
y
DSP
3 1
n
y
BasicMaths
2 1
y
0
y
BasicMathsF32
1
2
Input1_f32.txt
Input2_f32.txt
1
Output
1
g
1
6
5
1
5
16
32
64
128
256
1 1
y
0
n
1 2
y
0
n
1 3
y
0
n
1 4
y
0
n
1 5
y
0
n
1 6
y
0
n
1 7
y
0
n
1 8
y
0
n
2 2
y
0
y
BasicMathsQ31
1
2
Input1_q31.txt
Input2_q31.txt
1
Output
1
g
1
6
5
1
5
16
32
64
128
256
1 1
y
0
n
1 2
y
0
n
1 3
y
0
n
1 4
y
0
n
1 5
y
0
n
1 6
y
0
n
1 7
y
0
n
1 8
y
0
n
2 3
y
0
y
BasicMathsQ15
1
2
Input1_q15.txt
Input2_q15.txt
1
Output
1
g
1
6
5
1
5
16
32
64
128
256
1 1
y
0
n
1 2
y
0
n
1 3
y
0
n
1 4
y
0
n
1 5
y
0
n
1 6
y
0
n
1 7
y
0
n
1 8
y
0
n
2 4
y
0
y
BasicMathsQ7
1
2
Input1_q7.txt
Input2_q7.txt
1
Output
1
g
1
6
5
1
5
16
32
64
128
256
1 1
y
0
n
1 2
y
0
n
1 3
y
0
n
1 4
y
0
n
1 5
y
0
n
1 6
y
0
n
1 7
y
0
n
1 8
y
0
n
3 3 3 3
n n
y y
NN Bayes
2 1 2 1
n n
y y
FullyConnected BayesF32
0 0
60 5
TestCase_1_10_4_input_1.txt Dims1_s16.txt
TestCase_1_10_4_bias_1.txt Inputs1_f32.txt
TestCase_1_10_4_weights_1.txt Params1_f32.txt
TestCase_1_10_4_output_1.txt Probas1_f32.txt
TestCase_1_8_9_input_2.txt Predicts1_s16.txt
TestCase_1_8_9_bias_2.txt
TestCase_1_8_9_weights_2.txt
TestCase_1_8_9_output_2.txt
TestCase_1_10_4_input_3.txt
TestCase_1_10_4_bias_3.txt
TestCase_1_10_4_weights_3.txt
TestCase_1_10_4_output_3.txt
TestCase_1_9_1_input_4.txt
TestCase_1_9_1_bias_4.txt
TestCase_1_9_1_weights_4.txt
TestCase_1_9_1_output_4.txt
TestCase_1_8_8_input_5.txt
TestCase_1_8_8_bias_5.txt
TestCase_1_8_8_weights_5.txt
TestCase_1_8_8_output_5.txt
TestCase_9_6_1_input_6.txt
TestCase_9_6_1_bias_6.txt
TestCase_9_6_1_weights_6.txt
TestCase_9_6_1_output_6.txt
TestCase_8_8_1_input_7.txt
TestCase_8_8_1_bias_7.txt
TestCase_8_8_1_weights_7.txt
TestCase_8_8_1_output_7.txt
TestCase_4_10_1_input_8.txt
TestCase_4_10_1_bias_8.txt
TestCase_4_10_1_weights_8.txt
TestCase_4_10_1_output_8.txt
TestCase_9_6_1_input_9.txt
TestCase_9_6_1_bias_9.txt
TestCase_9_6_1_weights_9.txt
TestCase_9_6_1_output_9.txt
TestCase_4_10_1_input_10.txt
TestCase_4_10_1_bias_10.txt
TestCase_4_10_1_weights_10.txt
TestCase_4_10_1_output_10.txt
TestCase_8_8_1_input_11.txt
TestCase_8_8_1_bias_11.txt
TestCase_8_8_1_weights_11.txt
TestCase_8_8_1_output_11.txt
TestCase_9_8_4_input_12.txt
TestCase_9_8_4_bias_12.txt
TestCase_9_8_4_weights_12.txt
TestCase_9_8_4_output_12.txt
TestCase_8_8_5_input_13.txt
TestCase_8_8_5_bias_13.txt
TestCase_8_8_5_weights_13.txt
TestCase_8_8_5_output_13.txt
TestCase_4_7_3_input_14.txt
TestCase_4_7_3_bias_14.txt
TestCase_4_7_3_weights_14.txt
TestCase_4_7_3_output_14.txt
TestCase_8_7_4_input_15.txt
TestCase_8_7_4_bias_15.txt
TestCase_8_7_4_weights_15.txt
TestCase_8_7_4_output_15.txt
2 2
Output Probas
Temp Predicts
0 0
1 1 1 1
n n
n n
1 2
n
n
1 3
n
n
1 4
n
n
1 5
n
n
1 6
n
n
1 7
n
n
1 8
n
n
1 9
n
n
1 10
n
n
1 11
n
n
1 12
n
n
1 13
n
n
1 14
n
n
1 15
n
n
3 4
n
y
NN
2 1
n
y
FullyConnected
1
4
TestCase_1_10_4_input_1.txt
TestCase_1_10_4_bias_1.txt
TestCase_1_10_4_weights_1.txt
TestCase_1_10_4_output_1.txt
2
Output
Temp
1
g
1
5
4
1
4
10
20
100
200
1 1
y
0
n

@ -1,2 +1,2 @@
OPTIMIZED,HARDFP,FASTMATH,NEON,UNROLL,ROUNDING,PLATFORM,CORE,COMPILER,VERSION OPTIMIZED,HARDFP,FASTMATH,NEON,UNROLL,ROUNDING,PLATFORM,CORE,COMPILER,VERSION
1,1,1,0,1,0,FVP,ARMCM7_DP,AC6,6120001 1,1,1,1,1,0,FVP,ARMCA5,AC6,6120001

1 OPTIMIZED HARDFP FASTMATH NEON UNROLL ROUNDING PLATFORM CORE COMPILER VERSION
2 1 1 1 0 1 1 0 FVP ARMCM7_DP ARMCA5 AC6 6120001

@ -110,6 +110,29 @@ group Root {
} }
} }
} }
group Bayes Tests {
class = BayesTests
folder = Bayes
suite Bayes F32 {
class = BayesF32
folder = BayesF32
Pattern DIMS1_S16_ID : Dims1_s16.txt
Pattern INPUTS1_F32_ID : Inputs1_f32.txt
Pattern PARAMS1_F32_ID : Params1_f32.txt
Pattern PROBAS1_F32_ID : Probas1_f32.txt
Pattern PREDICTS1_S16_ID : Predicts1_s16.txt
Output OUT_PROBA_F32_ID : Probas
Output OUT_PREDICT_S16_ID : Predicts
Functions {
arm_gaussian_naive_bayes_predict_f32:test_gaussian_naive_bayes_predict_f32
}
}
}
} }
group DSP Benchmarks { group DSP Benchmarks {

Loading…
Cancel
Save