CMSIS-DSP: Added example for the naive gaussian bayes classifier
parent
585137ad16
commit
88d1328ee4
@ -0,0 +1,45 @@
|
||||
cmake_minimum_required (VERSION 3.6)
|
||||
project (arm_bayes_example VERSION 0.1)
|
||||
|
||||
|
||||
# Needed to include the configBoot module
|
||||
# Define the path to CMSIS-DSP (ROOT is defined on command line when using cmake)
|
||||
set(ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../..)
|
||||
set(DSP ${ROOT}/CMSIS/DSP)
|
||||
|
||||
# Add DSP folder to module path
|
||||
list(APPEND CMAKE_MODULE_PATH ${DSP})
|
||||
|
||||
###################################
|
||||
#
|
||||
# LIBRARIES
|
||||
#
|
||||
###################################
|
||||
|
||||
###########
|
||||
#
|
||||
# CMSIS DSP
|
||||
#
|
||||
|
||||
add_subdirectory(../../../Source bin_dsp)
|
||||
|
||||
|
||||
###################################
|
||||
#
|
||||
# TEST APPLICATION
|
||||
#
|
||||
###################################
|
||||
|
||||
|
||||
add_executable(arm_bayes_example)
|
||||
|
||||
|
||||
include(config)
|
||||
configApp(arm_bayes_example ${ROOT})
|
||||
|
||||
target_sources(arm_bayes_example PRIVATE arm_bayes_example_f32.c)
|
||||
|
||||
### Sources and libs
|
||||
|
||||
target_link_libraries(arm_bayes_example PRIVATE CMSISDSP)
|
||||
|
||||
@ -0,0 +1,146 @@
|
||||
/* ----------------------------------------------------------------------
|
||||
* Copyright (C) 2019-2020 ARM Limited. All rights reserved.
|
||||
*
|
||||
* $Date: 09. December 2019
|
||||
* $Revision: V1.0.0
|
||||
*
|
||||
* Project: CMSIS DSP Library
|
||||
* Title: arm_bayes_example_f32.c
|
||||
*
|
||||
* Description: Example code demonstrating how to use Bayes functions.
|
||||
*
|
||||
* Target Processor: Cortex-M/Cortex-A
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* - Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* - Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* - Neither the name of ARM LIMITED nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this
|
||||
* software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
* -------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @ingroup groupExamples
|
||||
*/
|
||||
|
||||
/**
|
||||
* @defgroup BayesExample Bayes Example
|
||||
*
|
||||
* \par Description:
|
||||
* \par
|
||||
* Demonstrates the use of Bayesian classifier functions. It is complementing the tutorial
|
||||
* about classical ML with CMSIS-DSP and python scikit-learn.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
/** \example arm_bayes_example_f32.c
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include "arm_math.h"
|
||||
|
||||
/*
|
||||
|
||||
Those parameters can be generated with the python library scikit-learn.
|
||||
|
||||
*/
|
||||
arm_gaussian_naive_bayes_instance_f32 S;
|
||||
|
||||
#define NB_OF_CLASSES 3
|
||||
#define VECTOR_DIMENSION 2
|
||||
|
||||
const float32_t theta[NB_OF_CLASSES*VECTOR_DIMENSION] = {
|
||||
1.4539529436590528f, 0.8722776016801852f,
|
||||
-1.5267934452462473f, 0.903204577814203f,
|
||||
-0.15338006360932258f, -2.9997913665803964f
|
||||
}; /**< Mean values for the Gaussians */
|
||||
|
||||
const float32_t sigma[NB_OF_CLASSES*VECTOR_DIMENSION] = {
|
||||
1.0063470889514925f, 0.9038018246524426f,
|
||||
1.0224479953244736f, 0.7768764290432544f,
|
||||
1.1217662403241206f, 1.2303890106020325f
|
||||
}; /**< Variances for the Gaussians */
|
||||
|
||||
const float32_t classPriors[NB_OF_CLASSES] = {
|
||||
0.3333333333333333f, 0.3333333333333333f, 0.3333333333333333f
|
||||
}; /**< Class prior probabilities */
|
||||
|
||||
|
||||
int32_t main(void)
|
||||
{
|
||||
/* Array of input data */
|
||||
float32_t in[2];
|
||||
|
||||
/* Result of the classifier */
|
||||
float32_t result[NB_OF_CLASSES];
|
||||
float32_t maxProba;
|
||||
uint32_t index;
|
||||
|
||||
S.vectorDimension = VECTOR_DIMENSION;
|
||||
S.numberOfClasses = NB_OF_CLASSES;
|
||||
S.theta = theta;
|
||||
S.sigma = sigma;
|
||||
S.classPriors = classPriors;
|
||||
S.epsilon=4.328939296523643e-09f;
|
||||
|
||||
in[0] = 1.5f;
|
||||
in[1] = 1.0f;
|
||||
|
||||
arm_gaussian_naive_bayes_predict_f32(&S, in, result);
|
||||
|
||||
arm_max_f32(result,
|
||||
NB_OF_CLASSES,
|
||||
&maxProba,
|
||||
&index);
|
||||
|
||||
printf("Class = %d\n",index);
|
||||
|
||||
in[0] = -1.5f;
|
||||
in[1] = 1.0f;
|
||||
|
||||
arm_gaussian_naive_bayes_predict_f32(&S, in, result);
|
||||
|
||||
arm_max_f32(result,
|
||||
NB_OF_CLASSES,
|
||||
&maxProba,
|
||||
&index);
|
||||
|
||||
printf("Class = %d\n",index);
|
||||
|
||||
in[0] = 0.0f;
|
||||
in[1] = -3.0f;
|
||||
|
||||
arm_gaussian_naive_bayes_predict_f32(&S, in, result);
|
||||
|
||||
arm_max_f32(result,
|
||||
NB_OF_CLASSES,
|
||||
&maxProba,
|
||||
&index);
|
||||
|
||||
printf("Class = %d\n",index);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -0,0 +1,74 @@
|
||||
from sklearn.naive_bayes import GaussianNB
|
||||
import random
|
||||
import numpy as np
|
||||
import math
|
||||
|
||||
from pylab import scatter,figure, clf, plot, xlabel, ylabel, xlim, ylim, title, grid, axes, show,semilogx, semilogy
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.font_manager import FontProperties
|
||||
|
||||
# Generation of data to train the classifier
|
||||
# 100 vectors are generated. Vector have dimension 2 so can be represented as points
|
||||
NBVECS = 100
|
||||
VECDIM = 2
|
||||
|
||||
# 3 cluster of points are generated
|
||||
ballRadius = 1.0
|
||||
x1 = [1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
|
||||
x2 = [-1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
|
||||
x3 = [0, -3] + ballRadius * np.random.randn(NBVECS,VECDIM)
|
||||
|
||||
# All points are concatenated
|
||||
X_train=np.concatenate((x1,x2,x3))
|
||||
|
||||
# The classes are 0,1 and 2.
|
||||
Y_train=np.concatenate((np.zeros(NBVECS),np.ones(NBVECS),2*np.ones(NBVECS)))
|
||||
|
||||
gnb = GaussianNB()
|
||||
gnb.fit(X_train, Y_train)
|
||||
|
||||
print("Testing")
|
||||
y_pred = gnb.predict([[1.5,1.0]])
|
||||
print(y_pred)
|
||||
|
||||
y_pred = gnb.predict([[-1.5,1.0]])
|
||||
print(y_pred)
|
||||
|
||||
y_pred = gnb.predict([[0,-3.0]])
|
||||
print(y_pred)
|
||||
|
||||
# Dump of data for CMSIS-DSP
|
||||
|
||||
print("Parameters")
|
||||
# Gaussian averages
|
||||
print("Theta = ",list(np.reshape(gnb.theta_,np.size(gnb.theta_))))
|
||||
|
||||
# Gaussian variances
|
||||
print("Sigma = ",list(np.reshape(gnb.sigma_,np.size(gnb.sigma_))))
|
||||
|
||||
# Class priors
|
||||
print("Prior = ",list(np.reshape(gnb.class_prior_,np.size(gnb.class_prior_))))
|
||||
|
||||
print("Epsilon = ",gnb.epsilon_)
|
||||
|
||||
|
||||
# Some bounds are computed for the graphical representation
|
||||
x_min = X_train[:, 0].min()
|
||||
x_max = X_train[:, 0].max()
|
||||
y_min = X_train[:, 1].min()
|
||||
y_max = X_train[:, 1].max()
|
||||
|
||||
font = FontProperties()
|
||||
font.set_size(20)
|
||||
|
||||
r=plt.figure()
|
||||
plt.axis('off')
|
||||
plt.text(1.5,1.0,"A", verticalalignment='center', horizontalalignment='center',fontproperties=font)
|
||||
plt.text(-1.5,1.0,"B",verticalalignment='center', horizontalalignment='center', fontproperties=font)
|
||||
plt.text(0,-3,"C", verticalalignment='center', horizontalalignment='center',fontproperties=font)
|
||||
scatter(x1[:,0],x1[:,1],s=1.0,color='#FF6B00')
|
||||
scatter(x2[:,0],x2[:,1],s=1.0,color='#95D600')
|
||||
scatter(x3[:,0],x3[:,1],s=1.0,color='#00C1DE')
|
||||
#r.savefig('fig.jpeg')
|
||||
#plt.close(r)
|
||||
show()
|
||||
Loading…
Reference in New Issue