From 9f95ff5b6ba01db09552b84a0ab79607060a2666 Mon Sep 17 00:00:00 2001 From: Ali Labbene Date: Wed, 11 Dec 2019 08:59:21 +0100 Subject: Official ARM version: v5.4.0 Add CMSIS V5.4.0, please refer to index.html available under \docs folder. Note: content of \CMSIS\Core\Include has been copied under \Include to keep the same structure used in existing projects, and thus avoid projects mass update Note: the following components have been removed from ARM original delivery (as not used in ST packages) - CMSIS_EW2018.pdf - .gitattributes - .gitignore - \Device - \CMSIS - \CoreValidation - \DAP - \Documentation - \DoxyGen - \Driver - \Pack - \RTOS\CMSIS_RTOS_Tutorial.pdf - \RTOS\RTX - \RTOS\Template - \RTOS2\RTX - \Utilities - All ARM/GCC projects files are deleted from \DSP, \RTOS and \RTOS2 Change-Id: Ia026c3f0f0d016627a4fb5a9032852c33d24b4d3 --- docs/NN/html/group__Acti.html | 327 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 327 insertions(+) create mode 100644 docs/NN/html/group__Acti.html (limited to 'docs/NN/html/group__Acti.html') diff --git a/docs/NN/html/group__Acti.html b/docs/NN/html/group__Acti.html new file mode 100644 index 0000000..fa8cb48 --- /dev/null +++ b/docs/NN/html/group__Acti.html @@ -0,0 +1,327 @@ + + + + + +Neural Network Activation Functions +CMSIS-NN: Neural Network Activation Functions + + + + + + + + + + + + + + +
+
+ + + + + + + +
+
CMSIS-NN +  Version 1.1.0 +
+
CMSIS NN Software Library
+
+
+ +
+
    + +
+
+ + + +
+
+ +
+
+
+ +
+ + + + +
+ +
+ +
+ +
+
Neural Network Activation Functions
+
+
+ + + + + + + + + + + + + + +

+Functions

void arm_nn_activations_direct_q15 (q15_t *data, uint16_t size, uint16_t int_width, arm_nn_activation_type type)
 Q15 neural network activation function using direct table look-up. More...
 
void arm_nn_activations_direct_q7 (q7_t *data, uint16_t size, uint16_t int_width, arm_nn_activation_type type)
 Q7 neural network activation function using direct table look-up. More...
 
void arm_relu_q15 (q15_t *data, uint16_t size)
 Q15 RELU function. More...
 
void arm_relu_q7 (q7_t *data, uint16_t size)
 Q7 RELU function. More...
 
+

Description

+

Perform activation layers, including ReLU (Rectified Linear Unit), sigmoid and tanh

+

Function Documentation

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void arm_nn_activations_direct_q15 (q15_t * data,
uint16_t size,
uint16_t int_width,
arm_nn_activation_type type 
)
+
+
Parameters
+ + + + + +
[in,out]datapointer to input
[in]sizenumber of elements
[in]int_widthbit-width of the integer part, assume to be smaller than 3
[in]typetype of activation functions
+
+
+
Returns
none.
+

This is the direct table look-up approach.

+

Assume here the integer part of the fixed-point is <= 3. More than 3 just not making much sense, makes no difference with saturation followed by any of these activation functions.

+ +

References ARM_SIGMOID, ARM_TANH, sigmoidTable_q15, and tanhTable_q15.

+ +

Referenced by gru_example().

+ +
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void arm_nn_activations_direct_q7 (q7_t * data,
uint16_t size,
uint16_t int_width,
arm_nn_activation_type type 
)
+
+
Parameters
+ + + + + +
[in,out]datapointer to input
[in]sizenumber of elements
[in]int_widthbit-width of the integer part, assume to be smaller than 3
[in]typetype of activation functions
+
+
+
Returns
none.
+

This is the direct table look-up approach.

+

Assume here the integer part of the fixed-point is <= 3. More than 3 just not making much sense, makes no difference with saturation followed by any of these activation functions.

+ +

References ARM_SIGMOID, ARM_TANH, sigmoidTable_q7, and tanhTable_q7.

+ +
+
+ +
+
+ + + + + + + + + + + + + + + + + + +
void arm_relu_q15 (q15_t * data,
uint16_t size 
)
+
+
Parameters
+ + + +
[in,out]datapointer to input
[in]sizenumber of elements
+
+
+
Returns
none.
+

Optimized relu with QSUB instructions.

+ +
+
+ +
+
+ + + + + + + + + + + + + + + + + + +
void arm_relu_q7 (q7_t * data,
uint16_t size 
)
+
+
Parameters
+ + + +
[in,out]datapointer to input
[in]sizenumber of elements
+
+
+
Returns
none.
+

Optimized relu with QSUB instructions.

+ +

Referenced by main().

+ +
+
+
+
+ + + + -- cgit