Fix regression introduced by V1.1.37 update (#275)

Signed-off-by: xiang.zhang <xiang.zhang@verisilicon.com>
This commit is contained in:
Sven 2022-01-18 11:36:09 +08:00 committed by GitHub
parent 04cd392b7e
commit a02900d135
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 16 additions and 16 deletions

View File

@ -34,6 +34,7 @@
#include "vsi_nn_log.h"
#include "utils/vsi_nn_util.h"
#include "utils/vsi_nn_link_list.h"
#include "kernel/vsi_nn_kernel.h"
#define MAX_SOFTMAX_BATCH 65520
@ -96,7 +97,7 @@ static vsi_status vsi_nn_softmax_compute
memset(&paramExt, 0, sizeof(vx_nn_softmax_params_ext_t));
paramExt.base.beta = self->nn_param.softmax_internal.beta;
paramExt.axis = 0;
paramExt.axis = self->nn_param.softmax_internal.axis;
size = sizeof(vx_nn_softmax_params_ext_t);
#else
@ -108,10 +109,7 @@ static vsi_status vsi_nn_softmax_compute
#endif
status = VSI_FAILURE;
status = VSI_FAILURE;
if(param->beta == 0.f)
if (param->beta == 0.f)
{
VSILOGW("Softmax's beta is 0. Set beta to 1");
/* FIXME: Compatible with old case generated by Acuity */
@ -141,23 +139,25 @@ static vsi_status vsi_nn_softmax_compute
}
else
{
#ifdef VX_SOFTMAX_AXIS_PARAMETER_SUPPORT
if ( inputs[0]->attr.dim_num > 2 )
{
paramExt.axis = 2;
}
#endif
float beta = param->beta;
int32_t axis = self->nn_param.softmax_internal.axis;
vsi_nn_kernel_param_t * kernel_param = NULL;
self->n = vxSoftmaxLayer2( self->graph->g,
inputs[0]->t,
param,
size,
outputs[0]->t);
kernel_param = vsi_nn_kernel_param_create();
vsi_nn_kernel_param_add_float32( kernel_param, "beta", beta );
vsi_nn_kernel_param_add_int32( kernel_param, "axis", axis );
self->n = (vx_node)vsi_nn_kernel_selector( self->graph,
"softmax",
inputs, 1,
outputs, 1, kernel_param );;
if( NULL != self->n )
{
status = VSI_SUCCESS;
}
vsi_nn_kernel_param_release( &kernel_param );
}
return status;