Support alpha in elu (#354)

https://github.com/VeriSilicon/TIM-VX/issues/305

Signed-off-by: Chen Xin <jack.chen@verisilicon.com>
This commit is contained in:
chxin66 2022-04-11 19:04:30 +08:00 committed by GitHub
parent b4091318ea
commit d0af7ae8df
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 67 additions and 24 deletions

View File

@ -79,7 +79,6 @@ namespace ops {
DECLARE_NO_PARAMETER_ACTIVATION(Relu)
DECLARE_NO_PARAMETER_ACTIVATION(Relu1)
DECLARE_NO_PARAMETER_ACTIVATION(Relu6)
DECLARE_NO_PARAMETER_ACTIVATION(Elu)
DECLARE_NO_PARAMETER_ACTIVATION(Tanh)
DECLARE_NO_PARAMETER_ACTIVATION(Sigmoid)
DECLARE_NO_PARAMETER_ACTIVATION(Swish)
@ -89,6 +88,16 @@ DECLARE_NO_PARAMETER_ACTIVATION(SoftRelu)
#undef DEFINE_NO_PARAMETER_ACTIVATION
class Elu : public DirectMapOp {
public:
Elu(Graph* graph, float alpha);
std::shared_ptr<Operation> Clone(
std::shared_ptr<Graph>& graph) const override;
protected:
float alpha_;
};
class Prelu : public DirectMapOp {
public:
Prelu(Graph* graph, int axis);

View File

@ -40,14 +40,21 @@ namespace ops {
DEFINE_NO_PARAMETER_ACTIVATION(Relu, VSI_NN_OP_RELU)
DEFINE_NO_PARAMETER_ACTIVATION(Relu1, VSI_NN_OP_RELU1)
DEFINE_NO_PARAMETER_ACTIVATION(Relu6, VSI_NN_OP_RELU6)
DEFINE_NO_PARAMETER_ACTIVATION(Elu, VSI_NN_OP_ELU)
DEFINE_NO_PARAMETER_ACTIVATION(Sigmoid, VSI_NN_OP_SIGMOID)
DEFINE_NO_PARAMETER_ACTIVATION(Mish, VSI_NN_OP_MISH)
DEFINE_NO_PARAMETER_ACTIVATION(SoftRelu, VSI_NN_OP_SOFTRELU)
#undef DEFINE_NO_PARAMETER_ACTIVATION
Elu::Elu(Graph* graph, float alpha)
: DirectMapOp(graph, VSI_NN_OP_ELU), alpha_(alpha) {
this->impl()->node()->nn_param.elu.alpha = alpha_;
}
std::shared_ptr<Operation> Elu::Clone(std::shared_ptr<Graph>& graph) const {
return graph->CreateOperation<Elu>(this->alpha_);
}
HardSwish::HardSwish(Graph* graph) : DirectMapOp(graph, VSI_NN_OP_SWISH) {
this->impl()->node()->nn_param.swish.type = VSI_NN_HSWISH;
this->impl()->node()->nn_param.swish.beta = 1.0f;
@ -63,8 +70,7 @@ Swish::Swish(Graph* graph) : DirectMapOp(graph, VSI_NN_OP_SWISH) {
this->impl()->node()->nn_param.swish.beta = 1.0f;
}
std::shared_ptr<Operation> Swish::Clone(
std::shared_ptr<Graph>& graph) const {
std::shared_ptr<Operation> Swish::Clone(std::shared_ptr<Graph>& graph) const {
return graph->CreateOperation<Swish>();
}
@ -83,7 +89,8 @@ HardSigmoid::HardSigmoid(Graph* graph, float alpha, float beta)
this->impl()->node()->nn_param.hard_sigmoid.beta = beta_;
}
std::shared_ptr<Operation> HardSigmoid::Clone(std::shared_ptr<Graph>& graph) const {
std::shared_ptr<Operation> HardSigmoid::Clone(
std::shared_ptr<Graph>& graph) const {
return graph->CreateOperation<HardSigmoid>(this->alpha_, this->beta_);
}
@ -122,7 +129,8 @@ Gelu::Gelu(Graph* graph, bool approximate)
}
std::shared_ptr<Operation> Gelu::Clone(std::shared_ptr<Graph>& graph) const {
return graph->CreateOperation<Gelu>(this->impl()->node()->nn_param.gelu.approximate);
return graph->CreateOperation<Gelu>(
this->impl()->node()->nn_param.gelu.approximate);
}
} // namespace ops

View File

@ -207,17 +207,14 @@ TEST(HardSigmoid, shape_5_1_uint8_Quantized) {
auto input_tensor = graph->CreateTensor(input_spec);
auto output_tensor = graph->CreateTensor(output_spec);
std::vector<uint8_t> in_data = {65, 255, 140, 92, 142,
122, 117, 167, 132, 117,
44, 99, 109, 96, 216,
222, 135, 126, 113, 100};
std::vector<uint8_t> golden_data = {222, 240, 229, 225, 229,
227, 227, 232, 228, 227,
220, 225, 226, 225, 236,
237, 229, 228, 227, 225};
std::vector<uint8_t> in_data = {65, 255, 140, 92, 142, 122, 117,
167, 132, 117, 44, 99, 109, 96,
216, 222, 135, 126, 113, 100};
std::vector<uint8_t> golden_data = {222, 240, 229, 225, 229, 227, 227,
232, 228, 227, 220, 225, 226, 225,
236, 237, 229, 228, 227, 225};
EXPECT_TRUE(
input_tensor->CopyDataToTensor(in_data.data(), in_data.size()));
EXPECT_TRUE(input_tensor->CopyDataToTensor(in_data.data(), in_data.size()));
auto op = graph->CreateOperation<tim::vx::ops::HardSigmoid>(0.2, 0.5);
(*op).BindInput(input_tensor).BindOutput(output_tensor);
@ -228,3 +225,32 @@ TEST(HardSigmoid, shape_5_1_uint8_Quantized) {
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden_data, output, (uint8_t)1));
}
TEST(Elu, shape_5_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
tim::vx::ShapeType io_shape({5, 1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32, io_shape,
tim::vx::TensorAttribute::INPUT);
tim::vx::TensorSpec output_spec(tim::vx::DataType::FLOAT32, io_shape,
tim::vx::TensorAttribute::OUTPUT);
auto input_tensor = graph->CreateTensor(input_spec);
auto output_tensor = graph->CreateTensor(output_spec);
std::vector<float> in_data = {-2.5, -0.1, 0, 0.55, 99};
std::vector<float> golden = {-0.458957, -0.0475813, 0, 0.55, 99};
EXPECT_TRUE(
input_tensor->CopyDataToTensor(in_data.data(), in_data.size() * 4));
auto op = graph->CreateOperation<tim::vx::ops::Elu>(0.5);
(*op).BindInputs({input_tensor}).BindOutputs({output_tensor});
EXPECT_TRUE(graph->Compile());
EXPECT_TRUE(graph->Run());
std::vector<float> output(5, 0);
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
}