Skip to content

Commit a2a5f6c

Browse files
luweizhou2016rnugmanx
authored and
rnugmanx
committedAug 26, 2021
Revise swish (openvinotoolkit#5983)
* Update Swish OP description. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Use RTTI to declare/define NGraph Swish OP. Add input element type check when constructing Swish OP. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Add Swish into activation serialization test list. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Add Swish into IE CPU plugin activation single layer test suit. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Add Swish NGraph backend and visitor API tests. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Add Swish unsupported parameter data type test cases. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com> * Update the Swish OP visistor API to use typed test. Signed-off-by: Luwei Zhou <luwei.zhou@intel.com>
1 parent 6752adb commit a2a5f6c

File tree

9 files changed

+115
-5
lines changed

9 files changed

+115
-5
lines changed
 

‎docs/ops/activation/Swish_4.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@
99
**Detailed description**
1010

1111
*Swish* operation is introduced in this [article](https://arxiv.org/abs/1710.05941).
12-
It performs element-wise activation function on a given input tensor, based on the following mathematical formula:
12+
13+
*Swish* is a smooth, non-monotonic function. The non-monotonicity property of *Swish* distinguishes itself from most common activation functions. It performs element-wise activation function on a given input tensor, based on the following mathematical formula:
1314

1415
\f[
1516
Swish(x) = x\cdot \sigma(\beta x) = x \left(1 + e^{-(\beta x)}\right)^{-1}

‎inference-engine/tests/functional/inference_engine/serialization/single_layer/activation.cpp

+1
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
5252
{Ceiling, {}},
5353
{Mish, {}},
5454
{HSwish, {}},
55+
{Swish, {{0.3f}}},
5556
{SoftPlus, {}},
5657
{HSigmoid, {}},
5758
{RoundHalfToEven, {}},

‎inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp

+2-1
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,8 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
5959
{RoundHalfAwayFromZero, {}},
6060
{Erf, {}},
6161
{GeluErf, {}},
62-
{GeluTanh, {}}
62+
{GeluTanh, {}},
63+
{Swish, {{0.4f}}}
6364
};
6465

6566
// List of operations that should be tested also with integer precision

‎ngraph/core/include/ngraph/op/swish.hpp

+1-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,7 @@ namespace ngraph
2020
class NGRAPH_API Swish : public ngraph::op::Op
2121
{
2222
public:
23-
static constexpr NodeTypeInfo type_info{"Swish", 4};
24-
const NodeTypeInfo& get_type_info() const override { return type_info; }
23+
NGRAPH_RTTI_DECLARATION;
2524
Swish() = default;
2625

2726
/// \brief Constructs an Swish operation.

‎ngraph/core/src/op/swish.cpp

+7-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
using namespace std;
1515
using namespace ngraph;
1616

17-
constexpr NodeTypeInfo op::v4::Swish::type_info;
17+
NGRAPH_RTTI_DEFINITION(op::v4::Swish, "Swish", 4);
1818

1919
op::v4::Swish::Swish(const Output<Node>& arg)
2020
: Op({arg})
@@ -43,6 +43,12 @@ void op::v4::Swish::validate_and_infer_types()
4343
"Swish must have 1 or 2 inputs, but it has: ",
4444
inputs_count);
4545

46+
NODE_VALIDATION_CHECK(this,
47+
get_input_element_type(0).is_real(),
48+
"Swish input tensor must be floating point type(",
49+
get_input_element_type(0),
50+
").");
51+
4652
if (inputs_count == 2)
4753
{
4854
NODE_VALIDATION_CHECK(this,

‎ngraph/test/CMakeLists.txt

+2
Original file line numberDiff line numberDiff line change
@@ -300,6 +300,7 @@ set(SRC
300300
visitors/op/squeeze.cpp
301301
visitors/op/sqrt.cpp
302302
visitors/op/strided_slice.cpp
303+
visitors/op/swish.cpp
303304
visitors/op/tanh.cpp
304305
visitors/op/topk.cpp
305306
visitors/op/transpose.cpp
@@ -476,6 +477,7 @@ set(MULTI_TEST_SRC
476477
backend/squared_difference.in.cpp
477478
backend/squeeze.in.cpp
478479
backend/subtract.in.cpp
480+
backend/swish.in.cpp
479481
backend/tan.in.cpp
480482
backend/tanh.in.cpp
481483
backend/tile.in.cpp

‎ngraph/test/backend/swish.in.cpp

+75
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
// Copyright (C) 2018-2021 Intel Corporation
2+
// SPDX-License-Identifier: Apache-2.0
3+
//
4+
#include "gtest/gtest.h"
5+
#include "ngraph/ngraph.hpp"
6+
#include "util/engine/test_engines.hpp"
7+
#include "util/test_case.hpp"
8+
#include "util/test_control.hpp"
9+
10+
using namespace std;
11+
using namespace ngraph;
12+
13+
static string s_manifest = "${MANIFEST}";
14+
using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
15+
16+
NGRAPH_TEST(${BACKEND_NAME}, swish_2D_with_beta0_6)
17+
{
18+
Shape in_shape{2, 4};
19+
element::Type et = element::f32;
20+
auto beta = 0.6f;
21+
22+
auto args0 = make_shared<op::Parameter>(et, in_shape);
23+
auto args1 = make_shared<op::Parameter>(et, Shape{});
24+
auto swish = make_shared<op::v4::Swish>(args0, args1);
25+
auto f = make_shared<Function>(swish, ParameterVector{args0, args1});
26+
27+
vector<vector<float>> in_vec{vector<float>{0.4, -5.7, -6, 3, -0.9, 23, 5, 3.3} , vector<float>{beta}};
28+
vector<float> out_vec{in_vec[0]};
29+
std::transform(out_vec.begin(), out_vec.end(), out_vec.begin(), [&beta](float x) -> float { return (x / (1.0f + std::exp(x * beta * -1.0f)));});
30+
31+
auto test_case = test::TestCase<TestEngine>(f);
32+
test_case.add_multiple_inputs<float>(in_vec);
33+
test_case.add_expected_output<float>(in_shape, out_vec);
34+
test_case.run();
35+
}
36+
37+
NGRAPH_TEST(${BACKEND_NAME}, swish_2D_without_beta)
38+
{
39+
Shape in_shape{2, 3};
40+
element::Type et = element::f32;
41+
42+
auto args0 = make_shared<op::Parameter>(et, in_shape);
43+
auto swish = make_shared<op::v4::Swish>(args0);
44+
auto f = make_shared<Function>(swish, ParameterVector{args0});
45+
46+
vector<float> in_vec{1, 8, -8, 17, -0.5, -1};
47+
vector<float> out_vec{in_vec};
48+
std::transform(out_vec.begin(), out_vec.end(), out_vec.begin(), [](float x) -> float { return (x / (1.0f + std::exp(x * -1.0f)));});
49+
50+
auto test_case = test::TestCase<TestEngine>(f);
51+
test_case.add_input<float>(in_vec);
52+
test_case.add_expected_output<float>(in_shape, out_vec);
53+
test_case.run();
54+
}
55+
56+
NGRAPH_TEST(${BACKEND_NAME}, swish_4D_with_beta0_33)
57+
{
58+
Shape in_shape{2, 2, 1, 2};
59+
element::Type et = element::f32;
60+
auto beta = 0.33f;
61+
62+
auto args0 = make_shared<op::Parameter>(et, in_shape);
63+
auto args1 = make_shared<op::Parameter>(et, Shape{});
64+
auto swish = make_shared<op::v4::Swish>(args0, args1);
65+
auto f = make_shared<Function>(swish, ParameterVector{args0, args1});
66+
67+
vector<vector<float>> in_vec{vector<float>{0.1, 0.6, 20, -7, -5.3, 3.5, -9, 11} , vector<float>{beta}};
68+
vector<float> out_vec{in_vec[0]};
69+
std::transform(out_vec.begin(), out_vec.end(), out_vec.begin(), [&beta](float x) -> float { return (x / (1.0f + std::exp(x * beta * -1.0f)));});
70+
71+
auto test_case = test::TestCase<TestEngine>(f);
72+
test_case.add_multiple_inputs<float>(in_vec);
73+
test_case.add_expected_output<float>(in_shape, out_vec);
74+
test_case.run();
75+
}

‎ngraph/test/type_prop/swish.cpp

+14
Original file line numberDiff line numberDiff line change
@@ -81,3 +81,17 @@ TEST(type_prop, swish_2_inputs)
8181
ASSERT_TRUE(swish_func->get_output_partial_shape(0).same_scheme(data->get_output_shape(0)));
8282
ASSERT_TRUE(swish_func->get_output_partial_shape(0).rank().is_static());
8383
}
84+
85+
TEST(type_prop, swish_incompatible_type_boolean)
86+
{
87+
auto data = make_shared<op::Parameter>(element::boolean, Shape{1, 3, 6});
88+
auto beta = make_shared<op::Parameter>(element::f32, Shape{});
89+
ASSERT_THROW(make_shared<op::v4::Swish>(data, beta);, ngraph::NodeValidationFailure);
90+
}
91+
92+
TEST(type_prop, swish_incompatible_types_u32)
93+
{
94+
auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6});
95+
auto beta = make_shared<op::Parameter>(element::u32, Shape{});
96+
ASSERT_THROW(make_shared<op::v4::Swish>(data, beta);, ngraph::NodeValidationFailure);
97+
}

‎ngraph/test/visitors/op/swish.cpp

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
// Copyright (C) 2021 Intel Corporation
2+
// SPDX-License-Identifier: Apache-2.0
3+
//
4+
#include "unary_ops.hpp"
5+
6+
using Type = ::testing::Types<UnaryOperatorType<ngraph::op::v4::Swish, element::f32>>;
7+
8+
INSTANTIATE_TYPED_TEST_CASE_P(visitor_without_atrribute,
9+
UnaryOperatorVisitor,
10+
Type,
11+
UnaryOperatorTypeName);

0 commit comments

Comments
 (0)