|
19 | 19 | #include "caffe/layers/prelu_layer.hpp"
|
20 | 20 | #include "caffe/layers/relu_layer.hpp"
|
21 | 21 | #include "caffe/layers/sigmoid_layer.hpp"
|
| 22 | +#include "caffe/layers/swish_layer.hpp" |
22 | 23 | #include "caffe/layers/tanh_layer.hpp"
|
23 | 24 | #include "caffe/layers/threshold_layer.hpp"
|
24 | 25 |
|
@@ -344,6 +345,84 @@ TYPED_TEST(NeuronLayerTest, TestSigmoidGradient) {
|
344 | 345 | this->blob_top_vec_);
|
345 | 346 | }
|
346 | 347 |
|
| 348 | +TYPED_TEST(NeuronLayerTest, TestSwish) { |
| 349 | + typedef typename TypeParam::Dtype Dtype; |
| 350 | + LayerParameter layer_param; |
| 351 | + SwishLayer<Dtype> layer(layer_param); |
| 352 | + layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_); |
| 353 | + layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_); |
| 354 | + // Now, check values |
| 355 | + const Dtype* bottom_data = this->blob_bottom_->cpu_data(); |
| 356 | + const Dtype* top_data = this->blob_top_->cpu_data(); |
| 357 | + for (int i = 0; i < this->blob_bottom_->count(); ++i) { |
| 358 | + EXPECT_FLOAT_EQ(top_data[i], bottom_data[i] / (1. + exp(-bottom_data[i]))); |
| 359 | + } |
| 360 | +} |
| 361 | + |
| 362 | +TYPED_TEST(NeuronLayerTest, TestSwishWithBeta) { |
| 363 | + typedef typename TypeParam::Dtype Dtype; |
| 364 | + LayerParameter layer_param; |
| 365 | + CHECK(google::protobuf::TextFormat::ParseFromString( |
| 366 | + "swish_param { beta: 1.5 }", &layer_param)); |
| 367 | + SwishLayer<Dtype> layer(layer_param); |
| 368 | + layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_); |
| 369 | + layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_); |
| 370 | + // Now, check values |
| 371 | + const Dtype* bottom_data = this->blob_bottom_->cpu_data(); |
| 372 | + const Dtype* top_data = this->blob_top_->cpu_data(); |
| 373 | + for (int i = 0; i < this->blob_bottom_->count(); ++i) { |
| 374 | + EXPECT_FLOAT_EQ(top_data[i], bottom_data[i] / (1. + exp(-1.5 * |
| 375 | + bottom_data[i]))); |
| 376 | + } |
| 377 | +} |
| 378 | + |
| 379 | +TYPED_TEST(NeuronLayerTest, TestSwishAsLinear) { |
| 380 | + typedef typename TypeParam::Dtype Dtype; |
| 381 | + LayerParameter layer_param; |
| 382 | + CHECK(google::protobuf::TextFormat::ParseFromString( |
| 383 | + "swish_param { beta: 0.0 }", &layer_param)); |
| 384 | + SwishLayer<Dtype> layer(layer_param); |
| 385 | + layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_); |
| 386 | + layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_); |
| 387 | + // Now, check values |
| 388 | + const Dtype* bottom_data = this->blob_bottom_->cpu_data(); |
| 389 | + const Dtype* top_data = this->blob_top_->cpu_data(); |
| 390 | + for (int i = 0; i < this->blob_bottom_->count(); ++i) { |
| 391 | + EXPECT_FLOAT_EQ(top_data[i], bottom_data[i] / 2.0); |
| 392 | + } |
| 393 | +} |
| 394 | + |
| 395 | +TYPED_TEST(NeuronLayerTest, TestSwishGradient) { |
| 396 | + typedef typename TypeParam::Dtype Dtype; |
| 397 | + LayerParameter layer_param; |
| 398 | + SwishLayer<Dtype> layer(layer_param); |
| 399 | + GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01); |
| 400 | + checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_, |
| 401 | + this->blob_top_vec_); |
| 402 | +} |
| 403 | + |
| 404 | +TYPED_TEST(NeuronLayerTest, TestSwishWithBetaGradient) { |
| 405 | + typedef typename TypeParam::Dtype Dtype; |
| 406 | + LayerParameter layer_param; |
| 407 | + CHECK(google::protobuf::TextFormat::ParseFromString( |
| 408 | + "swish_param { beta: 1.5 }", &layer_param)); |
| 409 | + SwishLayer<Dtype> layer(layer_param); |
| 410 | + GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01); |
| 411 | + checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_, |
| 412 | + this->blob_top_vec_); |
| 413 | +} |
| 414 | + |
| 415 | +TYPED_TEST(NeuronLayerTest, TestSwishAsLinearGradient) { |
| 416 | + typedef typename TypeParam::Dtype Dtype; |
| 417 | + LayerParameter layer_param; |
| 418 | + CHECK(google::protobuf::TextFormat::ParseFromString( |
| 419 | + "swish_param { beta: 0.0 }", &layer_param)); |
| 420 | + SwishLayer<Dtype> layer(layer_param); |
| 421 | + GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01); |
| 422 | + checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_, |
| 423 | + this->blob_top_vec_); |
| 424 | +} |
| 425 | + |
347 | 426 | TYPED_TEST(NeuronLayerTest, TestTanH) {
|
348 | 427 | typedef typename TypeParam::Dtype Dtype;
|
349 | 428 | LayerParameter layer_param;
|
|
0 commit comments