pytorch/caffe2/python/operator_test/selu_op_test.py
Ansha Yu 3b1a5a1b8a Refactor tests part 2 (#11811)
Summary:
Followup to the [first refactor](https://github.com/pytorch/pytorch/pull/11350). Increase coverage of tests
Pull Request resolved: https://github.com/pytorch/pytorch/pull/11811

Reviewed By: houseroad

Differential Revision: D9923074

Pulled By: ajyu

fbshipit-source-id: 0f899bb9e9a75bf7ed939e06cc9b028daa7f6bd9
2018-09-19 10:09:28 -07:00

98 lines
3.2 KiB
Python

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
import unittest
class TestSelu(serial.SerializedTestCase):
@serial.given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_1(self, X, gc, dc, engine):
alpha = 1.0
scale = 2.0
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0])
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_2(self, X, gc, dc, engine):
alpha = 1.6732
scale = 1.0507
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0], stepsize=1e-2, threshold=1e-2)
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_3(self, X, gc, dc, engine):
alpha = 1.3
scale = 1.1
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0])
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_inplace(self, X, gc, dc, engine):
alpha = 1.3
scale = 1.1
op = core.CreateOperator("Selu", ["X"], ["X"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
# inplace gradient
Y = TestSelu.selu_ref(X, alpha=alpha, scale=scale)
dX = np.ones_like(X)
op2 = core.CreateOperator("SeluGradient", ["Y", "dX"], ["dX"],
alpha=alpha, scale=scale, engine=engine)
self.assertDeviceChecks(dc, op2, [Y, dX], [0])
@staticmethod
def fix0(X):
# go away from the origin point to avoid kink problems
X += 0.02 * np.sign(X)
X[X == 0.0] += 0.02
return X
@staticmethod
def selu_ref(x, scale, alpha):
ret = scale * ((x > 0) * x + (x <= 0) * (alpha * (np.exp(x) - 1)))
return [ret]
if __name__ == "__main__":
unittest.main()