pytorch/caffe2/python/operator_test/selu_op_test.py
Bugra Akyildiz 27c7158166 Remove __future__ imports for legacy Python2 supports (#45033)
Summary:
There is a module called `2to3` which you can target for future specifically to remove these, the directory of `caffe2` has the most redundant imports:

```2to3 -f future -w caffe2```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/45033

Reviewed By: seemethere

Differential Revision: D23808648

Pulled By: bugra

fbshipit-source-id: 38971900f0fe43ab44a9168e57f2307580d36a38
2020-09-23 17:57:02 -07:00

100 lines
3.2 KiB
Python

from caffe2.python import core
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
import unittest
class TestSelu(serial.SerializedTestCase):
@serial.given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_1(self, X, gc, dc, engine):
alpha = 1.0
scale = 2.0
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0])
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
@settings(deadline=1000)
def test_selu_2(self, X, gc, dc, engine):
alpha = 1.6732
scale = 1.0507
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0], stepsize=1e-2, threshold=1e-2)
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
@settings(deadline=1000)
def test_selu_3(self, X, gc, dc, engine):
alpha = 1.3
scale = 1.1
op = core.CreateOperator("Selu", ["X"], ["Y"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
self.assertGradientChecks(gc, op, [X], 0, [0])
self.assertReferenceChecks(
gc, op, [X], lambda x: TestSelu.selu_ref(x, alpha=alpha, scale=scale)
)
@given(X=hu.tensor(),
engine=st.sampled_from(["", "CUDNN"]),
**hu.gcs)
def test_selu_inplace(self, X, gc, dc, engine):
alpha = 1.3
scale = 1.1
op = core.CreateOperator("Selu", ["X"], ["X"],
alpha=alpha, scale=scale, engine=engine)
X = TestSelu.fix0(X)
self.assertDeviceChecks(dc, op, [X], [0])
# inplace gradient
Y = TestSelu.selu_ref(X, alpha=alpha, scale=scale)
dX = np.ones_like(X)
op2 = core.CreateOperator("SeluGradient", ["Y", "dX"], ["dX"],
alpha=alpha, scale=scale, engine=engine)
self.assertDeviceChecks(dc, op2, [Y, dX], [0])
@staticmethod
def fix0(X):
# go away from the origin point to avoid kink problems
X += 0.02 * np.sign(X)
X[X == 0.0] += 0.02
return X
@staticmethod
def selu_ref(x, scale, alpha):
ret = scale * ((x > 0) * x + (x <= 0) * (alpha * (np.exp(x) - 1)))
return [ret]
if __name__ == "__main__":
unittest.main()