pytorch/caffe2/python/ideep/dropout_op_test.py
Gu, Jinghui dbab9b73b6 seperate mkl, mklml, and mkldnn (#12170)
Summary:
1. Remove avx2 support in mkldnn
2. Seperate mkl, mklml, and mkldnn
3. Fix convfusion test case
Pull Request resolved: https://github.com/pytorch/pytorch/pull/12170

Reviewed By: yinghai

Differential Revision: D10207126

Pulled By: orionr

fbshipit-source-id: 1e62eb47943f426a89d57e2d2606439f2b04fd51
2018-10-29 10:52:55 -07:00

61 lines
2.2 KiB
Python

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from hypothesis import assume, given
import hypothesis.strategies as st
import numpy as np
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.ideep_test_util as mu
@unittest.skipIf(not workspace.C.use_mkldnn, "No MKLDNN support.")
class DropoutTest(hu.HypothesisTestCase):
@given(X=hu.tensor(),
in_place=st.booleans(),
ratio=st.floats(0, 0.999),
**mu.gcs)
def test_dropout_is_test(self, X, in_place, ratio, gc, dc):
"""Test with is_test=True for a deterministic reference impl."""
op = core.CreateOperator('Dropout', ['X'],
['X' if in_place else 'Y'],
ratio=ratio, is_test=True)
self.assertDeviceChecks(dc, op, [X], [0])
# No sense in checking gradients for test phase
def reference_dropout_test(x):
return x, np.ones(x.shape, dtype=np.bool)
self.assertReferenceChecks(
gc, op, [X], reference_dropout_test,
# The 'mask' output may be uninitialized
outputs_to_check=[0])
@given(X=hu.tensor(),
in_place=st.booleans(),
output_mask=st.booleans(),
**mu.gcs)
@unittest.skipIf(True, "Skip duo to different rand seed.")
def test_dropout_ratio0(self, X, in_place, output_mask, gc, dc):
"""Test with ratio=0 for a deterministic reference impl."""
is_test = not output_mask
op = core.CreateOperator('Dropout', ['X'],
['X' if in_place else 'Y'] +
(['mask'] if output_mask else []),
ratio=0.0, is_test=is_test)
self.assertDeviceChecks(dc, op, [X], [0])
def reference_dropout_ratio0(x):
return (x,) if is_test else (x, np.ones(x.shape, dtype=np.bool))
self.assertReferenceChecks(
gc, op, [X], reference_dropout_ratio0, outputs_to_check=[0])
if __name__ == "__main__":
unittest.main()