2018-05-01 01:36:35 +00:00
|
|
|
#include <catch.hpp>
|
|
|
|
|
|
2018-05-04 15:04:57 +00:00
|
|
|
#include <torch/torch.h>
|
2018-05-01 01:36:35 +00:00
|
|
|
|
2018-05-04 15:04:57 +00:00
|
|
|
using namespace torch;
|
2018-05-07 21:45:00 +00:00
|
|
|
using namespace torch::nn;
|
2018-05-01 01:36:35 +00:00
|
|
|
|
|
|
|
|
TEST_CASE("misc") {
|
|
|
|
|
SECTION("no_grad") {
|
|
|
|
|
no_grad_guard guard;
|
2018-05-07 21:45:00 +00:00
|
|
|
auto model = make(Linear(5, 2));
|
2018-05-01 01:36:35 +00:00
|
|
|
auto x = Var(at::CPU(at::kFloat).randn({10, 5}), true);
|
|
|
|
|
auto y = model->forward({x})[0];
|
|
|
|
|
Variable s = y.sum();
|
|
|
|
|
|
|
|
|
|
backward(s);
|
|
|
|
|
REQUIRE(!model->parameters()["weight"].grad().defined());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
SECTION("CPU random seed") {
|
|
|
|
|
int size = 100;
|
|
|
|
|
setSeed(7);
|
|
|
|
|
auto x1 = Var(at::CPU(at::kFloat).randn({size}));
|
|
|
|
|
setSeed(7);
|
|
|
|
|
auto x2 = Var(at::CPU(at::kFloat).randn({size}));
|
|
|
|
|
|
|
|
|
|
auto l_inf = (x1.data() - x2.data()).abs().max().toCFloat();
|
|
|
|
|
REQUIRE(l_inf < 1e-10);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST_CASE("misc_cuda", "[cuda]") {
|
|
|
|
|
SECTION("CUDA random seed") {
|
|
|
|
|
int size = 100;
|
|
|
|
|
setSeed(7);
|
|
|
|
|
auto x1 = Var(at::CUDA(at::kFloat).randn({size}));
|
|
|
|
|
setSeed(7);
|
|
|
|
|
auto x2 = Var(at::CUDA(at::kFloat).randn({size}));
|
|
|
|
|
|
|
|
|
|
auto l_inf = (x1.data() - x2.data()).abs().max().toCFloat();
|
|
|
|
|
REQUIRE(l_inf < 1e-10);
|
|
|
|
|
}
|
|
|
|
|
}
|