pytorch/caffe2/core/timer_test.cc
Nikita Shulga 16774f7353 Increase TimerTest tolerance to 20% on Windows (#35818)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/35818

Test Plan: CI

Differential Revision: D20798424

Pulled By: malfet

fbshipit-source-id: 57e8d9c6b93903a6632168a4a35bf946d8c518aa
2020-04-01 14:29:05 -07:00

65 lines
1.6 KiB
C++

#include <chrono>
#include <iostream>
#include <thread>
#include "caffe2/core/timer.h"
#include <gtest/gtest.h>
namespace caffe2 {
namespace {
TEST(TimerTest, Test) {
Timer timer;
// A timer auto-starts when it is constructed.
std::this_thread::sleep_for(std::chrono::microseconds(1));
EXPECT_GT(timer.NanoSeconds(), 0);
// Sleep for a while, and get the time.
timer.Start();
std::this_thread::sleep_for(std::chrono::milliseconds(100));
float ns = timer.NanoSeconds();
float us = timer.MicroSeconds();
float ms = timer.MilliSeconds();
// Time should be at least accurate +- 10%. (20% on Windows)
#ifndef _WIN32
EXPECT_NEAR(ns, 100000000, 10000000);
EXPECT_NEAR(us, 100000, 10000);
EXPECT_NEAR(ms, 100, 10);
#else
EXPECT_NEAR(ns, 100000000, 20000000);
EXPECT_NEAR(us, 100000, 20000);
EXPECT_NEAR(ms, 100, 20);
#endif
// Test restarting the clock.
timer.Start();
EXPECT_LT(timer.MicroSeconds(), 1000);
}
TEST(TimerTest, TestLatency) {
constexpr int iter = 1000;
float latency = 0;
Timer timer;
for (int i = 0; i < iter; ++i) {
timer.Start();
latency += timer.NanoSeconds();
}
std::cout << "Average nanosecond latency is: " << latency / iter << std::endl;
latency = 0;
for (int i = 0; i < iter; ++i) {
timer.Start();
latency += timer.MicroSeconds();
}
std::cout << "Average microsecond latency is: " << latency / iter << std::endl;
latency = 0;
for (int i = 0; i < iter; ++i) {
timer.Start();
latency += timer.MilliSeconds();
}
std::cout << "Average millisecond latency is: " << latency / iter << std::endl;
}
} // namespace
} // namespace caffe2