pytorch/test/cpp/api/misc.cpp
Kurt Mohler 124cdf2290 Add experimental deterministic flag (#38683)
Summary:
Adds `torch.experimental.deterministic` flag to enforce deterministic algorithms across all of pytorch.
Adds `torch.experimental.deterministic_error_level` to allow users to choose between error/warning/silent if determinism for an operation is not available.
Adds `torch.experimental.alert_not_deterministic()` which should be called within operations that are not deterministic.
Offers both Python and ATen interfaces

Issue https://github.com/pytorch/pytorch/issues/15359
Pull Request resolved: https://github.com/pytorch/pytorch/pull/38683

Differential Revision: D21998093

Pulled By: ezyang

fbshipit-source-id: 23aabbddd20f6199d846f97764ff24d728163737
2020-06-12 08:44:06 -07:00

102 lines
2.4 KiB
C++

#include <gtest/gtest.h>
#include <torch/torch.h>
#include <test/cpp/api/support.h>
#include <functional>
using namespace torch::test;
void torch_warn_once_A() {
TORCH_WARN_ONCE("warn once");
}
void torch_warn_once_B() {
TORCH_WARN_ONCE("warn something else once");
}
void torch_warn() {
TORCH_WARN("warn multiple times");
}
TEST(UtilsTest, WarnOnce) {
{
WarningCapture warnings;
torch_warn_once_A();
torch_warn_once_A();
torch_warn_once_B();
torch_warn_once_B();
ASSERT_EQ(count_substr_occurrences(warnings.str(), "warn once"), 1);
ASSERT_EQ(
count_substr_occurrences(warnings.str(), "warn something else once"),
1);
}
{
WarningCapture warnings;
torch_warn();
torch_warn();
torch_warn();
ASSERT_EQ(
count_substr_occurrences(warnings.str(), "warn multiple times"), 3);
}
}
TEST(NoGradTest, SetsGradModeCorrectly) {
torch::manual_seed(0);
torch::NoGradGuard guard;
torch::nn::Linear model(5, 2);
auto x = torch::randn({10, 5}, torch::requires_grad());
auto y = model->forward(x);
torch::Tensor s = y.sum();
// Mimicking python API behavior:
ASSERT_THROWS_WITH(s.backward(),
"element 0 of tensors does not require grad and does not have a grad_fn")
}
struct AutogradTest : torch::test::SeedingFixture {
AutogradTest() {
x = torch::randn({3, 3}, torch::requires_grad());
y = torch::randn({3, 3});
z = x * y;
}
torch::Tensor x, y, z;
};
TEST_F(AutogradTest, CanTakeDerivatives) {
z.backward(torch::ones_like(z));
ASSERT_TRUE(x.grad().allclose(y));
}
TEST_F(AutogradTest, CanTakeDerivativesOfZeroDimTensors) {
z.sum().backward();
ASSERT_TRUE(x.grad().allclose(y));
}
TEST_F(AutogradTest, CanPassCustomGradientInputs) {
z.sum().backward(torch::ones({}) * 2);
ASSERT_TRUE(x.grad().allclose(y * 2));
}
TEST(DeterministicTest, CanSetDeterministic) {
auto context = &at::globalContext();
for (bool deterministic : {true, false}) {
context->setDeterministic(deterministic);
ASSERT_TRUE(context->deterministic() == deterministic);
}
}
TEST(DeterministicTest, CanAlertNotDeterministic) {
auto context = &at::globalContext();
context->setDeterministic(true);
ASSERT_ANY_THROW(context->alertNotDeterministic("test"));
context->setDeterministic(false);
// Should not throw error if deterministic setting is turned off
context->alertNotDeterministic("test");
}