2018-09-21 21:12:37 -07:00
|
|
|
#include <gtest/gtest.h>
|
2018-04-30 18:36:35 -07:00
|
|
|
|
2018-07-12 18:45:48 -07:00
|
|
|
#include <torch/nn/init.h>
|
2018-05-30 08:55:34 -07:00
|
|
|
#include <torch/nn/modules/linear.h>
|
2018-11-06 14:28:20 -08:00
|
|
|
#include <torch/types.h>
|
2018-05-30 08:55:34 -07:00
|
|
|
#include <torch/utils.h>
|
2018-04-30 18:36:35 -07:00
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
#include <test/cpp/api/support.h>
|
2018-05-24 12:46:51 -07:00
|
|
|
|
2019-08-27 21:41:15 -07:00
|
|
|
#include <functional>
|
|
|
|
|
|
|
|
|
|
using namespace torch::test;
|
|
|
|
|
|
|
|
|
|
void torch_warn_once_A() {
|
|
|
|
|
TORCH_WARN_ONCE("warn once");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void torch_warn_once_B() {
|
|
|
|
|
TORCH_WARN_ONCE("warn something else once");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void torch_warn() {
|
|
|
|
|
TORCH_WARN("warn multiple times");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST(UtilsTest, WarnOnce) {
|
|
|
|
|
{
|
|
|
|
|
std::stringstream buffer;
|
|
|
|
|
CerrRedirect cerr_redirect(buffer.rdbuf());
|
|
|
|
|
|
|
|
|
|
torch_warn_once_A();
|
|
|
|
|
torch_warn_once_A();
|
|
|
|
|
torch_warn_once_B();
|
|
|
|
|
torch_warn_once_B();
|
|
|
|
|
|
|
|
|
|
ASSERT_EQ(count_substr_occurrences(buffer.str(), "warn once"), 1);
|
|
|
|
|
ASSERT_EQ(count_substr_occurrences(buffer.str(), "warn something else once"), 1);
|
|
|
|
|
}
|
|
|
|
|
{
|
|
|
|
|
std::stringstream buffer;
|
|
|
|
|
CerrRedirect cerr_redirect(buffer.rdbuf());
|
|
|
|
|
|
|
|
|
|
torch_warn();
|
|
|
|
|
torch_warn();
|
|
|
|
|
torch_warn();
|
|
|
|
|
|
|
|
|
|
ASSERT_EQ(count_substr_occurrences(buffer.str(), "warn multiple times"), 3);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
TEST(NoGradTest, SetsGradModeCorrectly) {
|
2018-06-27 20:00:53 -07:00
|
|
|
torch::manual_seed(0);
|
|
|
|
|
torch::NoGradGuard guard;
|
2018-09-21 21:12:37 -07:00
|
|
|
torch::nn::Linear model(5, 2);
|
2018-06-27 20:00:53 -07:00
|
|
|
auto x = torch::randn({10, 5}, torch::requires_grad());
|
|
|
|
|
auto y = model->forward(x);
|
|
|
|
|
torch::Tensor s = y.sum();
|
|
|
|
|
|
2019-09-18 09:19:00 -07:00
|
|
|
// Mimicking python API behavior:
|
|
|
|
|
ASSERT_THROWS_WITH(s.backward(),
|
|
|
|
|
"element 0 of tensors does not require grad and does not have a grad_fn")
|
2018-04-30 18:36:35 -07:00
|
|
|
}
|
2018-05-17 17:10:15 -04:00
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
struct AutogradTest : torch::test::SeedingFixture {
|
|
|
|
|
AutogradTest() {
|
|
|
|
|
x = torch::randn({3, 3}, torch::requires_grad());
|
|
|
|
|
y = torch::randn({3, 3});
|
|
|
|
|
z = x * y;
|
2018-05-24 17:31:41 -07:00
|
|
|
}
|
2018-09-21 21:12:37 -07:00
|
|
|
torch::Tensor x, y, z;
|
|
|
|
|
};
|
2018-05-24 17:31:41 -07:00
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
TEST_F(AutogradTest, CanTakeDerivatives) {
|
2019-09-18 09:19:00 -07:00
|
|
|
z.backward(torch::ones_like(z));
|
2018-09-21 21:12:37 -07:00
|
|
|
ASSERT_TRUE(x.grad().allclose(y));
|
2018-07-12 18:45:48 -07:00
|
|
|
}
|
|
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
TEST_F(AutogradTest, CanTakeDerivativesOfZeroDimTensors) {
|
|
|
|
|
z.sum().backward();
|
|
|
|
|
ASSERT_TRUE(x.grad().allclose(y));
|
2018-05-17 17:10:15 -04:00
|
|
|
}
|
2018-05-24 12:46:51 -07:00
|
|
|
|
2018-09-21 21:12:37 -07:00
|
|
|
TEST_F(AutogradTest, CanPassCustomGradientInputs) {
|
|
|
|
|
z.sum().backward(torch::ones({}) * 2);
|
|
|
|
|
ASSERT_TRUE(x.grad().allclose(y * 2));
|
2019-08-27 21:41:15 -07:00
|
|
|
}
|