Autogenerated contiguous memory format for old *_like calls

Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/29227

Test Plan: Imported from OSS

Differential Revision: D18330969

Pulled By: VitalyFedyunin

fbshipit-source-id: 54d75c025b40520866b2480ce86e6483e2dcb002
This commit is contained in:
Vitaly Fedyunin 2019-11-06 07:20:46 -08:00 committed by Facebook Github Bot
parent cc1c0120bc
commit 81bf73643b
3 changed files with 5 additions and 5 deletions

View File

@ -26,7 +26,7 @@ namespace native {
static void make_offset2bag(const Tensor &offsets, const Tensor &indices, Tensor& offset2bag) {
offset2bag.index_add_(
0, offsets, at::ones_like(offsets)); // offset2bag = [1 0 1 0 1]
0, offsets, at::ones_like(offsets, at::MemoryFormat::Contiguous)); // offset2bag = [1 0 1 0 1]
offset2bag[0] -= 1; // offset2bag = [0 0 1 0 1]
offset2bag = offset2bag.cumsum(0); // offset2bag = [0 0 1 1 2]
}
@ -220,7 +220,7 @@ static Tensor apply_bag_size(const Tensor &offsets, const Tensor &indices,
auto bag_size_ = std::max(indices.size(0), static_cast<int64_t>(1));
output /= bag_size_;
} else {
auto bag_size_ = at::max(bag_size, at::ones_like(bag_size))
auto bag_size_ = at::max(bag_size, at::ones_like(bag_size, at::MemoryFormat::Contiguous))
.to(output.options())
.unsqueeze(1)
.expand_as(output);

View File

@ -27,7 +27,7 @@ variable_list _make_grads(
TORCH_CHECK(
output.numel() == 1,
"grad can be implicitly created only for scalar outputs");
new_grads.emplace_back(at::ones_like(output));
new_grads.emplace_back(at::ones_like(output, at::MemoryFormat::Contiguous));
}
}
} else {
@ -45,7 +45,7 @@ variable_list _make_grads(
TORCH_CHECK(
output.numel() == 1,
"grad can be implicitly created only for scalar outputs");
new_grads.emplace_back(at::ones_like(output));
new_grads.emplace_back(at::ones_like(output, at::MemoryFormat::Contiguous));
}
} else {
// grad output is defined, just append to the new_grads

View File

@ -49,7 +49,7 @@ void DistEngine::validateRootsAndRetrieveEdges(
// Compute the root edges and generate the appropriate gradients.
rootEdges.push_back(root.gradient_edge());
grads.push_back(at::ones_like(root));
grads.push_back(at::ones_like(root, at::MemoryFormat::Contiguous));
}
// Validate rootEdges and grads.