mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Headers under torch/csrc/distributed may be referened with relative path, e.g., "<c10d/...>". However, relative path cannot be gracefully handled by Meta internal build when the NCCL PG is hipified to support AMD/RCCL because the "hipified" header files are generated in other directories. Moreover, using absolute path for header inclusion is the state-of-the-art in most components in Pytorch. Thus, this patch refactors all header paths in torch/csrc/distributed to be absolute. See D39835774 for more details about Meta internal complication. **How to test**: commit 9e5d199 removes -I./torch/csrc/distributed in compile options. Thus use it to verify we don't miss any relative path use of torch/csrc/distributed headers. Pull Request resolved: https://github.com/pytorch/pytorch/pull/85780 Approved by: https://github.com/kumpera, https://github.com/huydhn
26 lines
697 B
C++
26 lines
697 B
C++
// (c) Facebook, Inc. and its affiliates. Confidential and proprietary.
|
|
|
|
#include <torch/csrc/distributed/c10d/ParamCommsUtils.hpp>
|
|
|
|
namespace torch {
|
|
|
|
extern const std::string kParamCommsCallName = "record_param_comms";
|
|
|
|
ParamCommsDebugInfo::ParamCommsDebugInfo(
|
|
int rank,
|
|
std::string&& colName,
|
|
int inSize,
|
|
int outSize,
|
|
at::ScalarType dType,
|
|
std::vector<int64_t> inSplitSizes,
|
|
std::vector<int64_t> outSplitSizes)
|
|
: rank_(rank),
|
|
columnName_(colName),
|
|
inMessageSize_(inSize),
|
|
outMessageSize_(outSize),
|
|
dType_(dType),
|
|
inputSplitSizes_(std::move(inSplitSizes)),
|
|
outputSplitSizes_(std::move(outSplitSizes)) {}
|
|
|
|
} // namespace torch
|