Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions paddle/fluid/distributed/ps/service/ps_client.h
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,8 @@ class PSClient {
// client2client消息处理,std::function<int32_t (int, int, const std::string&)
// -> ret (msg_type, from_client_id, msg)
typedef std::function<int32_t(int, int, const std::string &)> MsgHandlerFunc;
virtual int RegisteClient2ClientMsgHandler(int msg_type,
MsgHandlerFunc handler) {
virtual int RegisterClient2ClientMsgHandler(int msg_type,
MsgHandlerFunc handler) {
_msg_handler_map[msg_type] = handler;
return 0;
}
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/distributed/ps/wrapper/fleet.cc
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ void FleetWrapper::PushDenseVarsAsync(

::paddle::distributed::Region reg(g, tensor->numel());
regions.emplace_back(std::move(reg));
VLOG(3) << "FleetWrapper::PushDenseVarsAsync Var " << t << " talbe_id "
VLOG(3) << "FleetWrapper::PushDenseVarsAsync Var " << t << " table_id "
<< table_id << " Temp_data[0] " << g[0] << " Temp_data[-1] "
<< g[tensor->numel() - 1];
}
Expand Down Expand Up @@ -813,7 +813,7 @@ void FleetWrapper::ShrinkDenseTable(int table_id,
push_status.wait();
auto status = push_status.get();
if (status != 0) {
// PADDLE_THORW(platform::errors::Fatal(
// PADDLE_THROW(platform::errors::Fatal(
// "push shrink dense param failed, status is [%d].", status));
sleep(sleep_seconds_before_fail_exit_);
exit(-1);
Expand All @@ -839,7 +839,7 @@ int FleetWrapper::RegisterClientToClientMsgHandler(int msg_type,
VLOG(0) << "FleetWrapper::Client is null";
return -1;
} else {
return worker_ptr_->RegisteClient2ClientMsgHandler(msg_type, handler);
return worker_ptr_->RegisterClient2ClientMsgHandler(msg_type, handler);
}
}

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/rpc/rpc_agent.cc
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ void RpcAgent::SetAgentInstance(std::shared_ptr<RpcAgent> agent) {
rpc_agent_instance_,
nullptr,
platform::errors::Fatal(
"RpcAgent has been set, please don't set rpc agent repeatly."));
"RpcAgent has been set, please don't set rpc agent repeatedly."));
rpc_agent_instance_ = agent;
}
} // namespace distributed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ void RunBrpcPushDense() {
auto ph_host = paddle::distributed::PSHost(ip_, port_, 0);
host_sign_list_.push_back(ph_host.SerializeToString());

// Srart Server
// Start Server
std::thread server_thread(RunServer);
sleep(1);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ void RunBrpcPushSparse() {
auto ph_host = paddle::distributed::PSHost(ip_, port_, 0);
host_sign_list_.push_back(ph_host.SerializeToString());

// Srart Server
// Start Server
std::thread server_thread(RunServer);
sleep(1);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x,

// Type promotion Logic
if (phi::NeedTypePromotion(x.dtype(), y.dtype())) {
VLOG(5) << "got different data type, run type protmotion automatically.";
VLOG(5) << "got different data type, run type promotion automatically.";
LOG_FIRST_N(WARNING, 1)
<< "got different data type, run type protmotion "
<< "got different data type, run type promotion "
"automatically, this may cause data type been changed.";
auto op_name = phi::TransToFluidOpName("multiply");
auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype());
Expand Down Expand Up @@ -407,9 +407,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x,

// Type promotion Logic
if (phi::NeedTypePromotion(x.dtype(), y.dtype())) {
VLOG(5) << "got different data type, run type protmotion automatically.";
VLOG(5) << "got different data type, run type promotion automatically.";
LOG_FIRST_N(WARNING, 1)
<< "got different data type, run type protmotion "
<< "got different data type, run type promotion "
"automatically, this may cause data type been changed.";
auto op_name = phi::TransToFluidOpName("multiply");
auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ paddle::Tensor reshard_ad_function(
#else
PADDLE_THROW(phi::errors::Unavailable(
"Reshard is not supported in this version of Paddle. Try to recompile it "
"with WITH_DISTRIBTUE=ON and reinstall this package."));
"with WITH_DISTRIBUTE=ON and reinstall this package."));
return paddle::Tensor();
#endif
}
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ ReshardGradNode::operator()(
#else
PADDLE_THROW(phi::errors::Unavailable(
"ReshardGrad is not supported in this version of Paddle. Try to "
"recompile it with WITH_DISTRIBTUE=ON and reinstall this package."));
"recompile it with WITH_DISTRIBUTE=ON and reinstall this package."));
return paddle::small_vector<std::vector<paddle::Tensor>,
egr::kSlotSmallVectorSize>(1);
#endif
Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/eager/auto_code_generator/eager_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ static bool CheckOpProto(proto::OpProto* op_proto) {
}
const std::string& op_type = op_proto->type();

// Skip ooerator which is not inherit form OperatorWithKernel, like while,
// Skip operator which is not inherit form OperatorWithKernel, like while,
// since only OperatorWithKernel can run in dygraph mode.
auto& all_kernels = paddle::framework::OperatorWithKernel::AllOpKernels();
if (!all_kernels.count(op_type) &&
Expand Down Expand Up @@ -2702,7 +2702,7 @@ static std::string GenerateGradNodeCCContents(
// This is a Copy
auto op_base_infos = bwd_info.GetOpBaseInfos();

/* Special Case: ops such as sum_grad_op is implemented abnormaly,
/* Special Case: ops such as sum_grad_op is implemented abnormally,
where it unpacked duplicable GradX and created one OpBase
corresponds to each member of GradX[i]
*/
Expand Down
10 changes: 5 additions & 5 deletions paddle/fluid/eager/custom_operator/custom_operator_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ static std::vector<std::vector<phi::DDim>> RunInferShapeFunc(
"Custom operator only supports `paddle::Vec(...)` inputs and "
"cannot support `paddle::Vec(...)` output without setting "
"InplaceMap. If you have to use `paddle::Vec(...)` output, "
"please indicate it by setting InplaceMap manully."));
"please indicate it by setting InplaceMap manually."));
std::vector<phi::DDim> shapes;
auto duplicable_input_pair = ctx.InputRangeAt(inplace_reverse_map[i]);
for (size_t j = duplicable_input_pair.first;
Expand Down Expand Up @@ -570,7 +570,7 @@ std::vector<std::vector<phi::DDim>> RunInferShapeFn(
out_dims.size(),
ctx.OutputRange().size(),
phi::errors::InvalidArgument(
"Custome op infer_shape return size should be %d, but got %d.",
"Custom op infer_shape return size should be %d, but got %d.",
ctx.OutputRange().size(),
out_dims.size()));

Expand Down Expand Up @@ -603,7 +603,7 @@ std::vector<std::vector<phi::DataType>> RunInferDtypeFn(
out_dtypes.size(),
ctx.OutputRange().size(),
phi::errors::InvalidArgument(
"Custome op infer_dtype return size should be %d, but got %d.",
"Custom op infer_dtype return size should be %d, but got %d.",
ctx.OutputRange().size(),
out_dtypes.size()));
return out_dtypes;
Expand Down Expand Up @@ -677,15 +677,15 @@ std::
PADDLE_ENFORCE_EQ(
out_dim.size(),
pair.second - pair.first,
phi::errors::InvalidArgument("custome op infer_shape result[%d]'s "
phi::errors::InvalidArgument("custom op infer_shape result[%d]'s "
"size should be %d, but got %d.",
i,
pair.second - pair.first,
out_dim.size()));
PADDLE_ENFORCE_EQ(
out_dtype.size(),
pair.second - pair.first,
phi::errors::InvalidArgument("custome op infer_shape result[%d]'s "
phi::errors::InvalidArgument("custom op infer_shape result[%d]'s "
"size should be %d, but got %d.",
i,
pair.second - pair.first,
Expand Down