diff --git a/paddle/fluid/eager/auto_code_generator/eager_generator.cc b/paddle/fluid/eager/auto_code_generator/eager_generator.cc index 7c26bcf7effae5..adeacdd1958211 100644 --- a/paddle/fluid/eager/auto_code_generator/eager_generator.cc +++ b/paddle/fluid/eager/auto_code_generator/eager_generator.cc @@ -23,7 +23,7 @@ #include "paddle/fluid/framework/operator.h" #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/variable.h" -#include "paddle/fluid/pybind/op_function_generator.h" +#include "paddle/fluid/pybind/eager_op_function_generator.h" #include "paddle/fluid/pybind/pybind.h" #include "paddle/fluid/string/string_helper.h" diff --git a/paddle/fluid/pybind/CMakeLists.txt b/paddle/fluid/pybind/CMakeLists.txt index eebb8dd84349ef..55e676f62f77da 100755 --- a/paddle/fluid/pybind/CMakeLists.txt +++ b/paddle/fluid/pybind/CMakeLists.txt @@ -295,8 +295,6 @@ if(WITH_PYTHON) list(APPEND OP_FUNCTION_GENERETOR_DEPS ${PYTHON_LIBRARIES}) endif() - add_executable(op_function_generator op_function_generator.cc) - target_link_libraries(op_function_generator ${OP_FUNCTION_GENERETOR_DEPS}) add_executable(eager_legacy_op_function_generator eager_legacy_op_function_generator.cc) target_link_libraries(eager_legacy_op_function_generator @@ -308,11 +306,9 @@ if(WITH_PYTHON) endif() get_property(os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES) - target_link_libraries(op_function_generator ${os_dependency_modules}) target_link_libraries(eager_legacy_op_function_generator ${os_dependency_modules}) if(WITH_ROCM) - target_link_libraries(op_function_generator ${ROCM_HIPRTC_LIB}) target_link_libraries(eager_legacy_op_function_generator ${ROCM_HIPRTC_LIB}) target_link_libraries(kernel_signature_generator ${ROCM_HIPRTC_LIB}) endif() @@ -346,7 +342,6 @@ if(WITH_PYTHON) "${PADDLE_SOURCE_DIR}/paddle/fluid/pybind/" "${CODE_GEN_SPLIT_FILE_COUNT}") - set(OP_IMPL_DEPS op_function_generator) set(EAGER_OP_IMPL_DEPS eager_legacy_op_function_generator eager_python_c_codegen) @@ -357,25 +352,6 @@ if(WITH_PYTHON) set(op_impl_path "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}") endif() - file( - WRITE - ${CMAKE_BINARY_DIR}/paddle/fluid/pybind/op_function_generator_retry.bat - "" - "set build_times=1\n" - ":retry\n" - "ECHO op_function_generator run %build_times% time\n" - "taskkill /f /im op_function_generator.exe 2>NUL\n" - "${op_impl_path}/op_function_generator.exe ${op_function_output_path} ${CODE_GEN_SPLIT_FILE_COUNT}\n" - "if %ERRORLEVEL% NEQ 0 (\n" - " set /a build_times=%build_times%+1\n" - " if %build_times% GEQ 10 (\n" - " exit /b 1\n" - " ) else (\n" - " goto :retry\n" - " )\n" - ")\n" - "exit /b 0") - file( WRITE ${CMAKE_BINARY_DIR}/paddle/fluid/pybind/eager_legacy_op_function_generator_retry.bat @@ -441,8 +417,6 @@ if(WITH_PYTHON) add_custom_command( OUTPUT op_function - COMMAND - ${CMAKE_BINARY_DIR}/paddle/fluid/pybind/op_function_generator_retry.bat COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_impl_file1} ${impl_file1} COMMENT "copy_if_different ${tmp_impl_file1} to ${impl_file1}" @@ -526,10 +500,6 @@ if(WITH_PYTHON) endif() add_custom_command( OUTPUT op_function - COMMAND - ${CMAKE_COMMAND} -E env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}:." - "${CMAKE_CURRENT_BINARY_DIR}/op_function_generator" - "${op_function_output_path}" "${CODE_GEN_SPLIT_FILE_COUNT}" COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_impl_file1} ${impl_file1} COMMENT "copy_if_different ${tmp_impl_file1} to ${impl_file1}" @@ -570,7 +540,6 @@ if(WITH_PYTHON) VERBATIM) endif() endif() - add_custom_target(op_function_generator_cmd ALL DEPENDS op_function) if(NOT ((NOT WITH_PYTHON) AND ON_INFER)) add_custom_target(eager_legacy_op_function_generator_cmd ALL DEPENDS ${eager_impl_file}) @@ -647,6 +616,5 @@ if(WITH_PYTHON) get_property(os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES) target_link_libraries(${SHARD_LIB_NAME} ${os_dependency_modules}) - add_dependencies(${SHARD_LIB_NAME} op_function_generator_cmd) endif() diff --git a/paddle/fluid/pybind/eager_legacy_op_function_generator.cc b/paddle/fluid/pybind/eager_legacy_op_function_generator.cc index bc5eeeea875cb0..9d0a9f2140912d 100644 --- a/paddle/fluid/pybind/eager_legacy_op_function_generator.cc +++ b/paddle/fluid/pybind/eager_legacy_op_function_generator.cc @@ -31,7 +31,7 @@ #ifdef PADDLE_WITH_ASCEND_CL #include "paddle/fluid/framework/fleet/ascend_wrapper.h" #endif -#include "paddle/fluid/pybind/op_function_generator.h" +#include "paddle/fluid/pybind/eager_op_function_generator.h" // phi #include "paddle/phi/kernels/declarations.h" diff --git a/paddle/fluid/pybind/op_function_generator.h b/paddle/fluid/pybind/eager_op_function_generator.h similarity index 100% rename from paddle/fluid/pybind/op_function_generator.h rename to paddle/fluid/pybind/eager_op_function_generator.h diff --git a/paddle/fluid/pybind/op_function_generator.cc b/paddle/fluid/pybind/op_function_generator.cc deleted file mode 100644 index f2d784f6d5e86d..00000000000000 --- a/paddle/fluid/pybind/op_function_generator.cc +++ /dev/null @@ -1,602 +0,0 @@ -// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/fluid/pybind/op_function_generator.h" - -#include -#include -#include -#include -#ifndef _WIN32 -#include -#endif - -#include "paddle/fluid/framework/op_info.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/fluid/framework/operator.h" -#include "paddle/fluid/framework/variable.h" -#include "paddle/fluid/pybind/pybind.h" -#include "paddle/fluid/string/string_helper.h" -#ifdef PADDLE_WITH_ASCEND_CL -#include "paddle/fluid/framework/fleet/ascend_wrapper.h" -#endif - -// phi -#include "paddle/phi/kernels/declarations.h" - -static std::string LegalizeVarName(const std::string& var_name) { - std::string ret = var_name; - std::replace(ret.begin(), ret.end(), '@', '_'); // replace all '-' to '_' - return ret; -} - -// NOTE(pangyoki): Inplace OP with duplicable input. -// The set includes inplace ops that have duplicable input. -// The first Varbase in input needs to be specified for the inplace strategy -// and share Varbase with the output. -std::set inplace_op_duplicable_ins_set = { - "sum", -}; - -// clang-format off -const char* OUT_INITIALIZER_TEMPLATE = - R"({"%s", {std::shared_ptr(new imperative::VarBase("auto_"+std::to_string(VarBaseUniqueNameID++)+"_"))}})"; -const char* OUT_DUPLICABLE_INITIALIZER_TEMPLATE = R"({"%s", ConstructDuplicableOutput(%s)})"; - -const char* INPUT_INITIALIZER_TEMPLATE = R"({"%s", {%s}})"; -const char* INPUT_LIST_INITIALIZER_TEMPLATE = R"({"%s", %s})"; - -const char* INPUT_INITIALIZER_TEMPLATE_WITH_NULL = R"( - if (%s != nullptr) { - ins["%s"] = {%s}; - } -)"; - -const char* INPUT_INITIALIZER_TEMPLATE_WITH_NULL_LIST = R"( - if (%s.size() != 0) { - ins["%s"] = %s; - } -)"; - -const char* OUTPUT_INITIALIZER_TEMPLATE_WITH_NULL = R"( - outs["%s"] = {%s}; -)"; - -const char* OUTPUT_INITIALIZER_TEMPLATE_WITH_NULL_LIST = R"( - outs["%s"] = %s; -)"; -// if inputs is list, no need {} -const char* ARG_OUT_NUM = R"(%sNum)"; -const char* ARG_OUT_NUM_TYPE = R"(size_t )"; - -const char* IN_VAR_TYPE = R"(py::handle)"; -const char* IN_VAR_LIST_TYPE = R"(py::handle)"; - -const char* OUT_VAR_TYPE = R"(std::shared_ptr)"; -const char* OUT_VAR_LIST_TYPE = R"(std::vector>)"; - -const char* CAST_VAR_TEMPLATE = R"( - auto %s = GetVarBaseFromArgs(op_type, "%s", args, %d, %s);)"; - -const char* CAST_VAR_LIST_TEMPLATE = R"( - auto %s = GetVarBaseListFromArgs(op_type, "%s", args, %d, %s);)"; - -const char* CAST_SIZE_T_TEMPLATE = R"( - auto %s = GetUnsignedLongFromArgs(op_type, "%s", args, %d, %s);)"; - -const char* ARG_TEMPLATE = R"(const %s& %s)"; - -const char* RETURN_TUPLE_TYPE = R"(std::tuple<%s>)"; -const char* RETURN_TUPLE_TEMPLATE = R"(std::make_tuple(%s))"; -const char* RETURN_LIST_TEMPLATE = R"(outs["%s"])"; -const char* RETURN_TEMPLATE = R"(outs["%s"][0])"; - -const char* FUNCTION_ARGS = R"(%s, const py::args& args)"; -const char* FUNCTION_ARGS_NO_INPUT = R"(const py::args& args)"; - -const char* HANDLE_VIEW_BETWEEN_INPUT_AND_OUTPUT = R"( - if (ins.count("%s") && outs.count("%s")) { - HandleViewBetweenInputAndOutput(ins["%s"][0], outs["%s"][0]); - })"; - -const char* INPLACE_DUPLICABLE_INPUT = R"([0])"; - -const char* INPLACE_LEAF_ERROR_MESSAGE = R"(Leaf Var (%s) that doesn't stop gradient can't use inplace strategy.)"; - -const char* INPLACE_STRATEGY_TEMPLATE = -R"( - PADDLE_ENFORCE_EQ( - %s->IsLeaf() && !%s->OverridedStopGradient(), false, - platform::errors::InvalidArgument("%s", %s->Name())); - %s->BumpInplaceVersion(); - VLOG(3) << "Var(" << %s->Name() << ") uses Inplace Strategy."; -)"; - -const char* INPLACE_MAPPING_TEMPLATE = R"({"%s", "%s"})"; - -const char* OP_FUNCTION_TEMPLATE = -R"( -static PyObject * %s(PyObject *self, PyObject *args, PyObject *kwargs) -{ - PyThreadState *tstate = nullptr; - try - { - std::string op_type = "%s"; - platform::RecordEvent op_type_record_event("%s pybind_imperative_func"); - %s - framework::AttributeMap attrs; - ConstructAttrMapFromPyArgs(op_type, args, %d, PyTuple_GET_SIZE(args) , attrs); - tstate = PyEval_SaveThread(); - %s - imperative::NameVarBaseMap outs = %s; - imperative::NameVarBaseMap ins = %s; - %s - imperative::GetCurrentTracer()->TraceOp(op_type, ins, outs, attrs, {%s}); - PyEval_RestoreThread(tstate); - tstate = nullptr; - %s - } - catch(...) { - if (tstate) { - PyEval_RestoreThread(tstate); - } - ThrowExceptionToPython(std::current_exception()); - return nullptr; - } -})"; - -const char* PYBIND_ITEM_TEMPLATE = R"( {"%s", (PyCFunction)(void(*)(void))%s, METH_VARARGS | METH_KEYWORDS, "C++ interface function for %s in dygraph."},)"; - -// clang-format on -static inline bool FindInsMap(const std::string& op_type, - const std::string& in_name) { - return op_ins_map[op_type].count(in_name); -} - -static inline bool FindOutsMap(const std::string& op_type, - const std::string& out_name) { - return op_outs_map[op_type].count(out_name); -} - -static inline bool FindPassingOutsMap(const std::string& op_type, - const std::string& out_name) { - return op_passing_outs_map[op_type].count(out_name); -} - -static inline bool FindDuplicableInputInplaceOpSet(const std::string& op_type) { - return inplace_op_duplicable_ins_set.count(op_type); -} - -static inline bool FindViewOpMap(const std::string& op_type) { - return view_op_map.count(op_type); -} - -static inline std::string TempName(const std::string& name) { - return name + '_'; -} - -std::string GenerateOpFunctionsBody( - const paddle::framework::proto::OpProto* op_proto, - std::string func_name, - bool use_inplace_strategy = false, - std::map inplace_map = {}) { - auto& op_type = op_proto->type(); - std::string input_args = ""; - std::string ins_initializer = "{"; - std::string ins_initializer_with_null = ""; - std::string py_arg = ""; - int arg_idx = 0; - int input_args_num = 0; - std::string ins_cast_str = ""; - std::string view_strategy_str = ""; - std::string inplace_strategy_str = ""; - for (auto& input : op_proto->inputs()) { - auto& in_name = input.name(); - // skip those dispensable inputs, like ResidualData in conv2d - if (input.dispensable() && !FindInsMap(op_type, in_name)) { - continue; - } - const auto in_type = input.duplicable() ? IN_VAR_LIST_TYPE : IN_VAR_TYPE; - auto input_arg = paddle::string::Sprintf( - ARG_TEMPLATE, in_type, LegalizeVarName(TempName(in_name))); - input_args += input_arg; - input_args += ","; - input_args_num++; - const auto in_cast_type = - input.duplicable() ? CAST_VAR_LIST_TEMPLATE : CAST_VAR_TEMPLATE; - auto dispensable = input.dispensable() ? "true" : "false"; - ins_cast_str += paddle::string::Sprintf(in_cast_type, - LegalizeVarName(in_name), - in_name, - arg_idx++, - dispensable); - - if (input.dispensable()) { - const auto in_template = input.duplicable() - ? INPUT_INITIALIZER_TEMPLATE_WITH_NULL_LIST - : INPUT_INITIALIZER_TEMPLATE_WITH_NULL; - ins_initializer_with_null += - paddle::string::Sprintf(in_template, - LegalizeVarName(in_name), - in_name, - LegalizeVarName(in_name)); - } else { - const auto in_template = input.duplicable() - ? INPUT_LIST_INITIALIZER_TEMPLATE - : INPUT_INITIALIZER_TEMPLATE; - ins_initializer += paddle::string::Sprintf( - in_template, in_name, LegalizeVarName(in_name)); - ins_initializer += ","; - } - } - if (ins_initializer.back() == ',') { - ins_initializer.pop_back(); - } - ins_initializer += "}"; - - if (!input_args.empty() && input_args.back() == ',') { - input_args.pop_back(); - } - - // Generate outs initializer - std::string outs_initializer = "{"; - std::string outs_initializer_with_null = ""; - std::string inplace_mapping_str = ""; - std::string return_str = ""; - - int outs_num = 0; - for (auto& output : op_proto->outputs()) { - auto& out_name = output.name(); - - // skip those dispensable oututs - if (output.dispensable() && !FindOutsMap(op_type, out_name)) { - continue; - } - const auto out_type = - output.duplicable() ? OUT_VAR_LIST_TYPE : OUT_VAR_TYPE; - const auto return_template = - output.duplicable() ? RETURN_LIST_TEMPLATE : RETURN_TEMPLATE; - - if (FindPassingOutsMap(op_type, out_name)) { - if (input_args != "") { - input_args += ","; - } - input_args += out_type; - input_args += LegalizeVarName(out_name); - input_args_num++; - - if (output.dispensable()) { - const auto out_template = - output.duplicable() ? OUTPUT_INITIALIZER_TEMPLATE_WITH_NULL_LIST - : OUTPUT_INITIALIZER_TEMPLATE_WITH_NULL; - outs_initializer_with_null += - paddle::string::Sprintf(out_template, out_name, out_name); - } else { - const auto out_template = output.duplicable() - ? INPUT_LIST_INITIALIZER_TEMPLATE - : INPUT_INITIALIZER_TEMPLATE; - outs_initializer += paddle::string::Sprintf( - out_template, out_name, LegalizeVarName(out_name)); - outs_initializer += ","; - } - - const auto in_cast_type = - output.duplicable() ? CAST_VAR_LIST_TEMPLATE : CAST_VAR_TEMPLATE; - auto dispensable = output.dispensable() ? "true" : "false"; - ins_cast_str += paddle::string::Sprintf(in_cast_type, - LegalizeVarName(out_name), - out_name, - arg_idx++, - dispensable); - } else if (use_inplace_strategy && inplace_map.count(out_name)) { - PADDLE_ENFORCE_NE( - inplace_map[out_name], - "", - paddle::platform::errors::InvalidArgument( - "Inplace op %s has no input corresponding to output %s.", - op_type, - out_name)); - - // TODO(pangyoki): Inplace op don't have duplicable output in temporary, - // so don't support duplicable output now. - const auto out_template = INPUT_INITIALIZER_TEMPLATE; - - auto inplace_input_name = inplace_map[out_name]; - inplace_mapping_str += paddle::string::Sprintf( - INPLACE_MAPPING_TEMPLATE, inplace_input_name, out_name); - inplace_mapping_str += ","; - - // If inplace op has duplicable input, the first Varbase in input will - // share Varbase with output. - if (FindDuplicableInputInplaceOpSet(op_type)) { - inplace_input_name += INPLACE_DUPLICABLE_INPUT; - } - - // Leaf Var that doesn't stop gradient can't use inplace strategy. - // Increase inplace_version. - inplace_strategy_str += - paddle::string::Sprintf(INPLACE_STRATEGY_TEMPLATE, - LegalizeVarName(inplace_input_name), - LegalizeVarName(inplace_input_name), - INPLACE_LEAF_ERROR_MESSAGE, - LegalizeVarName(inplace_input_name), - LegalizeVarName(inplace_input_name), - LegalizeVarName(inplace_input_name)); - outs_initializer += paddle::string::Sprintf( - out_template, out_name, LegalizeVarName(inplace_input_name)); - outs_initializer += ","; - } else { - // There are few Operators that have duplicable output, like `Out` in - // split op. We need to specify the number of variables for the - // duplicable output, as the argument OutNum; - if (output.duplicable()) { - if (input_args != "") { - input_args += ","; - } - auto out_num_str = - paddle::string::Sprintf(ARG_OUT_NUM, LegalizeVarName(out_name)); - input_args += ARG_OUT_NUM_TYPE; - input_args += out_num_str; - input_args_num++; - outs_initializer += paddle::string::Sprintf( - OUT_DUPLICABLE_INITIALIZER_TEMPLATE, out_name, out_num_str); - - auto dispensable = output.dispensable() ? "true" : "false"; - ins_cast_str += paddle::string::Sprintf(CAST_SIZE_T_TEMPLATE, - out_num_str, - out_num_str, - arg_idx++, - dispensable); - } else { - outs_initializer += - paddle::string::Sprintf(OUT_INITIALIZER_TEMPLATE, out_name); - } - outs_initializer += ","; - } - - return_str += paddle::string::Sprintf(return_template, out_name); - return_str += ","; - outs_num += 1; - } - if (outs_initializer.back() == ',') { - outs_initializer.pop_back(); - return_str.pop_back(); - } - outs_initializer += "}"; - if (!inplace_mapping_str.empty() && inplace_mapping_str.back() == ',') { - inplace_mapping_str.pop_back(); - } - if (!use_inplace_strategy && FindViewOpMap(op_type)) { - std::string viwe_input_name = view_op_map[op_type].first; - std::string viwe_output_name = view_op_map[op_type].second; - view_strategy_str += - paddle::string::Sprintf(HANDLE_VIEW_BETWEEN_INPUT_AND_OUTPUT, - viwe_input_name, - viwe_output_name, - viwe_input_name, - viwe_output_name); - } - if (outs_num == 0) { - return_str = "RETURN_PY_NONE"; - } else if (outs_num == 1) { - return_str = "return MakeReturnPyObject(" + return_str + ");"; - } else { - return_str = "return MakeReturnPyObject(" + - paddle::string::Sprintf(RETURN_TUPLE_TEMPLATE, return_str) + - ");"; - } - std::string function_args = ""; - if (input_args == "") { - function_args = FUNCTION_ARGS_NO_INPUT; - } else { - function_args = paddle::string::Sprintf(FUNCTION_ARGS, input_args); - } - - // generate op funtcion body - auto op_function_str = paddle::string::Sprintf( - OP_FUNCTION_TEMPLATE, - func_name, - op_type, - op_type, - ins_cast_str, - input_args_num, - inplace_strategy_str, - outs_initializer, - ins_initializer, - ins_initializer_with_null + outs_initializer_with_null + - view_strategy_str, - inplace_mapping_str, - return_str); - - return op_function_str; -} - -static std::vector< - std::tuple, std::vector>> -GenerateOpFunctions(int split_count) { - auto& op_info_map = paddle::framework::OpInfoMap::Instance().map(); - std::vector, std::vector>> - result; - std::vector op_function_list, bind_function_list; - auto& all_kernels = paddle::framework::OperatorWithKernel::AllOpKernels(); - - paddle::flat_hash_map - op_info_map_need_gen; - for (auto& pair : op_info_map) { - auto& op_info = pair.second; - auto op_proto = op_info.proto_; - if (op_proto == nullptr) { - continue; - } - auto& op_type = op_proto->type(); - // Skip operator which is not inherit form OperatorWithKernel, like while, - // since only OperatorWithKernel can run in dygraph mode. - // if the phi lib contains op kernel, we still generate ops method - if (!all_kernels.count(op_type) && - !phi::KernelFactory::Instance().HasCompatiblePhiKernel(op_type)) { - continue; - } - // Skip the sparse op - if (op_type.compare(0, 7, "sparse_") == 0 && op_type != "sparse_momentum" && - op_type != "sparse_attention") { - continue; - } - - op_info_map_need_gen.emplace(pair); - } - - int cc_file_api_size = op_info_map_need_gen.size() / split_count; - if (op_info_map_need_gen.size() % split_count != 0) { - cc_file_api_size++; - } - int api_index = 0; - int file_index = 0; - - for (auto& pair : op_info_map_need_gen) { - auto& op_info = pair.second; - auto op_proto = op_info.proto_; - - auto& op_type = op_proto->type(); - - // NOTE(pangyoki): Inplace Strategy. - // In this case, output will reuse input varbase. - // Dygraph mode needs to be aligned with the in-place strategy in static - // mode, and the mapping relationships between output and input that have - // been defined in static graph mode should be used in dygraph mode. - // Find which ops need to use Inplace strategy in static graph mode, and get - // the mapping relationship between Inplace output and input. - auto& infer_inplace = - paddle::framework::OpInfoMap::Instance().Get(op_type).infer_inplace_; - std::map inplace_map; - if (infer_inplace) { - auto in_to_outs = infer_inplace(true); - for (auto& inplace_pair : in_to_outs) { - inplace_map[inplace_pair.second] = inplace_pair.first; - } - } - - std::string func_name = "imperative_" + op_type; - std::string op_function_str = GenerateOpFunctionsBody(op_proto, func_name); - - // generate pybind item - auto bind_function_str = paddle::string::Sprintf( - PYBIND_ITEM_TEMPLATE, op_type, func_name, op_type); - - op_function_list.emplace_back(std::move(op_function_str)); - bind_function_list.emplace_back(std::move(bind_function_str)); - - if (infer_inplace) { - // Reuse Varbase Inplace OP: op_type_. - // The inplace OP needs a new implementation method. - std::string inplace_op_type = op_type + "_"; - std::string inplace_func_name = "imperative_" + inplace_op_type; - std::string inplace_op_function_str = GenerateOpFunctionsBody( - op_proto, inplace_func_name, true, inplace_map); - - // generate pybind item - auto inplace_bind_function_str = - paddle::string::Sprintf(PYBIND_ITEM_TEMPLATE, - inplace_op_type, - inplace_func_name, - inplace_op_type); - - op_function_list.emplace_back(std::move(inplace_op_function_str)); - bind_function_list.emplace_back(std::move(inplace_bind_function_str)); - } - - api_index++; - if (api_index / cc_file_api_size > file_index) { - file_index++; - result.push_back(std::make_tuple(op_function_list, bind_function_list)); - op_function_list.clear(); - bind_function_list.clear(); - } - } - - result.push_back(std::make_tuple(op_function_list, bind_function_list)); - - return result; -} - -int main(int argc, char* argv[]) { - if (argc != 3) { - std::cerr << "argc must be 3" << std::endl; - return -1; - } - -#ifdef PADDLE_WITH_ASCEND_CL - auto ascend_ptr = paddle::framework::AscendInstance::GetInstance(); - ascend_ptr->InitGEForUT(); -#endif - - std::vector headers{"\"paddle/fluid/imperative/tracer.h\"", - "\"paddle/fluid/platform/profiler.h\"", - "\"pybind11/numpy.h\"", - "\"pybind11/pybind11.h\"", - "\"pybind11/detail/common.h\"", - "\"paddle/fluid/pybind/eager_utils.h\"", - "\"paddle/fluid/pybind/op_function.h\"", - ""}; - - std::string path = argv[1]; - int split_count = atoi(argv[2]); - - auto op_funcs = GenerateOpFunctions(split_count); - - for (size_t i = 0; i < op_funcs.size(); i++) { - std::ofstream out(path + "op_function" + std::to_string(i + 1) + ".cc.tmp", - std::ios::out); - - out << "#if defined(_MSC_VER)\n" - << "#include \n" - << "typedef SSIZE_T ssize_t;\n" - << "#endif\n"; - - for (auto& header : headers) { - out << "#include " + header + "\n"; - } - - out << "\n\n"; - - out << "namespace paddle {\n" - << "namespace pybind {\n\n"; - out << "extern std::atomic VarBaseUniqueNameID;\n"; - out << paddle::string::join_strings(std::get<0>(op_funcs[i]), '\n'); - out << "\n\n"; - - out << "static PyMethodDef ExtestMethods[] = {\n" - << paddle::string::join_strings(std::get<1>(op_funcs[i]), '\n') - << "\n {nullptr,nullptr,0,nullptr}" - << "};\n\n"; - - out << "void BindOpFunctions" << i + 1 << "(pybind11::module *module) {\n" - << " auto m = module->def_submodule(\"ops\");\n" - << " if (PyModule_AddFunctions(m.ptr(), ExtestMethods) < 0) {\n" - << " PADDLE_THROW(platform::errors::Fatal (\"Add functions to " - "core.ops failed!\"));\n" - << " }\n\n" - << " InitOpsAttrTypeMap();" - << "}\n\n" - << "} // namespace pybind\n" - << "} // namespace paddle\n"; - - out.close(); - } - -#ifdef PADDLE_WITH_ASCEND_CL - ge::GEFinalize(); -#endif - - return 0; -} diff --git a/paddle/scripts/paddle_build.bat b/paddle/scripts/paddle_build.bat index 0163946682400a..4e65a747794a3a 100644 --- a/paddle/scripts/paddle_build.bat +++ b/paddle/scripts/paddle_build.bat @@ -41,10 +41,8 @@ taskkill /f /im python.exe /t 2>NUL taskkill /f /im nvcc.exe /t 2>NUL taskkill /f /im cicc.exe /t 2>NUL taskkill /f /im ptxas.exe /t 2>NUL -taskkill /f /im op_function_generator.exe /t 2>NUL taskkill /f /im eager_generator.exe /t 2>NUL taskkill /f /im eager_legacy_op_function_generator.exe /t 2>NUL -wmic process where name="op_function_generator.exe" call terminate 2>NUL wmic process where name="eager_generator.exe" call terminate 2>NUL wmic process where name="eager_legacy_op_function_generator.exe" call terminate 2>NUL wmic process where name="cvtres.exe" call terminate 2>NUL @@ -533,10 +531,8 @@ taskkill /f /im csc.exe /t 2>NUL taskkill /f /im nvcc.exe /t 2>NUL taskkill /f /im cicc.exe /t 2>NUL taskkill /f /im ptxas.exe /t 2>NUL -taskkill /f /im op_function_generator.exe /t 2>NUL taskkill /f /im eager_generator.exe /t 2>NUL taskkill /f /im eager_legacy_op_function_generator.exe /t 2>NUL -wmic process where name="op_function_generator.exe" call terminate 2>NUL wmic process where name="eager_generator.exe" call terminate 2>NUL wmic process where name="eager_legacy_op_function_generator.exe" call terminate 2>NUL wmic process where name="cmake.exe" call terminate 2>NUL @@ -936,10 +932,8 @@ taskkill /f /im python.exe /t 2>NUL taskkill /f /im nvcc.exe /t 2>NUL taskkill /f /im cicc.exe /t 2>NUL taskkill /f /im ptxas.exe /t 2>NUL -taskkill /f /im op_function_generator.exe /t 2>NUL taskkill /f /im eager_generator.exe /t 2>NUL taskkill /f /im eager_legacy_op_function_generator.exe /t 2>NUL -wmic process where name="op_function_generator.exe" call terminate 2>NUL wmic process where name="eager_generator.exe" call terminate 2>NUL wmic process where name="eager_legacy_op_function_generator.exe" call terminate 2>NUL wmic process where name="cvtres.exe" call terminate 2>NUL