|
| 1 | +/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. |
| 2 | +
|
| 3 | +Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +you may not use this file except in compliance with the License. |
| 5 | +You may obtain a copy of the License at |
| 6 | +
|
| 7 | + http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +
|
| 9 | +Unless required by applicable law or agreed to in writing, software |
| 10 | +distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +See the License for the specific language governing permissions and |
| 13 | +limitations under the License. */ |
| 14 | + |
| 15 | +#ifdef PADDLE_WITH_ASCEND_CL |
| 16 | +#include <memory> |
| 17 | +#include <string> |
| 18 | + |
| 19 | +#include "paddle/fluid/operators/cast_op.h" |
| 20 | +#include "paddle/fluid/operators/npu_op_runner.h" |
| 21 | + |
| 22 | +namespace paddle { |
| 23 | +namespace operators { |
| 24 | + |
| 25 | +static std::map<framework::proto::VarType::Type, aclDataType> |
| 26 | + DTYPE_2_ACL_DTYPE = { |
| 27 | + {framework::proto::VarType::BOOL, ACL_BOOL}, |
| 28 | + {framework::proto::VarType::INT16, ACL_INT16}, |
| 29 | + {framework::proto::VarType::INT32, ACL_INT32}, |
| 30 | + {framework::proto::VarType::INT64, ACL_INT64}, |
| 31 | + {framework::proto::VarType::FP16, ACL_FLOAT16}, |
| 32 | + {framework::proto::VarType::FP32, ACL_FLOAT}, |
| 33 | + {framework::proto::VarType::FP64, ACL_DOUBLE}, |
| 34 | +}; |
| 35 | + |
| 36 | +using Tensor = framework::Tensor; |
| 37 | + |
| 38 | +template <typename DeviceContext, typename T> |
| 39 | +class CastNPUKernel : public framework::OpKernel<T> { |
| 40 | + public: |
| 41 | + void Compute(const framework::ExecutionContext& ctx) const override { |
| 42 | + auto* x = ctx.Input<Tensor>("X"); |
| 43 | + int dtype = ctx.Attr<int>("out_dtype"); |
| 44 | + |
| 45 | + auto* out = ctx.Output<Tensor>("Out"); |
| 46 | + |
| 47 | + auto place = ctx.GetPlace(); |
| 48 | + |
| 49 | + auto iter = DTYPE_2_ACL_DTYPE.find(static_cast<framework::proto::VarType::Type>(dtype)); |
| 50 | + int aclDtype = iter->second; |
| 51 | + |
| 52 | + if (dtype == framework::proto::VarType::FP32) { |
| 53 | + out->mutable_data<float>(place); |
| 54 | + } else if (dtype == framework::proto::VarType::FP16) { |
| 55 | + out->mutable_data<paddle::platform::float16>(place); |
| 56 | + } else if (dtype == framework::proto::VarType::INT16) { |
| 57 | + out->mutable_data<int16_t>(place); |
| 58 | + } else if (dtype == framework::proto::VarType::INT32) { |
| 59 | + out->mutable_data<int32_t>(place); |
| 60 | + } else if (dtype == framework::proto::VarType::INT64) { |
| 61 | + out->mutable_data<int64_t>(place); |
| 62 | + } else if (dtype == framework::proto::VarType::FP64) { |
| 63 | + out->mutable_data<double>(place); |
| 64 | + } else if (dtype == framework::proto::VarType::BOOL) { |
| 65 | + out->mutable_data<bool>(place); |
| 66 | + } |
| 67 | + |
| 68 | + auto stream = |
| 69 | + ctx.template device_context<paddle::platform::NPUDeviceContext>() |
| 70 | + .stream(); |
| 71 | + |
| 72 | + auto runner = NpuOpRunner("Cast", {*x}, {*out}, {{"dst_type", static_cast<int32_t>(aclDtype)}}); |
| 73 | + runner.Run(stream); |
| 74 | + } |
| 75 | +}; |
| 76 | +} // namespace operators |
| 77 | +} // namespace paddleaclDtype |
| 78 | + |
| 79 | +namespace ops = paddle::operators; |
| 80 | + |
| 81 | +REGISTER_OP_NPU_KERNEL( |
| 82 | + cast, |
| 83 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, int16_t>, |
| 84 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, int32_t>, |
| 85 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, int64_t>, |
| 86 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, bool>, |
| 87 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, double>, |
| 88 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, float>, |
| 89 | + ops::CastNPUKernel<paddle::platform::NPUDeviceContext, |
| 90 | + paddle::platform::float16>); |
| 91 | +#endif |
0 commit comments