@@ -146,26 +146,6 @@ Sigmoid Activation
146146
147147)DOC" ;
148148
149- UNUSED constexpr char SiluDoc[] = R"DOC(
150- Silu Activation Operator
151-
152- $$out = x * \\frac{1}{1 + e^{-x}}$$
153- )DOC" ;
154-
155- UNUSED constexpr char LogSigmoidDoc[] = R"DOC(
156- Logsigmoid Activation Operator
157-
158- $$out = \\log \\frac{1}{1 + e^{-x}}$$
159-
160- )DOC" ;
161-
162- UNUSED constexpr char Expm1Doc[] = R"DOC(
163- Expm1 Operator. Computes expm1 of x element-wise with a natural number :math:`e` as the base.
164-
165- $$out = e^x - 1$$
166-
167- )DOC" ;
168-
169149UNUSED constexpr char ReluDoc[] = R"DOC(
170150Relu Activation Operator.
171151
@@ -206,43 +186,6 @@ Please make sure input is legal in case of numeric errors.
206186
207187)DOC" ;
208188
209- UNUSED constexpr char CeilDoc[] = R"DOC(
210- Ceil Operator. Computes ceil of x element-wise.
211-
212- .. math::
213- out = \left \lceil x \right \rceil
214-
215- )DOC" ;
216-
217- UNUSED constexpr char FloorDoc[] = R"DOC(
218- Floor Activation Operator. Computes floor of x element-wise.
219-
220- $$out = \\lfloor x \\rfloor$$
221-
222- )DOC" ;
223-
224- UNUSED constexpr char RoundDoc[] = R"DOC(
225- The OP rounds the values in the input to the nearest integer value.
226-
227- .. code-block:: text
228-
229- input:
230- x.shape = [4]
231- x.data = [1.2, -0.9, 3.4, 0.9]
232-
233- output:
234- out.shape = [4]
235- out.data = [1., -1., 3., 1.]
236-
237- )DOC" ;
238-
239- UNUSED constexpr char ReciprocalDoc[] = R"DOC(
240- Reciprocal Activation Operator.
241-
242- $$out = \\frac{1}{x}$$
243-
244- )DOC" ;
245-
246189UNUSED constexpr char LogDoc[] = R"DOC(
247190Log Activation Operator.
248191
@@ -252,33 +195,6 @@ Natural logarithm of x.
252195
253196)DOC" ;
254197
255- UNUSED constexpr char Log2Doc[] = R"DOC(
256- Log2 Activation Operator.
257-
258- $$out = \log_2x$$
259-
260- logarithm of x base to 2.
261-
262- )DOC" ;
263-
264- UNUSED constexpr char Log10Doc[] = R"DOC(
265- Log10 Activation Operator.
266-
267- $$out = \log_10_x$$
268-
269- logarithm of x base to 10.
270-
271- )DOC" ;
272-
273- UNUSED constexpr char Log1pDoc[] = R"DOC(
274- Log Activation Operator.
275-
276- $out = \ln(x+1)$
277-
278- Natural logarithm of x.
279-
280- )DOC" ;
281-
282198UNUSED constexpr char SquareDoc[] = R"DOC(
283199The OP square each elements of the inputs.
284200
@@ -356,28 +272,6 @@ class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
356272 }
357273};
358274
359- class HardShrinkOpMaker : public framework ::OpProtoAndCheckerMaker {
360- public:
361- void Make () override {
362- AddInput (" X" , " Input of HardShrink operator" );
363- AddOutput (" Out" , " Output of HardShrink operator" );
364- AddAttr<float >(" threshold" ,
365- " The value of threshold for HardShrink. [default: 0.5]" )
366- .SetDefault (0 .5f );
367- AddComment (R"DOC(
368- :strong:`HardShrink activation operator`
369-
370- .. math::
371- out = \begin{cases}
372- x, \text{if } x > \lambda \\
373- x, \text{if } x < -\lambda \\
374- 0, \text{otherwise}
375- \end{cases}
376-
377- )DOC" );
378- }
379- };
380-
381275class BReluOpMaker : public framework ::OpProtoAndCheckerMaker {
382276 public:
383277 void Make () override {
@@ -454,39 +348,6 @@ class ELUGradOpMaker : public framework::SingleGradOpMaker<T> {
454348 }
455349};
456350
457- class LogitOpMaker : public framework ::OpProtoAndCheckerMaker {
458- public:
459- void Make () override {
460- AddInput (" X" , " Input of Logit operator" );
461- AddOutput (" Out" , " Output of Logit operator" );
462- AddAttr<float >(" eps" ,
463- " (float, default 1e-6f) the epsilon for input clamp bound" )
464- .SetDefault (1e-6f );
465- AddComment (R"DOC(
466- Logit Operator.
467-
468- this function is defined as follow:
469- $ logit=ln\left ( {\frac {x} {1-x}} \right ) $
470-
471- )DOC" );
472- }
473- };
474-
475- template <typename T>
476- class LogitGradOpMaker : public framework ::SingleGradOpMaker<T> {
477- public:
478- using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
479-
480- protected:
481- void Apply (GradOpPtr<T> grad_op) const override {
482- grad_op->SetType (" logit_grad" );
483- grad_op->SetInput (" X" , this ->Input (" X" ));
484- grad_op->SetInput (framework::GradVarName (" Out" ), this ->OutputGrad (" Out" ));
485- grad_op->SetOutput (framework::GradVarName (" X" ), this ->InputGrad (" X" ));
486- grad_op->SetAttrMap (this ->Attrs ());
487- }
488- };
489-
490351class CELUOpMaker : public framework ::OpProtoAndCheckerMaker {
491352 public:
492353 void Make () override {
@@ -591,31 +452,6 @@ class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
591452 }
592453};
593454
594- class HardSigmoidOpMaker : public framework ::OpProtoAndCheckerMaker {
595- public:
596- void Make () override {
597- AddInput (" X" , " An N-D Tensor with data type float32, float64. " );
598- AddOutput (" Out" , " A Tensor with the same shape as input. " );
599- AddAttr<float >(" slope" ,
600- " The slope of the linear approximation of sigmoid. Its "
601- " value MUST BE positive. Default is 0.2. " )
602- .SetDefault (0 .2f );
603- AddAttr<float >(
604- " offset" ,
605- " The offset of the linear approximation of sigmoid. Default is 0.5. " )
606- .SetDefault (0 .5f );
607- AddComment (R"DOC(
608- HardSigmoid Activation Operator.
609-
610- A 3-part piecewise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391),
611- which is much faster than sigmoid.
612-
613- $$out = \max(0, \min(1, slope * x + offset))$$
614-
615- )DOC" );
616- }
617- };
618-
619455class SwishOpMaker : public framework ::OpProtoAndCheckerMaker {
620456 public:
621457 void Make () override {
@@ -684,22 +520,12 @@ It is recommended to use the defaults for this activation.
684520};
685521
686522REGISTER_ACTIVATION_OP_MAKER (Sigmoid, SigmoidDoc);
687- REGISTER_ACTIVATION_OP_MAKER (Silu, SiluDoc);
688- REGISTER_ACTIVATION_OP_MAKER (LogSigmoid, LogSigmoidDoc);
689- REGISTER_ACTIVATION_OP_MAKER (Expm1, Expm1Doc);
690523REGISTER_ACTIVATION_OP_MAKER (Relu, ReluDoc);
691524REGISTER_ACTIVATION_OP_MAKER (Tanh, TanhDoc);
692525REGISTER_ACTIVATION_OP_MAKER (TanhShrink, TanhShrinkDoc);
693526REGISTER_ACTIVATION_OP_MAKER (Sqrt, SqrtDoc);
694527REGISTER_ACTIVATION_OP_MAKER (Rsqrt, RsqrtDoc);
695- REGISTER_ACTIVATION_OP_MAKER (Ceil, CeilDoc);
696- REGISTER_ACTIVATION_OP_MAKER (Floor, FloorDoc);
697- REGISTER_ACTIVATION_OP_MAKER (Round, RoundDoc);
698- REGISTER_ACTIVATION_OP_MAKER (Reciprocal, ReciprocalDoc);
699528REGISTER_ACTIVATION_OP_MAKER (Log, LogDoc);
700- REGISTER_ACTIVATION_OP_MAKER (Log2, Log2Doc);
701- REGISTER_ACTIVATION_OP_MAKER (Log10, Log10Doc);
702- REGISTER_ACTIVATION_OP_MAKER (Log1p, Log1pDoc);
703529REGISTER_ACTIVATION_OP_MAKER (Square, SquareDoc);
704530REGISTER_ACTIVATION_OP_MAKER (Softsign, SoftsignDoc);
705531
@@ -1093,73 +919,6 @@ DECLARE_INPLACE_OP_INFERER(ActivationDoubleGradOpInplaceInferer,
1093919DECLARE_INPLACE_OP_INFERER (ActivationTripleGradOpInplaceInferer,
1094920 {" DDX" , " D_DOut" });
1095921
1096- class LogitOp : public framework ::OperatorWithKernel {
1097- public:
1098- LogitOp (const std::string& type,
1099- const framework::VariableNameMap& inputs,
1100- const framework::VariableNameMap& outputs,
1101- const framework::AttributeMap& attrs)
1102- : OperatorWithKernel(type, inputs, outputs, attrs) {}
1103-
1104- void InferShape (framework::InferShapeContext* ctx) const override {
1105- PADDLE_ENFORCE_EQ (ctx->HasInput (" X" ),
1106- true ,
1107- platform::errors::InvalidArgument (
1108- " Input(%s) of LogitOp should not be null." , " X" ));
1109- PADDLE_ENFORCE_EQ (ctx->HasOutput (" Out" ),
1110- true ,
1111- platform::errors::InvalidArgument (
1112- " Output(%s) of LogitOp should not be null." , " Out" ));
1113-
1114- ctx->ShareDim (" X" , /* ->*/ " Out" );
1115- ctx->ShareLoD (" X" , /* ->*/ " Out" );
1116- }
1117-
1118- protected:
1119- framework::OpKernelType GetExpectedKernelType (
1120- const framework::ExecutionContext& ctx) const override {
1121- framework::LibraryType library{framework::LibraryType::kPlain };
1122- phi::DataLayout layout = phi::DataLayout::kAnyLayout ;
1123- auto data_type = OperatorWithKernel::IndicateVarDataType (ctx, " X" );
1124-
1125- return framework::OpKernelType (data_type, ctx.GetPlace (), layout, library);
1126- }
1127- };
1128-
1129- class LogitGradOp : public framework ::OperatorWithKernel {
1130- public:
1131- using framework::OperatorWithKernel::OperatorWithKernel;
1132-
1133- void InferShape (framework::InferShapeContext* ctx) const override {
1134- PADDLE_ENFORCE_EQ (
1135- ctx->HasInput (framework::GradVarName (" Out" )),
1136- true ,
1137- platform::errors::InvalidArgument (
1138- " Input(%s) of LogitGradOp should not be null." , " DOut" ));
1139- PADDLE_ENFORCE_EQ (ctx->HasInput (" X" ),
1140- true ,
1141- platform::errors::InvalidArgument (
1142- " Input(%s) of LogitGradOp should not be null." , " X" ));
1143- PADDLE_ENFORCE_EQ (
1144- ctx->HasOutput (framework::GradVarName (" X" )),
1145- true ,
1146- platform::errors::InvalidArgument (
1147- " Output(%s) of LogitGradOp should not be null." , " DX" ));
1148- auto x_grad_name = framework::GradVarName (" X" );
1149- ctx->SetOutputDim (x_grad_name, ctx->GetInputDim (" X" ));
1150- ctx->ShareLoD (" X" , /* ->*/ x_grad_name);
1151- }
1152-
1153- protected:
1154- framework::OpKernelType GetExpectedKernelType (
1155- const framework::ExecutionContext& ctx) const override {
1156- framework::LibraryType library{framework::LibraryType::kPlain };
1157- phi::DataLayout layout = phi::DataLayout::kAnyLayout ;
1158- auto data_type = OperatorWithKernel::IndicateVarDataType (ctx, " X" );
1159- return framework::OpKernelType (data_type, ctx.GetPlace (), layout, library);
1160- }
1161- };
1162-
1163922template <typename T>
1164923class PowGradOpMaker : public framework ::SingleGradOpMaker<T> {
1165924 public:
@@ -1273,10 +1032,6 @@ REGISTER_ACTIVATION_OP(thresholded_relu,
12731032 ThresholdedReluFunctor,
12741033 ThresholdedReluGradFunctor);
12751034REGISTER_ACTIVATION_OP (relu6, Relu6, Relu6Functor, Relu6GradFunctor);
1276- REGISTER_ACTIVATION_OP (hard_shrink,
1277- HardShrink,
1278- HardShrinkFunctor,
1279- HardShrinkGradFunctor);
12801035REGISTER_ACTIVATION_OP (softshrink,
12811036 SoftShrink,
12821037 SoftShrinkFunctor,
@@ -1285,42 +1040,21 @@ REGISTER_ACTIVATION_OP(tanh_shrink,
12851040 TanhShrink,
12861041 TanhShrinkFunctor,
12871042 TanhShrinkGradFunctor);
1288- REGISTER_ACTIVATION_OP (silu, Silu, SiluFunctor, SiluGradFunctor);
12891043REGISTER_ACTIVATION_OP (softsign,
12901044 Softsign,
12911045 SoftsignFunctor,
12921046 SoftsignGradFunctor);
1293- REGISTER_ACTIVATION_OP (hard_sigmoid,
1294- HardSigmoid,
1295- HardSigmoidFunctor,
1296- HardSigmoidGradFunctor);
1297- REGISTER_ACTIVATION_OP (logsigmoid,
1298- LogSigmoid,
1299- LogSigmoidFunctor,
1300- LogSigmoidGradFunctor);
1301- REGISTER_ACTIVATION_OP (expm1, Expm1, Expm1Functor, Expm1GradFunctor);
13021047REGISTER_ACTIVATION_OP (softplus,
13031048 Softplus,
13041049 SoftplusFunctor,
13051050 SoftplusGradFunctor);
13061051REGISTER_ACTIVATION_OP (mish, Mish, MishFunctor, MishGradFunctor);
13071052REGISTER_ACTIVATION_OP (stanh, STanh, STanhFunctor, STanhGradFunctor);
1308- REGISTER_ACTIVATION_OP (reciprocal,
1309- Reciprocal,
1310- ReciprocalFunctor,
1311- ReciprocalGradFunctor);
1312-
1313- REGISTER_ACTIVATION_OP (log2, Log2, Log2Functor, Log2GradFunctor);
1314- REGISTER_ACTIVATION_OP (log10, Log10, Log10Functor, Log10GradFunctor);
1315- REGISTER_ACTIVATION_OP (log1p, Log1p, Log1pFunctor, Log1pGradFunctor);
13161053REGISTER_ACTIVATION_OP (hard_swish,
13171054 HardSwish,
13181055 HardSwishFunctor,
13191056 HardSwishGradFunctor);
13201057REGISTER_ACTIVATION_OP (swish, Swish, SwishFunctor, SwishGradFunctor);
1321- REGISTER_ACTIVATION_OP (round, Round, RoundFunctor, ZeroGradFunctor);
1322- REGISTER_ACTIVATION_OP (floor, Floor, FloorFunctor, ZeroGradFunctor);
1323- REGISTER_ACTIVATION_OP (ceil, Ceil, CeilFunctor, ZeroGradFunctor);
13241058
13251059/* ========================== sigmoid register =============================
13261060 */
@@ -1459,17 +1193,6 @@ REGISTER_OPERATOR(
14591193
14601194/* ========================================================================== */
14611195
1462- /* ======================== logit register ============================
1463- */
1464- REGISTER_OPERATOR (logit,
1465- ops::LogitOp,
1466- ops::LogitOpMaker,
1467- ops::LogitGradOpMaker<paddle::framework::OpDesc>,
1468- ops::LogitGradOpMaker<paddle::imperative::OpBase>);
1469- REGISTER_OPERATOR (logit_grad, ops::LogitGradOp);
1470-
1471- /* ========================================================================== */
1472-
14731196/* ======================== celu register ============================
14741197 */
14751198REGISTER_OPERATOR (
0 commit comments