Skip to content

Commit 4794e8e

Browse files
authored
replace cross_entropy in python/paddle/fluid/tests/unittests/*/*.py except unittests/*.py (PaddlePaddle#48920)
1 parent 5bc27b6 commit 4794e8e

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+147
-63
lines changed

python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,12 @@ def run_inference_pruning_test(
6060
def run_training_pruning_test(self, get_mask_gen_func, get_mask_check_func):
6161
with fluid.program_guard(self.main_program, self.startup_program):
6262
loss = paddle.mean(
63-
fluid.layers.cross_entropy(input=self.predict, label=self.label)
63+
paddle.nn.functional.cross_entropy(
64+
input=self.predict,
65+
label=self.label,
66+
reduction='none',
67+
use_softmax=False,
68+
)
6469
)
6570
optimizer = paddle.incubate.asp.decorate(
6671
fluid.optimizer.SGD(learning_rate=0.01)

python/paddle/fluid/tests/unittests/asp/test_asp_customized_pruning.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,12 @@ def test_inference_pruning(self):
269269
def test_training_pruning(self):
270270
with fluid.program_guard(self.main_program, self.startup_program):
271271
loss = paddle.mean(
272-
fluid.layers.cross_entropy(input=self.predict, label=self.label)
272+
paddle.nn.functional.cross_entropy(
273+
input=self.predict,
274+
label=self.label,
275+
reduction='none',
276+
use_softmax=False,
277+
)
273278
)
274279
optimizer = sparsity.decorate(
275280
fluid.optimizer.SGD(learning_rate=0.01)

python/paddle/fluid/tests/unittests/asp/test_asp_optimize_static.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,12 @@ def build_model():
4545
with fluid.program_guard(self.main_program, self.startup_program):
4646
self.img, self.label, predict = build_model()
4747
self.loss = paddle.mean(
48-
fluid.layers.cross_entropy(input=predict, label=self.label)
48+
paddle.nn.functional.cross_entropy(
49+
input=predict,
50+
label=self.label,
51+
reduction='none',
52+
use_softmax=False,
53+
)
4954
)
5055
self.optimizer = fluid.optimizer.SGD(learning_rate=0.01)
5156

python/paddle/fluid/tests/unittests/asp/test_asp_pruning_static.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,12 @@ def test_inference_pruning(self):
6565
def test_training_pruning(self):
6666
with fluid.program_guard(self.main_program, self.startup_program):
6767
loss = paddle.mean(
68-
fluid.layers.cross_entropy(input=self.predict, label=self.label)
68+
paddle.nn.functional.cross_entropy(
69+
input=self.predict,
70+
label=self.label,
71+
reduction='none',
72+
use_softmax=False,
73+
)
6974
)
7075
optimizer = paddle.incubate.asp.decorate(
7176
fluid.optimizer.SGD(learning_rate=0.01)

python/paddle/fluid/tests/unittests/asp/test_asp_save_load.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,12 @@ def build_model():
146146
with fluid.program_guard(self.main_program, self.startup_program):
147147
self.img, self.label, predict = build_model()
148148
self.loss = paddle.mean(
149-
fluid.layers.cross_entropy(input=predict, label=self.label)
149+
paddle.nn.functional.cross_entropy(
150+
input=predict,
151+
label=self.label,
152+
reduction='none',
153+
use_softmax=False,
154+
)
150155
)
151156
self.optimizer = fluid.optimizer.SGD(learning_rate=0.01)
152157
self.optimizer = paddle.incubate.asp.decorate(self.optimizer)

python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_sharding.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,12 @@ def net(self, main_prog, startup_prog):
6060
fc_3 = fluid.layers.fc(input=fc_2, size=64, act='tanh')
6161
fc_4 = fluid.layers.fc(input=fc_3, size=64, act='tanh')
6262
prediction = fluid.layers.fc(input=fc_4, size=2, act='softmax')
63-
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
63+
cost = paddle.nn.functional.cross_entropy(
64+
input=prediction,
65+
label=input_y,
66+
reduction='none',
67+
use_softmax=False,
68+
)
6469
avg_cost = paddle.mean(x=cost)
6570

6671
dist_strategy = paddle.distributed.fleet.DistributedStrategy()

python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_static.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,12 @@ def net(self, main_prog, startup_prog):
4949

5050
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
5151
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
52-
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
52+
cost = paddle.nn.functional.cross_entropy(
53+
input=prediction,
54+
label=input_y,
55+
reduction='none',
56+
use_softmax=False,
57+
)
5358
avg_cost = paddle.mean(x=cost)
5459

5560
strategy = paddle.distributed.fleet.DistributedStrategy()
@@ -122,7 +127,12 @@ def net(self, main_prog, startup_prog):
122127

123128
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
124129
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
125-
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
130+
cost = paddle.nn.functional.cross_entropy(
131+
input=prediction,
132+
label=input_y,
133+
reduction='none',
134+
use_softmax=False,
135+
)
126136
avg_cost = paddle.mean(x=cost)
127137

128138
strategy = paddle.distributed.fleet.DistributedStrategy()

python/paddle/fluid/tests/unittests/dygraph_to_static/ifelse_simple_func.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@ def add_fn(x):
2222

2323

2424
def loss_fn(x, lable):
25-
loss = fluid.layers.cross_entropy(x, lable)
25+
loss = paddle.nn.functional.cross_entropy(
26+
x, lable, reduction='none', use_softmax=False
27+
)
2628
return loss
2729

2830

@@ -45,7 +47,9 @@ def dyfunc_with_if_else(x_v, label=None):
4547
x_v = x_v + 1
4648
# plain if in python
4749
if label is not None:
48-
loss = fluid.layers.cross_entropy(x_v, label)
50+
loss = paddle.nn.functional.cross_entropy(
51+
x_v, label, reduction='none', use_softmax=False
52+
)
4953
return loss
5054
return x_v
5155

@@ -302,7 +306,9 @@ def if_with_and_or(x_v, label=None):
302306
x_v = x_v + 1
303307

304308
if label is not None:
305-
loss = fluid.layers.cross_entropy(x_v, label)
309+
loss = paddle.nn.functional.cross_entropy(
310+
x_v, label, reduction='none', use_softmax=False
311+
)
306312
return loss
307313
return x_v
308314

python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,9 @@ def forward(self, inputs, label=None):
107107
x = self.inference(inputs)
108108
if label is not None:
109109
acc = paddle.static.accuracy(input=x, label=label)
110-
loss = fluid.layers.cross_entropy(x, label)
110+
loss = paddle.nn.functional.cross_entropy(
111+
x, label, reduction='none', use_softmax=False
112+
)
111113
avg_loss = paddle.mean(loss)
112114

113115
return x, acc, avg_loss

python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,9 @@ def set_args_1(__args):
109109

110110
def true_fn_1():
111111
nonlocal __return_0, __return_1, __return_value_0, loss
112-
loss = fluid.layers.cross_entropy(x_v, label)
112+
loss = paddle.nn.functional.cross_entropy(
113+
x_v, label, reduction='none', use_softmax=False
114+
)
113115
__return_0 = _jst.create_bool_as_type(label is not None, True)
114116
__return_value_0 = loss
115117
return
@@ -178,7 +180,9 @@ def set_args_3(__args):
178180

179181
def true_fn_3():
180182
nonlocal __return_2, __return_3, __return_value_1, loss
181-
loss = fluid.layers.cross_entropy(x_v, label)
183+
loss = paddle.nn.functional.cross_entropy(
184+
x_v, label, reduction='none', use_softmax=False
185+
)
182186
__return_2 = _jst.create_bool_as_type(label is not None, True)
183187
__return_value_1 = loss
184188
return

0 commit comments

Comments
 (0)