Skip to content

Commit 7dcefc4

Browse files
authored
[remove fluid.layers.cross_entropy] remove unit tests (part 1) (#48726)
* replace layers.cross_entropy with paddle.entropy * fix args * fix codestyle
1 parent 364b0b0 commit 7dcefc4

18 files changed

+78
-25
lines changed

python/paddle/fluid/contrib/slim/tests/imperative_test_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,9 @@ def train_lenet(lenet, reader, optimizer):
7070
label = paddle.to_tensor(y_data)
7171

7272
out = lenet(img)
73-
loss = fluid.layers.cross_entropy(out, label)
73+
loss = paddle.nn.functional.cross_entropy(
74+
out, label, reduction='none', use_softmax=False
75+
)
7476
avg_loss = paddle.mean(loss)
7577
avg_loss.backward()
7678

python/paddle/fluid/contrib/slim/tests/test_graph.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,9 @@ def conv_block():
4747
act="relu",
4848
)
4949
prediction = fluid.layers.fc(input=conv_pool_2, size=10, act='softmax')
50-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
50+
loss = paddle.nn.functional.cross_entropy(
51+
input=prediction, label=label, reduction='none', use_softmax=False
52+
)
5153
avg_loss = paddle.mean(loss)
5254
return [img, label], avg_loss
5355

python/paddle/fluid/contrib/slim/tests/test_imperative_qat.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,9 @@ def func_qat(self):
130130
label = fluid.dygraph.to_variable(y_data)
131131
out = lenet(img)
132132
acc = paddle.static.accuracy(out, label)
133-
loss = fluid.layers.cross_entropy(out, label)
133+
loss = paddle.nn.functional.cross_entropy(
134+
out, label, reduction='none', use_softmax=False
135+
)
134136
avg_loss = paddle.mean(loss)
135137
avg_loss.backward()
136138
adam.minimize(avg_loss)

python/paddle/fluid/contrib/slim/tests/test_imperative_qat_amp.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,9 @@ def model_train(self, model, batch_num=-1, batch_size=32, use_amp=False):
119119
with paddle.amp.auto_cast():
120120
out = model(img)
121121
acc = paddle.static.accuracy(out, label)
122-
loss = fluid.layers.cross_entropy(out, label)
122+
loss = paddle.nn.functional.cross_entropy(
123+
out, label, reduction='none', use_softmax=False
124+
)
123125
avg_loss = paddle.mean(loss)
124126
scaled_loss = scaler.scale(avg_loss)
125127
scaled_loss.backward()
@@ -129,7 +131,9 @@ def model_train(self, model, batch_num=-1, batch_size=32, use_amp=False):
129131
else:
130132
out = model(img)
131133
acc = paddle.static.accuracy(out, label)
132-
loss = fluid.layers.cross_entropy(out, label)
134+
loss = paddle.nn.functional.cross_entropy(
135+
out, label, reduction='none', use_softmax=False
136+
)
133137
avg_loss = paddle.mean(loss)
134138
avg_loss.backward()
135139

python/paddle/fluid/contrib/slim/tests/test_imperative_qat_lsq.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,9 @@ def func_qat(self):
171171
label = fluid.dygraph.to_variable(y_data)
172172
out = lenet(img)
173173
acc = paddle.static.accuracy(out, label)
174-
loss = fluid.layers.cross_entropy(out, label)
174+
loss = paddle.nn.functional.cross_entropy(
175+
out, label, reduction='none', use_softmax=False
176+
)
175177
avg_loss = paddle.mean(loss)
176178

177179
avg_loss.backward()

python/paddle/fluid/contrib/slim/tests/test_quantization_mkldnn_pass.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,9 @@ def conv_net(img, label):
4747
act="relu",
4848
)
4949
prediction = fluid.layers.fc(input=conv_pool_2, size=10, act='softmax')
50-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
50+
loss = paddle.nn.functional.cross_entropy(
51+
input=prediction, label=label, reduction='none', use_softmax=False
52+
)
5153
avg_loss = paddle.mean(loss)
5254
return avg_loss
5355

python/paddle/fluid/contrib/slim/tests/test_quantization_pass.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,9 @@ def linear_fc(num):
3939
hidden = data
4040
for _ in range(num):
4141
hidden = fluid.layers.fc(hidden, size=128, act='relu')
42-
loss = fluid.layers.cross_entropy(input=hidden, label=label)
42+
loss = paddle.nn.functional.cross_entropy(
43+
input=hidden, label=label, reduction='none', use_softmax=False
44+
)
4345
loss = paddle.mean(loss)
4446
return loss
4547

@@ -87,7 +89,9 @@ def conv_bn_layer(
8789
input=hidden, pool_size=2, pool_type='avg', pool_stride=2
8890
)
8991
fc = fluid.layers.fc(input=pool, size=10)
90-
loss = fluid.layers.cross_entropy(input=fc, label=label)
92+
loss = paddle.nn.functional.cross_entropy(
93+
input=fc, label=label, reduction='none', use_softmax=False
94+
)
9195
loss = paddle.mean(loss)
9296
return loss
9397

@@ -115,7 +119,9 @@ def conv_net(img, label, quant_skip_pattern):
115119
hidden = fluid.layers.fc(input=conv_pool_2, size=100, act='relu')
116120
with fluid.name_scope(quant_skip_pattern):
117121
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
118-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
122+
loss = paddle.nn.functional.cross_entropy(
123+
input=prediction, label=label, reduction='none', use_softmax=False
124+
)
119125
avg_loss = paddle.mean(loss)
120126
return avg_loss
121127

@@ -756,7 +762,9 @@ def conv_bn_layer(
756762
)
757763
pool_add = paddle.nn.functional.relu(paddle.add(x=pool1, y=pool2))
758764
fc = fluid.layers.fc(input=pool_add, size=10)
759-
loss = fluid.layers.cross_entropy(input=fc, label=label)
765+
loss = paddle.nn.functional.cross_entropy(
766+
input=fc, label=label, reduction='none', use_softmax=False
767+
)
760768
loss = paddle.mean(loss)
761769
return loss
762770

python/paddle/fluid/contrib/slim/tests/test_quantization_scale_pass.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,9 @@ def conv_net(img, label):
5555
)
5656
hidden = fluid.layers.fc(input=conv_pool_2, size=100, act='relu')
5757
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
58-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
58+
loss = paddle.nn.functional.cross_entropy(
59+
input=prediction, label=label, reduction='none', use_softmax=False
60+
)
5961
avg_loss = paddle.mean(loss)
6062
return avg_loss
6163

python/paddle/fluid/contrib/slim/tests/test_quantize_transpiler_v2.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@ def conv_net(img, label):
5252
with fluid.name_scope("skip_quant"):
5353
hidden = fluid.layers.fc(input=conv_pool_1, size=100, act='relu')
5454
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
55-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
55+
loss = paddle.nn.functional.cross_entropy(
56+
input=prediction, label=label, reduction='none', use_softmax=False
57+
)
5658
avg_loss = paddle.mean(loss)
5759
return avg_loss
5860

python/paddle/fluid/contrib/slim/tests/test_user_defined_quantization.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,9 @@ def conv_net(img, label):
5757
)
5858
hidden = fluid.layers.fc(input=conv_pool_2, size=100, act='relu')
5959
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
60-
loss = fluid.layers.cross_entropy(input=prediction, label=label)
60+
loss = paddle.nn.functional.cross_entropy(
61+
input=prediction, label=label, reduction='none', use_softmax=False
62+
)
6163
avg_loss = paddle.mean(loss)
6264
return avg_loss
6365

0 commit comments

Comments
 (0)