Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions paddle/phi/kernels/cpu/cross_entropy_grad_kernel.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@ void CrossEntropyWithSoftmaxGradCPUKernel(const CPUContext& dev_ctx,
int ignore_index,
int axis,
DenseTensor* logits_grad) {
if (logits_grad->numel() == 0) {
dev_ctx.template Alloc<T>(logits_grad);
return;
}

const DenseTensor* out_grad = &loss_grad;
DenseTensor* logit_grad = logits_grad;

Expand Down
14 changes: 14 additions & 0 deletions paddle/phi/kernels/cpu/cross_entropy_kernel.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ limitations under the License. */
#include "paddle/phi/backends/cpu/cpu_context.h"
#include "paddle/phi/core/kernel_registry.h"
#include "paddle/phi/core/tensor_utils.h"
#include "paddle/phi/kernels/full_kernel.h"
#include "paddle/phi/kernels/funcs/axis_utils.h"
#include "paddle/phi/kernels/funcs/cross_entropy.h"
#include "paddle/phi/kernels/funcs/math_function.h"
Expand Down Expand Up @@ -79,6 +80,19 @@ void CrossEntropyWithSoftmaxKernel(const Context& dev_ctx,
int axis,
DenseTensor* softmax,
DenseTensor* loss) {
if (softmax->numel() == 0) {
// When soft_label is False, the axis column cannot be 0. Other dimensions
// are the same, so the numel of softmax and loss are both 0.
dev_ctx.template Alloc<T>(softmax);
dev_ctx.template Alloc<T>(loss);

// When soft_label is True, the axis column is 1.
if (soft_label) {
phi::Full<T, Context>(
dev_ctx, phi::IntArray(common::vectorize(loss->dims())), 0, loss);
}
return;
}
// do not with softmax op, and input is softmax
if (!use_softmax) {
CrossEntropy<T>(
Expand Down
4 changes: 4 additions & 0 deletions paddle/phi/kernels/gpu/cross_entropy_grad_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,10 @@ void CrossEntropyWithSoftmaxGradKernel(const Context& dev_ctx,
int ignore_index,
int axis,
DenseTensor* logits_grad) {
if (logits_grad->numel() == 0) {
dev_ctx.template Alloc<T>(logits_grad);
return;
}
auto dtype = label.dtype();
if (soft_label) {
PADDLE_ENFORCE_EQ(
Expand Down
15 changes: 15 additions & 0 deletions paddle/phi/kernels/gpu/cross_entropy_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/phi/kernels/cross_entropy_kernel.h"
#include "paddle/phi/kernels/full_kernel.h"

#include "glog/logging.h"

Expand Down Expand Up @@ -1402,6 +1403,20 @@ void CrossEntropyWithSoftmaxKernel(const Context& dev_ctx,
int axis,
DenseTensor* softmax,
DenseTensor* loss) {
if (softmax->numel() == 0) {
// When soft_label is False, the axis column cannot be 0. Other dimensions
// are the same, so the numel of softmax and loss are both 0.
dev_ctx.template Alloc<T>(softmax);
dev_ctx.template Alloc<T>(loss);

// When soft_label is True, the axis column is 1.
if (soft_label) {
phi::Full<T, Context>(
dev_ctx, phi::IntArray(common::vectorize(loss->dims())), 0, loss);
}
return;
}

auto dtype = label.dtype();
if (soft_label) {
PADDLE_ENFORCE_EQ(
Expand Down
3 changes: 3 additions & 0 deletions paddle/phi/kernels/xpu/cross_entropy_grad_kernel.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ void CrossEntropyWithSoftmaxGradKernel(const Context& dev_ctx,
DenseTensor* logit_grad) {
using XPUType = typename XPUTypeTrait<T>::Type;
dev_ctx.template Alloc<T>(logit_grad);
if (logit_grad->numel() == 0) {
return;
}

const int rank = logit_grad->dims().size();
const int axis = phi::funcs::CanonicalAxis(axis_in, rank);
Expand Down
15 changes: 15 additions & 0 deletions paddle/phi/kernels/xpu/cross_entropy_kernel.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ limitations under the License. */

#include "paddle/phi/backends/xpu/enforce_xpu.h"
#include "paddle/phi/core/kernel_registry.h"
#include "paddle/phi/kernels/full_kernel.h"
#include "paddle/phi/kernels/funcs/axis_utils.h"

namespace phi {
Expand All @@ -31,6 +32,20 @@ void CrossEntropyWithSoftmaxKernel(const Context& dev_ctx,
int axis_in,
DenseTensor* softmax,
DenseTensor* loss) {
if (softmax->numel() == 0) {
// When soft_label is False, the axis column cannot be 0. Other dimensions
// are the same, so the numel of softmax and loss are both 0.
dev_ctx.template Alloc<T>(softmax);
dev_ctx.template Alloc<T>(loss);

// When soft_label is True, the axis column is 1.
if (soft_label) {
phi::Full<T, Context>(
dev_ctx, phi::IntArray(common::vectorize(loss->dims())), 0, loss);
}
return;
}

using XPUType = typename XPUTypeTrait<T>::Type;
const int rank = logits.dims().size();
const int axis = phi::funcs::CanonicalAxis(axis_in, rank);
Expand Down
29 changes: 29 additions & 0 deletions test/legacy_test/test_cross_entropy_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -468,5 +468,34 @@ def test_input_dims():
self.assertRaises(ValueError, test_input_dims)


class TestCrossEntropyOp_ZeroSize(TestCrossEntropyOp):
def setUp(self):
self.op_type = "cross_entropy"
self.python_api = api_wrapper
self.soft_label = False
self.ignore_index = -100
self.dtype = np.float64
# 0-size
self.batch_size = 0
self.class_num = 10

self.init_dtype_type()
self.init_attr_type()
self.init_bs_class_num()
self.init_x()
self.init_label()
self.get_cross_entropy()

self.inputs = {"X": self.x, "Label": self.label}
self.outputs = {"Y": self.cross_entropy}
self.attrs = {
"soft_label": self.soft_label,
"ignore_index": self.ignore_index,
}

def get_cross_entropy(self):
self.cross_entropy = np.random.random([0, 1]).astype(np.float64)


if __name__ == "__main__":
unittest.main()
73 changes: 73 additions & 0 deletions test/legacy_test/test_softmax_with_cross_entropy_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,6 +964,79 @@ def test_input_dims2():
self.assertRaises(ValueError, test_input_dims2)


class TestSoftmaxWithCrossEntropyOp_ZeroSize(OpTest):
def initParams(self):
self.op_type = "softmax_with_cross_entropy"
self.__class__.op_type = "softmax_with_cross_entropy"
self.__class__.exist_fp64_check_grad = True
self.python_api = python_api
self.python_out_sig = ["Loss", "Softmax"]
self.numeric_stable_mode = False
self.soft_label = False
self.dtype = np.float32
self.axis = -1
self.ignore_index = -1
self.shape = [0, 10]
self.use_softmax = True

def hard_label_dtype(self):
return "int64"

def setUp(self):
self.initParams()

logits = getattr(
self,
"logits",
np.random.uniform(0.1, 1.0, self.shape).astype(self.dtype),
)
if logits.size == 0:
softmax = logits
else:
softmax = np.apply_along_axis(stable_softmax, self.axis, logits)

if self.soft_label:
labels = np.random.uniform(0.1, 1.0, self.shape).astype(self.dtype)
labels /= np.sum(labels, axis=self.axis, keepdims=True)
else:
axis_dim = self.shape[self.axis]
self.shape[self.axis] = 1
labels = np.random.randint(
0, axis_dim, self.shape, dtype=self.hard_label_dtype()
)

loss = cross_entropy(
softmax, labels, self.soft_label, self.axis, self.ignore_index
)

if not self.use_softmax:
self.inputs = {"Logits": softmax, "Label": labels}
else:
self.inputs = {"Logits": logits, "Label": labels}

self.outputs = {
"Softmax": softmax.astype(self.dtype),
"Loss": loss.astype(self.dtype),
}
self.attrs = {
"numeric_stable_mode": self.numeric_stable_mode,
"soft_label": self.soft_label,
"ignore_index": self.ignore_index,
"use_softmax": self.use_softmax,
}

if self.axis != -1:
self.attrs['axis'] = self.axis

def test_check_output(self):
self.check_output(check_pir=True)

def test_check_grad(self):
self.check_grad(
["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True
)


if __name__ == "__main__":
paddle.enable_static()
unittest.main()