From 6c2228bbe44d7c57ea98b4e5617ba746aa66e2a0 Mon Sep 17 00:00:00 2001 From: juncaipeng <13006307475@163.com> Date: Tue, 30 Aug 2022 16:02:15 +0800 Subject: [PATCH] Replace _C_ops with _legacy_C_ops --- paddleseg/models/losses/lovasz_loss.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/paddleseg/models/losses/lovasz_loss.py b/paddleseg/models/losses/lovasz_loss.py index 4385c979fe..82dfaf9597 100644 --- a/paddleseg/models/losses/lovasz_loss.py +++ b/paddleseg/models/losses/lovasz_loss.py @@ -124,8 +124,12 @@ def lovasz_hinge_flat(logits, labels): signs = 2. * labels - 1. signs.stop_gradient = True errors = 1. - logits * signs - errors_sorted, perm = paddle._C_ops.argsort(errors, 'axis', 0, 'descending', - True) + if hasattr(paddle, "_legacy_C_ops"): + errors_sorted, perm = paddle._legacy_C_ops.argsort(errors, 'axis', 0, + 'descending', True) + else: + errors_sorted, perm = paddle._C_ops.argsort(errors, 'axis', 0, + 'descending', True) errors_sorted.stop_gradient = False gt_sorted = paddle.gather(labels, perm) grad = lovasz_grad(gt_sorted) @@ -181,8 +185,12 @@ def lovasz_softmax_flat(probas, labels, classes='present'): else: class_pred = probas[:, c] errors = paddle.abs(fg - class_pred) - errors_sorted, perm = paddle._C_ops.argsort(errors, 'axis', 0, - 'descending', True) + if hasattr(paddle, "_legacy_C_ops"): + errors_sorted, perm = paddle._legacy_C_ops.argsort( + errors, 'axis', 0, 'descending', True) + else: + errors_sorted, perm = paddle._C_ops.argsort(errors, 'axis', 0, + 'descending', True) errors_sorted.stop_gradient = False fg_sorted = paddle.gather(fg, perm)