Skip to content

Commit 7a5f15c

Browse files
authored
Fix (#73176)
1 parent 0f2b873 commit 7a5f15c

File tree

4 files changed

+177
-0
lines changed

4 files changed

+177
-0
lines changed

test/legacy_test/test_mse_loss.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -353,6 +353,31 @@ def test_NNFunctionalMseLoss_none(self):
353353
self.assertEqual(dy_result.shape, tuple(dim))
354354

355355

356+
class TestNNFunctionalMseLoss_ZeroSize(unittest.TestCase):
357+
358+
def test_dygraph_and_grad(self):
359+
for dim in [[0, 0], [2, 0, 10]]:
360+
input_np = np.random.uniform(0.1, 0.5, dim).astype("float32")
361+
target_np = np.random.uniform(0.1, 0.5, dim).astype("float32")
362+
363+
paddle.disable_static()
364+
x = paddle.to_tensor(input_np)
365+
x.stop_gradient = False
366+
dy_ret = paddle.nn.functional.mse_loss(
367+
x, paddle.to_tensor(target_np), 'mean'
368+
)
369+
dy_result = dy_ret.numpy()
370+
371+
sub = input_np - target_np
372+
expected = np.mean(sub * sub)
373+
np.testing.assert_allclose(dy_result, expected, rtol=1e-05)
374+
self.assertEqual(dy_result.shape, ())
375+
376+
loss = paddle.sum(dy_ret)
377+
loss.backward()
378+
np.testing.assert_allclose(x.grad.shape, x.shape)
379+
380+
356381
if __name__ == "__main__":
357382
paddle.enable_static()
358383
unittest.main()

test/legacy_test/test_poisson_nll_loss.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,5 +243,67 @@ def test_api(self):
243243
self.test_dynamic_case(reduction="sum")
244244

245245

246+
class TestPoissonNLLLossCase_ZeroSize(unittest.TestCase):
247+
def init_shape(self):
248+
self.shape = [0, 2]
249+
250+
def setUp(self, dtype="float32"):
251+
self.init_shape()
252+
self.dtype = dtype
253+
self.input_np = np.random.random(self.shape).astype(self.dtype)
254+
self.label_np = np.random.random(self.shape).astype(self.dtype)
255+
self.place = (
256+
paddle.CUDAPlace(0)
257+
if core.is_compiled_with_cuda()
258+
else paddle.CPUPlace()
259+
)
260+
261+
def _test_dynamic_case_and_grad(
262+
self,
263+
dtype="float32",
264+
log_input=True,
265+
full=False,
266+
epsilon=1e-8,
267+
reduction="mean",
268+
):
269+
self.setUp(dtype)
270+
paddle.disable_static(self.place)
271+
272+
input_x = paddle.to_tensor(self.input_np)
273+
input_x.stop_gradient = False
274+
label = paddle.to_tensor(self.label_np)
275+
out_ref = ref_poisson_nll_loss(
276+
self.input_np,
277+
self.label_np,
278+
log_input=log_input,
279+
full=full,
280+
epsilon=epsilon,
281+
reduction=reduction,
282+
)
283+
out1 = F.poisson_nll_loss(
284+
input_x,
285+
label,
286+
log_input=log_input,
287+
full=full,
288+
epsilon=epsilon,
289+
reduction=reduction,
290+
)
291+
292+
np.allclose(out_ref, out1.numpy(), rtol=1e-5)
293+
294+
loss = paddle.sum(out1)
295+
loss.backward()
296+
np.testing.assert_allclose(input_x.grad.shape, input_x.shape)
297+
paddle.enable_static()
298+
299+
def test_api(self):
300+
self._test_dynamic_case_and_grad(reduction="sum")
301+
302+
303+
class TestPoissonNLLLossCase_ZeroSize2(TestPoissonNLLLossCase_ZeroSize):
304+
def init_shape(self):
305+
self.shape = [0, 0]
306+
307+
246308
if __name__ == "__main__":
247309
unittest.main()

test/legacy_test/test_soft_margin_loss.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,5 +195,51 @@ def test_SoftMarginLoss_error(self):
195195
paddle.enable_static()
196196

197197

198+
class TestSoftMarginLoss_ZeroSize(unittest.TestCase):
199+
def init_shape(self):
200+
self.shape = (0, 5)
201+
202+
def test_SoftMarginLoss(self):
203+
self.init_shape()
204+
input_np = np.random.uniform(0.1, 0.8, size=self.shape).astype(
205+
np.float64
206+
)
207+
type = np.float32
208+
places = []
209+
if (
210+
os.environ.get('FLAGS_CI_both_cpu_and_gpu', 'False').lower()
211+
in ['1', 'true', 'on']
212+
or not paddle.device.is_compiled_with_cuda()
213+
):
214+
places.append(paddle.CPUPlace())
215+
if paddle.device.is_compiled_with_cuda():
216+
places.append(paddle.CUDAPlace(0))
217+
reductions = ['sum', 'mean', 'none']
218+
for place in places:
219+
for reduction in reductions:
220+
label_np = np.random.randint(0, 2, size=self.shape).astype(type)
221+
label_np[label_np == 0] = -1
222+
223+
expected = calc_softmarginloss(input_np, label_np, reduction)
224+
225+
paddle.disable_static(place)
226+
input = paddle.to_tensor(input_np)
227+
input.stop_gradient = False
228+
label = paddle.to_tensor(label_np)
229+
230+
dy_res = paddle.nn.functional.soft_margin_loss(
231+
input, label, reduction=reduction
232+
)
233+
np.testing.assert_allclose(dy_res.numpy(), expected, rtol=1e-05)
234+
loss = paddle.sum(dy_res)
235+
loss.backward()
236+
np.testing.assert_allclose(input.grad.shape, input.shape)
237+
238+
239+
class TestSoftMarginLoss_ZeroSize2(TestSoftMarginLoss_ZeroSize):
240+
def init_shape(self):
241+
self.shape = (0, 0)
242+
243+
198244
if __name__ == "__main__":
199245
unittest.main()

test/legacy_test/test_square_error_cost.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
1516
import unittest
1617

1718
import numpy as np
@@ -61,6 +62,8 @@ def test_square_error_cost(self):
6162
class TestSquareErrorInvalidInput(unittest.TestCase):
6263

6364
def test_error(self):
65+
paddle.enable_static()
66+
6467
def test_invalid_input():
6568
input = [256, 3]
6669
label = paddle.static.data(
@@ -80,5 +83,46 @@ def test_invalid_label():
8083
self.assertRaises(TypeError, test_invalid_label)
8184

8285

86+
class TestSquareErrorCost_ZeroSize(unittest.TestCase):
87+
def init_shape(self):
88+
self.shape = [0, 3]
89+
90+
def test_square_error_cost(self):
91+
places = []
92+
if (
93+
os.environ.get('FLAGS_CI_both_cpu_and_gpu', 'False').lower()
94+
in ['1', 'true', 'on']
95+
or not paddle.device.is_compiled_with_cuda()
96+
):
97+
places.append(paddle.CPUPlace())
98+
if paddle.device.is_compiled_with_cuda():
99+
places.append(paddle.CUDAPlace(0))
100+
self.init_shape()
101+
shape = self.shape
102+
input_val = np.random.uniform(0.1, 0.5, shape).astype("float32")
103+
label_val = np.random.uniform(0.1, 0.5, shape).astype("float32")
104+
105+
sub = input_val - label_val
106+
np_result = sub * sub
107+
for place in places:
108+
paddle.disable_static(place)
109+
input = paddle.to_tensor(input_val)
110+
input.stop_gradient = False
111+
label = paddle.to_tensor(label_val)
112+
output = paddle.nn.functional.square_error_cost(
113+
input=input, label=label
114+
)
115+
np.testing.assert_allclose(np_result, output.numpy(), rtol=1e-05)
116+
loss = paddle.sum(output)
117+
loss.backward()
118+
np.testing.assert_allclose(input.grad.shape, input.shape)
119+
paddle.enable_static()
120+
121+
122+
class TestSquareErrorCost_ZeroSize2(TestSquareErrorCost_ZeroSize):
123+
def init_shape(self):
124+
self.shape = [0, 0]
125+
126+
83127
if __name__ == "__main__":
84128
unittest.main()

0 commit comments

Comments
 (0)