@@ -1033,37 +1033,5 @@ def test_static_compatibility(self):
10331033 np .testing .assert_array_equal (self .np_out , fetches [0 ])
10341034
10351035
1036- class TestClamp_AndClip_ (unittest .TestCase ):
1037- def setUp (self ) -> None :
1038- paddle .disable_static ()
1039- self .shape = [3 , 4 , 5 ]
1040- self .input_np = np .random .random (self .shape ).astype ('float32' )
1041- self .a = np .random .random (self .shape ).astype ('float32' )
1042- self .b = np .random .random (self .shape ).astype ('float32' )
1043- self .min , self .max = - 0.5 , 0.5
1044-
1045- def test_clip_and_clamp (self ):
1046- clip_a = paddle .to_tensor (self .a , stop_gradient = False )
1047- clip_b = paddle .to_tensor (self .b , stop_gradient = False )
1048-
1049- clamp_a = paddle .to_tensor (self .a , stop_gradient = False )
1050- clamp_b = paddle .to_tensor (self .b , stop_gradient = False )
1051-
1052- clip_x = clip_a + clip_b
1053- clip_x .clip_ (min = self .min , max = self .max )
1054- clip_x .retain_grads ()
1055- clip_x .mean ().backward ()
1056-
1057- clamp_x = clamp_a + clamp_b
1058- clamp_x .clamp_ (min = self .min , max = self .max )
1059- clamp_x .retain_grads ()
1060- clamp_x .mean ().backward ()
1061-
1062- np .testing .assert_allclose (clip_x .numpy (), clamp_x .numpy (), rtol = 1e-20 )
1063- np .testing .assert_allclose (
1064- clip_x .grad .numpy (), clamp_x .grad .numpy (), rtol = 1e-20
1065- )
1066-
1067-
10681036if __name__ == '__main__' :
10691037 unittest .main ()
0 commit comments