Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【Hackathon 5th No.11】add gammaincc and gammainc API #59357

Merged
merged 29 commits into from
Jan 24, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
9bc8ff5
【Hackathon 5th No.11】add igamma and igammac API
GreatV Nov 24, 2023
e2975de
fix bug
GreatV Nov 24, 2023
77fa04c
Merge branch 'develop' into add_igamma_igammac
GreatV Nov 28, 2023
93b329c
Merge branch 'develop' into add_igamma_igammac
GreatV Nov 28, 2023
26b8fea
Merge branch 'develop' into add_igamma_igammac
GreatV Nov 28, 2023
c753bbc
fix codestyle
GreatV Nov 28, 2023
95ab00e
Merge branch 'develop' into add_igamma_igammac
GreatV Dec 18, 2023
72d9733
fix bug
GreatV Dec 18, 2023
7bb8331
update ut
GreatV Dec 18, 2023
698b168
Merge branch 'develop' into add_igamma_igammac
GreatV Dec 18, 2023
be0902b
fix bug
GreatV Dec 18, 2023
a305761
fix bug
GreatV Dec 18, 2023
e35b378
add test inplace
GreatV Dec 19, 2023
00b9c41
fix bug
GreatV Dec 19, 2023
64c8fdb
fix bug
GreatV Dec 19, 2023
df1cd20
remove unused comment
GreatV Dec 19, 2023
42cc077
remove some c++ impl
GreatV Dec 20, 2023
3d2a1d1
update code
GreatV Dec 24, 2023
47fcd00
Merge branch 'develop' into add_igamma_igammac
GreatV Dec 24, 2023
623f01f
fix bug
GreatV Dec 24, 2023
56e5ce8
fix bug
GreatV Dec 25, 2023
faf3757
update
GreatV Dec 27, 2023
ad0e1cc
remove some paddle.enable_static()
GreatV Jan 4, 2024
ac25528
remove eigen impl
GreatV Jan 8, 2024
8f96075
Merge branch 'develop' into add_igamma_igammac
GreatV Jan 8, 2024
d7c1b59
fix test_inplace
GreatV Jan 9, 2024
cb34836
Merge branch 'develop' into add_igamma_igammac
GreatV Jan 22, 2024
af72a5c
rename op
GreatV Jan 23, 2024
ec4b0de
igamma(a, x) -> gammaincc(x, y)
GreatV Jan 23, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix test_inplace
  • Loading branch information
GreatV committed Jan 9, 2024
commit d7c1b590835482b155b90244a092c7bc63715a3e
4 changes: 2 additions & 2 deletions python/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -5150,7 +5150,7 @@ def gammaincc_(x, a, name=None):
Please refer to :ref:`api_paddle_gammaincc`.
"""
if in_dynamic_mode():
return _C_ops.gammaincc_(x, a)
return _C_ops.igamma_(x, a)


def gammainc(x, a, name=None):
GreatV marked this conversation as resolved.
Show resolved Hide resolved
Expand Down Expand Up @@ -5189,7 +5189,7 @@ def gammainc_(x, a, name=None):
Please refer to :ref:`api_paddle_gammainc`.
"""
return (
paddle.igamma_(x, a)
paddle.gammaincc_(x, a)
.multiply_(paddle.full_like(x, -1.0))
.add_(paddle.full_like(x, 1.0))
)
Expand Down
12 changes: 6 additions & 6 deletions test/legacy_test/test_inplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -885,7 +885,7 @@ def non_inplace_api_processing(self, var):
return paddle.neg(var)


class TestDygraphInplaceIgamma(TestDygraphInplaceWithContinuous):
class TestDygraphInplaceGammaincc(TestDygraphInplaceWithContinuous):
def init_data(self):
self.shape = (3, 40)
self.dtype = "float32"
Expand All @@ -895,13 +895,13 @@ def init_data(self):
self.a = paddle.rand(shape=self.shape, dtype=self.dtype) + 1

def inplace_api_processing(self, var):
return paddle.igamma_(var, a=self.a)
return paddle.gammaincc_(var, a=self.a)

def non_inplace_api_processing(self, var):
return paddle.igamma(var, a=self.a)
return paddle.gammaincc(var, a=self.a)


class TestDygraphInplaceIgammac(TestDygraphInplaceWithContinuous):
class TestDygraphInplaceGammainc(TestDygraphInplaceWithContinuous):
def init_data(self):
self.shape = (3, 40)
self.dtype = "float32"
Expand All @@ -911,10 +911,10 @@ def init_data(self):
self.a = paddle.rand(shape=self.shape, dtype=self.dtype) + 1

def inplace_api_processing(self, var):
return paddle.igammac_(var, a=self.a)
return paddle.gammainc_(var, a=self.a)

def non_inplace_api_processing(self, var):
return paddle.igammac(var, a=self.a)
return paddle.gammainc(var, a=self.a)

def test_forward_version(self):
with paddle.base.dygraph.guard():
Expand Down