Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion python/paddle/fluid/data_feeder.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def check_type(input, input_name, expected_type, op_name, extra_message=''):
return

# NOTE: `in_declarative_mode` is used to determined whether this op is called under
# @declarative in transformation from dygrah to static layer. We add VarBase in
# @to_static in transformation from dygrah to static layer. We add VarBase in
# expected_type to skip checking because varBase may be created and used in unusual way.
from .dygraph.base import in_declarative_mode

Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@
'to_variable',
]

# Flag that indicates whether running code under `@declarative`
# Flag that indicates whether running code under `@to_static`
_in_declarative_mode_ = False


def in_declarative_mode():
"""
Return a bool value that indicates whether running code under `@declarative`
Return a bool value that indicates whether running code under `@to_static`

"""
return _in_declarative_mode_
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Linear


Expand Down Expand Up @@ -373,7 +373,7 @@ def __init__(
bias_attr="next_sent_fc.b_0",
)

@declarative
@to_static
def forward(
self,
src_ids,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from paddle.fluid import ParamAttr, layers
from paddle.fluid.dygraph import Layer
from paddle.fluid.dygraph.base import to_variable
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Embedding

INF = 1.0 * 1e5
Expand Down Expand Up @@ -207,7 +207,7 @@ def _gather(self, x, indices, batch_pos):
topk_coordinates = paddle.stack([batch_pos, indices], axis=2)
return paddle.gather_nd(x, topk_coordinates)

@declarative
@to_static
def forward(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
Expand Down Expand Up @@ -312,7 +312,7 @@ def forward(self, inputs):

return loss

@declarative
@to_static
def beam_search(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
Expand Down Expand Up @@ -724,7 +724,7 @@ def _change_size_for_array(self, func, array):

return array

@declarative
@to_static
def forward(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import paddle.fluid as fluid
import paddle.fluid.param_attr as attr
from paddle.fluid.dygraph import Layer
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.static import Variable


Expand Down Expand Up @@ -495,7 +495,7 @@ def __init__(self, conf_dict):
self.bow_layer_po = FCLayer(self.bow_dim, None, "fc").ops()
self.softmax_layer = FCLayer(2, "softmax", "cos_sim").ops()

@declarative
@to_static
def forward(self, left, right):
"""
Forward network
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static


@paddle.jit.to_static
Expand All @@ -28,7 +28,7 @@ def dyfunc_assert_variable(x):
assert x_v


@declarative
@to_static
def dyfunc_assert_non_variable(x=True):
assert x

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import paddle
import paddle.fluid as fluid
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException

Expand All @@ -36,7 +36,7 @@ def test_error(self):
if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True)
self.assertTrue(declarative(self.dyfunc)(self.x))
self.assertTrue(to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)

Expand Down Expand Up @@ -223,7 +223,7 @@ def run_dygraph_mode(self):

def run_static_mode(self):
with fluid.dygraph.guard():
res = declarative(self.dygraph_func)(self.input)
res = to_static(self.dygraph_func)(self.input)
return res.numpy()

def test_transformed_static_result(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static import convert_to_static


Expand Down Expand Up @@ -138,7 +138,7 @@ def test_cache(self):
self.assertTrue(id(static_func), id(cached_func))


@declarative
@to_static
def sum_even_until_limit(max_len, limit):
ret_sum = fluid.dygraph.to_variable(np.zeros((1)).astype('int32'))
for i in range(max_len):
Expand Down Expand Up @@ -166,7 +166,7 @@ def test_output(self):
ret = sum_even_until_limit(80, 10)
self.assertEqual(ret.numpy(), 30)

ret = declarative(sum_under_while)(100)
ret = to_static(sum_under_while)(100)
self.assertEqual(ret.numpy(), 5050)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,40 +17,40 @@
import numpy as np

import paddle.fluid as fluid
from paddle.jit.api import declarative
from paddle.jit.api import to_static

SEED = 2020
np.random.seed(SEED)


@declarative
@to_static
def test_bool_cast(x):
x = fluid.dygraph.to_variable(x)
x = bool(x)
return x


@declarative
@to_static
def test_int_cast(x):
x = fluid.dygraph.to_variable(x)
x = int(x)
return x


@declarative
@to_static
def test_float_cast(x):
x = fluid.dygraph.to_variable(x)
x = float(x)
return x


@declarative
@to_static
def test_not_var_cast(x):
x = int(x)
return x


@declarative
@to_static
def test_mix_cast(x):
x = fluid.dygraph.to_variable(x)
x = int(x)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import paddle
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import BatchNorm

# Note: Set True to eliminate randomness.
Expand Down Expand Up @@ -82,7 +82,7 @@ def __init__(self, input_channel, istrain=True):
input_channel
)

@declarative
@to_static
def forward(self, input_A, input_B):
"""
Generator of GAN model.
Expand Down Expand Up @@ -133,7 +133,7 @@ def forward(self, input_A, input_B):
g_loss,
)

@declarative
@to_static
def discriminatorA(self, input_A, input_B):
"""
Discriminator A of GAN model.
Expand All @@ -143,7 +143,7 @@ def discriminatorA(self, input_A, input_B):

return rec_B, fake_pool_rec_B

@declarative
@to_static
def discriminatorB(self, input_A, input_B):
"""
Discriminator B of GAN model.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import (
ConcreteProgram,
StaticFunction,
Expand All @@ -38,13 +38,12 @@ def __init__(self):
super().__init__()
self.linear = paddle.nn.Linear(10, 3)

@declarative(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])
@to_static(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])
def forward(self, x, a=1, b=2):
y = self.inner_function(x)
return y

# `declarative` is not essential, add it to test for robustness.
@declarative
@to_static
def inner_function(self, x):
y = self.linear(x)
return y
Expand All @@ -53,14 +52,14 @@ def add_func(self, x, y):
z = x + y
return z

@declarative(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])
@to_static(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])
def func_with_list(self, l, int_val=1):
x, y = l
z = x + y
z = z + int_val
return z

@declarative(
@to_static(
input_spec=[{'x': InputSpec([None, 10]), 'y': InputSpec([None, 10])}]
)
def func_with_dict(self, d):
Expand All @@ -70,7 +69,7 @@ def func_with_dict(self, d):

return z

@declarative(
@to_static(
input_spec=[
[
InputSpec([None]),
Expand Down Expand Up @@ -135,8 +134,8 @@ def test_with_input_spec(self):

# 3. we can decorate any method
x_2 = to_variable(np.ones([4, 20]).astype('float32'))
# uses `declarative(func)` instead of `@declarative`
net.add_func = declarative(net.add_func)
# uses `to_static(func)` instead of `@to_static`
net.add_func = to_static(net.add_func)
out = net.add_func(x_2, np.ones([20]).astype('float32'))
self.assertTrue(len(net.add_func.program_cache) == 1)

Expand Down Expand Up @@ -164,7 +163,7 @@ def test_with_error(self):

# 2. requires len(input_spec) <= len(args)
with self.assertRaises(ValueError):
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[
InputSpec([-1, 10]),
Expand All @@ -182,7 +181,7 @@ def test_concrete_program(self):

net = SimpleNet()
# We can get concrete_program by specificing InputSpec information. Faking input is no need.
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[InputSpec([-1, 10]), InputSpec([-1, 10], name='y')],
)
Expand All @@ -191,14 +190,14 @@ def test_concrete_program(self):
self.assertTrue(cp1.inputs[-1].name == 'y')

# generate another program
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[InputSpec([10]), InputSpec([10], name='label')],
)
cp2 = net.add_func.concrete_program
self.assertTrue(cp2.inputs[-1].shape == (10,))
self.assertTrue(cp2.inputs[-1].name == 'label')
# Note(Aurelius84): New instance will be returned if we use `declarative(foo)` every time.
# Note(Aurelius84): New instance will be returned if we use `to_static(foo)` every time.
# So number of cache program is 1.
self.assertTrue(len(net.add_func.program_cache) == 1)
self.assertTrue(cp1 != cp2)
Expand All @@ -219,7 +218,7 @@ def test_with_different_input(self):
y_data = np.ones([10]).astype('float32') * 2
z_data = np.ones([10]).astype('float32') * 2.2

foo = declarative(foo_func)
foo = to_static(foo_func)

# [16, 10] + [10] (varbase)
out_1 = foo(to_variable(x_data), to_variable(y_data))
Expand Down Expand Up @@ -260,7 +259,7 @@ def test_with_different_input(self):

def test_get_concrete_program(self):

foo = declarative(foo_func)
foo = to_static(foo_func)

# 1. specific InputSpec for `x`/`y`
concrete_program_1 = foo.get_concrete_program(
Expand Down Expand Up @@ -349,7 +348,7 @@ def test_nest_input(self):

class TestDeclarativeAPI(unittest.TestCase):
def test_error(self):
func = declarative(dyfunc_to_variable)
func = to_static(dyfunc_to_variable)

paddle.enable_static()

Expand All @@ -373,20 +372,20 @@ def setUp(self):

def test_fake_input(self):
net = SimpleNet()
net = declarative(net)
net = to_static(net)
y = net(self.x)
self.assertTrue(len(net.forward.program_cache) == 1)

def test_input_spec(self):
net = SimpleNet()
net = declarative(net, input_spec=[InputSpec([None, 8, 10])])
net = to_static(net, input_spec=[InputSpec([None, 8, 10])])
self.assertTrue(len(net.forward.inputs) == 1)
self.assertTrue(len(net.forward.program_cache) == 1)
input_shape = net.forward.inputs[0].shape
self.assertListEqual(list(input_shape), [-1, 8, 10])

# redecorate
net = declarative(net, input_spec=[InputSpec([None, 16, 10])])
net = to_static(net, input_spec=[InputSpec([None, 16, 10])])
input_shape = net.forward.inputs[0].shape
self.assertListEqual(list(input_shape), [-1, 16, 10])

Expand Down
Loading