Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions ppsci/arch/afno.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,11 @@ class AFNONet(base.Arch):
Examples:
>>> import ppsci
>>> model = ppsci.arch.AFNONet(("input", ), ("output", ))
>>> input_data = {"input": paddle.randn([1, 20, 720, 1440])}
>>> output_data = model(input_data)
>>> for k, v in output_data.items():
... print(k, v.shape)
output [1, 20, 720, 1440]
"""

def __init__(
Expand Down Expand Up @@ -577,6 +582,11 @@ class PrecipNet(base.Arch):
>>> import ppsci
>>> wind_model = ppsci.arch.AFNONet(("input", ), ("output", ))
>>> model = ppsci.arch.PrecipNet(("input", ), ("output", ), wind_model)
>>> data = paddle.randn([1, 20, 720, 1440])
>>> data_dict = {"input": data}
>>> output = model.forward(data_dict)
>>> print(output['output'].shape)
[1, 1, 720, 1440]
"""

def __init__(
Expand Down
12 changes: 11 additions & 1 deletion ppsci/arch/embedding_koopman.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,18 @@ class CylinderEmbedding(base.Arch):
drop (float, optional): Probability of dropout the units. Defaults to 0.0.

Examples:
>>> import paddle
>>> import ppsci
>>> model = ppsci.arch.CylinderEmbedding(("x", "y"), ("u", "v"))
>>> model = ppsci.arch.CylinderEmbedding(("states", "visc"), ("pred_states", "recover_states"))
>>> states_shape = [32, 10, 3, 64, 128]
>>> visc_shape = [32, 1]
>>> input_dict = {"states" : paddle.rand(states_shape),
... "visc" : paddle.rand(visc_shape)}
>>> out_dict = model(input_dict)
>>> print(out_dict["pred_states"].shape)
[32, 9, 3, 64, 128]
>>> print(out_dict["recover_states"].shape)
[32, 10, 3, 64, 128]
"""

def __init__(
Expand Down
15 changes: 15 additions & 0 deletions ppsci/arch/gan.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,21 @@ class Discriminator(base.Arch):
>>> acts = ("leaky_relu", "leaky_relu", "leaky_relu", "leaky_relu", None)
>>> output_keys_disc = ("out_1", "out_2", "out_3", "out_4", "out_5", "out_6", "out_7", "out_8", "out_9", "out_10")
>>> model = ppsci.arch.Discriminator(("in_1","in_2"), output_keys_disc, in_channel, out_channels, fc_channel, kernel_sizes, strides, use_bns, acts)
>>> input_data = [paddle.to_tensor(paddle.randn([1, in_channel, 128, 128])),paddle.to_tensor(paddle.randn([1, in_channel, 128, 128]))]
>>> input_dict = {"in_1": input_data[0],"in_2": input_data[1]}
>>> out_dict = model(input_dict)
>>> for k, v in out_dict.items():
... print(k, v.shape)
out_1 [1, 32, 64, 64]
out_2 [1, 64, 32, 32]
out_3 [1, 128, 16, 16]
out_4 [1, 256, 16, 16]
out_5 [1, 1]
out_6 [1, 32, 64, 64]
out_7 [1, 64, 32, 32]
out_8 [1, 128, 16, 16]
out_9 [1, 256, 16, 16]
out_10 [1, 1]
"""

def __init__(
Expand Down
9 changes: 9 additions & 0 deletions ppsci/arch/model_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,19 @@ class ModelList(base.Arch):
model_list (Tuple[base.Arch, ...]): Model(s) nested in tuple.

Examples:
>>> import paddle
>>> import ppsci
>>> model1 = ppsci.arch.MLP(("x", "y"), ("u", "v"), 10, 128)
>>> model2 = ppsci.arch.MLP(("x", "y"), ("w", "p"), 5, 128)
>>> model = ppsci.arch.ModelList((model1, model2))
>>> input_dict = {"x": paddle.rand([64, 64, 1]),"y": paddle.rand([64, 64, 1])}
>>> output_dict = model(input_dict)
>>> for k, v in output_dict.items():
... print(k, v.shape)
u [64, 64, 1]
v [64, 64, 1]
w [64, 64, 1]
p [64, 64, 1]
"""

def __init__(
Expand Down
5 changes: 5 additions & 0 deletions ppsci/arch/nowcastnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,11 @@ class NowcastNet(base.Arch):
Examples:
>>> import ppsci
>>> model = ppsci.arch.NowcastNet(("input", ), ("output", ))
>>> input_data = paddle.rand([1, 9, 512, 512, 2])
>>> input_dict = {"input": input_data}
>>> output_dict = model(input_dict)
>>> print(output_dict["output"].shape)
[1, 20, 512, 512, 1]
"""

def __init__(
Expand Down
6 changes: 6 additions & 0 deletions ppsci/arch/physx_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,14 @@ class PhysformerGPT2(base.Arch):
output data to the physical space. Defaults to None.

Examples:
>>> import paddle
>>> import ppsci
>>> model = ppsci.arch.PhysformerGPT2(("embeds", ), ("pred_embeds", ), 6, 16, 128, 4)
>>> data = paddle.to_tensor(paddle.randn([10, 16, 128]))
>>> inputs = {"embeds": data}
>>> outputs = model(inputs)
>>> print(outputs["pred_embeds"].shape)
[10, 16, 128]
"""

def __init__(
Expand Down
22 changes: 21 additions & 1 deletion ppsci/autodiff/ad.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,8 @@ def __call__(
>>> x.stop_gradient = False
>>> y = (x * x).sin()
>>> dy_dxx = ppsci.autodiff.hessian(y, x, component=0)
>>> print(dy_dxx.shape)
[4, 1]
"""
key = (ys, xs, component)
if key not in self.Hs:
Expand All @@ -299,6 +301,24 @@ def _clear(self):


def clear():
"""Clear cached Jacobians and Hessians."""
"""Clear cached Jacobians and Hessians.

Args:
None.

Returns:
None.

Examples:
>>> import paddle
>>> import ppsci
>>> x = paddle.randn([4, 3])
>>> x.stop_gradient = False
>>> y = (x * x).sin()
>>> dy_dxx = ppsci.autodiff.hessian(y, x, component=0)
>>> ppsci.autodiff.clear()
>>> print(ppsci.autodiff.hessian.Hs)
{}
"""
jacobian._clear()
hessian._clear()
16 changes: 16 additions & 0 deletions ppsci/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,22 @@ def all_gather(

Returns:
Union[paddle.Tensor, List[paddle.Tensor]]: Gathered Tensors.

Examples:
>>> import paddle
>>> import ppsci
>>> import paddle.distributed as dist
>>> dist.init_parallel_env() # doctest: +SKIP
>>> if dist.get_rank() == 0: # doctest: +SKIP
... data = paddle.to_tensor([[1, 2, 3], [4, 5, 6]])
... else:
... data = paddle.to_tensor([[7, 8, 9], [10, 11, 12]])
>>> result = ppsci.utils.misc.all_gather(data) # doctest: +SKIP
>>> print(result.numpy()) # doctest: +SKIP
[[ 1 2 3]
[ 4 5 6]
[ 7 8 9]
[10 11 12]]
"""
result: List[paddle.Tensor] = []

Expand Down