Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions docs/zh/examples/darcy2d.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,18 @@
python darcy2d.py mode=eval EVAL.pretrained_model_path=https://paddle-org.bj.bcebos.com/paddlescience/models/darcy2d/darcy2d_pretrained.pdparams
```

=== "模型导出命令"

``` sh
python darcy2d.py mode=export
```

=== "模型推理命令"

``` sh
python darcy2d.py mode=infer
```

| 预训练模型 | 指标 |
|:--| :--|
| [darcy2d_pretrained.pdparams](https://paddle-org.bj.bcebos.com/paddlescience/models/darcy2d/darcy2d_pretrained.pdparams) | loss(Residual): 0.36500<br>MSE.poisson(Residual): 0.00006 |
Expand Down
18 changes: 18 additions & 0 deletions examples/darcy/conf/darcy2d.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ hydra:
mode: train # running mode: train/eval
seed: 42
output_dir: ${hydra:run.dir}
log_freq: 20

# set working condition
NPOINT_PDE: 9801 # 99 ** 2
Expand Down Expand Up @@ -62,3 +63,20 @@ EVAL:
batch_size:
residual_validator: 8192
pretrained_model_path: null

INFER:
pretrained_model_path: https://paddle-org.bj.bcebos.com/paddlescience/models/darcy2d/darcy2d_pretrained.pdparams
export_path: ./inference/darcy2d
pdmodel_path: ${INFER.export_path}.pdmodel
pdiparams_path: ${INFER.export_path}.pdiparams
onnx_path: ${INFER.export_path}.onnx
device: gpu
engine: native
precision: fp32
ir_optim: true
min_subgraph_size: 5
gpu_mem: 2000
gpu_id: 0
max_batch_size: 8192
num_cpu_threads: 10
batch_size: 8192
54 changes: 53 additions & 1 deletion examples/darcy/darcy2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,14 +296,66 @@ def poisson_ref_compute_func(_in):
solver.visualize()


def export(cfg: DictConfig):
# set model
model = ppsci.arch.MLP(**cfg.MODEL)

# initialize solver
solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
)
# export model
from paddle.static import InputSpec

input_spec = [
{key: InputSpec([None, 1], "float32", name=key) for key in model.input_keys},
]

solver.export(input_spec, cfg.INFER.export_path)


def inference(cfg: DictConfig):
from deploy.python_infer import pinn_predictor

predictor = pinn_predictor.PINNPredictor(cfg)

# set geometry
geom = {"rect": ppsci.geometry.Rectangle((0.0, 0.0), (1.0, 1.0))}
# manually collate input data for visualization,
input_dict = geom["rect"].sample_interior(
cfg.NPOINT_PDE + cfg.NPOINT_BC, evenly=True
)
output_dict = predictor.predict(
{key: input_dict[key] for key in cfg.MODEL.input_keys}, cfg.INFER.batch_size
)
# mapping data to cfg.INFER.output_keys
output_dict = {
store_key: output_dict[infer_key]
for store_key, infer_key in zip(cfg.MODEL.output_keys, output_dict.keys())
}
ppsci.visualize.save_vtu_from_dict(
"./visual/darcy2d.vtu",
{**input_dict, **output_dict},
input_dict.keys(),
cfg.MODEL.output_keys,
)


@hydra.main(version_base=None, config_path="./conf", config_name="darcy2d.yaml")
def main(cfg: DictConfig):
if cfg.mode == "train":
train(cfg)
elif cfg.mode == "eval":
evaluate(cfg)
elif cfg.mode == "export":
export(cfg)
elif cfg.mode == "infer":
inference(cfg)
else:
raise ValueError(f"cfg.mode should in ['train', 'eval'], but got '{cfg.mode}'")
raise ValueError(
f"cfg.mode should in ['train', 'eval', 'export', 'infer'], but got '{cfg.mode}'"
)


if __name__ == "__main__":
Expand Down