Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ppsci/arch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from ppsci.arch.mlp import MLP
from ppsci.utils import logger

__all__ = ["MLP"]
__all__ = ["MLP", "build_model"]


def build_model(cfg):
Expand Down
2 changes: 1 addition & 1 deletion ppsci/arch/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self, *args, **kwargs):
self._output_transform = None

def forward(self, *args, **kwargs):
raise NotImplementedError(f"NetBase.forward is not implemented")
raise NotImplementedError("NetBase.forward is not implemented")

@property
def num_params(self):
Expand Down
4 changes: 2 additions & 2 deletions ppsci/arch/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ def __init__(
if isinstance(hidden_size, (tuple, list)):
if num_layers is not None:
raise ValueError(
f"num_layers should be None when hidden_size is specified"
"num_layers should be None when hidden_size is specified"
)
elif isinstance(hidden_size, int):
if not isinstance(num_layers, int):
raise ValueError(
f"num_layers should be an int when hidden_size is an int"
"num_layers should be an int when hidden_size is an int"
)
hidden_size = [hidden_size] * num_layers
else:
Expand Down
5 changes: 4 additions & 1 deletion ppsci/constraint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@

import copy

from ppsci.constraint.base import Constraint
from ppsci.constraint.boundary_constraint import BoundaryConstraint
from ppsci.constraint.boundary_constraint import SupervisedConstraint
from ppsci.constraint.initial_constraint import InitialConstraint
from ppsci.constraint.integral_constraint import IntegralConstraint
from ppsci.constraint.interior_constraint import InteriorConstraint
Expand All @@ -27,6 +27,7 @@
from ppsci.utils import misc

__all__ = [
"Constraint",
"BoundaryConstraint",
"InitialConstraint",
"IntegralConstraint",
Expand All @@ -47,6 +48,8 @@ def build_constraint(cfg, equation_dict, geom_dict):
Returns:
Dict[str, constraint]: Constraint(s) in dict.
"""
if cfg is None:
return None
cfg = copy.deepcopy(cfg)
global_dataloader_cfg = cfg["dataloader"]
constraint_cfg = cfg["content"]
Expand Down
4 changes: 1 addition & 3 deletions ppsci/constraint/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,11 @@
limitations under the License.
"""

import numpy as np

from ppsci import data


class Constraint:
"""Base class for constraints"""
"""Base class for constraint"""

def __init__(self, dataset, dataloader_cfg, loss, name):
self.data_loader = data.build_dataloader(dataset, dataloader_cfg)
Expand Down
103 changes: 0 additions & 103 deletions ppsci/constraint/boundary_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
limitations under the License.
"""

import os.path as osp
import types

import numpy as np
Expand All @@ -23,7 +22,6 @@
from ppsci import geometry
from ppsci.constraint import base
from ppsci.data import dataset
from ppsci.utils import misc


class BoundaryConstraint(base.Constraint):
Expand Down Expand Up @@ -132,104 +130,3 @@ def __init__(
raise NotImplementedError(f"type of {type(value)} is invalid yet.")
_dataset = getattr(dataset, dataloader_cfg["dataset"])(input, label, weight)
super().__init__(_dataset, dataloader_cfg, loss, name)


class SupervisedConstraint(base.Constraint):
"""Class for supervised constraint.

Args:
label_expr (Dict[str, sympy.Basic]): Expression of how to compute label.
data_file (Dict[str, Union[float, sympy.Basic]]): Path of data file.
input_keys (List[str]): List of input keys.
dataloader_cfg (AttrDict): Config of building a dataloader.
loss (LossBase): Loss object.
weight_dict (Dict[str, Union[float, sympy.Basic]], optional): Weight for label
if specified. Defaults to None.
name (str, optional): Name of constraint object. Defaults to "SupBC".
"""

def __init__(
self,
data_file,
input_keys,
label_keys,
alias_dict,
dataloader_cfg,
loss,
weight_dict=None,
timestamps=None,
name="SupBC",
):
self.input_keys = [
alias_dict[key] if key in alias_dict else key for key in input_keys
]
self.output_keys = [
alias_dict[key] if key in alias_dict else key for key in label_keys
]
if data_file.endswith(".csv"):
# load data
data = misc.load_csv_file(data_file, input_keys + label_keys, alias_dict)
if "t" not in data and timestamps is None:
raise ValueError(f"must given time data from t0 or data itself.")
if timestamps is not None:
if "t" in data:
raw_time_array = data["t"]
mask = np.zeros((len(raw_time_array),), "bool")
for ti in timestamps:
mask |= np.isclose(raw_time_array, ti).flatten()
data = misc.convert_to_array(
data, self.input_keys + self.output_keys
)
data = data[mask]
data = misc.convert_to_dict(
data, self.input_keys + self.output_keys
)
else:
data = misc.convert_to_array(
data, self.input_keys + self.output_keys
)
data = misc.combine_array_with_time(data, timestamps)
self.input_keys = ["t"] + self.input_keys
data = misc.convert_to_dict(
data, self.input_keys + self.output_keys
)
input = {key: data[key] for key in self.input_keys}
label = {key: data[key] for key in self.output_keys}
self.num_timestamp = len(timestamps)
else:
# time already in data and "t" in input_keys
input = {key: data[key] for key in self.input_keys}
label = {key: data[key] for key in self.output_keys}
self.num_timestamp = len(np.unique(data["t"]))

self.label_expr = {key: (lambda d, k=key: d[k]) for key in self.output_keys}
else:
raise NotImplementedError(f"Only suppport .csv file now.")

weight = {key: np.ones_like(next(iter(label.values()))) for key in label}
if weight_dict is not None:
for key, value in weight_dict.items():
if isinstance(value, str):
value = sp_parser.parse_expr(value)

if isinstance(value, (int, float)):
weight[key] = np.full_like(next(iter(label.values())), float(value))
elif isinstance(value, sympy.Basic):
func = sympy.lambdify(
[sympy.Symbol(k) for k in self.input_keys],
value,
[{"amax": lambda xy, _: np.maximum(xy[0], xy[1])}, "numpy"],
)
weight[key] = func(**{k: input[k] for k in self.input_keys})
elif isinstance(value, types.FunctionType):
func = value
weight[key] = func(input)
if isinstance(weight[key], (int, float)):
weight[key] = np.full_like(
next(iter(input.values())), float(weight[key])
)
else:
raise NotImplementedError(f"type of {type(value)} is invalid yet.")
_dataset = getattr(dataset, dataloader_cfg["dataset"])(input, label, weight)

super().__init__(_dataset, dataloader_cfg, loss, name)
9 changes: 4 additions & 5 deletions ppsci/constraint/supervised_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
limitations under the License.
"""

import os.path as osp
import types

import numpy as np
Expand Down Expand Up @@ -61,7 +60,7 @@ def __init__(
# load data
data = self._load_csv_file(data_file, input_keys + label_keys, alias_dict)
if "t" not in data and timestamps is None:
raise ValueError(f"must given time data from t0 or data itself.")
raise ValueError("Time should be given by arg t0 or data itself.")
if timestamps is not None:
if "t" in data:
raw_time_array = data["t"]
Expand Down Expand Up @@ -95,7 +94,7 @@ def __init__(

self.label_expr = {key: (lambda d, k=key: d[k]) for key in self.output_keys}
else:
raise NotImplementedError(f"Only suppport .csv file now.")
raise NotImplementedError("Only suppport .csv file now.")

weight = {key: np.ones_like(next(iter(label.values()))) for key in label}
if weight_dict is not None:
Expand Down Expand Up @@ -162,7 +161,7 @@ def __init__(
# load data
data = misc.load_csv_file(data_file, input_keys + label_keys, alias_dict)
if "t" not in data and t0 is None:
raise ValueError(f"must given time data from t0 or data itself.")
raise ValueError("Time should be given by arg t0 or data itself.")
if t0 is not None:
data = misc.convert_to_array(data, self.input_keys + self.output_keys)
data = misc.combine_array_with_time(data, [t0])
Expand All @@ -178,7 +177,7 @@ def __init__(
self.label_expr = {key: (lambda d, k=key: d[k]) for key in self.output_keys}
self.num_timestamp = 1
else:
raise NotImplementedError(f"Only suppport .csv file now.")
raise NotImplementedError("Only suppport .csv file now.")

weight = {key: np.ones_like(next(iter(label.values()))) for key in label}
if weight_dict is not None:
Expand Down
4 changes: 3 additions & 1 deletion ppsci/equation/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from ppsci.utils import logger
from ppsci.utils import misc

__all__ = ["PDE", "NavierStokes", "Poisson", "NormalDotVec"]
__all__ = ["build_equation", "PDE", "NavierStokes", "Poisson", "NormalDotVec"]


def build_equation(cfg):
Expand All @@ -34,6 +34,8 @@ def build_equation(cfg):
Returns:
Dict[str, Equation]: Equation(s) in dict.
"""
if cfg is None:
return None
cfg = copy.deepcopy(cfg)
eq_dict = misc.PrettyOrderedDict()
for _item in cfg:
Expand Down
14 changes: 12 additions & 2 deletions ppsci/equation/pde/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,9 @@
limitations under the License.
"""

import paddle.nn as nn
import sympy

# from sympy import printing


class PDE(object):
"""Base class for Partial Differential Equation"""
Expand All @@ -25,6 +24,9 @@ def __init__(self):
super().__init__()
self.equations = {}

# for PDE which has learnable parameter(s)
self.learnable_parameters = nn.ParameterList()

def create_symbols(self, symbol_str):
"""Create symbols

Expand All @@ -51,6 +53,14 @@ def create_function(self, name, invars):
def add_equation(self, name, equation):
self.equations.update({name: equation})

def parameters(self):
"""Return parameters contained in PDE.

Returns:
List[Tensor]: A list of parameters.
"""
return self.learnable_parameters.parameters()

def __str__(self):
return ", ".join(
[self.__class__.__name__]
Expand Down
4 changes: 2 additions & 2 deletions ppsci/equation/pde/navier_stokes.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ def momentum_y_compute_func(out):
if self.dim == 3:

def momentum_z_compute_func(out):
x, y = out["x"], out["y"]
u, v, p = out["u"], out["v"], out["p"]
x, y, z = out["x"], out["y"], out["z"]
u, v, w, p = out["u"], out["v"], out["w"], out["p"]
momentum_z = (
u * jacobian(w, x)
+ v * jacobian(w, y)
Expand Down
3 changes: 3 additions & 0 deletions ppsci/geometry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from ppsci.utils import misc

__all__ = [
"build_geometry",
"Cuboid",
"Disk",
"Geometry",
Expand All @@ -59,6 +60,8 @@ def build_geometry(cfg):
Returns:
Dict[str, Geometry]: Geometry(ies) in dict.
"""
if cfg is None:
return None
cfg = copy.deepcopy(cfg)

geom_dict = misc.PrettyOrderedDict()
Expand Down
4 changes: 2 additions & 2 deletions ppsci/geometry/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def sample_interior(self, n, random="pseudo", criteria=None, evenly=False):
_nsuc += 1

if _ntry >= 1000 and _nsuc == 0:
raise RuntimeError(f"sample interior failed")
raise RuntimeError("sample interior failed")
return misc.convert_to_dict(x, self.dim_keys)

def sample_boundary(self, n, random="pseudo", criteria=None, evenly=False):
Expand Down Expand Up @@ -117,7 +117,7 @@ def sample_boundary(self, n, random="pseudo", criteria=None, evenly=False):
_nsuc += 1

if _ntry >= 1000 and _nsuc == 0:
raise RuntimeError(f"sample boundary failed")
raise RuntimeError("sample boundary failed")

if not (
misc.typename(self) == "TimeXGeometry"
Expand Down
6 changes: 3 additions & 3 deletions ppsci/geometry/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def sample_boundary(
while _size < n:
if evenly:
raise ValueError(
f"Can't sample evenly on mesh now, please set evenly=False."
"Can't sample evenly on mesh now, please set evenly=False."
)
# points, normal, area = self.uniform_boundary_points(n, False)
else:
Expand All @@ -290,7 +290,7 @@ def sample_boundary(
_nsuc += 1

if _ntry >= 1000 and _nsuc == 0:
raise RuntimeError(f"sample boundary failed")
raise RuntimeError("sample boundary failed")

normal_dict = misc.convert_to_dict(
normal, [f"normal_{key}" for key in self.dim_keys if key != "t"]
Expand Down Expand Up @@ -348,7 +348,7 @@ def sample_interior(self, n, random="pseudo", criteria=None, evenly=False):
_nsuc += 1

if _ntry >= 1000 and _nsuc == 0:
raise RuntimeError(f"sample interior failed")
raise RuntimeError("sample interior failed")

x_dict = misc.convert_to_dict(x, self.dim_keys)

Expand Down
Loading