From 2d9573773763f0d6a8a626fbebb484b47abe270d Mon Sep 17 00:00:00 2001 From: PuQing Date: Wed, 9 Aug 2023 15:21:28 +0000 Subject: [PATCH 01/48] add sympy2func --- ppsci/equation/sympy2func.py | 252 +++++++++++++++++++++++++++++++++++ 1 file changed, 252 insertions(+) create mode 100644 ppsci/equation/sympy2func.py diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py new file mode 100644 index 000000000..bfd0fdb74 --- /dev/null +++ b/ppsci/equation/sympy2func.py @@ -0,0 +1,252 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Dict +from typing import List +from typing import Tuple + +import numpy as np +import paddle +import paddle.nn as nn +import sympy + +from ppsci.autodiff import hessian +from ppsci.autodiff import jacobian +from ppsci.utils import logger + +func_map = { + sympy.sin: paddle.sin, + sympy.cos: paddle.cos, + sympy.exp: paddle.exp, + sympy.Pow: paddle.pow, + sympy.sqrt: paddle.sqrt, + sympy.log: paddle.log, + sympy.tan: paddle.tan, + sympy.Mul: paddle.multiply, +} + +constant_map = { + sympy.pi: np.pi, + sympy.E: np.e, +} + + +class FuncNode(nn.Layer): + """ + A node representing a function in the computational graph + + Args: + fun (paddle.nn.Layer): the function + args (List[paddle.nn.Layer]): the arguments of the function + + Returns: + the result of the function + """ + + def __init__(self, fun, *args): + super().__init__() + self.fun = fun + self.args = args + + def forward(self, x): + return self.fun(*[arg(x) for arg in self.args]) + + +class AddNode(nn.Layer): + """ + A node representing a sum in the computational graph + + Args: + args (List[paddle.nn.Layer]): the arguments of the sum + + Returns: + the result of the sum + """ + + def __init__(self, *args): + super().__init__() + self.args = args + + def forward(self, x): + return paddle.add_n([arg(x) for arg in self.args]) + + +class SymbolNode(nn.Layer): + """ + A node representing a symbol in the computational graph + + Args: + sym (sympy.Symbol): the symbol + + Returns: + the value of the symbol + """ + + def __init__(self, sym: sympy.Symbol): + super().__init__() + self.sym = sym + + def forward(self, x: Dict): + if self.sym in x.keys(): + return x[self.sym] + else: + raise KeyError(f"Symbol {self.sym} not in {x.keys()}") + + +class NumberNode(nn.Layer): + """ + A node representing a number in the computational graph + + Args: + num (sympy.Number): the number + + Returns: + the value of the number + """ + + def __init__(self, num): + super().__init__() + assert isinstance(num, sympy.Number) + if num in constant_map.keys(): + num = constant_map[num] + else: + num = float(num) + self.num = num + + def forward(self, x): + return paddle.to_tensor(self.num, dtype="float32") + + +class DerivativeNode(nn.Layer): + """ + A node representing a derivative in the computational graph + + Args: + expr (sympy.Expr): the expression to be derived + syms (List[Tuple[sympy.Symbol, int]]): the symbols to be derived and their orders + + Returns: + the value of the derivative + """ + + def __init__(self, expr: sympy.Expr, syms: List[Tuple[sympy.Symbol, int]]): + super().__init__() + self.expr = expr + self.syms = syms + + def forward(self, x): + x_value = self.expr(x) + for sym, order in self.syms: + sym_value = sym(x) + if order == 1: + x_value = jacobian(x_value, sym_value) + elif order == 2: + x_value = hessian(x_value, sym_value) + else: + raise NotImplementedError( + f"Higher order derivatives are not implemented yet, got {order}" + ) + return x_value + + +class ExtraFuncNode(nn.Layer): + """ + A node representing a extra function in the computational graph + + Args: + fun (sympy.Function): the function + args (List[paddle.nn.Layer]): the arguments of the function + + Returns: + the result of the function + + Note: + This is used to handle the case where the function is a neural network + + Examples: + >>> x, y = sympy.symbols("x y") + >>> u = sympy.Function("u")(x, y) + >>> fun = sympy.Derivative(u, x, y) + >>> fun = sympy_to_function(fun) + >>> fun({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) + + Other cases: + + >>> x, y = sympy.symbols("x y") + >>> u = sympy.Function("u")(x, y) + >>> v = sympy.Function("v")(x, y) + >>> fun = sympy.Derivative(u, x, y) + sympy.Derivative(v, x, y) + >>> fun = sympy_to_function(fun) + >>> fun({u: (model, 0), v: (model, 1), x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) + """ + + def __init__(self, fun: sympy.Function, *args): + super().__init__() + assert isinstance(fun, sympy.Function) + self.fun = fun + self.args = args + + def forward(self, x: Dict): + model = x[self.fun] + if isinstance(model, tuple): + model, pos = model + return model(*[arg(x) for arg in self.args])[ + pos + ] # TODO(PuQing): lazy computing for model, avoid multiple computing + return model(*[arg(x) for arg in self.args]) + + +def process_sympy_expression(expr: sympy.Expr): + if expr.is_Symbol: + return SymbolNode(expr) + elif expr.is_Function or expr.is_Pow or expr.is_Mul: + args = [process_sympy_expression(arg) for arg in expr.args] + try: + paddle_func = func_map[expr.func] + return FuncNode(paddle_func, *args) + except KeyError: + logger.warning( + f"Note that you appear to be using a non-built-in function {expr}, please pass in that when you call the function" + ) + elif expr.is_Add: + args = [process_sympy_expression(arg) for arg in expr.args] + return AddNode(*args) + elif expr.is_Number: + return NumberNode(expr) + elif expr.is_Derivative: + expr = process_sympy_expression(expr.args[0]) + syms = [(process_sympy_expression(sym), order) for sym, order in expr.args[1:]] + return DerivativeNode(expr, syms) + else: + raise NotImplementedError(f"Unknown type {expr}") + + +def sympy_to_function(expr: sympy.Expr): + """ + Convert a sympy expression to a function that can be used in paddle + + Args: + expr (sympy.Expr): the sympy expression + + Returns: + a function that can be used in paddle + + Examples: + >>> x = sympy.Symbol("x") + >>> expr = sympy.sin(x) + >>> func = sympy_to_function(expr) + >>> func({"x": paddle.to_tensor(0.5)}) + Tensor(shape=[1], dtype=float32, place=CPUPlace, stop_gradient=True, + [0.47942555]) + """ + return process_sympy_expression(sympy.expand(expr)) From f8bb79510778fc1da50850d131455a97e57c798f Mon Sep 17 00:00:00 2001 From: PuQing Date: Thu, 10 Aug 2023 14:33:56 +0000 Subject: [PATCH 02/48] fix node --- ppsci/equation/sympy2func.py | 176 +++++++++++++++++++---------------- 1 file changed, 98 insertions(+), 78 deletions(-) diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py index bfd0fdb74..6a713b05f 100644 --- a/ppsci/equation/sympy2func.py +++ b/ppsci/equation/sympy2func.py @@ -11,11 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from typing import Dict from typing import List from typing import Tuple -import numpy as np import paddle import paddle.nn as nn import sympy @@ -24,7 +24,7 @@ from ppsci.autodiff import jacobian from ppsci.utils import logger -func_map = { +FUNC_MAP = { sympy.sin: paddle.sin, sympy.cos: paddle.cos, sympy.exp: paddle.exp, @@ -33,74 +33,77 @@ sympy.log: paddle.log, sympy.tan: paddle.tan, sympy.Mul: paddle.multiply, -} - -constant_map = { - sympy.pi: np.pi, - sympy.E: np.e, + sympy.Add: paddle.add_n, } class FuncNode(nn.Layer): """ - A node representing a function in the computational graph + A node representing a paddle function in the computational graph. Args: - fun (paddle.nn.Layer): the function - args (List[paddle.nn.Layer]): the arguments of the function + func (nn.Layer): The function to be applied. + *args (nn.Layer): The arguments of the function. Returns: - the result of the function + The result of applying the function to the arguments. + + Examples: + >>> x = sympy.Symbol("x") + >>> node = FuncNode(paddle.sin, SymbolNode(x)) + >>> node({x: paddle.to_tensor(0.5)}) + Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 0.47942555) """ - def __init__(self, fun, *args): + def __init__(self, func, *args): super().__init__() - self.fun = fun + self.func = func self.args = args def forward(self, x): - return self.fun(*[arg(x) for arg in self.args]) + if self.func == paddle.add_n: + return self.func([arg(x) for arg in self.args]) + return self.func(*[arg(x) for arg in self.args]) -class AddNode(nn.Layer): +class SymbolNode(nn.Layer): """ - A node representing a sum in the computational graph + A node retrieves the value of a symbol from the provided dictionary. Args: - args (List[paddle.nn.Layer]): the arguments of the sum + symbol (sympy.Symbol): The symbol to be represent in the graph Returns: - the result of the sum - """ + The value of the symbol - def __init__(self, *args): - super().__init__() - self.args = args - - def forward(self, x): - return paddle.add_n([arg(x) for arg in self.args]) - - -class SymbolNode(nn.Layer): - """ - A node representing a symbol in the computational graph + Examples: + >>> x = sympy.Symbol("x") + >>> node = SymbolNode(x) + >>> node({x: paddle.to_tensor(0.5)}) + Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 0.50000000) - Args: - sym (sympy.Symbol): the symbol + Or you can use the name of the symbol - Returns: - the value of the symbol + >>> x = sympy.Symbol("x") + >>> node = SymbolNode(x) + >>> node({"x": paddle.to_tensor(0.5)}) + Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 0.50000000) """ - def __init__(self, sym: sympy.Symbol): + def __init__(self, symbol: sympy.Symbol): super().__init__() - self.sym = sym + self.symbol = symbol def forward(self, x: Dict): - if self.sym in x.keys(): - return x[self.sym] - else: - raise KeyError(f"Symbol {self.sym} not in {x.keys()}") + value = x.get(self.symbol, x.get(self.symbol.name)) + if value is None: + raise ValueError( + f"Symbol {self.symbol} not in provided dictionary {list(x.keys())}!" + ) + return value class NumberNode(nn.Layer): @@ -108,23 +111,24 @@ class NumberNode(nn.Layer): A node representing a number in the computational graph Args: - num (sympy.Number): the number + number (sympy.Number): the number Returns: the value of the number + + Examples: + >>> node = NumberNode(sympy.pi) + >>> node({}) + Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 3.1415927) """ - def __init__(self, num): + def __init__(self, number: sympy.Number): super().__init__() - assert isinstance(num, sympy.Number) - if num in constant_map.keys(): - num = constant_map[num] - else: - num = float(num) - self.num = num + self.number = float(number) def forward(self, x): - return paddle.to_tensor(self.num, dtype="float32") + return paddle.to_tensor(self.number, dtype=paddle.get_default_dtype()) class DerivativeNode(nn.Layer): @@ -159,68 +163,84 @@ def forward(self, x): return x_value -class ExtraFuncNode(nn.Layer): +class LayerNode(nn.Layer): """ - A node representing a extra function in the computational graph + A node representing a neural network in the computational graph Args: - fun (sympy.Function): the function - args (List[paddle.nn.Layer]): the arguments of the function + func (sympy.Function): the neural network represented by a sympy function + *args (SymbolNode): the arguments of the function Returns: - the result of the function + the output of the neural network Note: - This is used to handle the case where the function is a neural network + For a multi-output model, only one symbol can be provided in the input dictionary, Examples: + Single output case: >>> x, y = sympy.symbols("x y") >>> u = sympy.Function("u")(x, y) - >>> fun = sympy.Derivative(u, x, y) - >>> fun = sympy_to_function(fun) - >>> fun({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) - - Other cases: + >>> func = sympy.Derivative(u, x, y) + >>> func = sympy_to_function(func) + >>> func({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) + Multi-output case: >>> x, y = sympy.symbols("x y") >>> u = sympy.Function("u")(x, y) >>> v = sympy.Function("v")(x, y) - >>> fun = sympy.Derivative(u, x, y) + sympy.Derivative(v, x, y) - >>> fun = sympy_to_function(fun) - >>> fun({u: (model, 0), v: (model, 1), x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) + >>> func = sympy.Derivative(u, x, y) + sympy.Derivative(v, x, y) + >>> func = sympy_to_function(func) + >>> func({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) # The model should have output_keys = ["u", "v"] """ - def __init__(self, fun: sympy.Function, *args): + _MODEL_OUTPUT_CACHE: Dict[str, paddle.Tensor] = {} + + def __init__(self, func: sympy.Function, *args: SymbolNode): super().__init__() - assert isinstance(fun, sympy.Function) - self.fun = fun + assert isinstance(func, sympy.Function) + self.func = func self.args = args def forward(self, x: Dict): - model = x[self.fun] - if isinstance(model, tuple): - model, pos = model - return model(*[arg(x) for arg in self.args])[ - pos - ] # TODO(PuQing): lazy computing for model, avoid multiple computing - return model(*[arg(x) for arg in self.args]) + # check if the model output is in the cache + model_output = self._MODEL_OUTPUT_CACHE.get(self.func.name) + + if model_output is None: + # Find which model provides the symbol value + for model in x.values(): + if hasattr(model, "output_keys"): + output_keys: Dict = model.output_keys + if self.func.name in output_keys: + model_output_dict: Dict = model( + {arg.symbol.name: arg(x) for arg in self.args} + ) + for key in output_keys: + self._MODEL_OUTPUT_CACHE[key] = model_output_dict[key] + break + else: # when no model provides the symbol value + raise ValueError( + f"Model {self.func.name} not in provided dictionary {list(x.keys())}!" + ) + + output = self._MODEL_OUTPUT_CACHE[self.func.name] + self._MODEL_OUTPUT_CACHE[self.func.name] = None + return output def process_sympy_expression(expr: sympy.Expr): if expr.is_Symbol: return SymbolNode(expr) - elif expr.is_Function or expr.is_Pow or expr.is_Mul: + elif expr.is_Function or expr.is_Pow or expr.is_Mul or expr.is_Add: args = [process_sympy_expression(arg) for arg in expr.args] try: - paddle_func = func_map[expr.func] + paddle_func = FUNC_MAP[expr.func] return FuncNode(paddle_func, *args) except KeyError: logger.warning( f"Note that you appear to be using a non-built-in function {expr}, please pass in that when you call the function" ) - elif expr.is_Add: - args = [process_sympy_expression(arg) for arg in expr.args] - return AddNode(*args) + return LayerNode(expr, *args) elif expr.is_Number: return NumberNode(expr) elif expr.is_Derivative: From b31a785dbf5a5f469de9393e2e31ce7dd205c3fb Mon Sep 17 00:00:00 2001 From: PuQing Date: Thu, 10 Aug 2023 14:57:10 +0000 Subject: [PATCH 03/48] fix var name --- ppsci/equation/sympy2func.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py index 6a713b05f..c69f12818 100644 --- a/ppsci/equation/sympy2func.py +++ b/ppsci/equation/sympy2func.py @@ -244,9 +244,9 @@ def process_sympy_expression(expr: sympy.Expr): elif expr.is_Number: return NumberNode(expr) elif expr.is_Derivative: - expr = process_sympy_expression(expr.args[0]) + model = process_sympy_expression(expr.args[0]) syms = [(process_sympy_expression(sym), order) for sym, order in expr.args[1:]] - return DerivativeNode(expr, syms) + return DerivativeNode(model, syms) else: raise NotImplementedError(f"Unknown type {expr}") From d9d30d126648b7a08c88bb910e881e1308f6607e Mon Sep 17 00:00:00 2001 From: PuQing Date: Sun, 13 Aug 2023 07:59:40 +0000 Subject: [PATCH 04/48] fix implementation --- ppsci/equation/sympy2func.py | 512 ++++++++++++++++++++++++----------- 1 file changed, 347 insertions(+), 165 deletions(-) diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py index c69f12818..b4eb0752f 100644 --- a/ppsci/equation/sympy2func.py +++ b/ppsci/equation/sympy2func.py @@ -14,7 +14,6 @@ from typing import Dict from typing import List -from typing import Tuple import paddle import paddle.nn as nn @@ -32,241 +31,424 @@ sympy.sqrt: paddle.sqrt, sympy.log: paddle.log, sympy.tan: paddle.tan, - sympy.Mul: paddle.multiply, - sympy.Add: paddle.add_n, + sympy.Max: paddle.maximum, + sympy.Min: paddle.minimum, + sympy.Abs: paddle.abs, } -class FuncNode(nn.Layer): +class NodeBase(nn.Layer): """ - A node representing a paddle function in the computational graph. + The base class of the node in the computational graph. Args: - func (nn.Layer): The function to be applied. - *args (nn.Layer): The arguments of the function. + expr (sympy.Expr): The expression of the node. Returns: - The result of applying the function to the arguments. - - Examples: - >>> x = sympy.Symbol("x") - >>> node = FuncNode(paddle.sin, SymbolNode(x)) - >>> node({x: paddle.to_tensor(0.5)}) - Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 0.47942555) + The input dictionary with the output of the node added. """ - def __init__(self, func, *args): + def __init__(self, expr: sympy.Expr): super().__init__() - self.func = func - self.args = args + self.expr = expr + + def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer] = None): + raise NotImplementedError - def forward(self, x): - if self.func == paddle.add_n: - return self.func([arg(x) for arg in self.args]) - return self.func(*[arg(x) for arg in self.args]) + def __repr__(self): + return ( + self.__class__.__name__ + f"(expr: {self.expr}), type: {type(self.expr)})" + ) -class SymbolNode(nn.Layer): +class OperatorNode(NodeBase): """ - A node retrieves the value of a symbol from the provided dictionary. + A node representing a sympy operator in the computational graph. + + (e.g. sin, cos, etc.) Args: - symbol (sympy.Symbol): The symbol to be represent in the graph + expr (sympy.Expr): The expression of the node. Returns: - The value of the symbol + The input dictionary with the output of the operator added. Examples: >>> x = sympy.Symbol("x") - >>> node = SymbolNode(x) - >>> node({x: paddle.to_tensor(0.5)}) - Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 0.50000000) + >>> node = OperatorNode(sympy.sin(x)) + >>> node({"x": paddle.to_tensor(np.random.randn(1, 1))}) + {'x': Tensor(shape=[1, 1], dtype=float64, place=Place(gpu:0), stop_gradient=True, + [[-0.49221350]]), + 'sin(x)': Tensor(shape=[1, 1], dtype=float64, place=Place(gpu:0), stop_gradient=True, + [[-0.47257778]])} + """ - Or you can use the name of the symbol + def __init__( + self, expr: sympy.Function or sympy.Add or sympy.Mul or sympy.Derivative + ): + super().__init__(expr) - >>> x = sympy.Symbol("x") - >>> node = SymbolNode(x) - >>> node({"x": paddle.to_tensor(0.5)}) - Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 0.50000000) + def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer] = None): + expr_str = str(self.expr) + if self.expr.func == sympy.Add: + inputs_dict[expr_str] = paddle.add_n( + [inputs_dict[str(arg)] for arg in self.expr.args] + ) + elif self.expr.func == sympy.Mul: + inputs_dict[expr_str] = paddle.to_tensor( + 1.0, dtype=paddle.get_default_dtype() + ) + for arg in self.expr.args: + inputs_dict[expr_str] *= inputs_dict[str(arg)] + elif self.expr.func == sympy.Derivative: + inputs_dict[expr_str] = inputs_dict[ + str(self.expr.args[0]) + ] # initialize the derivative + symbols = self.expr.args[1:] + for symbol, order in symbols: + expr_tensor = inputs_dict[expr_str] + symbol_tensor = inputs_dict[str(symbol)] + if order == 1: + inputs_dict[expr_str] = jacobian(expr_tensor, symbol_tensor) + elif order == 2: + inputs_dict[expr_str] = hessian(expr_tensor, symbol_tensor) + else: + logger.warning( + f"The order {order} of the derivative is not supported, the order should be 1 or 2." + ) + else: + try: + inputs_dict[expr_str] = FUNC_MAP[self.expr.func]( + *[inputs_dict[str(arg)] for arg in self.expr.args] + ) + except KeyError: + logger.warning( + f"The operator {self.expr.func} is not supported, please add it to FUNC_MAP." + ) + return inputs_dict + + +class LayerNode(NodeBase): """ + A node representing a neural network in the computational graph - def __init__(self, symbol: sympy.Symbol): - super().__init__() - self.symbol = symbol + Args: + expr (sympy.core.function.UndefinedFunction): Definition symbol of the neural network. - def forward(self, x: Dict): - value = x.get(self.symbol, x.get(self.symbol.name)) - if value is None: + Returns: + The input dictionary with the output of the neural network added. + + Note: + For the provided network, the forward should accept a dictionary as input and return a dictionary as output. + And the `output_keys` should be provided in the `__init__` function. + + Examples: + Single output case: + >>> x, y = sympy.symbols("x y") + >>> u = sympy.Function("u")(x, y) + >>> func = u.diff(x) + u.diff(y) + >>> node = LayerNode(func) + >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> class MyLayer(nn.Layer): + >>> def __init__(self): + >>> super(MyLayer, self).__init__() + >>> self.output_keys = ["u"] + >>> def forward(self, x): + >>> x, y = x["x"], x["y"] + >>> u = paddle.cos(y * x) + >>> return {"u": u} + >>> node(inputs_dict={"x": x, "y": y}, model_dict={f"u": MyLayer()}) + {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.20314099]]), + 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.95114714]]), + 'u(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.98139161]]), + 'Derivative(u(x, y), x)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.18263641]])} + + Multi-output case: + >>> x, y = sympy.symbols("x y") + >>> u = sympy.Function("u")(x, y) + >>> v = sympy.Function("v")(x, y) + >>> func = u.diff(x) + u.diff(y) + >>> node = LayerNode(func) + >>> class MyLayer(nn.Layer): + >>> def __init__(self): + >>> super(MyLayer, self).__init__() + >>> self.output_keys = ["u", "v"] + >>> def forward(self, x): + >>> x, y = x["x"], x["y"] + >>> u = paddle.cos(y * x) + >>> v = paddle.tanh(x**2) + >>> return {"u": u, "v": v} + >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> node(inputs_dict={"x": x, "y": y}, model_dict={"u": MyLayer()}) + {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.65654278]]), + 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.07239681]]), + 'u(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.99887061]]), + 'v(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.40619713]]), + 'Derivative(u(x, y), y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.03119478]]), + 'Derivative(u(x, y), x)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.00343984]]), + 'Derivative(u(x, y), x) + Derivative(u(x, y), y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.03463462]])} + """ + + def __init__(self, expr: sympy.core.function.UndefinedFunction): + super().__init__(expr) + + def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer]): + if str(self.expr) in inputs_dict: + return inputs_dict + for model in models_dict.values(): + if model.output_keys is None: + raise ValueError( + "The output_keys of the model should be provided in the __init__ function." + ) + model_output_keys = model.output_keys + if str(self.expr.func) in model_output_keys: # u(x, y) to u + model_output = model( + {str(arg): inputs_dict[str(arg)] for arg in self.expr.args} + ) + for key in model_output_keys: + # u to u(x, y) + expr_key = ( + f"{key}({', '.join([str(arg) for arg in self.expr.args])})" + ) + inputs_dict[expr_key] = model_output[key] + break + else: # no break raise ValueError( - f"Symbol {self.symbol} not in provided dictionary {list(x.keys())}!" + f"The model with output_keys = {str(self.expr.func)} is not found." ) - return value + return inputs_dict -class NumberNode(nn.Layer): +class ConstantNode(NodeBase): """ - A node representing a number in the computational graph + A node representing a constant in the computational graph. Args: - number (sympy.Number): the number + expr (sympy.Number or sympy.NumberSymbol): The constant to be applied. Returns: - the value of the number + The input dictionary with the constant added. Examples: - >>> node = NumberNode(sympy.pi) + >>> node = ConstantNode(sympy.pi) >>> node({}) - Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 3.1415927) + {'pi': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 3.1415927)} """ - def __init__(self, number: sympy.Number): - super().__init__() - self.number = float(number) + def __init__(self, expr: sympy.Number or sympy.NumberSymbol): + super().__init__(expr) - def forward(self, x): - return paddle.to_tensor(self.number, dtype=paddle.get_default_dtype()) + def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer]): + inputs_dict[str(self.expr)] = paddle.to_tensor( + float(self.expr), dtype=paddle.get_default_dtype() + ) + return inputs_dict -class DerivativeNode(nn.Layer): +class ComposedFunc(nn.Layer): """ - A node representing a derivative in the computational graph + Compose multiple functions into one function. Args: - expr (sympy.Expr): the expression to be derived - syms (List[Tuple[sympy.Symbol, int]]): the symbols to be derived and their orders + inputs_dict (Dict): The input tensor dictionary. + model_dict (Dict[str, nn.Layer]): The dictionary of the models. Returns: - the value of the derivative + The dictionary of the outputs of the all calculated nodes. + + Examples: + >>> x = sympy.Symbol("x") + >>> expr = sympy.sin(x) + >>> func = sympy_to_function(expr) + >>> func({x: paddle.to_tensor(0.5)}) + {'sin(x)': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 0.47942555)} """ - def __init__(self, expr: sympy.Expr, syms: List[Tuple[sympy.Symbol, int]]): + def __init__(self, nodes: List[NodeBase]): super().__init__() - self.expr = expr - self.syms = syms - - def forward(self, x): - x_value = self.expr(x) - for sym, order in self.syms: - sym_value = sym(x) - if order == 1: - x_value = jacobian(x_value, sym_value) - elif order == 2: - x_value = hessian(x_value, sym_value) - else: - raise NotImplementedError( - f"Higher order derivatives are not implemented yet, got {order}" - ) - return x_value - - -class LayerNode(nn.Layer): + self.nodes = nodes + + def _derivative_to_str(self, expr: sympy.Expr) -> str: + """ + Convert the derivative expression to string. + + Args: + expr (sympy.Expr): The derivative expression. + + Returns: + The string of the derivative expression. + """ + key = str(expr.args[0].func) + for symbol, order in expr.args[1:]: + key += f"__{symbol}" * order + return key + + def forward(self, inputs_dict: Dict, model_dict: Dict[str, nn.Layer] = None): + for node in self.nodes: + inputs_dict = node(inputs_dict, model_dict) + + last_expr = self.nodes[-1].expr + inputs_dict["output"] = inputs_dict.pop( + str(last_expr) + ) # rename the last node key to output + + layer_key_maps = {} + + for key in list(inputs_dict.keys()): + expr = sympy.sympify(key) + if key.startswith( + "Derivative(" + ): # rename the derivative key Derivative(u(x, y), x) to u__x + inputs_dict[self._derivative_to_str(expr)] = inputs_dict.pop(key) + if key.startswith("-Derivative("): # remove the negative derivative + inputs_dict[ + "-" + self._derivative_to_str(expr.args[1]) + ] = inputs_dict.pop(key) + if isinstance(expr.func, sympy.core.function.UndefinedFunction): + layer_key_maps[key] = str(expr.func) + + for ( + key, + value, + ) in layer_key_maps.items(): # rename the layer key e.g. u(x, y) to u + for inputs_key in list(inputs_dict.keys()): + if key in inputs_key: + inputs_dict[inputs_key.replace(key, value)] = inputs_dict.pop( + inputs_key + ) + + return inputs_dict + + +def get_expression_nodes(expr: sympy.Expr) -> List[sympy.Expr]: """ - A node representing a neural network in the computational graph + Convert a sympy expression to a list of sympy expressions using post-order traversal. Args: - func (sympy.Function): the neural network represented by a sympy function - *args (SymbolNode): the arguments of the function + expr (sympy.Expr): the sympy expression to be converted Returns: - the output of the neural network - - Note: - For a multi-output model, only one symbol can be provided in the input dictionary, + A list of sympy expressions. Examples: - Single output case: - >>> x, y = sympy.symbols("x y") - >>> u = sympy.Function("u")(x, y) - >>> func = sympy.Derivative(u, x, y) - >>> func = sympy_to_function(func) - >>> func({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) - - Multi-output case: - >>> x, y = sympy.symbols("x y") - >>> u = sympy.Function("u")(x, y) - >>> v = sympy.Function("v")(x, y) - >>> func = sympy.Derivative(u, x, y) + sympy.Derivative(v, x, y) - >>> func = sympy_to_function(func) - >>> func({u: model, x: paddle.to_tensor(0.5), y: paddle.to_tensor(0.5)}) # The model should have output_keys = ["u", "v"] + >>> x = sympy.Symbol("x") + >>> expr = sympy.sin(x) + x + >>> nodes = get_expression_nodes(expr) + >>> nodes + [x, sin(x), x, x + sin(x)] + + Notes: + This function performs a post-order traversal of the expression tree rooted at `expr`. + The resulting list contains the sub-expressions of `expr` in the order in which they would be evaluated. """ + nodes = [] - _MODEL_OUTPUT_CACHE: Dict[str, paddle.Tensor] = {} + def traverse_expression(expr, nodes): + nodes.insert(0, expr) + if expr.func == sympy.Derivative: + nodes.insert(0, expr.args[0]) + return nodes + for arg in expr.args: + nodes = traverse_expression(arg, nodes) + return nodes - def __init__(self, func: sympy.Function, *args: SymbolNode): - super().__init__() - assert isinstance(func, sympy.Function) - self.func = func - self.args = args - - def forward(self, x: Dict): - # check if the model output is in the cache - model_output = self._MODEL_OUTPUT_CACHE.get(self.func.name) - - if model_output is None: - # Find which model provides the symbol value - for model in x.values(): - if hasattr(model, "output_keys"): - output_keys: Dict = model.output_keys - if self.func.name in output_keys: - model_output_dict: Dict = model( - {arg.symbol.name: arg(x) for arg in self.args} - ) - for key in output_keys: - self._MODEL_OUTPUT_CACHE[key] = model_output_dict[key] - break - else: # when no model provides the symbol value - raise ValueError( - f"Model {self.func.name} not in provided dictionary {list(x.keys())}!" - ) - - output = self._MODEL_OUTPUT_CACHE[self.func.name] - self._MODEL_OUTPUT_CACHE[self.func.name] = None - return output - - -def process_sympy_expression(expr: sympy.Expr): - if expr.is_Symbol: - return SymbolNode(expr) - elif expr.is_Function or expr.is_Pow or expr.is_Mul or expr.is_Add: - args = [process_sympy_expression(arg) for arg in expr.args] - try: - paddle_func = FUNC_MAP[expr.func] - return FuncNode(paddle_func, *args) - except KeyError: - logger.warning( - f"Note that you appear to be using a non-built-in function {expr}, please pass in that when you call the function" - ) - return LayerNode(expr, *args) - elif expr.is_Number: - return NumberNode(expr) - elif expr.is_Derivative: - model = process_sympy_expression(expr.args[0]) - syms = [(process_sympy_expression(sym), order) for sym, order in expr.args[1:]] - return DerivativeNode(model, syms) - else: - raise NotImplementedError(f"Unknown type {expr}") + nodes = traverse_expression(expr, nodes) + return nodes def sympy_to_function(expr: sympy.Expr): """ - Convert a sympy expression to a function that can be used in paddle + Convert a sympy expression to a ComposedFunc. Args: expr (sympy.Expr): the sympy expression Returns: - a function that can be used in paddle + A ComposedFunc that can execute the formula represented by the sympy expression. Examples: - >>> x = sympy.Symbol("x") - >>> expr = sympy.sin(x) - >>> func = sympy_to_function(expr) - >>> func({"x": paddle.to_tensor(0.5)}) - Tensor(shape=[1], dtype=float32, place=CPUPlace, stop_gradient=True, - [0.47942555]) + >>> x, y = sympy.symbols("x y") + >>> u = sympy.Function("u")(x, y) + >>> v = sympy.Function("v")(x, y) + >>> expr = u.diff(x) - v.diff(x, 2) + u * v + sympy.sin(u) * sympy.cos(v) + >>> function = sympy_to_function(expr) + + >>> class MyLayer(nn.Layer): + >>> def __init__(self): + >>> super(MyLayer, self).__init__() + >>> self.output_keys = ["u", "v"] + >>> def forward(self, x): + >>> x, y = x["x"], x["y"] + >>> u = paddle.cos(y * x) + >>> v = paddle.tanh(x**2) + >>> return {"u": u, "v": v} + + >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") + >>> function(inputs_dict={"x": x, "y": y}, model_dict={"u": MyLayer()}) + {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.21531263]]), + 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-0.20731021]]), + '-1': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + -1.), + 'output': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-1.08300245]]), + 'u__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.00925053]]), + 'v__x__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[1.97856331]]), + '-v__x__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[-1.97856331]]), + 'u': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.99900395]]), + 'sin(u)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.84093243]]), + 'v': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.04632635]]), + 'cos(v)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.99892712]]), + 'u*v': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.04628020]]), + 'sin(u)*cos(v)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, + [[0.84003019]])} """ - return process_sympy_expression(sympy.expand(expr)) + expression_nodes = get_expression_nodes(expr) + expression_nodes = [ + node for node in expression_nodes if not node.is_Symbol + ] # remove symbol.Symbols + expression_nodes = list(dict.fromkeys(expression_nodes)) # remove duplicates + nodes = [] + for node in expression_nodes: + if isinstance(node.func, sympy.core.function.UndefinedFunction): + nodes.append(LayerNode(node)) + elif ( + node.is_Function + or node.is_Add + or node.is_Mul + or node.is_Pow + or node.is_Derivative + ): + nodes.append(OperatorNode(node)) + elif node.is_Number or node.is_NumberSymbol: + nodes.append(ConstantNode(node)) + else: + raise NotImplementedError( + f"The node {node} is not supported in sympy_to_function." + ) + return ComposedFunc(nodes) From 2ccc9f53e2092ce24fdae812ccd0f23c039e2866 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 24 Aug 2023 08:53:12 +0000 Subject: [PATCH 05/48] add sympy to func module --- ppsci/equation/pde/linear_elasticity.py | 53 +- ppsci/equation/sympy2func.py | 787 +++++++++++++-------- ppsci/utils/expression.py | 291 +++++++- ppsci/utils/logger.py | 45 +- test/utils/test_linear_elasticity_sympy.py | 261 +++++++ test/utils/test_navier_stokes_sympy.py | 197 ++++++ 6 files changed, 1300 insertions(+), 334 deletions(-) create mode 100644 test/utils/test_linear_elasticity_sympy.py create mode 100644 test/utils/test_navier_stokes_sympy.py diff --git a/ppsci/equation/pde/linear_elasticity.py b/ppsci/equation/pde/linear_elasticity.py index 85f395f0f..77b2c7eac 100644 --- a/ppsci/equation/pde/linear_elasticity.py +++ b/ppsci/equation/pde/linear_elasticity.py @@ -64,6 +64,12 @@ def __init__( time: bool = False, ): super().__init__() + if lambda_ is None: + nu = float(nu) + E = float(E) + lambda_ = nu * E / ((1 + nu) * (1 - 2 * nu)) + mu = E / (2 * (1 + nu)) + self.E = E self.nu = nu self.lambda_ = lambda_ @@ -74,13 +80,11 @@ def __init__( # Stress equations def stress_disp_xx_compute_func(out): - x, y, z, u, v, w = ( + x, y, u, v = ( out["x"], out["y"], - out["z"], out["u"], out["v"], - out["w"], ) sigma_xx = out["sigma_xx"] stress_disp_xx = ( @@ -96,13 +100,11 @@ def stress_disp_xx_compute_func(out): self.add_equation("stress_disp_xx", stress_disp_xx_compute_func) def stress_disp_yy_compute_func(out): - x, y, z, u, v, w = ( + x, y, u, v = ( out["x"], out["y"], - out["z"], out["u"], out["v"], - out["w"], ) sigma_yy = out["sigma_yy"] stress_disp_yy = ( @@ -166,7 +168,7 @@ def stress_disp_yz_compute_func(out): # Equations of equilibrium def equilibrium_x_compute_func(out): - x, y, z = out["x"], out["y"], out["z"] + x, y = out["x"], out["y"] sigma_xx, sigma_xy = out["sigma_xx"], out["sigma_xy"] equilibrium_x = -jacobian(sigma_xx, x) - jacobian(sigma_xy, y) if self.dim == 3: @@ -180,11 +182,10 @@ def equilibrium_x_compute_func(out): self.add_equation("equilibrium_x", equilibrium_x_compute_func) def equilibrium_y_compute_func(out): - x, y, z = out["x"], out["y"], out["z"] - sigma_xy, sigma_yy, sigma_yz = ( + x, y = out["x"], out["y"] + sigma_xy, sigma_yy = ( out["sigma_xy"], out["sigma_yy"], - out["sigma_yz"], ) equilibrium_y = -jacobian(sigma_xy, x) - jacobian(sigma_yy, y) if self.dim == 3: @@ -253,21 +254,25 @@ def traction_y_compute_func(out): self.add_equation("traction_y", traction_y_compute_func) - def traction_z_compute_func(out): - normal_x, normal_y, normal_z = ( - out["normal_x"], - out["normal_y"], - out["normal_z"], - ) - sigma_xz, sigma_yz, sigma_zz = ( - out["sigma_xz"], - out["sigma_yz"], - out["sigma_zz"], - ) - traction_z = normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz - return traction_z + if self.dim == 3: + + def traction_z_compute_func(out): + normal_x, normal_y, normal_z = ( + out["normal_x"], + out["normal_y"], + out["normal_z"], + ) + sigma_xz, sigma_yz, sigma_zz = ( + out["sigma_xz"], + out["sigma_yz"], + out["sigma_zz"], + ) + traction_z = ( + normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz + ) + return traction_z - self.add_equation("traction_z", traction_z_compute_func) + self.add_equation("traction_z", traction_z_compute_func) # Navier equations def navier_x_compute_func(out): diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py index b4eb0752f..e63cfb43b 100644 --- a/ppsci/equation/sympy2func.py +++ b/ppsci/equation/sympy2func.py @@ -12,13 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools from typing import Dict from typing import List +from typing import Union import paddle import paddle.nn as nn import sympy +import ppsci from ppsci.autodiff import hessian from ppsci.autodiff import jacobian from ppsci.utils import logger @@ -28,15 +31,42 @@ sympy.cos: paddle.cos, sympy.exp: paddle.exp, sympy.Pow: paddle.pow, - sympy.sqrt: paddle.sqrt, + # sympy.sqrt: paddle.sqrt, sympy.log: paddle.log, sympy.tan: paddle.tan, sympy.Max: paddle.maximum, sympy.Min: paddle.minimum, sympy.Abs: paddle.abs, + sympy.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), } +def single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): + order_left = order + while order_left > 0: + if order_left >= 2: + dvar = hessian(dvar, invar) + order_left -= 2 + else: + dvar = jacobian(dvar, invar) + order_left -= 1 + return dvar + + +def cvt_to_key(sympy_node: sympy.Basic): + if isinstance(sympy_node, sympy.Heaviside): + return str(sympy_node) + if isinstance(sympy_node, (sympy.Symbol, sympy.Function)): + return sympy_node.name + elif isinstance(sympy_node, sympy.Derivative): + expr_str = sympy_node.args[0].name # use 'f' instead of 'f(x,y,z)' + for symbol, order in sympy_node.args[1:]: + expr_str += f"__{symbol}" * order + return expr_str + else: + return str(sympy_node) + + class NodeBase(nn.Layer): """ The base class of the node in the computational graph. @@ -51,8 +81,9 @@ class NodeBase(nn.Layer): def __init__(self, expr: sympy.Expr): super().__init__() self.expr = expr + self.key = cvt_to_key(self.expr) - def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer] = None): + def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): raise NotImplementedError def __repr__(self): @@ -72,60 +103,44 @@ class OperatorNode(NodeBase): Returns: The input dictionary with the output of the operator added. - - Examples: - >>> x = sympy.Symbol("x") - >>> node = OperatorNode(sympy.sin(x)) - >>> node({"x": paddle.to_tensor(np.random.randn(1, 1))}) - {'x': Tensor(shape=[1, 1], dtype=float64, place=Place(gpu:0), stop_gradient=True, - [[-0.49221350]]), - 'sin(x)': Tensor(shape=[1, 1], dtype=float64, place=Place(gpu:0), stop_gradient=True, - [[-0.47257778]])} """ - def __init__( - self, expr: sympy.Function or sympy.Add or sympy.Mul or sympy.Derivative - ): + def __init__(self, expr: Union[sympy.Add, sympy.Mul, sympy.Derivative]): super().__init__(expr) - def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer] = None): - expr_str = str(self.expr) + def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): if self.expr.func == sympy.Add: - inputs_dict[expr_str] = paddle.add_n( - [inputs_dict[str(arg)] for arg in self.expr.args] + data_dict[self.key] = paddle.add_n( + [data_dict[cvt_to_key(arg)] for arg in self.expr.args] ) elif self.expr.func == sympy.Mul: - inputs_dict[expr_str] = paddle.to_tensor( - 1.0, dtype=paddle.get_default_dtype() - ) - for arg in self.expr.args: - inputs_dict[expr_str] *= inputs_dict[str(arg)] + data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + for arg in self.expr.args[1:]: + data_dict[self.key] = data_dict[self.key] * data_dict[cvt_to_key(arg)] elif self.expr.func == sympy.Derivative: - inputs_dict[expr_str] = inputs_dict[ - str(self.expr.args[0]) - ] # initialize the derivative - symbols = self.expr.args[1:] - for symbol, order in symbols: - expr_tensor = inputs_dict[expr_str] - symbol_tensor = inputs_dict[str(symbol)] - if order == 1: - inputs_dict[expr_str] = jacobian(expr_tensor, symbol_tensor) - elif order == 2: - inputs_dict[expr_str] = hessian(expr_tensor, symbol_tensor) - else: - logger.warning( - f"The order {order} of the derivative is not supported, the order should be 1 or 2." - ) + if self.key in data_dict: + return data_dict + data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + for symbol, order in self.expr.args[1:]: + data_dict[self.key] = single_derivate_func( + data_dict[self.key], + data_dict[cvt_to_key(symbol)], + order, + ) else: try: - inputs_dict[expr_str] = FUNC_MAP[self.expr.func]( - *[inputs_dict[str(arg)] for arg in self.expr.args] - ) + func = FUNC_MAP[self.expr.func] except KeyError: - logger.warning( - f"The operator {self.expr.func} is not supported, please add it to FUNC_MAP." + raise NotImplementedError( + f"'{self.expr.func}' operator is not supported now." + ) + if self.expr.func == sympy.Heaviside: + data_dict[self.key] = func(data_dict[cvt_to_key(self.expr.args[0])]) + else: + data_dict[self.key] = func( + *[data_dict[cvt_to_key(arg)] for arg in self.expr.args] ) - return inputs_dict + return data_dict class LayerNode(NodeBase): @@ -143,93 +158,22 @@ class LayerNode(NodeBase): And the `output_keys` should be provided in the `__init__` function. Examples: - Single output case: - >>> x, y = sympy.symbols("x y") - >>> u = sympy.Function("u")(x, y) - >>> func = u.diff(x) + u.diff(y) - >>> node = LayerNode(func) - >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> class MyLayer(nn.Layer): - >>> def __init__(self): - >>> super(MyLayer, self).__init__() - >>> self.output_keys = ["u"] - >>> def forward(self, x): - >>> x, y = x["x"], x["y"] - >>> u = paddle.cos(y * x) - >>> return {"u": u} - >>> node(inputs_dict={"x": x, "y": y}, model_dict={f"u": MyLayer()}) - {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.20314099]]), - 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.95114714]]), - 'u(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.98139161]]), - 'Derivative(u(x, y), x)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.18263641]])} - - Multi-output case: - >>> x, y = sympy.symbols("x y") - >>> u = sympy.Function("u")(x, y) - >>> v = sympy.Function("v")(x, y) - >>> func = u.diff(x) + u.diff(y) - >>> node = LayerNode(func) - >>> class MyLayer(nn.Layer): - >>> def __init__(self): - >>> super(MyLayer, self).__init__() - >>> self.output_keys = ["u", "v"] - >>> def forward(self, x): - >>> x, y = x["x"], x["y"] - >>> u = paddle.cos(y * x) - >>> v = paddle.tanh(x**2) - >>> return {"u": u, "v": v} - >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> node(inputs_dict={"x": x, "y": y}, model_dict={"u": MyLayer()}) - {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.65654278]]), - 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.07239681]]), - 'u(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.99887061]]), - 'v(x, y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.40619713]]), - 'Derivative(u(x, y), y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.03119478]]), - 'Derivative(u(x, y), x)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.00343984]]), - 'Derivative(u(x, y), x) + Derivative(u(x, y), y)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.03463462]])} + """ - def __init__(self, expr: sympy.core.function.UndefinedFunction): + def __init__(self, expr: sympy.core.function.UndefinedFunction, model: nn.Layer): super().__init__(expr) + self.model = model - def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer]): - if str(self.expr) in inputs_dict: - return inputs_dict - for model in models_dict.values(): - if model.output_keys is None: - raise ValueError( - "The output_keys of the model should be provided in the __init__ function." - ) - model_output_keys = model.output_keys - if str(self.expr.func) in model_output_keys: # u(x, y) to u - model_output = model( - {str(arg): inputs_dict[str(arg)] for arg in self.expr.args} - ) - for key in model_output_keys: - # u to u(x, y) - expr_key = ( - f"{key}({', '.join([str(arg) for arg in self.expr.args])})" - ) - inputs_dict[expr_key] = model_output[key] - break - else: # no break - raise ValueError( - f"The model with output_keys = {str(self.expr.func)} is not found." - ) - return inputs_dict + def forward(self, data_dict: Dict): + if self.key in data_dict: + return data_dict + + output_dict = self.model(data_dict) + for key, value in output_dict.items(): + data_dict[key] = value + + return data_dict class ConstantNode(NodeBase): @@ -251,12 +195,23 @@ class ConstantNode(NodeBase): def __init__(self, expr: sympy.Number or sympy.NumberSymbol): super().__init__(expr) + if self.expr.is_Float: + self.expr = float(self.expr) + elif self.expr.is_Integer: + self.expr = float(self.expr) + elif self.expr.is_Boolean: + self.expr = float(self.expr) + elif self.expr.is_Rational: + self.expr = float(self.expr) + else: + raise TypeError( + f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" + ) + self.expr = paddle.to_tensor(self.expr) - def forward(self, inputs_dict: Dict, models_dict: Dict[str, nn.Layer]): - inputs_dict[str(self.expr)] = paddle.to_tensor( - float(self.expr), dtype=paddle.get_default_dtype() - ) - return inputs_dict + def forward(self, data_dict: Dict): + data_dict[self.key] = self.expr + return data_dict class ComposedFunc(nn.Layer): @@ -264,114 +219,47 @@ class ComposedFunc(nn.Layer): Compose multiple functions into one function. Args: - inputs_dict (Dict): The input tensor dictionary. + data_dict (Dict): The input tensor dictionary. model_dict (Dict[str, nn.Layer]): The dictionary of the models. Returns: The dictionary of the outputs of the all calculated nodes. - - Examples: - >>> x = sympy.Symbol("x") - >>> expr = sympy.sin(x) - >>> func = sympy_to_function(expr) - >>> func({x: paddle.to_tensor(0.5)}) - {'sin(x)': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 0.47942555)} """ - def __init__(self, nodes: List[NodeBase]): + def __init__(self, target: str, funcs: List[NodeBase]): super().__init__() - self.nodes = nodes - - def _derivative_to_str(self, expr: sympy.Expr) -> str: - """ - Convert the derivative expression to string. - - Args: - expr (sympy.Expr): The derivative expression. - - Returns: - The string of the derivative expression. - """ - key = str(expr.args[0].func) - for symbol, order in expr.args[1:]: - key += f"__{symbol}" * order - return key - - def forward(self, inputs_dict: Dict, model_dict: Dict[str, nn.Layer] = None): - for node in self.nodes: - inputs_dict = node(inputs_dict, model_dict) - - last_expr = self.nodes[-1].expr - inputs_dict["output"] = inputs_dict.pop( - str(last_expr) - ) # rename the last node key to output - - layer_key_maps = {} - - for key in list(inputs_dict.keys()): - expr = sympy.sympify(key) - if key.startswith( - "Derivative(" - ): # rename the derivative key Derivative(u(x, y), x) to u__x - inputs_dict[self._derivative_to_str(expr)] = inputs_dict.pop(key) - if key.startswith("-Derivative("): # remove the negative derivative - inputs_dict[ - "-" + self._derivative_to_str(expr.args[1]) - ] = inputs_dict.pop(key) - if isinstance(expr.func, sympy.core.function.UndefinedFunction): - layer_key_maps[key] = str(expr.func) - - for ( - key, - value, - ) in layer_key_maps.items(): # rename the layer key e.g. u(x, y) to u - for inputs_key in list(inputs_dict.keys()): - if key in inputs_key: - inputs_dict[inputs_key.replace(key, value)] = inputs_dict.pop( - inputs_key - ) - - return inputs_dict - - -def get_expression_nodes(expr: sympy.Expr) -> List[sympy.Expr]: - """ - Convert a sympy expression to a list of sympy expressions using post-order traversal. - - Args: - expr (sympy.Expr): the sympy expression to be converted - - Returns: - A list of sympy expressions. - - Examples: - >>> x = sympy.Symbol("x") - >>> expr = sympy.sin(x) + x - >>> nodes = get_expression_nodes(expr) - >>> nodes - [x, sin(x), x, x + sin(x)] - - Notes: - This function performs a post-order traversal of the expression tree rooted at `expr`. - The resulting list contains the sub-expressions of `expr` in the order in which they would be evaluated. - """ - nodes = [] - - def traverse_expression(expr, nodes): - nodes.insert(0, expr) - if expr.func == sympy.Derivative: - nodes.insert(0, expr.args[0]) - return nodes - for arg in expr.args: - nodes = traverse_expression(arg, nodes) + self.funcs = funcs + self.target = target + + def forward(self, data_dict: Dict): + for func in self.funcs: + data_dict = func(data_dict) + return data_dict[self.funcs[-1].key] + + +def post_traverse(cur_node, nodes): + # traverse into sub-nodes + if isinstance(cur_node, sympy.core.function.UndefinedFunction): + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Function): + for arg in cur_node.args: + nodes = post_traverse(arg, nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Derivative): + nodes = post_traverse(cur_node.args[0], nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Symbol): return nodes - - nodes = traverse_expression(expr, nodes) + elif isinstance(cur_node, sympy.Number): + nodes.append(cur_node) + else: + for arg in cur_node.args: + nodes = post_traverse(arg, nodes) + nodes.append(cur_node) return nodes -def sympy_to_function(expr: sympy.Expr): +def sympy_to_function(target: str, expr: sympy.Expr, models: nn.Layer): """ Convert a sympy expression to a ComposedFunc. @@ -382,73 +270,398 @@ def sympy_to_function(expr: sympy.Expr): A ComposedFunc that can execute the formula represented by the sympy expression. Examples: - >>> x, y = sympy.symbols("x y") - >>> u = sympy.Function("u")(x, y) - >>> v = sympy.Function("v")(x, y) - >>> expr = u.diff(x) - v.diff(x, 2) + u * v + sympy.sin(u) * sympy.cos(v) - >>> function = sympy_to_function(expr) - - >>> class MyLayer(nn.Layer): - >>> def __init__(self): - >>> super(MyLayer, self).__init__() - >>> self.output_keys = ["u", "v"] - >>> def forward(self, x): - >>> x, y = x["x"], x["y"] - >>> u = paddle.cos(y * x) - >>> v = paddle.tanh(x**2) - >>> return {"u": u, "v": v} - - >>> x = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> y = paddle.to_tensor(np.random.randn(1, 1), stop_gradient=False, dtype="float32") - >>> function(inputs_dict={"x": x, "y": y}, model_dict={"u": MyLayer()}) - {'x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.21531263]]), - 'y': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-0.20731021]]), - '-1': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - -1.), - 'output': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-1.08300245]]), - 'u__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.00925053]]), - 'v__x__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[1.97856331]]), - '-v__x__x': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[-1.97856331]]), - 'u': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.99900395]]), - 'sin(u)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.84093243]]), - 'v': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.04632635]]), - 'cos(v)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.99892712]]), - 'u*v': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.04628020]]), - 'sin(u)*cos(v)': Tensor(shape=[1, 1], dtype=float32, place=Place(gpu:0), stop_gradient=False, - [[0.84003019]])} """ - expression_nodes = get_expression_nodes(expr) - expression_nodes = [ - node for node in expression_nodes if not node.is_Symbol - ] # remove symbol.Symbols - expression_nodes = list(dict.fromkeys(expression_nodes)) # remove duplicates - nodes = [] - for node in expression_nodes: + sympy_nodes = [] + sympy_nodes = post_traverse(expr, sympy_nodes) + sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] + sympy_nodes = list( + dict.fromkeys(sympy_nodes) + ) # remove duplicates with topo-order kept + + callable_nodes = [] + for i, node in enumerate(sympy_nodes): + logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") if isinstance(node.func, sympy.core.function.UndefinedFunction): - nodes.append(LayerNode(node)) + match = False + for model in models: + if str(node.func.name) in model.output_keys: + callable_nodes.append(LayerNode(node, model)) + if match: + raise ValueError( + f"function {node} can match at least 2 output key of models, which is forbidden." + ) + match = True elif ( - node.is_Function + isinstance(node, tuple(FUNC_MAP.keys())) or node.is_Add or node.is_Mul - or node.is_Pow or node.is_Derivative + or node.is_Pow ): - nodes.append(OperatorNode(node)) + callable_nodes.append(OperatorNode(node)) elif node.is_Number or node.is_NumberSymbol: - nodes.append(ConstantNode(node)) + callable_nodes.append(ConstantNode(node)) else: raise NotImplementedError( f"The node {node} is not supported in sympy_to_function." ) - return ComposedFunc(nodes) + return ComposedFunc(target, callable_nodes) + + +class ZeroEquation: + """ + Zero Equation Turbulence model + + Parameters + ========== + nu : float + The kinematic viscosity of the fluid. + max_distance : float + The maximum wall distance in the flow field. + rho : float, Sympy Symbol/Expr, str + The density. If `rho` is a str then it is + converted to Sympy Function of form 'rho(x,y,z,t)'. + If 'rho' is a Sympy Symbol or Expression then this + is substituted into the equation. Default is 1. + dim : int + Dimension of the Zero Equation Turbulence model (2 or 3). + Default is 3. + time : bool + If time-dependent equations or not. Default is True. + + Example + """ + + def __init__( + self, nu, max_distance, rho=1, dim=3, time=True + ): # TODO add density into model + # set params + self.dim = dim + self.time = time + + # model coefficients + self.max_distance = max_distance + self.karman_constant = 0.419 + self.max_distance_ratio = 0.09 + + # coordinates + x, y, z = sympy.Symbol("x"), sympy.Symbol("y"), sympy.Symbol("z") + + # time + t = sympy.Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = sympy.Function("u")(*input_variables) + v = sympy.Function("v")(*input_variables) + if self.dim == 3: + w = sympy.Function("w")(*input_variables) + else: + w = sympy.Number(0) + + # density + if type(rho) is str: + rho = sympy.Function(rho)(*input_variables) + elif type(rho) in [float, int]: + rho = sympy.Number(rho) + + # wall distance + normal_distance = sympy.Function("sdf")(*input_variables) + + # mixing length + mixing_length = sympy.Min( + self.karman_constant * normal_distance, + self.max_distance_ratio * self.max_distance, + ) + G = ( + 2 * u.diff(x) ** 2 + + 2 * v.diff(y) ** 2 + + 2 * w.diff(z) ** 2 + + (u.diff(y) + v.diff(x)) ** 2 + + (u.diff(z) + w.diff(x)) ** 2 + + (v.diff(z) + w.diff(y)) ** 2 + ) + + # set equations + self.equations = {} + self.equations["nu"] = nu + rho * mixing_length**2 * sympy.sqrt(G) + + +class NavierStokes_sympy: + """ + Compressible Navier Stokes equations + + Parameters + ========== + nu : float, Sympy Symbol/Expr, str + The kinematic viscosity. If `nu` is a str then it is + converted to Sympy Function of form `nu(x,y,z,t)`. + If `nu` is a Sympy Symbol or Expression then this + is substituted into the equation. This allows for + variable viscosity. + rho : float, Sympy Symbol/Expr, str + The density of the fluid. If `rho` is a str then it is + converted to Sympy Function of form 'rho(x,y,z,t)'. + If 'rho' is a Sympy Symbol or Expression then this + is substituted into the equation to allow for + compressible Navier Stokes. Default is 1. + dim : int + Dimension of the Navier Stokes (2 or 3). Default is 3. + time : bool + If time-dependent equations or not. Default is True. + mixed_form: bool + If True, use the mixed formulation of the Navier-Stokes equations. + + Examples + """ + + name = "NavierStokes" + + def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): + # set params + self.dim = dim + self.time = time + self.mixed_form = mixed_form + + # coordinates + x, y, z = sympy.Symbol("x"), sympy.Symbol("y"), sympy.Symbol("z") + + # time + t = sympy.Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = sympy.Function("u")(*input_variables) + v = sympy.Function("v")(*input_variables) + if self.dim == 3: + w = sympy.Function("w")(*input_variables) + else: + w = sympy.Number(0) + + # pressure + p = sympy.Function("p")(*input_variables) + + # kinematic viscosity + if isinstance(nu, str): + nu = sympy.Function(nu)(*input_variables) + elif isinstance(nu, (float, int)): + nu = sympy.Number(nu) + + # density + if isinstance(rho, str): + rho = sympy.Function(rho)(*input_variables) + elif isinstance(rho, (float, int)): + rho = sympy.Number(rho) + + # dynamic viscosity + mu = rho * nu + + # set equations + self.equations = {} + self.equations["continuity"] = ( + rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) + ) + + if not self.mixed_form: + curl = ( + sympy.Number(0) + if rho.diff(x) == 0 + else u.diff(x) + v.diff(y) + w.diff(z) + ) + self.equations["momentum_x"] = ( + (rho * u).diff(t) + + ( + u * ((rho * u).diff(x)) + + v * ((rho * u).diff(y)) + + w * ((rho * u).diff(z)) + + rho * u * (curl) + ) + + p.diff(x) + - (-2 / 3 * mu * (curl)).diff(x) + - (mu * u.diff(x)).diff(x) + - (mu * u.diff(y)).diff(y) + - (mu * u.diff(z)).diff(z) + - (mu * (curl).diff(x)) + ) + self.equations["momentum_y"] = ( + (rho * v).diff(t) + + ( + u * ((rho * v).diff(x)) + + v * ((rho * v).diff(y)) + + w * ((rho * v).diff(z)) + + rho * v * (curl) + ) + + p.diff(y) + - (-2 / 3 * mu * (curl)).diff(y) + - (mu * v.diff(x)).diff(x) + - (mu * v.diff(y)).diff(y) + - (mu * v.diff(z)).diff(z) + - (mu * (curl).diff(y)) + ) + self.equations["momentum_z"] = ( + (rho * w).diff(t) + + ( + u * ((rho * w).diff(x)) + + v * ((rho * w).diff(y)) + + w * ((rho * w).diff(z)) + + rho * w * (curl) + ) + + p.diff(z) + - (-2 / 3 * mu * (curl)).diff(z) + - (mu * w.diff(x)).diff(x) + - (mu * w.diff(y)).diff(y) + - (mu * w.diff(z)).diff(z) + - (mu * (curl).diff(z)) + ) + + if self.dim == 2: + self.equations.pop("momentum_z") + + elif self.mixed_form: + u_x = sympy.Function("u_x")(*input_variables) + u_y = sympy.Function("u_y")(*input_variables) + u_z = sympy.Function("u_z")(*input_variables) + v_x = sympy.Function("v_x")(*input_variables) + v_y = sympy.Function("v_y")(*input_variables) + v_z = sympy.Function("v_z")(*input_variables) + + if self.dim == 3: + w_x = sympy.Function("w_x")(*input_variables) + w_y = sympy.Function("w_y")(*input_variables) + w_z = sympy.Function("w_z")(*input_variables) + else: + w_x = sympy.Number(0) + w_y = sympy.Number(0) + w_z = sympy.Number(0) + u_z = sympy.Number(0) + v_z = sympy.Number(0) + + curl = sympy.Number(0) if rho.diff(x) == 0 else u_x + v_y + w_z + self.equations["momentum_x"] = ( + (rho * u).diff(t) + + ( + u * ((rho * u.diff(x))) + + v * ((rho * u.diff(y))) + + w * ((rho * u.diff(z))) + + rho * u * (curl) + ) + + p.diff(x) + - (-2 / 3 * mu * (curl)).diff(x) + - (mu * u_x).diff(x) + - (mu * u_y).diff(y) + - (mu * u_z).diff(z) + - (mu * (curl).diff(x)) + ) + self.equations["momentum_y"] = ( + (rho * v).diff(t) + + ( + u * ((rho * v.diff(x))) + + v * ((rho * v.diff(y))) + + w * ((rho * v.diff(z))) + + rho * v * (curl) + ) + + p.diff(y) + - (-2 / 3 * mu * (curl)).diff(y) + - (mu * v_x).diff(x) + - (mu * v_y).diff(y) + - (mu * v_z).diff(z) + - (mu * (curl).diff(y)) + ) + self.equations["momentum_z"] = ( + (rho * w).diff(t) + + ( + u * ((rho * w.diff(x))) + + v * ((rho * w.diff(y))) + + w * ((rho * w.diff(z))) + + rho * w * (curl) + ) + + p.diff(z) + - (-2 / 3 * mu * (curl)).diff(z) + - (mu * w_x).diff(x) + - (mu * w_y).diff(y) + - (mu * w_z).diff(z) + - (mu * (curl).diff(z)) + ) + self.equations["compatibility_u_x"] = u.diff(x) - u_x + self.equations["compatibility_u_y"] = u.diff(y) - u_y + self.equations["compatibility_u_z"] = u.diff(z) - u_z + self.equations["compatibility_v_x"] = v.diff(x) - v_x + self.equations["compatibility_v_y"] = v.diff(y) - v_y + self.equations["compatibility_v_z"] = v.diff(z) - v_z + self.equations["compatibility_w_x"] = w.diff(x) - w_x + self.equations["compatibility_w_y"] = w.diff(y) - w_y + self.equations["compatibility_w_z"] = w.diff(z) - w_z + self.equations["compatibility_u_xy"] = u_x.diff(y) - u_y.diff(x) + self.equations["compatibility_u_xz"] = u_x.diff(z) - u_z.diff(x) + self.equations["compatibility_u_yz"] = u_y.diff(z) - u_z.diff(y) + self.equations["compatibility_v_xy"] = v_x.diff(y) - v_y.diff(x) + self.equations["compatibility_v_xz"] = v_x.diff(z) - v_z.diff(x) + self.equations["compatibility_v_yz"] = v_y.diff(z) - v_z.diff(y) + self.equations["compatibility_w_xy"] = w_x.diff(y) - w_y.diff(x) + self.equations["compatibility_w_xz"] = w_x.diff(z) - w_z.diff(x) + self.equations["compatibility_w_yz"] = w_y.diff(z) - w_z.diff(y) + + if self.dim == 2: + self.equations.pop("momentum_z") + self.equations.pop("compatibility_u_z") + self.equations.pop("compatibility_v_z") + self.equations.pop("compatibility_w_x") + self.equations.pop("compatibility_w_y") + self.equations.pop("compatibility_w_z") + self.equations.pop("compatibility_u_xz") + self.equations.pop("compatibility_u_yz") + self.equations.pop("compatibility_v_xz") + self.equations.pop("compatibility_v_yz") + self.equations.pop("compatibility_w_xy") + self.equations.pop("compatibility_w_xz") + self.equations.pop("compatibility_w_yz") + + +if __name__ == "__main__": + logger.init_logger(log_level="debug") + # ze = ZeroEquation(nu=1, rho=1.0, dim=2, max_distance=4, time=False) + ns = NavierStokes_sympy(nu=2.0, rho=1.0, dim=2, time=False) + target = "momentum_x" + test_expr = ns.equations[target] + + x = paddle.randn([4, 1]) + y = paddle.randn([4, 1]) + z = paddle.randn([4, 1]) + sdf = paddle.randn([4, 1]) + sdf__x = paddle.randn([4, 1]) + sdf__y = paddle.randn([4, 1]) + x.stop_gradient = False + y.stop_gradient = False + z.stop_gradient = False + sdf.stop_gradient = False + sdf__x.stop_gradient = False + sdf__y.stop_gradient = False + + input_dict = { + "x": x, + "y": y, + "z": z, + "sdf": sdf, + "sdf__x": sdf__x, + "sdf__y": sdf__y, + } + + model1 = ppsci.arch.MLP(("x", "y", "z"), ("u", "v"), 2, 10) + model2 = ppsci.arch.MLP(("x", "y", "z"), ("w", "p"), 2, 10) + + cvt_expr = sympy_to_function(target, test_expr, [model1, model2]) + + output = cvt_expr(input_dict) + print(output.shape) diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index b331cbb84..3fe8ee916 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -12,17 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools from typing import TYPE_CHECKING from typing import Callable from typing import Dict +from typing import List from typing import Optional from typing import Tuple +from typing import Union +import paddle +import sympy from paddle import jit from paddle import nn +from ppsci.autodiff import hessian +from ppsci.autodiff import jacobian +from ppsci.utils import logger + if TYPE_CHECKING: - import paddle from ppsci import constraint from ppsci import validate @@ -191,3 +199,284 @@ def visu_forward( clear() return output_dict + + +FUNC_MAP = { + sympy.sin: paddle.sin, + sympy.cos: paddle.cos, + sympy.exp: paddle.exp, + sympy.Pow: paddle.pow, + sympy.log: paddle.log, + sympy.tan: paddle.tan, + sympy.Max: paddle.maximum, + sympy.Min: paddle.minimum, + sympy.Abs: paddle.abs, + sympy.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), +} + + +def single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): + order_left = order + while order_left > 0: + if order_left >= 2: + dvar = hessian(dvar, invar) + order_left -= 2 + else: + dvar = jacobian(dvar, invar) + order_left -= 1 + return dvar + + +def cvt_to_key(sympy_node: sympy.Basic): + if isinstance(sympy_node, sympy.Heaviside): + return str(sympy_node) + if isinstance(sympy_node, (sympy.Symbol, sympy.Function)): + return sympy_node.name + elif isinstance(sympy_node, sympy.Derivative): + expr_str = sympy_node.args[0].name # use 'f' instead of 'f(x,y,z)' + for symbol, order in sympy_node.args[1:]: + expr_str += f"__{symbol}" * order + return expr_str + else: + return str(sympy_node) + + +class NodeBase(nn.Layer): + """ + The base class of the node in the computational graph. + + Args: + expr (sympy.Expr): The expression of the node. + + Returns: + The input dictionary with the output of the node added. + """ + + def __init__(self, expr: sympy.Expr): + super().__init__() + self.expr = expr + self.key = cvt_to_key(self.expr) + + def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): + raise NotImplementedError + + def __repr__(self): + return ( + self.__class__.__name__ + f"(expr: {self.expr}), type: {type(self.expr)})" + ) + + +class OperatorNode(NodeBase): + """ + A node representing a sympy operator in the computational graph. + + (e.g. sin, cos, etc.) + + Args: + expr (sympy.Expr): The expression of the node. + + Returns: + The input dictionary with the output of the operator added. + """ + + def __init__(self, expr: Union[sympy.Add, sympy.Mul, sympy.Derivative]): + super().__init__(expr) + + def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): + if self.expr.func == sympy.Add: + data_dict[self.key] = paddle.add_n( + [data_dict[cvt_to_key(arg)] for arg in self.expr.args] + ) + elif self.expr.func == sympy.Mul: + data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + for arg in self.expr.args[1:]: + data_dict[self.key] = data_dict[self.key] * data_dict[cvt_to_key(arg)] + elif self.expr.func == sympy.Derivative: + if self.key in data_dict: + return data_dict + data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + for symbol, order in self.expr.args[1:]: + data_dict[self.key] = single_derivate_func( + data_dict[self.key], + data_dict[cvt_to_key(symbol)], + order, + ) + else: + try: + func = FUNC_MAP[self.expr.func] + except KeyError: + raise NotImplementedError( + f"'{self.expr.func}' operator is not supported now." + ) + if self.expr.func == sympy.Heaviside: + data_dict[self.key] = func(data_dict[cvt_to_key(self.expr.args[0])]) + else: + data_dict[self.key] = func( + *[data_dict[cvt_to_key(arg)] for arg in self.expr.args] + ) + return data_dict + + +class LayerNode(NodeBase): + """ + A node representing a neural network in the computational graph + + Args: + expr (sympy.core.function.UndefinedFunction): Definition symbol of the neural network. + + Returns: + The input dictionary with the output of the neural network added. + + Note: + For the provided network, the forward should accept a dictionary as input and return a dictionary as output. + And the `output_keys` should be provided in the `__init__` function. + + Examples: + + """ + + def __init__(self, expr: sympy.core.function.UndefinedFunction, model: nn.Layer): + super().__init__(expr) + self.model = model + + def forward(self, data_dict: Dict): + if self.key in data_dict: + return data_dict + + output_dict = self.model(data_dict) + for key, value in output_dict.items(): + data_dict[key] = value + + return data_dict + + +class ConstantNode(NodeBase): + """ + A node representing a constant in the computational graph. + + Args: + expr (sympy.Number or sympy.NumberSymbol): The constant to be applied. + + Returns: + The input dictionary with the constant added. + + Examples: + >>> node = ConstantNode(sympy.pi) + >>> node({}) + {'pi': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, + 3.1415927)} + """ + + def __init__(self, expr: sympy.Number or sympy.NumberSymbol): + super().__init__(expr) + if self.expr.is_Float: + self.expr = float(self.expr) + elif self.expr.is_Integer: + self.expr = float(self.expr) + elif self.expr.is_Boolean: + self.expr = float(self.expr) + elif self.expr.is_Rational: + self.expr = float(self.expr) + else: + raise TypeError( + f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" + ) + self.expr = paddle.to_tensor(self.expr) + + def forward(self, data_dict: Dict): + data_dict[self.key] = self.expr + return data_dict + + +class ComposedFunc(nn.Layer): + """ + Compose multiple functions into one function. + + Args: + data_dict (Dict): The input tensor dictionary. + model_dict (Dict[str, nn.Layer]): The dictionary of the models. + + Returns: + The dictionary of the outputs of the all calculated nodes. + """ + + def __init__(self, target: str, funcs: List[NodeBase]): + super().__init__() + self.funcs = funcs + self.target = target + + def forward(self, data_dict: Dict): + for func in self.funcs: + data_dict = func(data_dict) + return data_dict[self.funcs[-1].key] + + +def post_traverse(cur_node, nodes): + # traverse into sub-nodes + if isinstance(cur_node, sympy.core.function.UndefinedFunction): + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Function): + for arg in cur_node.args: + nodes = post_traverse(arg, nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Derivative): + nodes = post_traverse(cur_node.args[0], nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sympy.Symbol): + return nodes + elif isinstance(cur_node, sympy.Number): + nodes.append(cur_node) + else: + for arg in cur_node.args: + nodes = post_traverse(arg, nodes) + nodes.append(cur_node) + return nodes + + +def sympy_to_function(target: str, expr: sympy.Expr, models: nn.Layer): + """ + Convert a sympy expression to a ComposedFunc. + + Args: + expr (sympy.Expr): the sympy expression + + Returns: + A ComposedFunc that can execute the formula represented by the sympy expression. + + Examples: + """ + sympy_nodes = [] + sympy_nodes = post_traverse(expr, sympy_nodes) + sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] + sympy_nodes = list( + dict.fromkeys(sympy_nodes) + ) # remove duplicates with topo-order kept + + callable_nodes = [] + for i, node in enumerate(sympy_nodes): + logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") + if isinstance(node.func, sympy.core.function.UndefinedFunction): + match = False + for model in models: + if str(node.func.name) in model.output_keys: + callable_nodes.append(LayerNode(node, model)) + if match: + raise ValueError( + f"function {node} can match at least 2 output key of models, which is forbidden." + ) + match = True + elif ( + isinstance(node, tuple(FUNC_MAP.keys())) + or node.is_Add + or node.is_Mul + or node.is_Derivative + or node.is_Pow + ): + callable_nodes.append(OperatorNode(node)) + elif node.is_Number or node.is_NumberSymbol: + callable_nodes.append(ConstantNode(node)) + else: + raise NotImplementedError( + f"The node {node} is not supported in sympy_to_function." + ) + return ComposedFunc(target, callable_nodes) diff --git a/ppsci/utils/logger.py b/ppsci/utils/logger.py index 650a6aa33..977fbee4d 100644 --- a/ppsci/utils/logger.py +++ b/ppsci/utils/logger.py @@ -92,28 +92,6 @@ def init_logger( _logger.propagate = False -def set_log_level(log_level): - """Set log level.""" - if dist.get_rank() == 0: - _logger.setLevel(log_level) - else: - _logger.setLevel(logging.ERROR) - - -def log_at_trainer0(log_func): - """ - Logs will print multi-times when calling Fleet API. - Only display single log and ignore the others. - """ - - @functools.wraps(log_func) - def wrapped_log_func(fmt, *args): - if dist.get_rank() == 0: - log_func(fmt, *args) - - return wrapped_log_func - - def ensure_logger(log_func): """ Automatically initialize `logger` by default arguments @@ -136,6 +114,29 @@ def wrapped_log_func(fmt, *args): return wrapped_log_func +@ensure_logger +def set_log_level(log_level): + """Set log level.""" + if dist.get_rank() == 0: + _logger.setLevel(log_level) + else: + _logger.setLevel(logging.ERROR) + + +def log_at_trainer0(log_func): + """ + Logs will print multi-times when calling Fleet API. + Only display single log and ignore the others. + """ + + @functools.wraps(log_func) + def wrapped_log_func(fmt, *args): + if dist.get_rank() == 0: + log_func(fmt, *args) + + return wrapped_log_func + + @ensure_logger @log_at_trainer0 def info(fmt, *args): diff --git a/test/utils/test_linear_elasticity_sympy.py b/test/utils/test_linear_elasticity_sympy.py new file mode 100644 index 000000000..2686f8aa8 --- /dev/null +++ b/test/utils/test_linear_elasticity_sympy.py @@ -0,0 +1,261 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import pytest +from sympy import Function +from sympy import Number +from sympy import Symbol + +import ppsci +from ppsci import equation +from ppsci.autodiff import clear +from ppsci.utils import expression + +__all__ = [] + + +class LinearElasticity_sympy: + def __init__( + self, E=None, nu=None, lambda_=None, mu=None, rho=1, dim=3, time=False + ): + + # set params + self.dim = dim + self.time = time + + # coordinates + x, y, z = Symbol("x"), Symbol("y"), Symbol("z") + normal_x, normal_y, normal_z = ( + Symbol("normal_x"), + Symbol("normal_y"), + Symbol("normal_z"), + ) + + # time + t = Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # displacement componets + u = Function("u")(*input_variables) + v = Function("v")(*input_variables) + sigma_xx = Function("sigma_xx")(*input_variables) + sigma_yy = Function("sigma_yy")(*input_variables) + sigma_xy = Function("sigma_xy")(*input_variables) + if self.dim == 3: + w = Function("w")(*input_variables) + sigma_zz = Function("sigma_zz")(*input_variables) + sigma_xz = Function("sigma_xz")(*input_variables) + sigma_yz = Function("sigma_yz")(*input_variables) + else: + w = Number(0) + sigma_zz = Number(0) + sigma_xz = Number(0) + sigma_yz = Number(0) + + # material properties + if lambda_ is None: + if isinstance(nu, str): + nu = Function(nu)(*input_variables) + elif isinstance(nu, (float, int)): + nu = Number(nu) + if isinstance(E, str): + E = Function(E)(*input_variables) + elif isinstance(E, (float, int)): + E = Number(E) + lambda_ = nu * E / ((1 + nu) * (1 - 2 * nu)) + mu = E / (2 * (1 + nu)) + else: + if isinstance(lambda_, str): + lambda_ = Function(lambda_)(*input_variables) + elif isinstance(lambda_, (float, int)): + lambda_ = Number(lambda_) + if isinstance(mu, str): + mu = Function(mu)(*input_variables) + elif isinstance(mu, (float, int)): + mu = Number(mu) + if isinstance(rho, str): + rho = Function(rho)(*input_variables) + elif isinstance(rho, (float, int)): + rho = Number(rho) + + # set equations + self.equations = {} + + # Stress equations + self.equations["stress_disp_xx"] = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * u.diff(x) + - sigma_xx + ) + self.equations["stress_disp_yy"] = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * v.diff(y) + - sigma_yy + ) + self.equations["stress_disp_zz"] = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * w.diff(z) + - sigma_zz + ) + self.equations["stress_disp_xy"] = mu * (u.diff(y) + v.diff(x)) - sigma_xy + self.equations["stress_disp_xz"] = mu * (u.diff(z) + w.diff(x)) - sigma_xz + self.equations["stress_disp_yz"] = mu * (v.diff(z) + w.diff(y)) - sigma_yz + + # Equations of equilibrium + self.equations["equilibrium_x"] = rho * ((u.diff(t)).diff(t)) - ( + sigma_xx.diff(x) + sigma_xy.diff(y) + sigma_xz.diff(z) + ) + self.equations["equilibrium_y"] = rho * ((v.diff(t)).diff(t)) - ( + sigma_xy.diff(x) + sigma_yy.diff(y) + sigma_yz.diff(z) + ) + self.equations["equilibrium_z"] = rho * ((w.diff(t)).diff(t)) - ( + sigma_xz.diff(x) + sigma_yz.diff(y) + sigma_zz.diff(z) + ) + + # Traction equations + self.equations["traction_x"] = ( + normal_x * sigma_xx + normal_y * sigma_xy + normal_z * sigma_xz + ) + self.equations["traction_y"] = ( + normal_x * sigma_xy + normal_y * sigma_yy + normal_z * sigma_yz + ) + self.equations["traction_z"] = ( + normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz + ) + + # Navier equations + self.equations["navier_x"] = ( + rho * ((u.diff(t)).diff(t)) + - (lambda_ + mu) * (u.diff(x) + v.diff(y) + w.diff(z)).diff(x) + - mu * ((u.diff(x)).diff(x) + (u.diff(y)).diff(y) + (u.diff(z)).diff(z)) + ) + self.equations["navier_y"] = ( + rho * ((v.diff(t)).diff(t)) + - (lambda_ + mu) * (u.diff(x) + v.diff(y) + w.diff(z)).diff(y) + - mu * ((v.diff(x)).diff(x) + (v.diff(y)).diff(y) + (v.diff(z)).diff(z)) + ) + self.equations["navier_z"] = ( + rho * ((w.diff(t)).diff(t)) + - (lambda_ + mu) * (u.diff(x) + v.diff(y) + w.diff(z)).diff(z) + - mu * ((w.diff(x)).diff(x) + (w.diff(y)).diff(y) + (w.diff(z)).diff(z)) + ) + + if self.dim == 2: + self.equations.pop("navier_z") + self.equations.pop("stress_disp_zz") + self.equations.pop("stress_disp_xz") + self.equations.pop("stress_disp_yz") + self.equations.pop("equilibrium_z") + self.equations.pop("traction_z") + + +@pytest.mark.parametrize( + "E,nu,lambda_,mu", + ( + (2.0, 3.0, None, None), + (None, None, 2.0, 3.0), + ), +) +@pytest.mark.parametrize("rho", (1,)) +@pytest.mark.parametrize("dim", (2, 3)) +@pytest.mark.parametrize("time", (False, True)) +def test_linearelasticity(E, nu, lambda_, mu, rho, dim, time): + """Test for linearelasticity equation.""" + # define input/output keys + input_keys = ("x", "y", "z")[:dim] + if time: + input_keys = ("t",) + input_keys + + disp_output_keys = ("u", "v") + if dim == 3: + disp_output_keys += ("w",) + disp_output_keys += ("p",) + + stress_output_keys = ("sigma_xx", "sigma_yy") + if dim == 3: + stress_output_keys += ("sigma_zz",) + stress_output_keys += ("sigma_xy",) + if dim == 3: + stress_output_keys += ("sigma_xz", "sigma_yz") + + # prepare input data in dict + batch_size = 13 + input_dict = {} + for var in input_keys: + input_dict[var] = paddle.randn([batch_size, 1]) + input_dict[var].stop_gradient = False + input_dict[f"normal_{var}"] = paddle.randn([batch_size, 1]) + input_dict[f"normal_{var}"].stop_gradient = False + + # prepare model + disp_net = ppsci.arch.MLP( + input_keys, disp_output_keys, 3, 16, "silu", weight_norm=True + ) + stress_net = ppsci.arch.MLP( + input_keys, + stress_output_keys, + 3, + 16, + "silu", + weight_norm=True, + ) + model_list = ppsci.arch.ModelList((disp_net, stress_net)) + + # prepare python function expressions and sympy-expression in dict + functional_expr_dict = equation.LinearElasticity( + E, nu, lambda_, mu, rho, dim, time + ).equations + sympy_expr_dict = LinearElasticity_sympy( + E, nu, lambda_, mu, rho, dim, time + ).equations + for target, expr in sympy_expr_dict.items(): + sympy_expr_dict[target] = expression.sympy_to_function( + target, expr, [disp_net, stress_net] + ) + + # compute equation with python function + output_dict_functional = model_list(input_dict) + for name, expr in functional_expr_dict.items(): + if callable(expr): + output_dict_functional[name] = expr( + {**output_dict_functional, **input_dict} + ) + else: + raise TypeError(f"expr type({type(expr)}) is invalid") + clear() + + # compute equation with funciton converted from sympy + output_dict_sympy = {k: v for k, v in input_dict.items()} + for name, _ in sympy_expr_dict.items(): + output_dict_sympy[name] = sympy_expr_dict[name]( + {**output_dict_sympy, **input_dict} + ) + clear() + + # test for result + for key in functional_expr_dict: + assert paddle.allclose( + output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + ) + + +if __name__ == "__main__": + pytest.main() diff --git a/test/utils/test_navier_stokes_sympy.py b/test/utils/test_navier_stokes_sympy.py new file mode 100644 index 000000000..40a128b93 --- /dev/null +++ b/test/utils/test_navier_stokes_sympy.py @@ -0,0 +1,197 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import pytest +from sympy import Function +from sympy import Number +from sympy import Symbol + +import ppsci +from ppsci import equation +from ppsci.autodiff import clear +from ppsci.utils import expression + +__all__ = [] + + +class NavierStokes_sympy: + def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): + # set params + self.dim = dim + self.time = time + self.mixed_form = mixed_form + + # coordinates + x, y, z = Symbol("x"), Symbol("y"), Symbol("z") + + # time + t = Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = Function("u")(*input_variables) + v = Function("v")(*input_variables) + if self.dim == 3: + w = Function("w")(*input_variables) + else: + w = Number(0) + + # pressure + p = Function("p")(*input_variables) + + # kinematic viscosity + if isinstance(nu, str): + nu = Function(nu)(*input_variables) + elif isinstance(nu, (float, int)): + nu = Number(nu) + + # density + if isinstance(rho, str): + rho = Function(rho)(*input_variables) + elif isinstance(rho, (float, int)): + rho = Number(rho) + + # dynamic viscosity + mu = rho * nu + + # set equations + self.equations = {} + self.equations["continuity"] = ( + rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) + ) + + curl = Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) + self.equations["momentum_x"] = ( + (rho * u).diff(t) + + ( + u * ((rho * u).diff(x)) + + v * ((rho * u).diff(y)) + + w * ((rho * u).diff(z)) + + rho * u * (curl) + ) + + p.diff(x) + - (-2 / 3 * mu * (curl)).diff(x) + - (mu * u.diff(x)).diff(x) + - (mu * u.diff(y)).diff(y) + - (mu * u.diff(z)).diff(z) + - (mu * (curl).diff(x)) + ) + self.equations["momentum_y"] = ( + (rho * v).diff(t) + + ( + u * ((rho * v).diff(x)) + + v * ((rho * v).diff(y)) + + w * ((rho * v).diff(z)) + + rho * v * (curl) + ) + + p.diff(y) + - (-2 / 3 * mu * (curl)).diff(y) + - (mu * v.diff(x)).diff(x) + - (mu * v.diff(y)).diff(y) + - (mu * v.diff(z)).diff(z) + - (mu * (curl).diff(y)) + ) + self.equations["momentum_z"] = ( + (rho * w).diff(t) + + ( + u * ((rho * w).diff(x)) + + v * ((rho * w).diff(y)) + + w * ((rho * w).diff(z)) + + rho * w * (curl) + ) + + p.diff(z) + - (-2 / 3 * mu * (curl)).diff(z) + - (mu * w.diff(x)).diff(x) + - (mu * w.diff(y)).diff(y) + - (mu * w.diff(z)).diff(z) + - (mu * (curl).diff(z)) + ) + + if self.dim == 2: + self.equations.pop("momentum_z") + + +@pytest.mark.parametrize("nu", (2.0,)) +@pytest.mark.parametrize("rho", (1.0,)) +@pytest.mark.parametrize("dim", (2, 3)) +@pytest.mark.parametrize("time", (False, True)) +def test_navier_stokes(nu, rho, dim, time): + """Test for navier_stokes equation.""" + # define input/output keys + input_keys = ("x", "y", "z")[:dim] + if time: + input_keys = ("t",) + input_keys + + output_keys = ("u", "v") + if dim == 3: + output_keys += ("w",) + output_keys += ("p",) + + # prepare input data in dict + batch_size = 13 + input_dict = {} + for var in input_keys: + input_dict[var] = paddle.randn([batch_size, 1]) + input_dict[var].stop_gradient = False + + # prepare model + model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) + + # prepare python function expressions and sympy-expression in dict + functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations + sympy_expr_dict = NavierStokes_sympy(nu, rho, dim, time).equations + for target, expr in sympy_expr_dict.items(): + sympy_expr_dict[target] = expression.sympy_to_function( + target, + expr, + [ + model, + ], + ) + + # compute equation with python function + output_dict_functional = model(input_dict) + for name, expr in functional_expr_dict.items(): + if callable(expr): + output_dict_functional[name] = expr( + {**output_dict_functional, **input_dict} + ) + else: + raise TypeError(f"expr type({type(expr)}) is invalid") + clear() + + # compute equation with funciton converted from sympy + output_dict_sympy = {k: v for k, v in input_dict.items()} + for name, _ in sympy_expr_dict.items(): + output_dict_sympy[name] = sympy_expr_dict[name]( + {**output_dict_sympy, **input_dict} + ) + clear() + + # test for result + for key in functional_expr_dict: + assert paddle.allclose( + output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + ) + + +if __name__ == "__main__": + pytest.main() From bb9d71c312d0c8a2ff903ac85fd002b513c128f8 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 24 Aug 2023 11:21:42 +0000 Subject: [PATCH 06/48] add type hint and clean code --- ppsci/utils/expression.py | 196 +++++++++++++++----------------------- 1 file changed, 77 insertions(+), 119 deletions(-) diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index 3fe8ee916..cf06f2c2c 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -22,9 +22,10 @@ from typing import Union import paddle -import sympy +import sympy as sp from paddle import jit from paddle import nn +from typing_extensions import TypeAlias from ppsci.autodiff import hessian from ppsci.autodiff import jacobian @@ -202,20 +203,33 @@ def visu_forward( FUNC_MAP = { - sympy.sin: paddle.sin, - sympy.cos: paddle.cos, - sympy.exp: paddle.exp, - sympy.Pow: paddle.pow, - sympy.log: paddle.log, - sympy.tan: paddle.tan, - sympy.Max: paddle.maximum, - sympy.Min: paddle.minimum, - sympy.Abs: paddle.abs, - sympy.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), + sp.sin: paddle.sin, + sp.cos: paddle.cos, + sp.exp: paddle.exp, + sp.Pow: paddle.pow, + sp.log: paddle.log, + sp.tan: paddle.tan, + sp.Max: paddle.maximum, + sp.Min: paddle.minimum, + sp.Abs: paddle.abs, + sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), } - -def single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): +SYMPY_BUILTIN_FUNC: TypeAlias = Union[ + sp.sin, + sp.cos, + sp.exp, + sp.Pow, + sp.log, + sp.tan, + sp.Max, + sp.Min, + sp.Abs, + sp.Heaviside, +] + + +def _single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): order_left = order while order_left > 0: if order_left >= 2: @@ -227,13 +241,14 @@ def single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): return dvar -def cvt_to_key(sympy_node: sympy.Basic): - if isinstance(sympy_node, sympy.Heaviside): - return str(sympy_node) - if isinstance(sympy_node, (sympy.Symbol, sympy.Function)): +def _cvt_to_key(sympy_node: sp.Basic): + if isinstance( + sympy_node, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function) + ): return sympy_node.name - elif isinstance(sympy_node, sympy.Derivative): - expr_str = sympy_node.args[0].name # use 'f' instead of 'f(x,y,z)' + elif isinstance(sympy_node, sp.Derivative): + # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" + expr_str = sympy_node.args[0].name for symbol, order in sympy_node.args[1:]: expr_str += f"__{symbol}" * order return expr_str @@ -242,25 +257,17 @@ def cvt_to_key(sympy_node: sympy.Basic): class NodeBase(nn.Layer): - """ - The base class of the node in the computational graph. - - Args: - expr (sympy.Expr): The expression of the node. + """The base class of the node in expression tree.""" - Returns: - The input dictionary with the output of the node added. - """ - - def __init__(self, expr: sympy.Expr): + def __init__(self, expr: sp.Basic): super().__init__() self.expr = expr - self.key = cvt_to_key(self.expr) + self.key = _cvt_to_key(self.expr) - def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): - raise NotImplementedError + def forward(self, **kwargs): + raise NotImplementedError("NodeBase.forward is not implemented") - def __repr__(self): + def __str__(self): return ( self.__class__.__name__ + f"(expr: {self.expr}), type: {type(self.expr)})" ) @@ -268,37 +275,29 @@ def __repr__(self): class OperatorNode(NodeBase): """ - A node representing a sympy operator in the computational graph. - - (e.g. sin, cos, etc.) - - Args: - expr (sympy.Expr): The expression of the node. - - Returns: - The input dictionary with the output of the operator added. + A node representing a sp operator in the computational graph. """ - def __init__(self, expr: Union[sympy.Add, sympy.Mul, sympy.Derivative]): + def __init__(self, expr: SYMPY_BUILTIN_FUNC): super().__init__(expr) - def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): - if self.expr.func == sympy.Add: + def forward(self, data_dict: Dict): + if self.expr.func == sp.Add: data_dict[self.key] = paddle.add_n( - [data_dict[cvt_to_key(arg)] for arg in self.expr.args] + [data_dict[_cvt_to_key(arg)] for arg in self.expr.args] ) - elif self.expr.func == sympy.Mul: - data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + elif self.expr.func == sp.Mul: + data_dict[self.key] = data_dict[_cvt_to_key(self.expr.args[0])] for arg in self.expr.args[1:]: - data_dict[self.key] = data_dict[self.key] * data_dict[cvt_to_key(arg)] - elif self.expr.func == sympy.Derivative: + data_dict[self.key] = data_dict[self.key] * data_dict[_cvt_to_key(arg)] + elif self.expr.func == sp.Derivative: if self.key in data_dict: return data_dict - data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] + data_dict[self.key] = data_dict[_cvt_to_key(self.expr.args[0])] for symbol, order in self.expr.args[1:]: - data_dict[self.key] = single_derivate_func( + data_dict[self.key] = _single_derivate_func( data_dict[self.key], - data_dict[cvt_to_key(symbol)], + data_dict[_cvt_to_key(symbol)], order, ) else: @@ -308,11 +307,11 @@ def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): raise NotImplementedError( f"'{self.expr.func}' operator is not supported now." ) - if self.expr.func == sympy.Heaviside: - data_dict[self.key] = func(data_dict[cvt_to_key(self.expr.args[0])]) + if self.expr.func == sp.Heaviside: + data_dict[self.key] = func(data_dict[_cvt_to_key(self.expr.args[0])]) else: data_dict[self.key] = func( - *[data_dict[cvt_to_key(arg)] for arg in self.expr.args] + *[data_dict[_cvt_to_key(arg)] for arg in self.expr.args] ) return data_dict @@ -320,22 +319,9 @@ def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): class LayerNode(NodeBase): """ A node representing a neural network in the computational graph - - Args: - expr (sympy.core.function.UndefinedFunction): Definition symbol of the neural network. - - Returns: - The input dictionary with the output of the neural network added. - - Note: - For the provided network, the forward should accept a dictionary as input and return a dictionary as output. - And the `output_keys` should be provided in the `__init__` function. - - Examples: - """ - def __init__(self, expr: sympy.core.function.UndefinedFunction, model: nn.Layer): + def __init__(self, expr: sp.core.function.UndefinedFunction, model: nn.Layer): super().__init__(expr) self.model = model @@ -353,29 +339,16 @@ def forward(self, data_dict: Dict): class ConstantNode(NodeBase): """ A node representing a constant in the computational graph. - - Args: - expr (sympy.Number or sympy.NumberSymbol): The constant to be applied. - - Returns: - The input dictionary with the constant added. - - Examples: - >>> node = ConstantNode(sympy.pi) - >>> node({}) - {'pi': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 3.1415927)} """ - def __init__(self, expr: sympy.Number or sympy.NumberSymbol): + def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): super().__init__(expr) - if self.expr.is_Float: - self.expr = float(self.expr) - elif self.expr.is_Integer: - self.expr = float(self.expr) - elif self.expr.is_Boolean: - self.expr = float(self.expr) - elif self.expr.is_Rational: + if ( + self.expr.is_Float + or self.expr.is_Integer + or self.expr.is_Boolean + or self.expr.is_Rational + ): self.expr = float(self.expr) else: raise TypeError( @@ -391,13 +364,6 @@ def forward(self, data_dict: Dict): class ComposedFunc(nn.Layer): """ Compose multiple functions into one function. - - Args: - data_dict (Dict): The input tensor dictionary. - model_dict (Dict[str, nn.Layer]): The dictionary of the models. - - Returns: - The dictionary of the outputs of the all calculated nodes. """ def __init__(self, target: str, funcs: List[NodeBase]): @@ -408,45 +374,37 @@ def __init__(self, target: str, funcs: List[NodeBase]): def forward(self, data_dict: Dict): for func in self.funcs: data_dict = func(data_dict) - return data_dict[self.funcs[-1].key] + return data_dict[self.funcs[-1].key] # return the computed result of root node -def post_traverse(cur_node, nodes): +def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: # traverse into sub-nodes - if isinstance(cur_node, sympy.core.function.UndefinedFunction): + if isinstance(cur_node, sp.core.function.UndefinedFunction): nodes.append(cur_node) - elif isinstance(cur_node, sympy.Function): + elif isinstance(cur_node, sp.Function): for arg in cur_node.args: - nodes = post_traverse(arg, nodes) + nodes = _post_traverse(arg, nodes) nodes.append(cur_node) - elif isinstance(cur_node, sympy.Derivative): - nodes = post_traverse(cur_node.args[0], nodes) + elif isinstance(cur_node, sp.Derivative): + nodes = _post_traverse(cur_node.args[0], nodes) nodes.append(cur_node) - elif isinstance(cur_node, sympy.Symbol): + elif isinstance(cur_node, sp.Symbol): return nodes - elif isinstance(cur_node, sympy.Number): + elif isinstance(cur_node, sp.Number): nodes.append(cur_node) else: for arg in cur_node.args: - nodes = post_traverse(arg, nodes) + nodes = _post_traverse(arg, nodes) nodes.append(cur_node) return nodes -def sympy_to_function(target: str, expr: sympy.Expr, models: nn.Layer): +def sympy_to_function(target: str, expr: sp.Expr, models: nn.Layer) -> ComposedFunc: """ - Convert a sympy expression to a ComposedFunc. - - Args: - expr (sympy.Expr): the sympy expression - - Returns: - A ComposedFunc that can execute the formula represented by the sympy expression. - - Examples: + Convert a sp expression to a ComposedFunc. """ sympy_nodes = [] - sympy_nodes = post_traverse(expr, sympy_nodes) + sympy_nodes = _post_traverse(expr, sympy_nodes) sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] sympy_nodes = list( dict.fromkeys(sympy_nodes) @@ -455,7 +413,7 @@ def sympy_to_function(target: str, expr: sympy.Expr, models: nn.Layer): callable_nodes = [] for i, node in enumerate(sympy_nodes): logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") - if isinstance(node.func, sympy.core.function.UndefinedFunction): + if isinstance(node.func, sp.core.function.UndefinedFunction): match = False for model in models: if str(node.func.name) in model.output_keys: From 8783ec9d5e97ae3edc3f500847e474a02f6a2c7a Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Fri, 25 Aug 2023 06:47:08 +0000 Subject: [PATCH 07/48] update unitest for N-S equation with sympy-base 'nu' --- ppsci/utils/expression.py | 21 +- ppsci/utils/logger.py | 2 +- test/utils/test_linear_elasticity_sympy.py | 2 +- test/utils/test_navier_stokes_sympy.py | 521 ++++++++++++++++++--- 4 files changed, 459 insertions(+), 87 deletions(-) diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index cf06f2c2c..28a0e70de 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -245,7 +245,11 @@ def _cvt_to_key(sympy_node: sp.Basic): if isinstance( sympy_node, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function) ): - return sympy_node.name + if hasattr(sympy_node, "name"): + # custom function + return sympy_node.name + else: + str(sympy_node) elif isinstance(sympy_node, sp.Derivative): # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" expr_str = sympy_node.args[0].name @@ -256,7 +260,7 @@ def _cvt_to_key(sympy_node: sp.Basic): return str(sympy_node) -class NodeBase(nn.Layer): +class Node(nn.Layer): """The base class of the node in expression tree.""" def __init__(self, expr: sp.Basic): @@ -265,7 +269,7 @@ def __init__(self, expr: sp.Basic): self.key = _cvt_to_key(self.expr) def forward(self, **kwargs): - raise NotImplementedError("NodeBase.forward is not implemented") + raise NotImplementedError("Node.forward is not implemented") def __str__(self): return ( @@ -273,7 +277,7 @@ def __str__(self): ) -class OperatorNode(NodeBase): +class OperatorNode(Node): """ A node representing a sp operator in the computational graph. """ @@ -283,7 +287,7 @@ def __init__(self, expr: SYMPY_BUILTIN_FUNC): def forward(self, data_dict: Dict): if self.expr.func == sp.Add: - data_dict[self.key] = paddle.add_n( + data_dict[self.key] = sum( [data_dict[_cvt_to_key(arg)] for arg in self.expr.args] ) elif self.expr.func == sp.Mul: @@ -316,7 +320,7 @@ def forward(self, data_dict: Dict): return data_dict -class LayerNode(NodeBase): +class LayerNode(Node): """ A node representing a neural network in the computational graph """ @@ -330,13 +334,14 @@ def forward(self, data_dict: Dict): return data_dict output_dict = self.model(data_dict) + print("call model forward") for key, value in output_dict.items(): data_dict[key] = value return data_dict -class ConstantNode(NodeBase): +class ConstantNode(Node): """ A node representing a constant in the computational graph. """ @@ -366,7 +371,7 @@ class ComposedFunc(nn.Layer): Compose multiple functions into one function. """ - def __init__(self, target: str, funcs: List[NodeBase]): + def __init__(self, target: str, funcs: List[Node]): super().__init__() self.funcs = funcs self.target = target diff --git a/ppsci/utils/logger.py b/ppsci/utils/logger.py index 977fbee4d..627561bc3 100644 --- a/ppsci/utils/logger.py +++ b/ppsci/utils/logger.py @@ -102,7 +102,7 @@ def ensure_logger(log_func): def wrapped_log_func(fmt, *args): if _logger is None: init_logger() - _logger.info( + _logger.warning( "Before you call functions within the logger, the logger has already " "been automatically initialized. Since `log_file` is not specified by " "default, information will not be written to any file except being " diff --git a/test/utils/test_linear_elasticity_sympy.py b/test/utils/test_linear_elasticity_sympy.py index 2686f8aa8..87100ec3e 100644 --- a/test/utils/test_linear_elasticity_sympy.py +++ b/test/utils/test_linear_elasticity_sympy.py @@ -253,7 +253,7 @@ def test_linearelasticity(E, nu, lambda_, mu, rho, dim, time): # test for result for key in functional_expr_dict: assert paddle.allclose( - output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + output_dict_functional[key], output_dict_sympy[key], atol=2e-7 ) diff --git a/test/utils/test_navier_stokes_sympy.py b/test/utils/test_navier_stokes_sympy.py index 40a128b93..0639ac108 100644 --- a/test/utils/test_navier_stokes_sympy.py +++ b/test/utils/test_navier_stokes_sympy.py @@ -14,30 +14,27 @@ import paddle import pytest -from sympy import Function -from sympy import Number -from sympy import Symbol +import sympy as sp import ppsci from ppsci import equation from ppsci.autodiff import clear +from ppsci.autodiff import hessian as H +from ppsci.autodiff import jacobian as J from ppsci.utils import expression -__all__ = [] - class NavierStokes_sympy: - def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): + def __init__(self, nu, rho=1, dim=3, time=True): # set params self.dim = dim self.time = time - self.mixed_form = mixed_form # coordinates - x, y, z = Symbol("x"), Symbol("y"), Symbol("z") + x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") # time - t = Symbol("t") + t = sp.Symbol("t") # make input variables input_variables = {"x": x, "y": y, "z": z, "t": t} @@ -47,27 +44,27 @@ def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): input_variables.pop("t") # velocity componets - u = Function("u")(*input_variables) - v = Function("v")(*input_variables) + u = sp.Function("u")(*input_variables) + v = sp.Function("v")(*input_variables) if self.dim == 3: - w = Function("w")(*input_variables) + w = sp.Function("w")(*input_variables) else: - w = Number(0) + w = sp.Number(0) # pressure - p = Function("p")(*input_variables) + p = sp.Function("p")(*input_variables) # kinematic viscosity if isinstance(nu, str): - nu = Function(nu)(*input_variables) + nu = sp.Function(nu)(*input_variables) elif isinstance(nu, (float, int)): - nu = Number(nu) + nu = sp.Number(nu) # density if isinstance(rho, str): - rho = Function(rho)(*input_variables) + rho = sp.Function(rho)(*input_variables) elif isinstance(rho, (float, int)): - rho = Number(rho) + rho = sp.Number(rho) # dynamic viscosity mu = rho * nu @@ -78,7 +75,7 @@ def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) ) - curl = Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) + curl = sp.Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) self.equations["momentum_x"] = ( (rho * u).diff(t) + ( @@ -129,68 +126,438 @@ def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): self.equations.pop("momentum_z") -@pytest.mark.parametrize("nu", (2.0,)) -@pytest.mark.parametrize("rho", (1.0,)) -@pytest.mark.parametrize("dim", (2, 3)) -@pytest.mark.parametrize("time", (False, True)) -def test_navier_stokes(nu, rho, dim, time): - """Test for navier_stokes equation.""" - # define input/output keys - input_keys = ("x", "y", "z")[:dim] - if time: - input_keys = ("t",) + input_keys - - output_keys = ("u", "v") - if dim == 3: - output_keys += ("w",) - output_keys += ("p",) - - # prepare input data in dict - batch_size = 13 - input_dict = {} - for var in input_keys: - input_dict[var] = paddle.randn([batch_size, 1]) - input_dict[var].stop_gradient = False - - # prepare model - model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) - - # prepare python function expressions and sympy-expression in dict - functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations - sympy_expr_dict = NavierStokes_sympy(nu, rho, dim, time).equations - for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = expression.sympy_to_function( - target, - expr, - [ - model, - ], - ) +class ZeroEquation_sympy: + """ + Zero Equation Turbulence model - # compute equation with python function - output_dict_functional = model(input_dict) - for name, expr in functional_expr_dict.items(): - if callable(expr): - output_dict_functional[name] = expr( - {**output_dict_functional, **input_dict} - ) + Parameters + ========== + nu : float + The kinematic viscosity of the fluid. + max_distance : float + The maximum wall distance in the flow field. + rho : float, Sympy sp.Symbol/Expr, str + The density. If `rho` is a str then it is + converted to Sympy sp.Function of form 'rho(x,y,z,t)'. + If 'rho' is a Sympy sp.Symbol or Expression then this + is substituted into the equation. Default is 1. + dim : int + Dimension of the Zero Equation Turbulence model (2 or 3). + Default is 3. + time : bool + If time-dependent equations or not. Default is True. + + Example + """ + + def __init__( + self, nu, max_distance, rho=1, dim=3, time=True + ): # TODO add density into model + # set params + self.dim = dim + self.time = time + + # model coefficients + self.max_distance = max_distance + self.karman_constant = 0.419 + self.max_distance_ratio = 0.09 + + # coordinates + x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") + + # time + t = sp.Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = sp.Function("u")(*input_variables) + v = sp.Function("v")(*input_variables) + if self.dim == 3: + w = sp.Function("w")(*input_variables) else: - raise TypeError(f"expr type({type(expr)}) is invalid") - clear() - - # compute equation with funciton converted from sympy - output_dict_sympy = {k: v for k, v in input_dict.items()} - for name, _ in sympy_expr_dict.items(): - output_dict_sympy[name] = sympy_expr_dict[name]( - {**output_dict_sympy, **input_dict} - ) - clear() + w = sp.Number(0) + + # density + if type(rho) is str: + rho = sp.Function(rho)(*input_variables) + elif type(rho) in [float, int]: + rho = sp.Number(rho) + + # wall distance + normal_distance = sp.Function("sdf")(*input_variables) - # test for result - for key in functional_expr_dict: - assert paddle.allclose( - output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + # mixing length + mixing_length = sp.Min( + self.karman_constant * normal_distance, + self.max_distance_ratio * self.max_distance, ) + G = ( + 2 * u.diff(x) ** 2 + + 2 * v.diff(y) ** 2 + + 2 * w.diff(z) ** 2 + + (u.diff(y) + v.diff(x)) ** 2 + + (u.diff(z) + w.diff(x)) ** 2 + + (v.diff(z) + w.diff(y)) ** 2 + ) + + # set equations + self.equations = {} + self.equations["nu"] = nu + rho * mixing_length**2 * sp.sqrt(G) + + +class Test_NavierStokes_sympy: + @pytest.mark.parametrize("nu", (2.0,)) + @pytest.mark.parametrize("rho", (1.0,)) + @pytest.mark.parametrize("dim", (2,)) + @pytest.mark.parametrize("time", (False, True)) + def test_nu_sympy(self, nu, rho, dim, time): + """Test for navier_stokes equation.""" + # define input/output keys + ze = ZeroEquation_sympy(nu=nu, rho=1.0, dim=dim, max_distance=3.4, time=time) + nu_sympy = ze.equations["nu"] + + input_keys = ("x", "y", "z")[:dim] + if time: + input_keys = ("t",) + input_keys + + output_keys = ("u", "v") + if dim == 3: + output_keys += ("w",) + output_keys += ("p",) + + # prepare input data in dict + batch_size = 13 + input_dict = {} + for var in input_keys: + input_dict[var] = paddle.randn([batch_size, 1]) + input_dict[var].stop_gradient = False + if var != "t": + input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) + input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) + + input_dict[f"sdf__{var}"].stop_gradient = False + input_dict[f"normal__{var}"].stop_gradient = False + + input_dict["sdf"] = paddle.randn([batch_size, 1]) + input_dict["sdf"].stop_gradient = False + + # prepare model + model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) + + # prepare python function expressions and sympy-expression in dict + def nu_f(out): + karman_constant = 0.419 + max_distance_ratio = 0.09 + normal_distance = out["sdf"] + max_distance = ze.max_distance + mixing_length = paddle.minimum( + karman_constant * normal_distance, + max_distance_ratio * max_distance, + ) + x, y = out["x"], out["y"] + u, v = out["u"], out["v"] + G = 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + (J(u, y) + J(v, x)) ** 2 + if dim == 3: + z, w = out["z"], out["w"] + G += ( + +2 * J(w, z) ** 2 + + (J(u, z) + J(w, x)) ** 2 + + (J(v, z) + J(w, y)) ** 2 + ) + return nu + rho * mixing_length**2 * paddle.sqrt(G) + + functional_expr_dict = equation.NavierStokes(nu_f, rho, dim, time).equations + + def continuity_f(out): + x, y = out["x"], out["y"] + u, v = out["u"], out["v"] + return 1.0 * J(u, x) + 1.0 * J(v, y) + + def momentum_x_f(out): + x, y = out["x"], out["y"] + u, v, p = out["u"], out["v"], out["p"] + if time: + t = out["t"] + return ( + -( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(u, x) + - ( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(u, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 + + 2 * J(u, x) * J(J(u, x), y) + + 2 * J(v, y) * H(v, y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__y"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(u, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 + + 2 * J(u, x) * H(u, x) + + 2 * J(v, y) * J(J(v, x), y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__x"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(u, x) + + (1.0 * u * J(u, x) + 1.0 * v * J(u, y) + J(p, x)) + + (J(u, t) if time else 0) + ) + + def momentum_y_f(out): + x, y = out["x"], out["y"] + u, v, p = out["u"], out["v"], out["p"] + if time: + t = out["t"] + return ( + -( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(v, x) + - ( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(v, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 + + 2 * J(u, x) * J(J(u, x), y) + + 2 * J(v, y) * H(v, y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__y"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(v, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 + + 2 * J(u, x) * H(u, x) + + 2 * J(v, y) * J(J(v, x), y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__x"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(v, x) + + (1.0 * u * J(v, x) + 1.0 * v * J(v, y) + J(p, y)) + + (J(v, t) if time else 0) + ) + + functional_expr_dict["continuity"] = continuity_f + functional_expr_dict["momentum_x"] = momentum_x_f + functional_expr_dict["momentum_y"] = momentum_y_f + + sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations + for target, expr in sympy_expr_dict.items(): + sympy_expr_dict[target] = expression.sympy_to_function( + target, + expr, + [ + model, + ], + ) + + # compute equation with python function + output_dict_functional = model(input_dict) + for name, expr in functional_expr_dict.items(): + if callable(expr): + output_dict_functional[name] = expr( + {**output_dict_functional, **input_dict} + ) + else: + raise TypeError(f"expr type({type(expr)}) is invalid") + clear() + + # compute equation with funciton converted from sympy + output_dict_sympy = {k: v for k, v in input_dict.items()} + for name, expr in sympy_expr_dict.items(): + tmp = expr(output_dict_sympy) + output_dict_sympy[name] = tmp + clear() + + # test for result + for key in functional_expr_dict: + assert paddle.allclose( + output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + ), f"{key} not equal." + + @pytest.mark.parametrize("nu", (2.0,)) + @pytest.mark.parametrize("rho", (1.0,)) + @pytest.mark.parametrize("dim", (2,)) + @pytest.mark.parametrize("time", (False, True)) + def test_nu_constant(self, nu, rho, dim, time): + """Test for navier_stokes equation.""" + # define input/output keys + nu_sympy = nu + + input_keys = ("x", "y", "z")[:dim] + if time: + input_keys = ("t",) + input_keys + + output_keys = ("u", "v") + if dim == 3: + output_keys += ("w",) + output_keys += ("p",) + + # prepare input data in dict + batch_size = 13 + input_dict = {} + for var in input_keys: + input_dict[var] = paddle.randn([batch_size, 1]) + input_dict[var].stop_gradient = False + if var != "t": + input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) + input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) + + input_dict[f"sdf__{var}"].stop_gradient = False + input_dict[f"normal__{var}"].stop_gradient = False + + input_dict["sdf"] = paddle.randn([batch_size, 1]) + input_dict["sdf"].stop_gradient = False + + # prepare model + model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) + + # prepare python function expressions and sympy-expression in dict + functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations + + sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations + for target, expr in sympy_expr_dict.items(): + sympy_expr_dict[target] = expression.sympy_to_function( + target, + expr, + [ + model, + ], + ) + + # compute equation with python function + output_dict_functional = model(input_dict) + for name, expr in functional_expr_dict.items(): + if callable(expr): + output_dict_functional[name] = expr( + {**output_dict_functional, **input_dict} + ) + else: + raise TypeError(f"expr type({type(expr)}) is invalid") + clear() + + # compute equation with funciton converted from sympy + output_dict_sympy = {k: v for k, v in input_dict.items()} + for name, expr in sympy_expr_dict.items(): + tmp = expr(output_dict_sympy) + output_dict_sympy[name] = tmp + clear() + + # test for result + for key in functional_expr_dict: + assert paddle.allclose( + output_dict_functional[key], output_dict_sympy[key], atol=1e-7 + ), f"{key} not equal." if __name__ == "__main__": From 6f376b6d0c88fd4f40aeb0a6846f92c99dcf443c Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Fri, 25 Aug 2023 07:19:26 +0000 Subject: [PATCH 08/48] delete redundant sympy2func.py --- ppsci/equation/sympy2func.py | 667 ----------------------------------- 1 file changed, 667 deletions(-) delete mode 100644 ppsci/equation/sympy2func.py diff --git a/ppsci/equation/sympy2func.py b/ppsci/equation/sympy2func.py deleted file mode 100644 index e63cfb43b..000000000 --- a/ppsci/equation/sympy2func.py +++ /dev/null @@ -1,667 +0,0 @@ -# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -from typing import Dict -from typing import List -from typing import Union - -import paddle -import paddle.nn as nn -import sympy - -import ppsci -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian -from ppsci.utils import logger - -FUNC_MAP = { - sympy.sin: paddle.sin, - sympy.cos: paddle.cos, - sympy.exp: paddle.exp, - sympy.Pow: paddle.pow, - # sympy.sqrt: paddle.sqrt, - sympy.log: paddle.log, - sympy.tan: paddle.tan, - sympy.Max: paddle.maximum, - sympy.Min: paddle.minimum, - sympy.Abs: paddle.abs, - sympy.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), -} - - -def single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): - order_left = order - while order_left > 0: - if order_left >= 2: - dvar = hessian(dvar, invar) - order_left -= 2 - else: - dvar = jacobian(dvar, invar) - order_left -= 1 - return dvar - - -def cvt_to_key(sympy_node: sympy.Basic): - if isinstance(sympy_node, sympy.Heaviside): - return str(sympy_node) - if isinstance(sympy_node, (sympy.Symbol, sympy.Function)): - return sympy_node.name - elif isinstance(sympy_node, sympy.Derivative): - expr_str = sympy_node.args[0].name # use 'f' instead of 'f(x,y,z)' - for symbol, order in sympy_node.args[1:]: - expr_str += f"__{symbol}" * order - return expr_str - else: - return str(sympy_node) - - -class NodeBase(nn.Layer): - """ - The base class of the node in the computational graph. - - Args: - expr (sympy.Expr): The expression of the node. - - Returns: - The input dictionary with the output of the node added. - """ - - def __init__(self, expr: sympy.Expr): - super().__init__() - self.expr = expr - self.key = cvt_to_key(self.expr) - - def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): - raise NotImplementedError - - def __repr__(self): - return ( - self.__class__.__name__ + f"(expr: {self.expr}), type: {type(self.expr)})" - ) - - -class OperatorNode(NodeBase): - """ - A node representing a sympy operator in the computational graph. - - (e.g. sin, cos, etc.) - - Args: - expr (sympy.Expr): The expression of the node. - - Returns: - The input dictionary with the output of the operator added. - """ - - def __init__(self, expr: Union[sympy.Add, sympy.Mul, sympy.Derivative]): - super().__init__(expr) - - def forward(self, data_dict: Dict, model_dict: Dict[str, nn.Layer] = None): - if self.expr.func == sympy.Add: - data_dict[self.key] = paddle.add_n( - [data_dict[cvt_to_key(arg)] for arg in self.expr.args] - ) - elif self.expr.func == sympy.Mul: - data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] - for arg in self.expr.args[1:]: - data_dict[self.key] = data_dict[self.key] * data_dict[cvt_to_key(arg)] - elif self.expr.func == sympy.Derivative: - if self.key in data_dict: - return data_dict - data_dict[self.key] = data_dict[cvt_to_key(self.expr.args[0])] - for symbol, order in self.expr.args[1:]: - data_dict[self.key] = single_derivate_func( - data_dict[self.key], - data_dict[cvt_to_key(symbol)], - order, - ) - else: - try: - func = FUNC_MAP[self.expr.func] - except KeyError: - raise NotImplementedError( - f"'{self.expr.func}' operator is not supported now." - ) - if self.expr.func == sympy.Heaviside: - data_dict[self.key] = func(data_dict[cvt_to_key(self.expr.args[0])]) - else: - data_dict[self.key] = func( - *[data_dict[cvt_to_key(arg)] for arg in self.expr.args] - ) - return data_dict - - -class LayerNode(NodeBase): - """ - A node representing a neural network in the computational graph - - Args: - expr (sympy.core.function.UndefinedFunction): Definition symbol of the neural network. - - Returns: - The input dictionary with the output of the neural network added. - - Note: - For the provided network, the forward should accept a dictionary as input and return a dictionary as output. - And the `output_keys` should be provided in the `__init__` function. - - Examples: - - """ - - def __init__(self, expr: sympy.core.function.UndefinedFunction, model: nn.Layer): - super().__init__(expr) - self.model = model - - def forward(self, data_dict: Dict): - if self.key in data_dict: - return data_dict - - output_dict = self.model(data_dict) - for key, value in output_dict.items(): - data_dict[key] = value - - return data_dict - - -class ConstantNode(NodeBase): - """ - A node representing a constant in the computational graph. - - Args: - expr (sympy.Number or sympy.NumberSymbol): The constant to be applied. - - Returns: - The input dictionary with the constant added. - - Examples: - >>> node = ConstantNode(sympy.pi) - >>> node({}) - {'pi': Tensor(shape=[], dtype=float32, place=Place(gpu:0), stop_gradient=True, - 3.1415927)} - """ - - def __init__(self, expr: sympy.Number or sympy.NumberSymbol): - super().__init__(expr) - if self.expr.is_Float: - self.expr = float(self.expr) - elif self.expr.is_Integer: - self.expr = float(self.expr) - elif self.expr.is_Boolean: - self.expr = float(self.expr) - elif self.expr.is_Rational: - self.expr = float(self.expr) - else: - raise TypeError( - f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" - ) - self.expr = paddle.to_tensor(self.expr) - - def forward(self, data_dict: Dict): - data_dict[self.key] = self.expr - return data_dict - - -class ComposedFunc(nn.Layer): - """ - Compose multiple functions into one function. - - Args: - data_dict (Dict): The input tensor dictionary. - model_dict (Dict[str, nn.Layer]): The dictionary of the models. - - Returns: - The dictionary of the outputs of the all calculated nodes. - """ - - def __init__(self, target: str, funcs: List[NodeBase]): - super().__init__() - self.funcs = funcs - self.target = target - - def forward(self, data_dict: Dict): - for func in self.funcs: - data_dict = func(data_dict) - return data_dict[self.funcs[-1].key] - - -def post_traverse(cur_node, nodes): - # traverse into sub-nodes - if isinstance(cur_node, sympy.core.function.UndefinedFunction): - nodes.append(cur_node) - elif isinstance(cur_node, sympy.Function): - for arg in cur_node.args: - nodes = post_traverse(arg, nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sympy.Derivative): - nodes = post_traverse(cur_node.args[0], nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sympy.Symbol): - return nodes - elif isinstance(cur_node, sympy.Number): - nodes.append(cur_node) - else: - for arg in cur_node.args: - nodes = post_traverse(arg, nodes) - nodes.append(cur_node) - return nodes - - -def sympy_to_function(target: str, expr: sympy.Expr, models: nn.Layer): - """ - Convert a sympy expression to a ComposedFunc. - - Args: - expr (sympy.Expr): the sympy expression - - Returns: - A ComposedFunc that can execute the formula represented by the sympy expression. - - Examples: - """ - sympy_nodes = [] - sympy_nodes = post_traverse(expr, sympy_nodes) - sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] - sympy_nodes = list( - dict.fromkeys(sympy_nodes) - ) # remove duplicates with topo-order kept - - callable_nodes = [] - for i, node in enumerate(sympy_nodes): - logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") - if isinstance(node.func, sympy.core.function.UndefinedFunction): - match = False - for model in models: - if str(node.func.name) in model.output_keys: - callable_nodes.append(LayerNode(node, model)) - if match: - raise ValueError( - f"function {node} can match at least 2 output key of models, which is forbidden." - ) - match = True - elif ( - isinstance(node, tuple(FUNC_MAP.keys())) - or node.is_Add - or node.is_Mul - or node.is_Derivative - or node.is_Pow - ): - callable_nodes.append(OperatorNode(node)) - elif node.is_Number or node.is_NumberSymbol: - callable_nodes.append(ConstantNode(node)) - else: - raise NotImplementedError( - f"The node {node} is not supported in sympy_to_function." - ) - return ComposedFunc(target, callable_nodes) - - -class ZeroEquation: - """ - Zero Equation Turbulence model - - Parameters - ========== - nu : float - The kinematic viscosity of the fluid. - max_distance : float - The maximum wall distance in the flow field. - rho : float, Sympy Symbol/Expr, str - The density. If `rho` is a str then it is - converted to Sympy Function of form 'rho(x,y,z,t)'. - If 'rho' is a Sympy Symbol or Expression then this - is substituted into the equation. Default is 1. - dim : int - Dimension of the Zero Equation Turbulence model (2 or 3). - Default is 3. - time : bool - If time-dependent equations or not. Default is True. - - Example - """ - - def __init__( - self, nu, max_distance, rho=1, dim=3, time=True - ): # TODO add density into model - # set params - self.dim = dim - self.time = time - - # model coefficients - self.max_distance = max_distance - self.karman_constant = 0.419 - self.max_distance_ratio = 0.09 - - # coordinates - x, y, z = sympy.Symbol("x"), sympy.Symbol("y"), sympy.Symbol("z") - - # time - t = sympy.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sympy.Function("u")(*input_variables) - v = sympy.Function("v")(*input_variables) - if self.dim == 3: - w = sympy.Function("w")(*input_variables) - else: - w = sympy.Number(0) - - # density - if type(rho) is str: - rho = sympy.Function(rho)(*input_variables) - elif type(rho) in [float, int]: - rho = sympy.Number(rho) - - # wall distance - normal_distance = sympy.Function("sdf")(*input_variables) - - # mixing length - mixing_length = sympy.Min( - self.karman_constant * normal_distance, - self.max_distance_ratio * self.max_distance, - ) - G = ( - 2 * u.diff(x) ** 2 - + 2 * v.diff(y) ** 2 - + 2 * w.diff(z) ** 2 - + (u.diff(y) + v.diff(x)) ** 2 - + (u.diff(z) + w.diff(x)) ** 2 - + (v.diff(z) + w.diff(y)) ** 2 - ) - - # set equations - self.equations = {} - self.equations["nu"] = nu + rho * mixing_length**2 * sympy.sqrt(G) - - -class NavierStokes_sympy: - """ - Compressible Navier Stokes equations - - Parameters - ========== - nu : float, Sympy Symbol/Expr, str - The kinematic viscosity. If `nu` is a str then it is - converted to Sympy Function of form `nu(x,y,z,t)`. - If `nu` is a Sympy Symbol or Expression then this - is substituted into the equation. This allows for - variable viscosity. - rho : float, Sympy Symbol/Expr, str - The density of the fluid. If `rho` is a str then it is - converted to Sympy Function of form 'rho(x,y,z,t)'. - If 'rho' is a Sympy Symbol or Expression then this - is substituted into the equation to allow for - compressible Navier Stokes. Default is 1. - dim : int - Dimension of the Navier Stokes (2 or 3). Default is 3. - time : bool - If time-dependent equations or not. Default is True. - mixed_form: bool - If True, use the mixed formulation of the Navier-Stokes equations. - - Examples - """ - - name = "NavierStokes" - - def __init__(self, nu, rho=1, dim=3, time=True, mixed_form=False): - # set params - self.dim = dim - self.time = time - self.mixed_form = mixed_form - - # coordinates - x, y, z = sympy.Symbol("x"), sympy.Symbol("y"), sympy.Symbol("z") - - # time - t = sympy.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sympy.Function("u")(*input_variables) - v = sympy.Function("v")(*input_variables) - if self.dim == 3: - w = sympy.Function("w")(*input_variables) - else: - w = sympy.Number(0) - - # pressure - p = sympy.Function("p")(*input_variables) - - # kinematic viscosity - if isinstance(nu, str): - nu = sympy.Function(nu)(*input_variables) - elif isinstance(nu, (float, int)): - nu = sympy.Number(nu) - - # density - if isinstance(rho, str): - rho = sympy.Function(rho)(*input_variables) - elif isinstance(rho, (float, int)): - rho = sympy.Number(rho) - - # dynamic viscosity - mu = rho * nu - - # set equations - self.equations = {} - self.equations["continuity"] = ( - rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) - ) - - if not self.mixed_form: - curl = ( - sympy.Number(0) - if rho.diff(x) == 0 - else u.diff(x) + v.diff(y) + w.diff(z) - ) - self.equations["momentum_x"] = ( - (rho * u).diff(t) - + ( - u * ((rho * u).diff(x)) - + v * ((rho * u).diff(y)) - + w * ((rho * u).diff(z)) - + rho * u * (curl) - ) - + p.diff(x) - - (-2 / 3 * mu * (curl)).diff(x) - - (mu * u.diff(x)).diff(x) - - (mu * u.diff(y)).diff(y) - - (mu * u.diff(z)).diff(z) - - (mu * (curl).diff(x)) - ) - self.equations["momentum_y"] = ( - (rho * v).diff(t) - + ( - u * ((rho * v).diff(x)) - + v * ((rho * v).diff(y)) - + w * ((rho * v).diff(z)) - + rho * v * (curl) - ) - + p.diff(y) - - (-2 / 3 * mu * (curl)).diff(y) - - (mu * v.diff(x)).diff(x) - - (mu * v.diff(y)).diff(y) - - (mu * v.diff(z)).diff(z) - - (mu * (curl).diff(y)) - ) - self.equations["momentum_z"] = ( - (rho * w).diff(t) - + ( - u * ((rho * w).diff(x)) - + v * ((rho * w).diff(y)) - + w * ((rho * w).diff(z)) - + rho * w * (curl) - ) - + p.diff(z) - - (-2 / 3 * mu * (curl)).diff(z) - - (mu * w.diff(x)).diff(x) - - (mu * w.diff(y)).diff(y) - - (mu * w.diff(z)).diff(z) - - (mu * (curl).diff(z)) - ) - - if self.dim == 2: - self.equations.pop("momentum_z") - - elif self.mixed_form: - u_x = sympy.Function("u_x")(*input_variables) - u_y = sympy.Function("u_y")(*input_variables) - u_z = sympy.Function("u_z")(*input_variables) - v_x = sympy.Function("v_x")(*input_variables) - v_y = sympy.Function("v_y")(*input_variables) - v_z = sympy.Function("v_z")(*input_variables) - - if self.dim == 3: - w_x = sympy.Function("w_x")(*input_variables) - w_y = sympy.Function("w_y")(*input_variables) - w_z = sympy.Function("w_z")(*input_variables) - else: - w_x = sympy.Number(0) - w_y = sympy.Number(0) - w_z = sympy.Number(0) - u_z = sympy.Number(0) - v_z = sympy.Number(0) - - curl = sympy.Number(0) if rho.diff(x) == 0 else u_x + v_y + w_z - self.equations["momentum_x"] = ( - (rho * u).diff(t) - + ( - u * ((rho * u.diff(x))) - + v * ((rho * u.diff(y))) - + w * ((rho * u.diff(z))) - + rho * u * (curl) - ) - + p.diff(x) - - (-2 / 3 * mu * (curl)).diff(x) - - (mu * u_x).diff(x) - - (mu * u_y).diff(y) - - (mu * u_z).diff(z) - - (mu * (curl).diff(x)) - ) - self.equations["momentum_y"] = ( - (rho * v).diff(t) - + ( - u * ((rho * v.diff(x))) - + v * ((rho * v.diff(y))) - + w * ((rho * v.diff(z))) - + rho * v * (curl) - ) - + p.diff(y) - - (-2 / 3 * mu * (curl)).diff(y) - - (mu * v_x).diff(x) - - (mu * v_y).diff(y) - - (mu * v_z).diff(z) - - (mu * (curl).diff(y)) - ) - self.equations["momentum_z"] = ( - (rho * w).diff(t) - + ( - u * ((rho * w.diff(x))) - + v * ((rho * w.diff(y))) - + w * ((rho * w.diff(z))) - + rho * w * (curl) - ) - + p.diff(z) - - (-2 / 3 * mu * (curl)).diff(z) - - (mu * w_x).diff(x) - - (mu * w_y).diff(y) - - (mu * w_z).diff(z) - - (mu * (curl).diff(z)) - ) - self.equations["compatibility_u_x"] = u.diff(x) - u_x - self.equations["compatibility_u_y"] = u.diff(y) - u_y - self.equations["compatibility_u_z"] = u.diff(z) - u_z - self.equations["compatibility_v_x"] = v.diff(x) - v_x - self.equations["compatibility_v_y"] = v.diff(y) - v_y - self.equations["compatibility_v_z"] = v.diff(z) - v_z - self.equations["compatibility_w_x"] = w.diff(x) - w_x - self.equations["compatibility_w_y"] = w.diff(y) - w_y - self.equations["compatibility_w_z"] = w.diff(z) - w_z - self.equations["compatibility_u_xy"] = u_x.diff(y) - u_y.diff(x) - self.equations["compatibility_u_xz"] = u_x.diff(z) - u_z.diff(x) - self.equations["compatibility_u_yz"] = u_y.diff(z) - u_z.diff(y) - self.equations["compatibility_v_xy"] = v_x.diff(y) - v_y.diff(x) - self.equations["compatibility_v_xz"] = v_x.diff(z) - v_z.diff(x) - self.equations["compatibility_v_yz"] = v_y.diff(z) - v_z.diff(y) - self.equations["compatibility_w_xy"] = w_x.diff(y) - w_y.diff(x) - self.equations["compatibility_w_xz"] = w_x.diff(z) - w_z.diff(x) - self.equations["compatibility_w_yz"] = w_y.diff(z) - w_z.diff(y) - - if self.dim == 2: - self.equations.pop("momentum_z") - self.equations.pop("compatibility_u_z") - self.equations.pop("compatibility_v_z") - self.equations.pop("compatibility_w_x") - self.equations.pop("compatibility_w_y") - self.equations.pop("compatibility_w_z") - self.equations.pop("compatibility_u_xz") - self.equations.pop("compatibility_u_yz") - self.equations.pop("compatibility_v_xz") - self.equations.pop("compatibility_v_yz") - self.equations.pop("compatibility_w_xy") - self.equations.pop("compatibility_w_xz") - self.equations.pop("compatibility_w_yz") - - -if __name__ == "__main__": - logger.init_logger(log_level="debug") - # ze = ZeroEquation(nu=1, rho=1.0, dim=2, max_distance=4, time=False) - ns = NavierStokes_sympy(nu=2.0, rho=1.0, dim=2, time=False) - target = "momentum_x" - test_expr = ns.equations[target] - - x = paddle.randn([4, 1]) - y = paddle.randn([4, 1]) - z = paddle.randn([4, 1]) - sdf = paddle.randn([4, 1]) - sdf__x = paddle.randn([4, 1]) - sdf__y = paddle.randn([4, 1]) - x.stop_gradient = False - y.stop_gradient = False - z.stop_gradient = False - sdf.stop_gradient = False - sdf__x.stop_gradient = False - sdf__y.stop_gradient = False - - input_dict = { - "x": x, - "y": y, - "z": z, - "sdf": sdf, - "sdf__x": sdf__x, - "sdf__y": sdf__y, - } - - model1 = ppsci.arch.MLP(("x", "y", "z"), ("u", "v"), 2, 10) - model2 = ppsci.arch.MLP(("x", "y", "z"), ("w", "p"), 2, 10) - - cvt_expr = sympy_to_function(target, test_expr, [model1, model2]) - - output = cvt_expr(input_dict) - print(output.shape) From cfa853f47217b5084845b7483715197739716f96 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Fri, 25 Aug 2023 17:24:13 +0000 Subject: [PATCH 09/48] remove sympy to function code from expression.py to sym_to_func.py --- ppsci/utils/expression.py | 252 ----------------- ppsci/utils/sym_to_func.py | 362 +++++++++++++++++++++++++ test/utils/test_navier_stokes_sympy.py | 13 +- 3 files changed, 369 insertions(+), 258 deletions(-) create mode 100644 ppsci/utils/sym_to_func.py diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index 28a0e70de..97b1c5ff7 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -12,24 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import functools from typing import TYPE_CHECKING from typing import Callable from typing import Dict -from typing import List from typing import Optional from typing import Tuple -from typing import Union import paddle -import sympy as sp from paddle import jit from paddle import nn -from typing_extensions import TypeAlias - -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian -from ppsci.utils import logger if TYPE_CHECKING: from ppsci import constraint @@ -200,246 +191,3 @@ def visu_forward( clear() return output_dict - - -FUNC_MAP = { - sp.sin: paddle.sin, - sp.cos: paddle.cos, - sp.exp: paddle.exp, - sp.Pow: paddle.pow, - sp.log: paddle.log, - sp.tan: paddle.tan, - sp.Max: paddle.maximum, - sp.Min: paddle.minimum, - sp.Abs: paddle.abs, - sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), -} - -SYMPY_BUILTIN_FUNC: TypeAlias = Union[ - sp.sin, - sp.cos, - sp.exp, - sp.Pow, - sp.log, - sp.tan, - sp.Max, - sp.Min, - sp.Abs, - sp.Heaviside, -] - - -def _single_derivate_func(dvar: paddle.Tensor, invar: paddle.Tensor, order: int): - order_left = order - while order_left > 0: - if order_left >= 2: - dvar = hessian(dvar, invar) - order_left -= 2 - else: - dvar = jacobian(dvar, invar) - order_left -= 1 - return dvar - - -def _cvt_to_key(sympy_node: sp.Basic): - if isinstance( - sympy_node, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function) - ): - if hasattr(sympy_node, "name"): - # custom function - return sympy_node.name - else: - str(sympy_node) - elif isinstance(sympy_node, sp.Derivative): - # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" - expr_str = sympy_node.args[0].name - for symbol, order in sympy_node.args[1:]: - expr_str += f"__{symbol}" * order - return expr_str - else: - return str(sympy_node) - - -class Node(nn.Layer): - """The base class of the node in expression tree.""" - - def __init__(self, expr: sp.Basic): - super().__init__() - self.expr = expr - self.key = _cvt_to_key(self.expr) - - def forward(self, **kwargs): - raise NotImplementedError("Node.forward is not implemented") - - def __str__(self): - return ( - self.__class__.__name__ + f"(expr: {self.expr}), type: {type(self.expr)})" - ) - - -class OperatorNode(Node): - """ - A node representing a sp operator in the computational graph. - """ - - def __init__(self, expr: SYMPY_BUILTIN_FUNC): - super().__init__(expr) - - def forward(self, data_dict: Dict): - if self.expr.func == sp.Add: - data_dict[self.key] = sum( - [data_dict[_cvt_to_key(arg)] for arg in self.expr.args] - ) - elif self.expr.func == sp.Mul: - data_dict[self.key] = data_dict[_cvt_to_key(self.expr.args[0])] - for arg in self.expr.args[1:]: - data_dict[self.key] = data_dict[self.key] * data_dict[_cvt_to_key(arg)] - elif self.expr.func == sp.Derivative: - if self.key in data_dict: - return data_dict - data_dict[self.key] = data_dict[_cvt_to_key(self.expr.args[0])] - for symbol, order in self.expr.args[1:]: - data_dict[self.key] = _single_derivate_func( - data_dict[self.key], - data_dict[_cvt_to_key(symbol)], - order, - ) - else: - try: - func = FUNC_MAP[self.expr.func] - except KeyError: - raise NotImplementedError( - f"'{self.expr.func}' operator is not supported now." - ) - if self.expr.func == sp.Heaviside: - data_dict[self.key] = func(data_dict[_cvt_to_key(self.expr.args[0])]) - else: - data_dict[self.key] = func( - *[data_dict[_cvt_to_key(arg)] for arg in self.expr.args] - ) - return data_dict - - -class LayerNode(Node): - """ - A node representing a neural network in the computational graph - """ - - def __init__(self, expr: sp.core.function.UndefinedFunction, model: nn.Layer): - super().__init__(expr) - self.model = model - - def forward(self, data_dict: Dict): - if self.key in data_dict: - return data_dict - - output_dict = self.model(data_dict) - print("call model forward") - for key, value in output_dict.items(): - data_dict[key] = value - - return data_dict - - -class ConstantNode(Node): - """ - A node representing a constant in the computational graph. - """ - - def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): - super().__init__(expr) - if ( - self.expr.is_Float - or self.expr.is_Integer - or self.expr.is_Boolean - or self.expr.is_Rational - ): - self.expr = float(self.expr) - else: - raise TypeError( - f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" - ) - self.expr = paddle.to_tensor(self.expr) - - def forward(self, data_dict: Dict): - data_dict[self.key] = self.expr - return data_dict - - -class ComposedFunc(nn.Layer): - """ - Compose multiple functions into one function. - """ - - def __init__(self, target: str, funcs: List[Node]): - super().__init__() - self.funcs = funcs - self.target = target - - def forward(self, data_dict: Dict): - for func in self.funcs: - data_dict = func(data_dict) - return data_dict[self.funcs[-1].key] # return the computed result of root node - - -def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: - # traverse into sub-nodes - if isinstance(cur_node, sp.core.function.UndefinedFunction): - nodes.append(cur_node) - elif isinstance(cur_node, sp.Function): - for arg in cur_node.args: - nodes = _post_traverse(arg, nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sp.Derivative): - nodes = _post_traverse(cur_node.args[0], nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sp.Symbol): - return nodes - elif isinstance(cur_node, sp.Number): - nodes.append(cur_node) - else: - for arg in cur_node.args: - nodes = _post_traverse(arg, nodes) - nodes.append(cur_node) - return nodes - - -def sympy_to_function(target: str, expr: sp.Expr, models: nn.Layer) -> ComposedFunc: - """ - Convert a sp expression to a ComposedFunc. - """ - sympy_nodes = [] - sympy_nodes = _post_traverse(expr, sympy_nodes) - sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] - sympy_nodes = list( - dict.fromkeys(sympy_nodes) - ) # remove duplicates with topo-order kept - - callable_nodes = [] - for i, node in enumerate(sympy_nodes): - logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") - if isinstance(node.func, sp.core.function.UndefinedFunction): - match = False - for model in models: - if str(node.func.name) in model.output_keys: - callable_nodes.append(LayerNode(node, model)) - if match: - raise ValueError( - f"function {node} can match at least 2 output key of models, which is forbidden." - ) - match = True - elif ( - isinstance(node, tuple(FUNC_MAP.keys())) - or node.is_Add - or node.is_Mul - or node.is_Derivative - or node.is_Pow - ): - callable_nodes.append(OperatorNode(node)) - elif node.is_Number or node.is_NumberSymbol: - callable_nodes.append(ConstantNode(node)) - else: - raise NotImplementedError( - f"The node {node} is not supported in sympy_to_function." - ) - return ComposedFunc(target, callable_nodes) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py new file mode 100644 index 000000000..89561fd99 --- /dev/null +++ b/ppsci/utils/sym_to_func.py @@ -0,0 +1,362 @@ +""" +Sympy to python function conversion module +""" + +import functools +from typing import TYPE_CHECKING +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + +import paddle +import sympy as sp +from paddle import nn +from typing_extensions import TypeAlias + +from ppsci.autodiff import hessian +from ppsci.autodiff import jacobian +from ppsci.utils import logger + +if TYPE_CHECKING: + from ppsci import arch + + +__all__ = [ + "sympy_to_function", +] + + +PADDLE_FUNC_MAP = { + sp.sin: paddle.sin, + sp.cos: paddle.cos, + sp.exp: paddle.exp, + sp.Pow: paddle.pow, + sp.log: paddle.log, + sp.tan: paddle.tan, + sp.Max: paddle.maximum, + sp.Min: paddle.minimum, + sp.Abs: paddle.abs, + sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), +} + +SYMPY_BUILTIN_FUNC: TypeAlias = Union[ + sp.sin, + sp.cos, + sp.exp, + sp.Pow, + sp.log, + sp.tan, + sp.Max, + sp.Min, + sp.Abs, + sp.Heaviside, +] + + +def _cvt_to_key(expr: sp.Basic) -> str: + """Convert sympy expression to a string key, mainly as retrieval key in dict. + + Args: + expr (sp.Basic): Sympy expression. + + Returns: + str: Converted string key. + """ + if isinstance(expr, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function)): + if hasattr(expr, "name"): + # use name of custom function instead of itself. + return expr.name + else: + str(expr) + elif isinstance(expr, sp.Derivative): + # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" + expr_str = expr.args[0].name + for symbol, order in expr.args[1:]: + expr_str += f"__{symbol}" * order + return expr_str + else: + return str(expr) + + +def _compute_single_derivate( + dvar: paddle.Tensor, invar: paddle.Tensor, order: int +) -> paddle.Tensor: + """Compute derivative for a single dependent variable to a single independent variable. + + Args: + dvar (paddle.Tensor): Dependent variable. + invar (paddle.Tensor): Independent variable. + order (int): Order of derivative + + Returns: + paddle.Tensor: Result of derivative d^{order}{dvar} / d{invar}^{order}. + """ + order_left = order + while order_left > 0: + if order_left & 1: + dvar = jacobian(dvar, invar) + order_left -= 1 + if order_left >= 2: + dvar = hessian(dvar, invar) + order_left -= 2 + return dvar + + +class Node(nn.Layer): + """The base class of the node in expression tree. + + Args: + expr (sp.Basic): Sympy expression. + """ + + def __init__(self, expr: sp.Basic): + super().__init__() + self.expr = expr + self.key = _cvt_to_key(self.expr) + + def forward(self, **kwargs): + raise NotImplementedError("Node.forward is not implemented") + + def __str__(self): + return f"{self.__class__.__name__}(expr: {self.expr}, expr_type: {type(self.expr)})" + + def __repr__(self): + return f"{self.__class__.__name__}(expr: {self.expr})" + + +class OperatorNode(Node): + """Class for operator node in converted expression tree. + + Args: + expr (SYMPY_BUILTIN_FUNC): Sympy expression. + """ + + def __init__(self, expr: SYMPY_BUILTIN_FUNC): + super().__init__(expr) + # preprocess childs' key instead of processing at run-time + # which can reduce considerable overhead of time for calling "_cvt_to_key" + if self.expr.func == sp.Derivative: + self.childs = [_cvt_to_key(self.expr.args[0])] + [ + (_cvt_to_key(arg), order) for (arg, order) in self.expr.args[1:] + ] + else: + self.childs = [_cvt_to_key(arg) for arg in self.expr.args] + + if self.expr.func == sp.Add: + self.func = self._add_operator_func + elif self.expr.func == sp.Mul: + self.func = self._mul_operator_func + elif self.expr.func == sp.Derivative: + self.func = self._derivate_operator_func + else: + if self.expr.func == sp.Heaviside: + self.func = self._heaviside_operator_func + else: + self.func = self._vanilla_operator_func + + def forward(self, data_dict: Dict): + # use cache + if self.key in data_dict: + return data_dict + + return self.func(data_dict) + + def _add_operator_func(self, data_dict): + data_dict[self.key] = sum([data_dict[child] for child in self.childs]) + return data_dict + + def _mul_operator_func(self, data_dict): + data_dict[self.key] = data_dict[self.childs[0]] + for child in self.childs[1:]: + data_dict[self.key] *= data_dict[child] + return data_dict + + def _derivate_operator_func(self, data_dict): + data_dict[self.key] = data_dict[self.childs[0]] + for child, order in self.childs[1:]: + data_dict[self.key] = _compute_single_derivate( + data_dict[self.key], + data_dict[child], + order, + ) + return data_dict + + def _heaviside_operator_func(self, data_dict): + data_dict[self.key] = PADDLE_FUNC_MAP[sp.Heaviside](data_dict[self.childs[0]]) + return data_dict + + def _vanilla_operator_func(self, data_dict): + data_dict[self.key] = PADDLE_FUNC_MAP[self.expr.func]( + *[data_dict[child] for child in self.childs] + ) + return data_dict + + +class LayerNode(Node): + """Class for layer node in converted expression tree. + + Args: + expr (sp.core.function.UndefinedFunction): Sympy expression. + model (nn.Layer): NN model for computing forward result in this node. + """ + + def __init__(self, expr: sp.core.function.UndefinedFunction, model: nn.Layer): + super().__init__(expr) + self.model = model + + def forward(self, data_dict: Dict): + # use cache + if self.key in data_dict: + return data_dict + + output_dict = self.model(data_dict) + data_dict.update(output_dict) + + return data_dict + + +class ConstantNode(Node): + """ "Class for constant variable node in converted expression tree. + + Args: + expr (Union[sp.Number, sp.NumberSymbol]): Number expression. + """ + + def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): + super().__init__(expr) + if ( + self.expr.is_Float + or self.expr.is_Integer + or self.expr.is_Boolean + or self.expr.is_Rational + ): + self.expr = float(self.expr) + else: + raise TypeError( + f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" + ) + self.expr = paddle.to_tensor(self.expr) + + def forward(self, data_dict: Dict): + # use cache + if self.key in data_dict: + return data_dict + + data_dict[self.key] = self.expr + return data_dict + + +class ComposedNode(nn.Layer): + """ + Compose list of several callable objects together. + """ + + def __init__(self, target: str, funcs: List[Node]): + super().__init__() + self.funcs = funcs + self.target = target + + def forward(self, data_dict: Dict): + # call all funcs in order + for func in self.funcs: + data_dict = func(data_dict) + + # return result of last node(root node) for target + return data_dict[self.funcs[-1].key] + + +def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: + """Traverse sympy expression tree in postorder. + + Args: + cur_node (sp.Basic): Sympy expression of current node. + nodes (List[sp.Basic]): Node list storing all tree nodes in postorder. + + Returns: + List[sp.Basic]: Node list storing all tree nodes in postorder. + """ + # traverse into sub-nodes + if isinstance(cur_node, sp.core.function.UndefinedFunction): + nodes.append(cur_node) + elif isinstance(cur_node, sp.Function): + for arg in cur_node.args: + nodes = _post_traverse(arg, nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sp.Derivative): + nodes = _post_traverse(cur_node.args[0], nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sp.Symbol): + return nodes + elif isinstance(cur_node, sp.Number): + nodes.append(cur_node) + else: + for arg in cur_node.args: + nodes = _post_traverse(arg, nodes) + nodes.append(cur_node) + return nodes + + +def sympy_to_function( + target: str, + expr: sp.Expr, + models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, +) -> ComposedNode: + """Convert sympy expression to callable function. + + Args: + target (str): Alias of `expr`, such as "z" for expression: "z = a + b * c". + expr (sp.Expr): Sympy expression to be converted. + models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. + + Returns: + ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. + """ + + # simplify expression to reduce nodes in tree + expr = sp.nsimplify(expr) + expr = sp.expand(expr) + expr = sp.simplify(expr) + + # convert sympy expression tree to list of nodes in postorder + sympy_nodes = [] + sympy_nodes = _post_traverse(expr, sympy_nodes) + + # remove unnecessary symbol node for already in input dict + sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] + + # remove duplicates with topo-order kept + sympy_nodes = list(dict.fromkeys(sympy_nodes)) + + # convert sympy node to callable node + callable_nodes = [] + for i, node in enumerate(sympy_nodes): + logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") + if isinstance(node.func, sp.core.function.UndefinedFunction): + match = False + for model in models: + if str(node.func.name) in model.output_keys: + callable_nodes.append(LayerNode(node, model)) + if match: + raise ValueError( + f"Function {node} can match at least 2 output key of models, which is forbidden." + ) + match = True + elif ( + isinstance(node, tuple(PADDLE_FUNC_MAP.keys())) + or node.is_Add + or node.is_Mul + or node.is_Derivative + or node.is_Pow + ): + callable_nodes.append(OperatorNode(node)) + elif node.is_Number or node.is_NumberSymbol: + callable_nodes.append(ConstantNode(node)) + else: + raise NotImplementedError( + f"The node {node} is not supported in sympy_to_function." + ) + + # Compose callable nodes into one callable object + return ComposedNode(target, callable_nodes) diff --git a/test/utils/test_navier_stokes_sympy.py b/test/utils/test_navier_stokes_sympy.py index 0639ac108..b73b6eb35 100644 --- a/test/utils/test_navier_stokes_sympy.py +++ b/test/utils/test_navier_stokes_sympy.py @@ -21,7 +21,7 @@ from ppsci.autodiff import clear from ppsci.autodiff import hessian as H from ppsci.autodiff import jacobian as J -from ppsci.utils import expression +from ppsci.utils import sym_to_func class NavierStokes_sympy: @@ -219,7 +219,7 @@ class Test_NavierStokes_sympy: def test_nu_sympy(self, nu, rho, dim, time): """Test for navier_stokes equation.""" # define input/output keys - ze = ZeroEquation_sympy(nu=nu, rho=1.0, dim=dim, max_distance=3.4, time=time) + ze = ZeroEquation_sympy(nu=nu, rho=rho, dim=dim, max_distance=3.4, time=time) nu_sympy = ze.equations["nu"] input_keys = ("x", "y", "z")[:dim] @@ -453,7 +453,7 @@ def momentum_y_f(out): sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = expression.sympy_to_function( + sympy_expr_dict[target] = sym_to_func.sympy_to_function( target, expr, [ @@ -527,7 +527,7 @@ def test_nu_constant(self, nu, rho, dim, time): sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = expression.sympy_to_function( + sympy_expr_dict[target] = sym_to_func.sympy_to_function( target, expr, [ @@ -548,9 +548,10 @@ def test_nu_constant(self, nu, rho, dim, time): # compute equation with funciton converted from sympy output_dict_sympy = {k: v for k, v in input_dict.items()} + tmp = {k: v for k, v in output_dict_sympy.items()} for name, expr in sympy_expr_dict.items(): - tmp = expr(output_dict_sympy) - output_dict_sympy[name] = tmp + output = expr(tmp) + output_dict_sympy[name] = output clear() # test for result From 62e98559792e249d56c0eb666bedb02a22b9b139 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 05:11:20 +0000 Subject: [PATCH 10/48] update type hint in expression --- ppsci/utils/expression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index 97b1c5ff7..b331cbb84 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -18,11 +18,11 @@ from typing import Optional from typing import Tuple -import paddle from paddle import jit from paddle import nn if TYPE_CHECKING: + import paddle from ppsci import constraint from ppsci import validate From fee4553df42f869a2bd64fa846565c6e7bc56312 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 05:41:29 +0000 Subject: [PATCH 11/48] update code --- ppsci/utils/logger.py | 45 +++++++------- ppsci/utils/sym_to_func.py | 69 +++++++++++++++++++--- test/utils/test_linear_elasticity_sympy.py | 6 +- test/utils/test_navier_stokes_sympy.py | 25 -------- 4 files changed, 86 insertions(+), 59 deletions(-) diff --git a/ppsci/utils/logger.py b/ppsci/utils/logger.py index 627561bc3..cd15828ff 100644 --- a/ppsci/utils/logger.py +++ b/ppsci/utils/logger.py @@ -92,29 +92,6 @@ def init_logger( _logger.propagate = False -def ensure_logger(log_func): - """ - Automatically initialize `logger` by default arguments - when init_logger() is not called manually. - """ - - @functools.wraps(log_func) - def wrapped_log_func(fmt, *args): - if _logger is None: - init_logger() - _logger.warning( - "Before you call functions within the logger, the logger has already " - "been automatically initialized. Since `log_file` is not specified by " - "default, information will not be written to any file except being " - "output to the terminal." - ) - - log_func(fmt, *args) - - return wrapped_log_func - - -@ensure_logger def set_log_level(log_level): """Set log level.""" if dist.get_rank() == 0: @@ -137,6 +114,28 @@ def wrapped_log_func(fmt, *args): return wrapped_log_func +def ensure_logger(log_func): + """ + Automatically initialize `logger` by default arguments + when init_logger() is not called manually. + """ + + @functools.wraps(log_func) + def wrapped_log_func(fmt, *args): + if _logger is None: + init_logger() + _logger.warning( + "Before you call functions within the logger, the logger has already " + "been automatically initialized. Since `log_file` is not specified by " + "default, information will not be written to any file except being " + "output to the terminal." + ) + + log_func(fmt, *args) + + return wrapped_log_func + + @ensure_logger @log_at_trainer0 def info(fmt, *args): diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 89561fd99..44a933302 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -1,7 +1,23 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Sympy to python function conversion module """ +from __future__ import annotations + import functools from typing import TYPE_CHECKING from typing import Dict @@ -17,7 +33,6 @@ from ppsci.autodiff import hessian from ppsci.autodiff import jacobian -from ppsci.utils import logger if TYPE_CHECKING: from ppsci import arch @@ -235,7 +250,7 @@ def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): self.expr = float(self.expr) else: raise TypeError( - f"expr({expr}) should be float/int/bool, but got {type(self.expr)}" + f"expr({expr}) should be Float/Integer/Boolean/Rational, but got {type(self.expr)}" ) self.expr = paddle.to_tensor(self.expr) @@ -253,10 +268,9 @@ class ComposedNode(nn.Layer): Compose list of several callable objects together. """ - def __init__(self, target: str, funcs: List[Node]): + def __init__(self, funcs: List[Node]): super().__init__() self.funcs = funcs - self.target = target def forward(self, data_dict: Dict): # call all funcs in order @@ -299,19 +313,57 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: def sympy_to_function( - target: str, expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, ) -> ComposedNode: """Convert sympy expression to callable function. Args: - target (str): Alias of `expr`, such as "z" for expression: "z = a + b * c". expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. Returns: ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. + + Examples: + >>> import paddle + >>> import sympy as sp + >>> from ppsci import arch + >>> from ppsci.utils import sym_to_func + + >>> a, b, c, x, y = sp.symbols("a b c x y") + >>> u = sp.Function("u")(x, y) + >>> v = sp.Function("v")(x, y) + >>> z = -a + b * (c ** 2) + u * v + 2.3 + + >>> model = arch.MLP(("x", "y"), ("u", "v"), 4, 16) + + >>> batch_size = 13 + >>> a_tensor = paddle.randn([batch_size, 1]) + >>> b_tensor = paddle.randn([batch_size, 1]) + >>> c_tensor = paddle.randn([batch_size, 1]) + >>> x_tensor = paddle.randn([batch_size, 1]) + >>> y_tensor = paddle.randn([batch_size, 1]) + + >>> model_output_dict = model({"x": x_tensor, "y": y_tensor}) + >>> u_tensor, v_tensor = model_output_dict["u"], model_output_dict["v"] + + >>> z_tensor_manually = ( + ... -a_tensor + b_tensor * (c_tensor ** 2) + ... + u_tensor * v_tensor + 2.3 + ... ) + >>> z_tensor_sympy = sym_to_func.sympy_to_function(z, model)( + ... { + ... "a": a_tensor, + ... "b": b_tensor, + ... "c": c_tensor, + ... "x": x_tensor, + ... "y": y_tensor, + ... } + ... ) + + >>> paddle.allclose(z_tensor_manually, z_tensor_sympy).item() + True """ # simplify expression to reduce nodes in tree @@ -330,9 +382,10 @@ def sympy_to_function( sympy_nodes = list(dict.fromkeys(sympy_nodes)) # convert sympy node to callable node + if not isinstance(models, (tuple, list)): + models = (models,) callable_nodes = [] for i, node in enumerate(sympy_nodes): - logger.debug(f"tree node [{i + 1}/{len(sympy_nodes)}]: {node}") if isinstance(node.func, sp.core.function.UndefinedFunction): match = False for model in models: @@ -359,4 +412,4 @@ def sympy_to_function( ) # Compose callable nodes into one callable object - return ComposedNode(target, callable_nodes) + return ComposedNode(callable_nodes) diff --git a/test/utils/test_linear_elasticity_sympy.py b/test/utils/test_linear_elasticity_sympy.py index 87100ec3e..06ef3adbb 100644 --- a/test/utils/test_linear_elasticity_sympy.py +++ b/test/utils/test_linear_elasticity_sympy.py @@ -21,7 +21,7 @@ import ppsci from ppsci import equation from ppsci.autodiff import clear -from ppsci.utils import expression +from ppsci.utils import sym_to_func __all__ = [] @@ -227,8 +227,8 @@ def test_linearelasticity(E, nu, lambda_, mu, rho, dim, time): E, nu, lambda_, mu, rho, dim, time ).equations for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = expression.sympy_to_function( - target, expr, [disp_net, stress_net] + sympy_expr_dict[target] = sym_to_func.sympy_to_function( + expr, [disp_net, stress_net] ) # compute equation with python function diff --git a/test/utils/test_navier_stokes_sympy.py b/test/utils/test_navier_stokes_sympy.py index b73b6eb35..e10592c13 100644 --- a/test/utils/test_navier_stokes_sympy.py +++ b/test/utils/test_navier_stokes_sympy.py @@ -127,29 +127,6 @@ def __init__(self, nu, rho=1, dim=3, time=True): class ZeroEquation_sympy: - """ - Zero Equation Turbulence model - - Parameters - ========== - nu : float - The kinematic viscosity of the fluid. - max_distance : float - The maximum wall distance in the flow field. - rho : float, Sympy sp.Symbol/Expr, str - The density. If `rho` is a str then it is - converted to Sympy sp.Function of form 'rho(x,y,z,t)'. - If 'rho' is a Sympy sp.Symbol or Expression then this - is substituted into the equation. Default is 1. - dim : int - Dimension of the Zero Equation Turbulence model (2 or 3). - Default is 3. - time : bool - If time-dependent equations or not. Default is True. - - Example - """ - def __init__( self, nu, max_distance, rho=1, dim=3, time=True ): # TODO add density into model @@ -454,7 +431,6 @@ def momentum_y_f(out): sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations for target, expr in sympy_expr_dict.items(): sympy_expr_dict[target] = sym_to_func.sympy_to_function( - target, expr, [ model, @@ -528,7 +504,6 @@ def test_nu_constant(self, nu, rho, dim, time): sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations for target, expr in sympy_expr_dict.items(): sympy_expr_dict[target] = sym_to_func.sympy_to_function( - target, expr, [ model, From 1b7642e628bafcc2c3a9249721c2e3620e9766ec Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 08:08:12 +0000 Subject: [PATCH 12/48] update code --- ppsci/utils/sym_to_func.py | 37 +++++++------------------------------ 1 file changed, 7 insertions(+), 30 deletions(-) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 44a933302..f5ea7cef8 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -95,30 +95,6 @@ def _cvt_to_key(expr: sp.Basic) -> str: return str(expr) -def _compute_single_derivate( - dvar: paddle.Tensor, invar: paddle.Tensor, order: int -) -> paddle.Tensor: - """Compute derivative for a single dependent variable to a single independent variable. - - Args: - dvar (paddle.Tensor): Dependent variable. - invar (paddle.Tensor): Independent variable. - order (int): Order of derivative - - Returns: - paddle.Tensor: Result of derivative d^{order}{dvar} / d{invar}^{order}. - """ - order_left = order - while order_left > 0: - if order_left & 1: - dvar = jacobian(dvar, invar) - order_left -= 1 - if order_left >= 2: - dvar = hessian(dvar, invar) - order_left -= 2 - return dvar - - class Node(nn.Layer): """The base class of the node in expression tree. @@ -191,11 +167,12 @@ def _mul_operator_func(self, data_dict): def _derivate_operator_func(self, data_dict): data_dict[self.key] = data_dict[self.childs[0]] for child, order in self.childs[1:]: - data_dict[self.key] = _compute_single_derivate( - data_dict[self.key], - data_dict[child], - order, - ) + if order & 1: + data_dict[self.key] = jacobian(data_dict[self.key], data_dict[child]) + order -= 1 + while order > 0: + data_dict[self.key] = hessian(data_dict[self.key], data_dict[child]) + order -= 2 return data_dict def _heaviside_operator_func(self, data_dict): @@ -204,7 +181,7 @@ def _heaviside_operator_func(self, data_dict): def _vanilla_operator_func(self, data_dict): data_dict[self.key] = PADDLE_FUNC_MAP[self.expr.func]( - *[data_dict[child] for child in self.childs] + *tuple(data_dict[child] for child in self.childs) ) return data_dict From 1538f0d74fb5e5c2d183315787e48979efdb1313 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 08:09:19 +0000 Subject: [PATCH 13/48] update code --- test/utils/speed_test_navier_stokes.py | 476 +++++++++++++++++++++++++ 1 file changed, 476 insertions(+) create mode 100644 test/utils/speed_test_navier_stokes.py diff --git a/test/utils/speed_test_navier_stokes.py b/test/utils/speed_test_navier_stokes.py new file mode 100644 index 000000000..838a38c74 --- /dev/null +++ b/test/utils/speed_test_navier_stokes.py @@ -0,0 +1,476 @@ +import time as time_module + +import paddle +import sympy as sp + +from ppsci import arch +from ppsci import equation +from ppsci.autodiff import clear +from ppsci.autodiff import hessian as H +from ppsci.autodiff import jacobian as J +from ppsci.utils import sym_to_func + + +class NavierStokes_sympy: + def __init__(self, nu, rho, dim, time): + # set params + self.dim = dim + self.time = time + + # coordinates + x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") + + # time + t = sp.Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = sp.Function("u")(*input_variables) + v = sp.Function("v")(*input_variables) + if self.dim == 3: + w = sp.Function("w")(*input_variables) + else: + w = sp.Number(0) + + # pressure + p = sp.Function("p")(*input_variables) + + # kinematic viscosity + if isinstance(nu, str): + nu = sp.Function(nu)(*input_variables) + elif isinstance(nu, (float, int)): + nu = sp.Number(nu) + + # density + if isinstance(rho, str): + rho = sp.Function(rho)(*input_variables) + elif isinstance(rho, (float, int)): + rho = sp.Number(rho) + + # dynamic viscosity + mu = rho * nu + + # set equations + self.equations = {} + self.equations["continuity"] = ( + rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) + ) + + curl = sp.Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) + self.equations["momentum_x"] = ( + (rho * u).diff(t) + + ( + u * ((rho * u).diff(x)) + + v * ((rho * u).diff(y)) + + w * ((rho * u).diff(z)) + + rho * u * (curl) + ) + + p.diff(x) + - (-2 / 3 * mu * (curl)).diff(x) + - (mu * u.diff(x)).diff(x) + - (mu * u.diff(y)).diff(y) + - (mu * u.diff(z)).diff(z) + - (mu * (curl).diff(x)) + ) + self.equations["momentum_y"] = ( + (rho * v).diff(t) + + ( + u * ((rho * v).diff(x)) + + v * ((rho * v).diff(y)) + + w * ((rho * v).diff(z)) + + rho * v * (curl) + ) + + p.diff(y) + - (-2 / 3 * mu * (curl)).diff(y) + - (mu * v.diff(x)).diff(x) + - (mu * v.diff(y)).diff(y) + - (mu * v.diff(z)).diff(z) + - (mu * (curl).diff(y)) + ) + self.equations["momentum_z"] = ( + (rho * w).diff(t) + + ( + u * ((rho * w).diff(x)) + + v * ((rho * w).diff(y)) + + w * ((rho * w).diff(z)) + + rho * w * (curl) + ) + + p.diff(z) + - (-2 / 3 * mu * (curl)).diff(z) + - (mu * w.diff(x)).diff(x) + - (mu * w.diff(y)).diff(y) + - (mu * w.diff(z)).diff(z) + - (mu * (curl).diff(z)) + ) + + if self.dim == 2: + self.equations.pop("momentum_z") + + +class ZeroEquation_sympy: + def __init__( + self, nu, max_distance, rho=1, dim=3, time=True + ): # TODO add density into model + # set params + self.dim = dim + self.time = time + + # model coefficients + self.max_distance = max_distance + self.karman_constant = 0.419 + self.max_distance_ratio = 0.09 + + # coordinates + x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") + + # time + t = sp.Symbol("t") + + # make input variables + input_variables = {"x": x, "y": y, "z": z, "t": t} + if self.dim == 2: + input_variables.pop("z") + if not self.time: + input_variables.pop("t") + + # velocity componets + u = sp.Function("u")(*input_variables) + v = sp.Function("v")(*input_variables) + if self.dim == 3: + w = sp.Function("w")(*input_variables) + else: + w = sp.Number(0) + + # density + if type(rho) is str: + rho = sp.Function(rho)(*input_variables) + elif type(rho) in [float, int]: + rho = sp.Number(rho) + + # wall distance + normal_distance = sp.Function("sdf")(*input_variables) + + # mixing length + mixing_length = sp.Min( + self.karman_constant * normal_distance, + self.max_distance_ratio * self.max_distance, + ) + G = ( + 2 * u.diff(x) ** 2 + + 2 * v.diff(y) ** 2 + + 2 * w.diff(z) ** 2 + + (u.diff(y) + v.diff(x)) ** 2 + + (u.diff(z) + w.diff(x)) ** 2 + + (v.diff(z) + w.diff(y)) ** 2 + ) + + # set equations + self.equations = {} + self.equations["nu"] = nu + rho * mixing_length**2 * sp.sqrt(G) + + +def compute_with_sympy(input_dicts, nu, rho, dim, time, model): + """Test for navier_stokes equation.""" + # define input/output keys + ze = ZeroEquation_sympy(nu=nu, rho=rho, dim=dim, max_distance=3.4, time=time) + nu_sympy = ze.equations["nu"] + + input_keys = ("x", "y", "z")[:dim] + if time: + input_keys = ("t",) + input_keys + + output_keys = ("u", "v") + if dim == 3: + output_keys += ("w",) + output_keys += ("p",) + + # prepare input data in dict + cost_list = [] + # prepare python function expressions and sympy-expression in dict + sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations + for target, expr in sympy_expr_dict.items(): + sympy_expr_dict[target] = sym_to_func.sympy_to_function( + expr, + [ + model, + ], + ) + for i, input_dict in enumerate(input_dicts): + input_dict = input_dicts[i] + + # compute equation with funciton converted from sympy + output_dict_sympy = {k: v for k, v in input_dict.items()} + tmp = {k: v for k, v in output_dict_sympy.items()} + beg = time_module.perf_counter() + for name, expr in sympy_expr_dict.items(): + output = expr(tmp) + output_dict_sympy[name] = output + for key in model.output_keys: + output_dict_sympy[key] = tmp[key] + clear() + end = time_module.perf_counter() + cost_list.append(end - beg) + + # test for result + print( + f"compute_with_sympy overhead: {sum(cost_list[10:]) / len(cost_list[10:]):.5f}" + ) + return output_dict_sympy + + +def compute_with_pyfunc(input_dicts, nu, rho, dim, time, model): + def continuity_f(out): + x, y = out["x"], out["y"] + u, v = out["u"], out["v"] + return 1.0 * J(u, x) + 1.0 * J(v, y) + + def momentum_x_f(out): + x, y = out["x"], out["y"] + u, v, p = out["u"], out["v"], out["p"] + if time: + t = out["t"] + return ( + -( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(u, x) + - ( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(u, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 + + 2 * J(u, x) * J(J(u, x), y) + + 2 * J(v, y) * H(v, y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__y"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(u, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 + + 2 * J(u, x) * H(u, x) + + 2 * J(v, y) * J(J(v, x), y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__x"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(u, x) + + (1.0 * u * J(u, x) + 1.0 * v * J(u, y) + J(p, x)) + + (J(u, t) if time else 0) + ) + + def momentum_y_f(out): + x, y = out["x"], out["y"] + u, v, p = out["u"], out["v"], out["p"] + if time: + t = out["t"] + return ( + -( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(v, x) + - ( + 1.0 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + + 2.0 + ) + * H(v, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 + + 2 * J(u, x) * J(J(u, x), y) + + 2 * J(v, y) * H(v, y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__y"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(v, y) + - ( + 1.0 + * ( + (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 + + 2 * J(u, x) * H(u, x) + + 2 * J(v, y) * J(J(v, x), y) + ) + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ** 2 + / paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + + 0.838 + * paddle.sqrt( + (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + ) + * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) + * out["sdf__x"] + * paddle.minimum( + paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] + ) + ) + * J(v, x) + + (1.0 * u * J(v, x) + 1.0 * v * J(v, y) + J(p, y)) + + (J(v, t) if time else 0) + ) + + """Test for navier_stokes equation.""" + # define input/output keys + + # prepare input data in dict + cost_list = [] + for i, input_dict in enumerate(input_dicts): + input_dict = input_dicts[i] + + # prepare python function expressions in dict + functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations + functional_expr_dict["continuity"] = continuity_f + functional_expr_dict["momentum_x"] = momentum_x_f + functional_expr_dict["momentum_y"] = momentum_y_f + + # compute equation with python function + output_dict_functional = model(input_dict) + beg = time_module.perf_counter() + for name, expr in functional_expr_dict.items(): + if callable(expr): + output_dict_functional[name] = expr( + {**output_dict_functional, **input_dict} + ) + else: + raise TypeError(f"expr type({type(expr)}) is invalid") + clear() + end = time_module.perf_counter() + cost_list.append(end - beg) + + # test for result + print( + f"compute_with_pyfunc overhead: {sum(cost_list[10:]) / len(cost_list[10:]):.5f}" + ) + return output_dict_functional + + +if __name__ == "__main__": + input_keys = ("t", "x", "y") + output_keys = ("u", "v", "p") + nu = 2 + rho = 1 + dim = 2 + time = True + model = arch.MLP(input_keys, output_keys, 4, 50) + + batch_size = 2048 + input_dicts = [] + for i in range(50): + input_dict = {} + for var in input_keys: + input_dict[var] = paddle.randn([batch_size, 1]) + input_dict[var].stop_gradient = False + if var != "t": + input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) + input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) + + input_dict[f"sdf__{var}"].stop_gradient = False + input_dict[f"normal__{var}"].stop_gradient = False + + input_dict["sdf"] = paddle.randn([batch_size, 1]) + input_dict["sdf"].stop_gradient = False + input_dicts.append(input_dict) + + output_dict_sympy = compute_with_sympy( + input_dicts, nu=nu, rho=rho, dim=dim, time=time, model=model + ) + output_dict_pyfunc = compute_with_pyfunc( + input_dicts, nu=nu, rho=rho, dim=dim, time=time, model=model + ) + + for key in output_dict_pyfunc: + if not paddle.allclose( + output_dict_sympy[key], output_dict_pyfunc[key], atol=1e-7 + ): + print(f"{key} {output_dict_sympy[key]}\n{output_dict_pyfunc[key]}") + else: + print(f"{key} check pass") From 36e48e5c961b37214a8d2b4b06d87492120b83b4 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 12:06:21 +0000 Subject: [PATCH 14/48] refine sym_to_func.py --- ppsci/utils/sym_to_func.py | 48 +++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index f5ea7cef8..e6a7fdd56 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -194,9 +194,15 @@ class LayerNode(Node): model (nn.Layer): NN model for computing forward result in this node. """ - def __init__(self, expr: sp.core.function.UndefinedFunction, model: nn.Layer): + def __init__( + self, + expr: sp.core.function.UndefinedFunction, + model: nn.Layer, + detach_keys: Optional[Tuple[str, ...]] = None, + ): super().__init__(expr) self.model = model + self.detach_keys = detach_keys def forward(self, data_dict: Dict): # use cache @@ -206,6 +212,11 @@ def forward(self, data_dict: Dict): output_dict = self.model(data_dict) data_dict.update(output_dict) + # detach Tensor(s) if specified + if self.detach_keys: + for key in self.detach_keys: + data_dict[key] = data_dict[key].detach() + return data_dict @@ -292,6 +303,7 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: def sympy_to_function( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, + detach_keys: Tuple[str, ...] = None, ) -> ComposedNode: """Convert sympy expression to callable function. @@ -343,10 +355,11 @@ def sympy_to_function( True """ + # NOTE: Those simplify methods seem complicate given expr instead, so not use them here # simplify expression to reduce nodes in tree - expr = sp.nsimplify(expr) - expr = sp.expand(expr) - expr = sp.simplify(expr) + # expr = sp.nsimplify(expr) + # expr = sp.expand(expr) + # expr = sp.simplify(expr) # convert sympy expression tree to list of nodes in postorder sympy_nodes = [] @@ -358,21 +371,34 @@ def sympy_to_function( # remove duplicates with topo-order kept sympy_nodes = list(dict.fromkeys(sympy_nodes)) - # convert sympy node to callable node if not isinstance(models, (tuple, list)): models = (models,) + if detach_keys is None: + detach_keys = () + + # convert sympy node to callable node callable_nodes = [] for i, node in enumerate(sympy_nodes): if isinstance(node.func, sp.core.function.UndefinedFunction): - match = False - for model in models: + match_index = None + for j, model in enumerate(models): if str(node.func.name) in model.output_keys: - callable_nodes.append(LayerNode(node, model)) - if match: + callable_nodes.append( + LayerNode( + node, + model, + tuple( + key for key in detach_keys if key in model.output_keys + ), + ) + ) + if match_index is not None: raise ValueError( - f"Function {node} can match at least 2 output key of models, which is forbidden." + f"Name of function({node}) should be unique along given models," + f" but got same output_key({node.func.name}) in models[{match_index}]" + f" and models[{j}]." ) - match = True + match_index = j elif ( isinstance(node, tuple(PADDLE_FUNC_MAP.keys())) or node.is_Add From d32fd84672e693468c05d8b5493d59d3b345e97d Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sat, 26 Aug 2023 12:40:19 +0000 Subject: [PATCH 15/48] replace sympy PDE for biharmonic and laplace --- examples/euler_beam/euler_beam.py | 2 +- examples/laplace/laplace2d.py | 4 ++-- ppsci/equation/pde/biharmonic.py | 17 +++++++---------- ppsci/equation/pde/laplace.py | 18 ++++++++---------- ppsci/solver/solver.py | 10 ++++++++++ ppsci/utils/expression.py | 27 +++++++++++---------------- 6 files changed, 39 insertions(+), 39 deletions(-) diff --git a/examples/euler_beam/euler_beam.py b/examples/euler_beam/euler_beam.py index 1dfffeaec..a7bf45b0f 100644 --- a/examples/euler_beam/euler_beam.py +++ b/examples/euler_beam/euler_beam.py @@ -31,7 +31,7 @@ ITERS_PER_EPOCH = 1 EPOCHS = 10000 if not args.epochs else args.epochs # set output directory - OUTPUT_DIR = "./output/euler_beam" if not args.output_dir else args.output_dir + OUTPUT_DIR = "./output_euler_beam" if not args.output_dir else args.output_dir # initialize logger logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info") diff --git a/examples/laplace/laplace2d.py b/examples/laplace/laplace2d.py index 72fe7b7aa..fe8eacd3b 100644 --- a/examples/laplace/laplace2d.py +++ b/examples/laplace/laplace2d.py @@ -29,14 +29,14 @@ EVAL_FREQ = 200 # set output directory - OUTPUT_DIR = "./output/laplace2d" if not args.output_dir else args.output_dir + OUTPUT_DIR = "./output_laplace2d" if not args.output_dir else args.output_dir logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info") # set model model = ppsci.arch.MLP(("x", "y"), ("u",), 5, 20) # set equation - equation = {"laplace": ppsci.equation.pde.Laplace(dim=2)} + equation = {"laplace": ppsci.equation.Laplace(dim=2)} # set geometry geom = {"rect": ppsci.geometry.Rectangle((0.0, 0.0), (1.0, 1.0))} diff --git a/ppsci/equation/pde/biharmonic.py b/ppsci/equation/pde/biharmonic.py index e5c7c63e2..b227e2eb9 100644 --- a/ppsci/equation/pde/biharmonic.py +++ b/ppsci/equation/pde/biharmonic.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ppsci.autodiff import hessian from ppsci.equation.pde import base @@ -39,13 +38,11 @@ def __init__(self, dim: int, q: float, D: float): self.q = q self.D = D - def biharmonic_compute_func(out): - u = out["u"] - biharmonic = -self.q / self.D - invars = ("x", "y", "z")[: self.dim] - for invar_i in invars: - for invar_j in invars: - biharmonic += hessian(hessian(u, out[invar_i]), out[invar_j]) - return biharmonic + invars = self.create_symbols(("x", "y", "z")[: self.dim]) + u = self.create_function("u", invars) + biharmonic = -self.q / self.D + for invar_i in invars: + for invar_j in invars: + biharmonic += u.diff(invar_i).diff(invar_i).diff(invar_j).diff(invar_j) - self.add_equation("biharmonic", biharmonic_compute_func) + self.add_equation("biharmonic", biharmonic) diff --git a/ppsci/equation/pde/laplace.py b/ppsci/equation/pde/laplace.py index 4ab483fda..cada0c094 100644 --- a/ppsci/equation/pde/laplace.py +++ b/ppsci/equation/pde/laplace.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ppsci.autodiff import hessian +from __future__ import annotations + from ppsci.equation.pde import base @@ -34,14 +35,11 @@ class Laplace(base.PDE): def __init__(self, dim: int): super().__init__() self.dim = dim + invars = self.create_symbols(("x", "y", "z")[: self.dim]) + u = self.create_function("u", invars) - def laplace_compute_func(out): - x, y = out["x"], out["y"] - u = out["u"] - laplace = hessian(u, x) + hessian(u, y) - if self.dim == 3: - z = out["z"] - laplace += hessian(u, z) - return laplace + laplace = 0 + for invar in invars: + laplace += u.diff(invar).diff(invar) - self.add_equation("laplace", laplace_compute_func) + self.add_equation("laplace", laplace) diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index 4bce638b0..e735ac196 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -27,6 +27,7 @@ import numpy as np import paddle import paddle.distributed as dist +import sympy as sp import visualdl as vdl from packaging import version from paddle import amp @@ -43,6 +44,7 @@ from ppsci.utils import logger from ppsci.utils import misc from ppsci.utils import save_load +from ppsci.utils import sym_to_func class Solver: @@ -210,6 +212,14 @@ def __init__( # set equations for physics-driven or data-physics hybrid driven task, such as PINN self.equation = equation + # convert sympy expression to python function using sym_to_func module + for equation_obj in self.equation.values(): + for name, expr in equation_obj.equations.items(): + if isinstance(expr, sp.Basic): + equation_obj.equations[name] = sym_to_func.sympy_to_function( + expr, self.model + ) + # set geometry for generating data self.geom = {} if geom is None else geom diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index b331cbb84..07eeadc1c 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -74,17 +74,15 @@ def train_forward( output_dicts = [] for i, expr_dict in enumerate(expr_dicts): # model forward - if callable(next(iter(expr_dict.values()))): - output_dict = model(input_dicts[i]) + output_dict = model(input_dicts[i]) # equation forward + tmp = {k: v for k, v in input_dicts[i].items()} + tmp.update(output_dict) for name, expr in expr_dict.items(): if name not in label_dicts[i]: continue - if callable(expr): - output_dict[name] = expr({**output_dict, **input_dicts[i]}) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") + output_dict[name] = expr(tmp) # put field 'area' into output_dict if "area" in input_dicts[i]: @@ -132,17 +130,15 @@ def eval_forward( given validator. """ # model forward - if callable(next(iter(expr_dict.values()))): - output_dict = model(input_dict) + output_dict = model(input_dict) # equation forward + tmp = {k: v for k, v in input_dict.items()} + tmp.update(output_dict) for name, expr in expr_dict.items(): if name not in label_dict: continue - if callable(expr): - output_dict[name] = expr({**output_dict, **input_dict}) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") + output_dict[name] = expr(tmp) # put field 'area' into output_dict if "area" in input_dict: @@ -181,11 +177,10 @@ def visu_forward( if isinstance(expr_dict, dict): # equation forward + tmp = {k: v for k, v in input_dict.items()} + tmp.update(output_dict) for name, expr in expr_dict.items(): - if callable(expr): - output_dict[name] = expr({**output_dict, **input_dict}) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") + output_dict[name] = expr(tmp) # clear differentiation cache clear() From 4e61fa54d9063c2a4ea11e216aad1d86eed59ccf Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 27 Aug 2023 05:46:22 +0000 Subject: [PATCH 16/48] refine sym_to_func.py --- ppsci/utils/sym_to_func.py | 64 ++++++++++++++++++++------------------ 1 file changed, 34 insertions(+), 30 deletions(-) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index e6a7fdd56..85f4abcdf 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -43,18 +43,7 @@ ] -PADDLE_FUNC_MAP = { - sp.sin: paddle.sin, - sp.cos: paddle.cos, - sp.exp: paddle.exp, - sp.Pow: paddle.pow, - sp.log: paddle.log, - sp.tan: paddle.tan, - sp.Max: paddle.maximum, - sp.Min: paddle.minimum, - sp.Abs: paddle.abs, - sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), -} +DATA_DICT: TypeAlias = Dict[str, paddle.Tensor] SYMPY_BUILTIN_FUNC: TypeAlias = Union[ sp.sin, @@ -69,6 +58,19 @@ sp.Heaviside, ] +PADDLE_FUNC_MAP = { + sp.sin: paddle.sin, + sp.cos: paddle.cos, + sp.exp: paddle.exp, + sp.Pow: paddle.pow, + sp.log: paddle.log, + sp.tan: paddle.tan, + sp.Max: paddle.maximum, + sp.Min: paddle.minimum, + sp.Abs: paddle.abs, + sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), +} + def _cvt_to_key(expr: sp.Basic) -> str: """Convert sympy expression to a string key, mainly as retrieval key in dict. @@ -136,51 +138,53 @@ def __init__(self, expr: SYMPY_BUILTIN_FUNC): self.childs = [_cvt_to_key(arg) for arg in self.expr.args] if self.expr.func == sp.Add: - self.func = self._add_operator_func + self._operator_func = self._add_operator_func elif self.expr.func == sp.Mul: - self.func = self._mul_operator_func + self._operator_func = self._mul_operator_func elif self.expr.func == sp.Derivative: - self.func = self._derivate_operator_func + self._operator_func = self._derivate_operator_func else: if self.expr.func == sp.Heaviside: - self.func = self._heaviside_operator_func + self._operator_func = self._heaviside_operator_func + self._mapping_func = PADDLE_FUNC_MAP[sp.Heaviside] else: - self.func = self._vanilla_operator_func + self._operator_func = self._vanilla_operator_func + self._mapping_func = PADDLE_FUNC_MAP[self.expr.func] - def forward(self, data_dict: Dict): + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: # use cache if self.key in data_dict: return data_dict - return self.func(data_dict) + return self._operator_func(data_dict) - def _add_operator_func(self, data_dict): + def _add_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = sum([data_dict[child] for child in self.childs]) return data_dict - def _mul_operator_func(self, data_dict): + def _mul_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = data_dict[self.childs[0]] for child in self.childs[1:]: data_dict[self.key] *= data_dict[child] return data_dict - def _derivate_operator_func(self, data_dict): + def _derivate_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = data_dict[self.childs[0]] for child, order in self.childs[1:]: if order & 1: data_dict[self.key] = jacobian(data_dict[self.key], data_dict[child]) order -= 1 - while order > 0: + for _ in range(0, order, 2): data_dict[self.key] = hessian(data_dict[self.key], data_dict[child]) order -= 2 return data_dict - def _heaviside_operator_func(self, data_dict): - data_dict[self.key] = PADDLE_FUNC_MAP[sp.Heaviside](data_dict[self.childs[0]]) + def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self._mapping_func(data_dict[self.childs[0]]) return data_dict - def _vanilla_operator_func(self, data_dict): - data_dict[self.key] = PADDLE_FUNC_MAP[self.expr.func]( + def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self._mapping_func( *tuple(data_dict[child] for child in self.childs) ) return data_dict @@ -204,7 +208,7 @@ def __init__( self.model = model self.detach_keys = detach_keys - def forward(self, data_dict: Dict): + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: # use cache if self.key in data_dict: return data_dict @@ -242,7 +246,7 @@ def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): ) self.expr = paddle.to_tensor(self.expr) - def forward(self, data_dict: Dict): + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: # use cache if self.key in data_dict: return data_dict @@ -260,7 +264,7 @@ def __init__(self, funcs: List[Node]): super().__init__() self.funcs = funcs - def forward(self, data_dict: Dict): + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: # call all funcs in order for func in self.funcs: data_dict = func(data_dict) From 4baf4668b19885a014c5e45dde2ddce29c47ffaf Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 27 Aug 2023 10:01:42 +0000 Subject: [PATCH 17/48] fix bug in _cvt_to_key --- ppsci/utils/sym_to_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 85f4abcdf..61e7788a1 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -86,7 +86,7 @@ def _cvt_to_key(expr: sp.Basic) -> str: # use name of custom function instead of itself. return expr.name else: - str(expr) + return str(expr) elif isinstance(expr, sp.Derivative): # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" expr_str = expr.args[0].name From ba2a5c22ca50b67ceed63b0b0ead7c9bd0d9679e Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 27 Aug 2023 17:05:06 +0000 Subject: [PATCH 18/48] refine sym_to_func and expression code --- ppsci/utils/expression.py | 37 ++++++++++++++++++------------------- ppsci/utils/sym_to_func.py | 8 +++----- 2 files changed, 21 insertions(+), 24 deletions(-) diff --git a/ppsci/utils/expression.py b/ppsci/utils/expression.py index 07eeadc1c..bcf866ae5 100644 --- a/ppsci/utils/expression.py +++ b/ppsci/utils/expression.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from typing import TYPE_CHECKING from typing import Callable from typing import Dict @@ -25,6 +27,7 @@ import paddle from ppsci import constraint from ppsci import validate + from ppsci import arch from ppsci.autodiff import clear @@ -52,7 +55,7 @@ def train_forward( self, expr_dicts: Tuple[Dict[str, Callable], ...], input_dicts: Tuple[Dict[str, "paddle.Tensor"], ...], - model: nn.Layer, + model: arch.Arch, constraint: Dict[str, "constraint.Constraint"], label_dicts: Tuple[Dict[str, "paddle.Tensor"], ...], weight_dicts: Tuple[Dict[str, "paddle.Tensor"], ...], @@ -63,7 +66,7 @@ def train_forward( Args: expr_dicts (Tuple[Dict[str, Callable], ...]): Tuple of expression dicts. input_dicts (Tuple[Dict[str, paddle.Tensor], ...]): Tuple of input dicts. - model (nn.Layer): NN model. + model (arch.Arch): NN model. constraint (Dict[str, "constraint.Constraint"]): Constraint dict. label_dicts (Tuple[Dict[str, paddle.Tensor], ...]): Tuple of label dicts. weight_dicts (Tuple[Dict[str, paddle.Tensor], ...]): Tuple of weight dicts. @@ -77,12 +80,10 @@ def train_forward( output_dict = model(input_dicts[i]) # equation forward - tmp = {k: v for k, v in input_dicts[i].items()} - tmp.update(output_dict) + data_dict = {k: v for k, v in input_dicts[i].items()} + data_dict.update(output_dict) for name, expr in expr_dict.items(): - if name not in label_dicts[i]: - continue - output_dict[name] = expr(tmp) + output_dict[name] = expr(data_dict) # put field 'area' into output_dict if "area" in input_dicts[i]: @@ -109,7 +110,7 @@ def eval_forward( self, expr_dict: Dict[str, Callable], input_dict: Dict[str, "paddle.Tensor"], - model: nn.Layer, + model: arch.Arch, validator: "validate.Validator", label_dict: Dict[str, "paddle.Tensor"], weight_dict: Dict[str, "paddle.Tensor"], @@ -120,7 +121,7 @@ def eval_forward( Args: expr_dict (Dict[str, Callable]): Expression dict. input_dict (Dict[str, paddle.Tensor]): Input dict. - model (nn.Layer): NN model. + model (arch.Arch): NN model. validator (validate.Validator): Validator. label_dict (Dict[str, paddle.Tensor]): Label dict. weight_dict (Dict[str, paddle.Tensor]): Weight dict. @@ -133,12 +134,10 @@ def eval_forward( output_dict = model(input_dict) # equation forward - tmp = {k: v for k, v in input_dict.items()} - tmp.update(output_dict) + data_dict = {k: v for k, v in input_dict.items()} + data_dict.update(output_dict) for name, expr in expr_dict.items(): - if name not in label_dict: - continue - output_dict[name] = expr(tmp) + output_dict[name] = expr(data_dict) # put field 'area' into output_dict if "area" in input_dict: @@ -159,7 +158,7 @@ def visu_forward( self, expr_dict: Optional[Dict[str, Callable]], input_dict: Dict[str, "paddle.Tensor"], - model: nn.Layer, + model: arch.Arch, ) -> Dict[str, "paddle.Tensor"]: """Forward computation for visualization, including model forward and equation forward. @@ -167,7 +166,7 @@ def visu_forward( Args: expr_dict (Optional[Dict[str, Callable]]): Expression dict. input_dict (Dict[str, paddle.Tensor]): Input dict. - model (nn.Layer): NN model. + model (arch.Arch): NN model. Returns: Dict[str, paddle.Tensor]: Result dict for given expression dict. @@ -177,10 +176,10 @@ def visu_forward( if isinstance(expr_dict, dict): # equation forward - tmp = {k: v for k, v in input_dict.items()} - tmp.update(output_dict) + data_dict = {k: v for k, v in input_dict.items()} + data_dict.update(output_dict) for name, expr in expr_dict.items(): - output_dict[name] = expr(tmp) + output_dict[name] = expr(data_dict) # clear differentiation cache clear() diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 61e7788a1..8baebbd70 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -195,13 +195,13 @@ class LayerNode(Node): Args: expr (sp.core.function.UndefinedFunction): Sympy expression. - model (nn.Layer): NN model for computing forward result in this node. + model (arch.Arch): NN model for computing forward result in this node. """ def __init__( self, expr: sp.core.function.UndefinedFunction, - model: nn.Layer, + model: arch.Arch, detach_keys: Optional[Tuple[str, ...]] = None, ): super().__init__(expr) @@ -284,9 +284,7 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: List[sp.Basic]: Node list storing all tree nodes in postorder. """ # traverse into sub-nodes - if isinstance(cur_node, sp.core.function.UndefinedFunction): - nodes.append(cur_node) - elif isinstance(cur_node, sp.Function): + if isinstance(cur_node, sp.Function): for arg in cur_node.args: nodes = _post_traverse(arg, nodes) nodes.append(cur_node) From f057eeca5a743944b7318c7a5bf08a9f6a6110f0 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 27 Aug 2023 17:07:32 +0000 Subject: [PATCH 19/48] add euler_beam static code(WIP, can not running, to be debug) --- examples/euler_beam/euler_beam.py | 6 +- ppsci/equation/pde/linear_elasticity.py | 74 ------------------------- 2 files changed, 5 insertions(+), 75 deletions(-) diff --git a/examples/euler_beam/euler_beam.py b/examples/euler_beam/euler_beam.py index a7bf45b0f..ed4533c7e 100644 --- a/examples/euler_beam/euler_beam.py +++ b/examples/euler_beam/euler_beam.py @@ -31,7 +31,11 @@ ITERS_PER_EPOCH = 1 EPOCHS = 10000 if not args.epochs else args.epochs # set output directory - OUTPUT_DIR = "./output_euler_beam" if not args.output_dir else args.output_dir + OUTPUT_DIR = ( + "./output_euler_beam_sympy_to_static" + if not args.output_dir + else args.output_dir + ) # initialize logger logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info") diff --git a/ppsci/equation/pde/linear_elasticity.py b/ppsci/equation/pde/linear_elasticity.py index 77b2c7eac..aff126919 100644 --- a/ppsci/equation/pde/linear_elasticity.py +++ b/ppsci/equation/pde/linear_elasticity.py @@ -31,9 +31,6 @@ class LinearElasticity(base.PDE): traction_{x} = n_x \sigma_{xx} + n_y \sigma_{xy} + n_z \sigma_{xz} \\ traction_{y} = n_y \sigma_{yx} + n_y \sigma_{yy} + n_z \sigma_{yz} \\ traction_{z} = n_z \sigma_{zx} + n_y \sigma_{zy} + n_z \sigma_{zz} \\ - navier_{x} = \rho(\dfrac{\partial^2 u}{\partial t^2}) - (\lambda + \mu)(\dfrac{\partial^2 u}{\partial x^2}+\dfrac{\partial^2 v}{\partial y \partial x} + \dfrac{\partial^2 w}{\partial z \partial x}) - \mu(\dfrac{\partial^2 u}{\partial x^2} + \dfrac{\partial^2 u}{\partial y^2} + \dfrac{\partial^2 u}{\partial z^2}) \\ - navier_{y} = \rho(\dfrac{\partial^2 v}{\partial t^2}) - (\lambda + \mu)(\dfrac{\partial^2 v}{\partial x \partial y}+\dfrac{\partial^2 v}{\partial y^2} + \dfrac{\partial^2 w}{\partial z \partial y}) - \mu(\dfrac{\partial^2 v}{\partial x^2} + \dfrac{\partial^2 v}{\partial y^2} + \dfrac{\partial^2 v}{\partial z^2}) \\ - navier_{z} = \rho(\dfrac{\partial^2 w}{\partial t^2}) - (\lambda + \mu)(\dfrac{\partial^2 w}{\partial x \partial z}+\dfrac{\partial^2 v}{\partial y \partial z} + \dfrac{\partial^2 w}{\partial z^2}) - \mu(\dfrac{\partial^2 w}{\partial x^2} + \dfrac{\partial^2 w}{\partial y^2} + \dfrac{\partial^2 w}{\partial z^2}) \\ \end{cases} $$ @@ -273,74 +270,3 @@ def traction_z_compute_func(out): return traction_z self.add_equation("traction_z", traction_z_compute_func) - - # Navier equations - def navier_x_compute_func(out): - x, y, u, v = ( - out["x"], - out["y"], - out["u"], - out["v"], - ) - duxvywz = jacobian(u, x) + jacobian(v, y) - duxxuyyuzz = hessian(u, x) + hessian(u, y) - if self.dim == 3: - z, w = out["z"], out["w"] - duxvywz += jacobian(w, z) - duxxuyyuzz += hessian(u, z) - navier_x = ( - -(self.lambda_ + self.mu) * jacobian(duxvywz, x) - self.mu * duxxuyyuzz - ) - if self.time: - t = out["t"] - navier_x += rho * hessian(u, t) - return navier_x - - self.add_equation("navier_x", navier_x_compute_func) - - def navier_y_compute_func(out): - x, y, u, v = ( - out["x"], - out["y"], - out["u"], - out["v"], - ) - duxvywz = jacobian(u, x) + jacobian(v, y) - dvxxvyyvzz = hessian(v, x) + hessian(v, y) - if self.dim == 3: - z, w = out["z"], out["w"] - duxvywz += jacobian(w, z) - dvxxvyyvzz += hessian(v, z) - navier_y = ( - -(self.lambda_ + self.mu) * jacobian(duxvywz, y) - self.mu * dvxxvyyvzz - ) - if self.time: - t = out["t"] - navier_y += rho * hessian(v, t) - return navier_y - - self.add_equation("navier_y", navier_y_compute_func) - - if self.dim == 3: - - def navier_z_compute_func(out): - x, y, z, u, v, w = ( - out["x"], - out["y"], - out["z"], - out["u"], - out["v"], - out["w"], - ) - duxvywz = jacobian(u, x) + jacobian(v, y) + jacobian(w, z) - dwxxvyyvzz = hessian(w, x) + hessian(w, y) + hessian(w, z) - navier_z = ( - -(self.lambda_ + self.mu) * jacobian(duxvywz, z) - - self.mu * dwxxvyyvzz - ) - if self.time: - t = out["t"] - navier_z += rho * hessian(w, t) - return navier_z - - self.add_equation("navier_z", navier_z_compute_func) From ffd6b27933eeae694345a8eee0e60586134a0e2b Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Mon, 28 Aug 2023 05:13:54 +0000 Subject: [PATCH 20/48] wip code --- ppsci/equation/pde/base.py | 28 ++++++++++++++++++++++++++-- ppsci/equation/pde/viv.py | 27 +++++++++++++-------------- ppsci/solver/solver.py | 10 ++-------- ppsci/utils/sym_to_func.py | 37 ++++++++++++++++++++++++++++++++----- 4 files changed, 73 insertions(+), 29 deletions(-) diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index 12e685e98..522c1b46a 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -12,15 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING from typing import Callable from typing import Dict from typing import List +from typing import Optional from typing import Tuple +from typing import Union import paddle import sympy from paddle import nn +from ppsci.utils import sym_to_func + +if TYPE_CHECKING: + from ppsci import arch + class PDE: """Base class for Partial Differential Equation""" @@ -28,11 +36,12 @@ class PDE: def __init__(self): super().__init__() self.equations = {} + self.detach_keys = [] # for PDE which has learnable parameter(s) self.learnable_parameters = nn.ParameterList() - def create_symbols(self, symbol_str) -> Tuple[sympy.Symbol, ...]: + def create_symbols(self, symbol_str: str) -> Tuple[sympy.Symbol, ...]: """Create symbols Args: @@ -43,7 +52,9 @@ def create_symbols(self, symbol_str) -> Tuple[sympy.Symbol, ...]: """ return sympy.symbols(symbol_str) - def create_function(self, name, invars) -> sympy.Function: + def create_function( + self, name: str, invars: Tuple[sympy.Symbol, ...] + ) -> sympy.Function: """Create named function depending on given invars. Args: @@ -80,6 +91,19 @@ def set_state_dict(self, state_dict): """Set state dict from dict.""" self.learnable_parameters.set_state_dict(state_dict) + def cvt_sympy_to_function( + self, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] + ) -> None: + """Convert equation(s) to callable function""" + for name, expr in self.equations.items(): + if isinstance(expr, sympy.Basic): + self.equations[name] = sym_to_func.sympy_to_function( + expr, + models, + self.detach_keys, + self.learnable_parameters, + ) + def __str__(self): return ", ".join( [self.__class__.__name__] diff --git a/ppsci/equation/pde/viv.py b/ppsci/equation/pde/viv.py index 6330dff7c..39de1090b 100644 --- a/ppsci/equation/pde/viv.py +++ b/ppsci/equation/pde/viv.py @@ -13,10 +13,9 @@ # limitations under the License. import paddle +import sympy as sp from paddle.nn import initializer -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian from ppsci.equation.pde import base @@ -43,25 +42,25 @@ def __init__(self, rho: float, k1: float, k2: float): self.k1 = paddle.create_parameter( shape=[], dtype=paddle.get_default_dtype(), + name="k1", default_initializer=initializer.Constant(k1), ) self.k2 = paddle.create_parameter( shape=[], dtype=paddle.get_default_dtype(), + name="k2", default_initializer=initializer.Constant(k2), ) self.learnable_parameters.append(self.k1) self.learnable_parameters.append(self.k2) - def f_compute_func(out): - eta, t = out["eta"], out["t_f"] - eta__t = jacobian(eta, t) - eta__t__t = hessian(eta, t) - f = ( - self.rho * eta__t__t - + paddle.exp(self.k1) * eta__t - + paddle.exp(self.k2) * eta - ) - return f - - self.add_equation("f", f_compute_func) + t_f = self.create_symbols("t_f") + eta = self.create_function("eta", (t_f,)) + k1 = self.create_symbols(self.k1.name) + k2 = self.create_symbols(self.k2.name) + f = ( + self.rho * eta.diff(t_f).diff(t_f) + + sp.exp(k1) * eta.diff(t_f) + + sp.exp(k2) * eta + ) + self.add_equation("f", f) diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index e735ac196..334005699 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -27,7 +27,6 @@ import numpy as np import paddle import paddle.distributed as dist -import sympy as sp import visualdl as vdl from packaging import version from paddle import amp @@ -44,7 +43,6 @@ from ppsci.utils import logger from ppsci.utils import misc from ppsci.utils import save_load -from ppsci.utils import sym_to_func class Solver: @@ -213,12 +211,8 @@ def __init__( self.equation = equation # convert sympy expression to python function using sym_to_func module - for equation_obj in self.equation.values(): - for name, expr in equation_obj.equations.items(): - if isinstance(expr, sp.Basic): - equation_obj.equations[name] = sym_to_func.sympy_to_function( - expr, self.model - ) + for equation in self.equation.values(): + equation.cvt_sympy_to_function(model) # set geometry for generating data self.geom = {} if geom is None else geom diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 8baebbd70..4734600d2 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -225,7 +225,7 @@ def forward(self, data_dict: DATA_DICT) -> DATA_DICT: class ConstantNode(Node): - """ "Class for constant variable node in converted expression tree. + """Class for constant variable node in converted expression tree. Args: expr (Union[sp.Number, sp.NumberSymbol]): Number expression. @@ -255,6 +255,23 @@ def forward(self, data_dict: DATA_DICT) -> DATA_DICT: return data_dict +class ParameterNode(Node): + """Class for constant variable node in converted expression tree. + + Args: + expr (sp.Symbol): Parameter expression. + paramter (paddle.framework.io.EagerParamBase): Parameter tensor. + """ + + def __init__(self, expr: sp.Symbol, paramter: paddle.framework.io.EagerParamBase): + super().__init__(expr) + self.parameter = paramter + + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self.parameter + return data_dict + + class ComposedNode(nn.Layer): """ Compose list of several callable objects together. @@ -305,13 +322,16 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: def sympy_to_function( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, - detach_keys: Tuple[str, ...] = None, + detach_keys: Optional[Tuple[str, ...]] = None, + parameters: Optional[nn.ParameterList] = None, ) -> ComposedNode: """Convert sympy expression to callable function. Args: expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. + detach_keys (Optional[Tuple[str, ...]], optional): Keys which will be detached in computation. Defaults to None. + parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. Returns: ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. @@ -367,8 +387,13 @@ def sympy_to_function( sympy_nodes = [] sympy_nodes = _post_traverse(expr, sympy_nodes) - # remove unnecessary symbol node for already in input dict - sympy_nodes = [node for node in sympy_nodes if not node.is_Symbol] + # remove unnecessary symbol node for already in input dict(except for paramter symbol) + _parameter_names = tuple(param.name for param in parameters) + sympy_nodes = [ + node + for node in sympy_nodes + if (not node.is_Symbol) or (_cvt_to_key(node) not in _parameter_names) + ] # remove duplicates with topo-order kept sympy_nodes = list(dict.fromkeys(sympy_nodes)) @@ -381,7 +406,7 @@ def sympy_to_function( # convert sympy node to callable node callable_nodes = [] for i, node in enumerate(sympy_nodes): - if isinstance(node.func, sp.core.function.UndefinedFunction): + if isinstance(node, sp.Function): match_index = None for j, model in enumerate(models): if str(node.func.name) in model.output_keys: @@ -411,6 +436,8 @@ def sympy_to_function( callable_nodes.append(OperatorNode(node)) elif node.is_Number or node.is_NumberSymbol: callable_nodes.append(ConstantNode(node)) + elif isinstance(node, sp.Symbol): + callable_nodes.append(ParameterNode(node, parameters[_cvt_to_key(node)])) else: raise NotImplementedError( f"The node {node} is not supported in sympy_to_function." From b0d1df18ae1e33f60ad6395944fdf6311c62a43b Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Mon, 28 Aug 2023 11:03:16 +0000 Subject: [PATCH 21/48] temporary code(need to be refined) --- ppsci/equation/pde/base.py | 3 ++- ppsci/solver/solver.py | 44 ++++++++++++++++++++++++++++++++++---- ppsci/utils/sym_to_func.py | 34 +++++++++++++++++------------ 3 files changed, 62 insertions(+), 19 deletions(-) diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index b8064b194..ef9ac07b4 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -38,6 +38,7 @@ class PDE: def __init__(self): super().__init__() self.equations = {} + self.equations_func = {} self.detach_keys = [] # for PDE which has learnable parameter(s) @@ -99,7 +100,7 @@ def cvt_sympy_to_function( """Convert equation(s) to callable function""" for name, expr in self.equations.items(): if isinstance(expr, sympy.Basic): - self.equations[name] = sym_to_func.sympy_to_function( + self.equations_func[name] = sym_to_func.sympy_to_function( expr, models, self.detach_keys, diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index b860c3ba6..3fc1febe0 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -27,6 +27,7 @@ import numpy as np import paddle import paddle.distributed as dist +import sympy as sp import visualdl as vdl from packaging import version from paddle import amp @@ -210,10 +211,6 @@ def __init__( # set equations for physics-driven or data-physics hybrid driven task, such as PINN self.equation = equation - # convert sympy expression to python function using sym_to_func module - for equation in self.equation.values(): - equation.cvt_sympy_to_function(model) - # set geometry for generating data self.geom = {} if geom is None else geom @@ -223,6 +220,45 @@ def __init__( # set visualizer self.visualizer = visualizer + # convert sympy expression to python function using sym_to_func module + for equation in self.equation.values(): + equation.cvt_sympy_to_function(model) + if constraint is not None: + for constraint in self.constraint.values(): + for name, expr in constraint.output_expr.items(): + if isinstance(expr, sp.Basic): + converted_func = [ + equation.equations_func[e_name] + for e_name, e_expr in equation.equations.items() + if expr == e_expr + ] + print(expr) + if len(converted_func) == 1: + constraint.output_expr[name] = converted_func[0] + if validator is not None: + for validator in self.validator.values(): + for name, expr in validator.output_expr.items(): + if isinstance(expr, sp.Basic): + converted_func = [ + equation.equations_func[e_name] + for e_name, e_expr in equation.equations.items() + if expr == e_expr + ] + print(expr) + if len(converted_func) == 1: + validator.output_expr[name] = converted_func[0] + if visualizer is not None: + for visualizer in self.visualizer.values(): + for name, expr in visualizer.output_expr.items(): + if isinstance(expr, sp.Basic): + converted_func = [ + equation.equations_func[e_name] + for e_name, e_expr in equation.equations.items() + if expr == e_expr + ] + print(expr) + if len(converted_func) == 1: + visualizer.output_expr[name] = converted_func[0] # set automatic mixed precision(AMP) configuration self.use_amp = use_amp self.amp_level = amp_level diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 4734600d2..4904b7e7f 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -260,12 +260,12 @@ class ParameterNode(Node): Args: expr (sp.Symbol): Parameter expression. - paramter (paddle.framework.io.EagerParamBase): Parameter tensor. + parameter (paddle.framework.io.EagerParamBase): Parameter tensor. """ - def __init__(self, expr: sp.Symbol, paramter: paddle.framework.io.EagerParamBase): + def __init__(self, expr: sp.Symbol, parameter: paddle.framework.io.EagerParamBase): super().__init__(expr) - self.parameter = paramter + self.parameter = parameter def forward(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = self.parameter @@ -309,6 +309,7 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: nodes = _post_traverse(cur_node.args[0], nodes) nodes.append(cur_node) elif isinstance(cur_node, sp.Symbol): + nodes.append(cur_node) return nodes elif isinstance(cur_node, sp.Number): nodes.append(cur_node) @@ -392,7 +393,7 @@ def sympy_to_function( sympy_nodes = [ node for node in sympy_nodes - if (not node.is_Symbol) or (_cvt_to_key(node) not in _parameter_names) + if (not node.is_Symbol) or (_cvt_to_key(node) in _parameter_names) ] # remove duplicates with topo-order kept @@ -406,7 +407,15 @@ def sympy_to_function( # convert sympy node to callable node callable_nodes = [] for i, node in enumerate(sympy_nodes): - if isinstance(node, sp.Function): + if ( + isinstance(node, tuple(PADDLE_FUNC_MAP.keys())) + or node.is_Add + or node.is_Mul + or node.is_Derivative + or node.is_Pow + ): + callable_nodes.append(OperatorNode(node)) + elif isinstance(node, sp.Function): match_index = None for j, model in enumerate(models): if str(node.func.name) in model.output_keys: @@ -426,18 +435,15 @@ def sympy_to_function( f" and models[{j}]." ) match_index = j - elif ( - isinstance(node, tuple(PADDLE_FUNC_MAP.keys())) - or node.is_Add - or node.is_Mul - or node.is_Derivative - or node.is_Pow - ): - callable_nodes.append(OperatorNode(node)) elif node.is_Number or node.is_NumberSymbol: callable_nodes.append(ConstantNode(node)) elif isinstance(node, sp.Symbol): - callable_nodes.append(ParameterNode(node, parameters[_cvt_to_key(node)])) + print("ParameterNode", node) + callable_nodes.append( + ParameterNode( + node, *[param for param in parameters if param.name == node.name] + ) + ) else: raise NotImplementedError( f"The node {node} is not supported in sympy_to_function." From b2a450983b0d0011832f98caafed541904584631 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 6 Sep 2023 12:21:52 +0000 Subject: [PATCH 22/48] update solver code --- ppsci/constraint/base.py | 1 + ppsci/equation/pde/base.py | 36 +++++++----------- ppsci/solver/solver.py | 78 +++++++++++++++++++------------------- ppsci/utils/sym_to_func.py | 15 ++++---- 4 files changed, 62 insertions(+), 68 deletions(-) diff --git a/ppsci/constraint/base.py b/ppsci/constraint/base.py index c3b4c8a12..95bdce058 100644 --- a/ppsci/constraint/base.py +++ b/ppsci/constraint/base.py @@ -47,6 +47,7 @@ def __init__( self.data_iter = iter(self.data_loader) self.loss = loss self.name = name + self.detach_keys = () def __str__(self): return ", ".join( diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index ef9ac07b4..99a342dd9 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -14,23 +14,15 @@ from __future__ import annotations -from typing import TYPE_CHECKING from typing import Callable from typing import Dict from typing import List -from typing import Optional from typing import Tuple -from typing import Union import paddle import sympy from paddle import nn -from ppsci.utils import sym_to_func - -if TYPE_CHECKING: - from ppsci import arch - class PDE: """Base class for Partial Differential Equation""" @@ -38,8 +30,8 @@ class PDE: def __init__(self): super().__init__() self.equations = {} - self.equations_func = {} - self.detach_keys = [] + # self.equations_func = {} + # self.detach_keys = [] # for PDE which has learnable parameter(s) self.learnable_parameters = nn.ParameterList() @@ -94,18 +86,18 @@ def set_state_dict(self, state_dict): """Set state dict from dict.""" self.learnable_parameters.set_state_dict(state_dict) - def cvt_sympy_to_function( - self, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] - ) -> None: - """Convert equation(s) to callable function""" - for name, expr in self.equations.items(): - if isinstance(expr, sympy.Basic): - self.equations_func[name] = sym_to_func.sympy_to_function( - expr, - models, - self.detach_keys, - self.learnable_parameters, - ) + # def cvt_sympy_to_function( + # self, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] + # ) -> None: + # """Convert equation(s) to callable function""" + # for name, expr in self.equations.items(): + # if isinstance(expr, sympy.Basic): + # self.equations_func[name] = sym_to_func.sympy_to_function( + # expr, + # models, + # self.detach_keys, + # self.learnable_parameters, + # ) def __str__(self): return ", ".join( diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index 2f0c0a1f0..f409ba8e8 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -44,6 +44,7 @@ from ppsci.utils import logger from ppsci.utils import misc from ppsci.utils import save_load +from ppsci.utils import sym_to_func class Solver: @@ -220,45 +221,6 @@ def __init__( # set visualizer self.visualizer = visualizer - # convert sympy expression to python function using sym_to_func module - for equation in self.equation.values(): - equation.cvt_sympy_to_function(model) - if constraint is not None: - for constraint in self.constraint.values(): - for name, expr in constraint.output_expr.items(): - if isinstance(expr, sp.Basic): - converted_func = [ - equation.equations_func[e_name] - for e_name, e_expr in equation.equations.items() - if expr == e_expr - ] - print(expr) - if len(converted_func) == 1: - constraint.output_expr[name] = converted_func[0] - if validator is not None: - for validator in self.validator.values(): - for name, expr in validator.output_expr.items(): - if isinstance(expr, sp.Basic): - converted_func = [ - equation.equations_func[e_name] - for e_name, e_expr in equation.equations.items() - if expr == e_expr - ] - print(expr) - if len(converted_func) == 1: - validator.output_expr[name] = converted_func[0] - if visualizer is not None: - for visualizer in self.visualizer.values(): - for name, expr in visualizer.output_expr.items(): - if isinstance(expr, sp.Basic): - converted_func = [ - equation.equations_func[e_name] - for e_name, e_expr in equation.equations.items() - if expr == e_expr - ] - print(expr) - if len(converted_func) == 1: - visualizer.output_expr[name] = converted_func[0] # set automatic mixed precision(AMP) configuration self.use_amp = use_amp self.amp_level = amp_level @@ -348,6 +310,44 @@ def __init__( # use loss aggregator, use summation if None self.loss_aggregator = loss_aggregator + # convert sympy to callable object if exist + extra_parameters = [] + for equation in self.equation.values(): + extra_parameters += list(equation.learnable_parameters) + + if self.constraint: + for constraint_ in self.constraint.values(): + for name, expr in constraint_.output_expr.items(): + if isinstance(expr, sp.Basic): + constraint_.output_expr[name] = sym_to_func.sympy_to_function( + expr, + self.model, + constraint_.detach_keys, + extra_parameters, + ) + + if self.validator: + for validator_ in self.validator.values(): + for name, expr in validator_.output_expr.items(): + if isinstance(expr, sp.Basic): + validator_.output_expr[name] = sym_to_func.sympy_to_function( + expr, + self.model, + None, + extra_parameters, + ) + + if self.visualizer: + for visualizer_ in self.visualizer.values(): + for name, expr in visualizer_.output_expr.items(): + if isinstance(expr, sp.Basic): + visualizer_.output_expr[name] = sym_to_func.sympy_to_function( + expr, + self.model, + None, + extra_parameters, + ) + @staticmethod def from_config(cfg: Dict[str, Any]) -> Solver: """Initialize solver from given config. diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 7698344b2..9c74b2a36 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -23,6 +23,7 @@ from typing import Dict from typing import List from typing import Optional +from typing import Sequence from typing import Tuple from typing import Union @@ -192,11 +193,11 @@ def _derivate_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: return data_dict def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._mapping_func(data_dict[self.childs[0]]) + data_dict[self.key] = self._compute_func(data_dict[self.childs[0]]) return data_dict def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._mapping_func( + data_dict[self.key] = self._compute_func( *tuple(data_dict[child] for child in self.childs) ) return data_dict @@ -336,7 +337,7 @@ def sympy_to_function( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, detach_keys: Optional[Tuple[str, ...]] = None, - parameters: Optional[nn.ParameterList] = None, + extra_parameters: Optional[Sequence[paddle.Tensor]] = None, ) -> ComposedNode: """Convert sympy expression to callable function. @@ -344,7 +345,7 @@ def sympy_to_function( expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. detach_keys (Optional[Tuple[str, ...]], optional): Keys which will be detached in computation. Defaults to None. - parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. + extra_parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. Returns: ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. @@ -401,7 +402,7 @@ def sympy_to_function( sympy_nodes = _post_traverse(expr, sympy_nodes) # remove unnecessary symbol node for already in input dict(except for paramter symbol) - _parameter_names = tuple(param.name for param in parameters) + _parameter_names = tuple(param.name for param in extra_parameters) sympy_nodes = [ node for node in sympy_nodes @@ -450,10 +451,10 @@ def sympy_to_function( elif node.is_Number or node.is_NumberSymbol: callable_nodes.append(ConstantNode(node)) elif isinstance(node, sp.Symbol): - print("ParameterNode", node) callable_nodes.append( ParameterNode( - node, *[param for param in parameters if param.name == node.name] + node, + *[param for param in extra_parameters if param.name == node.name], ) ) else: From 13d6ff6aa4916c86e5f933c0b8232eb9f55cf2fb Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 6 Sep 2023 13:03:26 +0000 Subject: [PATCH 23/48] replace more pdes with sympy --- ppsci/equation/pde/base.py | 13 -- ppsci/equation/pde/biharmonic.py | 2 +- ppsci/equation/pde/laplace.py | 2 +- ppsci/equation/pde/linear_elasticity.py | 290 ++++++++---------------- ppsci/equation/pde/navier_stokes.py | 116 ++++------ ppsci/equation/pde/normal_dot_vec.py | 15 +- ppsci/equation/pde/poisson.py | 15 +- 7 files changed, 149 insertions(+), 304 deletions(-) diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index 99a342dd9..005462936 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -86,19 +86,6 @@ def set_state_dict(self, state_dict): """Set state dict from dict.""" self.learnable_parameters.set_state_dict(state_dict) - # def cvt_sympy_to_function( - # self, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] - # ) -> None: - # """Convert equation(s) to callable function""" - # for name, expr in self.equations.items(): - # if isinstance(expr, sympy.Basic): - # self.equations_func[name] = sym_to_func.sympy_to_function( - # expr, - # models, - # self.detach_keys, - # self.learnable_parameters, - # ) - def __str__(self): return ", ".join( [self.__class__.__name__] diff --git a/ppsci/equation/pde/biharmonic.py b/ppsci/equation/pde/biharmonic.py index 3f61bd188..ffccab5c2 100644 --- a/ppsci/equation/pde/biharmonic.py +++ b/ppsci/equation/pde/biharmonic.py @@ -40,7 +40,7 @@ def __init__(self, dim: int, q: float, D: float): self.q = q self.D = D - invars = self.create_symbols(("x", "y", "z")[: self.dim]) + invars = self.create_symbols("x y z")[: self.dim] u = self.create_function("u", invars) biharmonic = -self.q / self.D for invar_i in invars: diff --git a/ppsci/equation/pde/laplace.py b/ppsci/equation/pde/laplace.py index cada0c094..6b1b4e0c8 100644 --- a/ppsci/equation/pde/laplace.py +++ b/ppsci/equation/pde/laplace.py @@ -35,7 +35,7 @@ class Laplace(base.PDE): def __init__(self, dim: int): super().__init__() self.dim = dim - invars = self.create_symbols(("x", "y", "z")[: self.dim]) + invars = self.create_symbols("x y z")[: self.dim] u = self.create_function("u", invars) laplace = 0 diff --git a/ppsci/equation/pde/linear_elasticity.py b/ppsci/equation/pde/linear_elasticity.py index f69c61b43..3207d9caf 100644 --- a/ppsci/equation/pde/linear_elasticity.py +++ b/ppsci/equation/pde/linear_elasticity.py @@ -16,8 +16,8 @@ from typing import Optional -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian +import sympy as sp + from ppsci.equation.pde import base @@ -63,212 +63,110 @@ def __init__( time: bool = False, ): super().__init__() + self.dim = dim + self.time = time + + t, x, y, z = self.create_symbols("t x y z") + normal_x, normal_y, normal_z = self.create_symbols("normal_x normal_y normal_z") + invars = (x, y) + if time: + invars = (t,) + invars + if self.dim == 3: + invars += (z,) + + u = self.create_function("u", invars) + v = self.create_function("v", invars) + w = self.create_function("w", invars) if dim == 3 else sp.Number(0) + + sigma_xx = self.create_function("sigma_xx", invars) + sigma_yy = self.create_function("sigma_yy", invars) + sigma_xy = self.create_function("sigma_xy", invars) + sigma_zz = ( + self.create_function("sigma_zz", invars) if dim == 3 else sp.Number(0) + ) + sigma_xz = ( + self.create_function("sigma_xz", invars) if dim == 3 else sp.Number(0) + ) + sigma_yz = ( + self.create_function("sigma_yz", invars) if dim == 3 else sp.Number(0) + ) + + # compute lambda and mu if lambda_ is None: - nu = float(nu) - E = float(E) + if isinstance(nu, str): + nu = self.create_function(nu)(invars) + if isinstance(E, str): + E = self.create_function(E)(invars) lambda_ = nu * E / ((1 + nu) * (1 - 2 * nu)) mu = E / (2 * (1 + nu)) + else: + if isinstance(lambda_, str): + lambda_ = self.create_function(lambda_)(invars) + if isinstance(mu, str): + mu = self.create_function(mu)(invars) + + if isinstance(rho, str): + rho = self.create_function(rho)(invars) self.E = E self.nu = nu self.lambda_ = lambda_ self.mu = mu self.rho = rho - self.dim = dim - self.time = time - - # Stress equations - def stress_disp_xx_compute_func(out): - x, y, u, v = ( - out["x"], - out["y"], - out["u"], - out["v"], - ) - sigma_xx = out["sigma_xx"] - stress_disp_xx = ( - self.lambda_ * (jacobian(u, x) + jacobian(v, y)) - + 2 * self.mu * jacobian(u, x) - - sigma_xx - ) - if self.dim == 3: - z, w = out["z"], out["w"] - stress_disp_xx += self.lambda_ * jacobian(w, z) - return stress_disp_xx - - self.add_equation("stress_disp_xx", stress_disp_xx_compute_func) - - def stress_disp_yy_compute_func(out): - x, y, u, v = ( - out["x"], - out["y"], - out["u"], - out["v"], - ) - sigma_yy = out["sigma_yy"] - stress_disp_yy = ( - self.lambda_ * (jacobian(u, x) + jacobian(v, y)) - + 2 * self.mu * jacobian(v, y) - - sigma_yy - ) - if self.dim == 3: - z, w = out["z"], out["w"] - stress_disp_yy += self.lambda_ * jacobian(w, z) - return stress_disp_yy - - self.add_equation("stress_disp_yy", stress_disp_yy_compute_func) - - if self.dim == 3: - - def stress_disp_zz_compute_func(out): - x, y, z, u, v, w = ( - out["x"], - out["y"], - out["z"], - out["u"], - out["v"], - out["w"], - ) - sigma_zz = out["sigma_zz"] - stress_disp_zz = ( - self.lambda_ * (jacobian(u, x) + jacobian(v, y) + jacobian(w, z)) - + 2 * self.mu * jacobian(w, z) - - sigma_zz - ) - return stress_disp_zz - - self.add_equation("stress_disp_zz", stress_disp_zz_compute_func) - - def stress_disp_xy_compute_func(out): - x, y, u, v = out["x"], out["y"], out["u"], out["v"] - sigma_xy = out["sigma_xy"] - stress_disp_xy = self.mu * (jacobian(u, y) + jacobian(v, x)) - sigma_xy - return stress_disp_xy - - self.add_equation("stress_disp_xy", stress_disp_xy_compute_func) + # compute stress equations + stress_disp_xx = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * u.diff(x) + - sigma_xx + ) + stress_disp_yy = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * v.diff(y) + - sigma_yy + ) + stress_disp_zz = ( + lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) + + 2 * mu * w.diff(z) + - sigma_zz + ) + stress_disp_xy = mu * (u.diff(y) + v.diff(x)) - sigma_xy + stress_disp_xz = mu * (u.diff(z) + w.diff(x)) - sigma_xz + stress_disp_yz = mu * (v.diff(z) + w.diff(y)) - sigma_yz + + # compute equilibrium equations + equilibrium_x = rho * ((u.diff(t)).diff(t)) - ( + sigma_xx.diff(x) + sigma_xy.diff(y) + sigma_xz.diff(z) + ) + equilibrium_y = rho * ((v.diff(t)).diff(t)) - ( + sigma_xy.diff(x) + sigma_yy.diff(y) + sigma_yz.diff(z) + ) + equilibrium_z = rho * ((w.diff(t)).diff(t)) - ( + sigma_xz.diff(x) + sigma_yz.diff(y) + sigma_zz.diff(z) + ) + + # compute traction equations + traction_x = normal_x * sigma_xx + normal_y * sigma_xy + normal_z * sigma_xz + traction_y = normal_x * sigma_xy + normal_y * sigma_yy + normal_z * sigma_yz + traction_z = normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz + + # add stress equations + self.add_equation("stress_disp_xx", stress_disp_xx) + self.add_equation("stress_disp_yy", stress_disp_yy) + self.add_equation("stress_disp_xy", stress_disp_xy) if self.dim == 3: + self.add_equation("stress_disp_zz", stress_disp_zz) + self.add_equation("stress_disp_xz", stress_disp_xz) + self.add_equation("stress_disp_yz", stress_disp_yz) - def stress_disp_xz_compute_func(out): - x, z, u, w = out["x"], out["z"], out["u"], out["w"] - sigma_xz = out["sigma_xz"] - stress_disp_xz = self.mu * (jacobian(u, z) + jacobian(w, x)) - sigma_xz - return stress_disp_xz - - self.add_equation("stress_disp_xz", stress_disp_xz_compute_func) - - def stress_disp_yz_compute_func(out): - y, z, v, w = out["y"], out["z"], out["v"], out["w"] - sigma_yz = out["sigma_yz"] - stress_disp_yz = self.mu * (jacobian(v, z) + jacobian(w, y)) - sigma_yz - return stress_disp_yz - - self.add_equation("stress_disp_yz", stress_disp_yz_compute_func) - - # Equations of equilibrium - def equilibrium_x_compute_func(out): - x, y = out["x"], out["y"] - sigma_xx, sigma_xy = out["sigma_xx"], out["sigma_xy"] - equilibrium_x = -jacobian(sigma_xx, x) - jacobian(sigma_xy, y) - if self.dim == 3: - z, sigma_xz = out["z"], out["sigma_xz"] - equilibrium_x -= jacobian(sigma_xz, z) - if self.time: - t, u = out["t"], out["u"] - equilibrium_x += self.rho * hessian(u, t) - return equilibrium_x - - self.add_equation("equilibrium_x", equilibrium_x_compute_func) - - def equilibrium_y_compute_func(out): - x, y = out["x"], out["y"] - sigma_xy, sigma_yy = ( - out["sigma_xy"], - out["sigma_yy"], - ) - equilibrium_y = -jacobian(sigma_xy, x) - jacobian(sigma_yy, y) - if self.dim == 3: - z, sigma_yz = out["z"], out["sigma_yz"] - equilibrium_y -= jacobian(sigma_yz, z) - if self.time: - t, v = out["t"], out["v"] - equilibrium_y += self.rho * hessian(v, t) - return equilibrium_y - - self.add_equation("equilibrium_y", equilibrium_y_compute_func) - + # add equilibrium equations + self.add_equation("equilibrium_x", equilibrium_x) + self.add_equation("equilibrium_y", equilibrium_y) if self.dim == 3: + self.add_equation("equilibrium_z", equilibrium_z) - def equilibrium_z_compute_func(out): - x, y, z = out["x"], out["y"], out["z"] - sigma_xz, sigma_yz, sigma_zz = ( - out["sigma_xz"], - out["sigma_yz"], - out["sigma_zz"], - ) - equilibrium_z = ( - -jacobian(sigma_xz, x) - - jacobian(sigma_yz, y) - - jacobian(sigma_zz, z) - ) - if self.time: - t, w = out["t"], out["w"] - equilibrium_z += self.rho * hessian(w, t) - return equilibrium_z - - self.add_equation("equilibrium_z", equilibrium_z_compute_func) - - # Traction equations - def traction_x_compute_func(out): - normal_x, normal_y = ( - out["normal_x"], - out["normal_y"], - ) - sigma_xx, sigma_xy = ( - out["sigma_xx"], - out["sigma_xy"], - ) - traction_x = normal_x * sigma_xx + normal_y * sigma_xy - if self.dim == 3: - normal_z, sigma_xz = out["normal_z"], out["sigma_xz"] - traction_x += normal_z * sigma_xz - return traction_x - - self.add_equation("traction_x", traction_x_compute_func) - - def traction_y_compute_func(out): - normal_x, normal_y = ( - out["normal_x"], - out["normal_y"], - ) - sigma_xy, sigma_yy = ( - out["sigma_xy"], - out["sigma_yy"], - ) - traction_y = normal_x * sigma_xy + normal_y * sigma_yy - if self.dim == 3: - normal_z, sigma_yz = out["normal_z"], out["sigma_yz"] - traction_y += normal_z * sigma_yz - return traction_y - - self.add_equation("traction_y", traction_y_compute_func) - + # add traction equations + self.add_equation("traction_x", traction_x) + self.add_equation("traction_y", traction_y) if self.dim == 3: - - def traction_z_compute_func(out): - normal_x, normal_y, normal_z = ( - out["normal_x"], - out["normal_y"], - out["normal_z"], - ) - sigma_xz, sigma_yz, sigma_zz = ( - out["sigma_xz"], - out["sigma_yz"], - out["sigma_zz"], - ) - traction_z = ( - normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz - ) - return traction_z - - self.add_equation("traction_z", traction_z_compute_func) + self.add_equation("traction_z", traction_z) diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index fa5ed3f68..3cd10e3a7 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -17,8 +17,8 @@ from typing import Callable from typing import Union -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian +import sympy as sp + from ppsci.equation.pde import base @@ -70,79 +70,39 @@ def __init__(self, nu: Union[float, Callable], rho: float, dim: int, time: bool) self.dim = dim self.time = time - def continuity_compute_func(out): - x, y = out["x"], out["y"] - u, v = out["u"], out["v"] - continuity = jacobian(u, x) + jacobian(v, y) - if self.dim == 3: - z, w = out["z"], out["w"] - continuity += jacobian(w, z) - return continuity - - self.add_equation("continuity", continuity_compute_func) - - def momentum_x_compute_func(out): - nu = self.nu(out) if callable(self.nu) else self.nu - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - momentum_x = ( - u * jacobian(u, x) - + v * jacobian(u, y) - - nu * hessian(u, x) - - nu * hessian(u, y) - + 1 / rho * jacobian(p, x) - ) - if self.time: - t = out["t"] - momentum_x += jacobian(u, t) - if self.dim == 3: - z, w = out["z"], out["w"] - momentum_x += w * jacobian(u, z) - momentum_x -= nu * hessian(u, z) - return momentum_x - - self.add_equation("momentum_x", momentum_x_compute_func) - - def momentum_y_compute_func(out): - nu = self.nu(out) if callable(self.nu) else self.nu - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - momentum_y = ( - u * jacobian(v, x) - + v * jacobian(v, y) - - nu * hessian(v, x) - - nu * hessian(v, y) - + 1 / rho * jacobian(p, y) - ) - if self.time: - t = out["t"] - momentum_y += jacobian(v, t) - if self.dim == 3: - z, w = out["z"], out["w"] - momentum_y += w * jacobian(v, z) - momentum_y -= nu * hessian(v, z) - return momentum_y - - self.add_equation("momentum_y", momentum_y_compute_func) - - if self.dim == 3: - - def momentum_z_compute_func(out): - nu = self.nu(out) if callable(self.nu) else self.nu - x, y, z = out["x"], out["y"], out["z"] - u, v, w, p = out["u"], out["v"], out["w"], out["p"] - momentum_z = ( - u * jacobian(w, x) - + v * jacobian(w, y) - + w * jacobian(w, z) - - nu * hessian(w, x) - - nu * hessian(w, y) - - nu * hessian(w, z) - + 1 / rho * jacobian(p, z) - ) - if self.time: - t = out["t"] - momentum_z += jacobian(w, t) - return momentum_z - - self.add_equation("momentum_z", momentum_z_compute_func) + t, x, y, z = self.create_symbols("t x y z") + u, v, w, p = self.create_symbols("u v w p") + if self.dim == 2: + w = sp.Number(0) + if not time: + t = sp.Number(0) + + continuity = u.diff(x) + v.diff(y) + w.diff(z) + momentum_x = ( + u.diff(t) + + u * u.diff(x) + + v * u.diff(y) + + w * u.diff(z) + - nu * (u.diff(x).diff(x) + u.diff(y).diff(y) + u.diff(z).diff(z)) + + 1 / rho * p.diff(x) + ) + momentum_y = ( + u.diff(t) + + u * v.diff(x) + + v * v.diff(y) + + w * v.diff(z) + - nu * (v.diff(x).diff(x) + v.diff(y).diff(y) + v.diff(z).diff(z)) + + 1 / rho * p.diff(y) + ) + momentum_z = ( + u.diff(t) + + u * w.diff(x) + + v * w.diff(y) + + w * w.diff(z) + - nu * (w.diff(x).diff(x) + w.diff(y).diff(y) + w.diff(z).diff(z)) + + 1 / rho * p.diff(z) + ) + self.add_equation("continuity", continuity) + self.add_equation("momentum_x", momentum_x) + self.add_equation("momentum_y", momentum_y) + self.add_equation("momentum_z", momentum_z) diff --git a/ppsci/equation/pde/normal_dot_vec.py b/ppsci/equation/pde/normal_dot_vec.py index b71555fd0..3e23ac332 100644 --- a/ppsci/equation/pde/normal_dot_vec.py +++ b/ppsci/equation/pde/normal_dot_vec.py @@ -37,14 +37,15 @@ class NormalDotVec(base.PDE): def __init__(self, vec_keys: Tuple[str, ...]): super().__init__() + if not vec_keys: + raise ValueError(f"len(vec_keys)({len(vec_keys)}) should be larger than 0.") self.vec_keys = vec_keys - self.normal_keys = ("normal_x", "normal_y", "normal_z") - def normal_dot_vel_compute_func(out): - normal_dot_vel = 0 - for i, vec_key in enumerate(vec_keys): - normal_dot_vel += out[vec_key] * out[self.normal_keys[i]] + vec_vars = self.create_symbols(" ".join(vec_keys)) + normals = self.create_symbols("normal_x normal_y normal_z") - return normal_dot_vel + normal_dot_vec = 0 + for (vec, normal) in zip(vec_vars, normals): + normal_dot_vec += vec * normal - self.equations["normal_dot_vel"] = normal_dot_vel_compute_func + self.add_equation("normal_dot_vec", normal_dot_vec) diff --git a/ppsci/equation/pde/poisson.py b/ppsci/equation/pde/poisson.py index f2c6b9e02..3a8ccff44 100644 --- a/ppsci/equation/pde/poisson.py +++ b/ppsci/equation/pde/poisson.py @@ -14,7 +14,6 @@ from __future__ import annotations -from ppsci.autodiff import hessian from ppsci.equation.pde import base @@ -37,11 +36,11 @@ def __init__(self, dim: int): super().__init__() self.dim = dim - def poisson_compute_func(out): - invars = ("x", "y", "z")[: self.dim] - poisson = 0 - for invar in invars: - poisson += hessian(out["p"], out[invar]) - return poisson + invars = self.create_symbols("x y z")[: self.dim] + p = self.create_function("p", invars) - self.add_equation("poisson", poisson_compute_func) + poisson = 0 + for invar in invars: + poisson += p.diff(invar).diff(invar) + + self.add_equation("poisson", poisson) From ffd3a9317bba2670a8e637419d965d8236bedc48 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 6 Sep 2023 13:08:48 +0000 Subject: [PATCH 24/48] simplify code in solver --- ppsci/solver/solver.py | 42 +++++++++++++++++++----------------------- 1 file changed, 19 insertions(+), 23 deletions(-) diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index f409ba8e8..a732f4bac 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -315,38 +315,34 @@ def __init__( for equation in self.equation.values(): extra_parameters += list(equation.learnable_parameters) - if self.constraint: - for constraint_ in self.constraint.values(): - for name, expr in constraint_.output_expr.items(): + def convert_expr( + container_dict: Dict[ + str, + Union[ + ppsci.constraint.Constraint, + ppsci.validate.Validator, + ppsci.visualize.Visualizer, + ], + ] + ) -> None: + for container in container_dict.values(): + for name, expr in container.output_expr.items(): if isinstance(expr, sp.Basic): - constraint_.output_expr[name] = sym_to_func.sympy_to_function( + container.output_expr[name] = sym_to_func.sympy_to_function( expr, self.model, - constraint_.detach_keys, + container.detach_keys, extra_parameters, ) + if self.constraint: + convert_expr(self.constraint) + if self.validator: - for validator_ in self.validator.values(): - for name, expr in validator_.output_expr.items(): - if isinstance(expr, sp.Basic): - validator_.output_expr[name] = sym_to_func.sympy_to_function( - expr, - self.model, - None, - extra_parameters, - ) + convert_expr(self.validator) if self.visualizer: - for visualizer_ in self.visualizer.values(): - for name, expr in visualizer_.output_expr.items(): - if isinstance(expr, sp.Basic): - visualizer_.output_expr[name] = sym_to_func.sympy_to_function( - expr, - self.model, - None, - extra_parameters, - ) + convert_expr(self.visualizer) @staticmethod def from_config(cfg: Dict[str, Any]) -> Solver: From 256f31a677eb2a54ac483ab06a8500ab163cff15 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 04:51:49 +0000 Subject: [PATCH 25/48] update code --- examples/bracket/bracket.py | 24 +++---- examples/pipe/poiseuille_flow.py | 4 +- ppsci/constraint/boundary_constraint.py | 7 +- ppsci/constraint/initial_constraint.py | 7 +- ppsci/constraint/integral_constraint.py | 7 +- ppsci/constraint/interior_constraint.py | 9 ++- ppsci/constraint/periodic_constraint.py | 7 +- ppsci/equation/pde/base.py | 25 +++++-- ppsci/equation/pde/biharmonic.py | 23 ++++++- ppsci/equation/pde/laplace.py | 12 +++- ppsci/equation/pde/linear_elasticity.py | 3 + ppsci/equation/pde/navier_stokes.py | 39 ++++++++--- ppsci/equation/pde/normal_dot_vec.py | 8 ++- ppsci/equation/pde/poisson.py | 11 +-- ppsci/solver/solver.py | 2 +- ppsci/utils/sym_to_func.py | 91 ++++++++++++------------- 16 files changed, 179 insertions(+), 100 deletions(-) diff --git a/examples/bracket/bracket.py b/examples/bracket/bracket.py index cedf5c5e8..1d23841a8 100644 --- a/examples/bracket/bracket.py +++ b/examples/bracket/bracket.py @@ -127,15 +127,15 @@ support_interior_constraint = ppsci.constraint.InteriorConstraint( equation["LinearElasticity"].equations, { - "equilibrium_x": 0, - "equilibrium_y": 0, - "equilibrium_z": 0, "stress_disp_xx": 0, "stress_disp_yy": 0, "stress_disp_zz": 0, "stress_disp_xy": 0, "stress_disp_xz": 0, "stress_disp_yz": 0, + "equilibrium_x": 0, + "equilibrium_y": 0, + "equilibrium_z": 0, }, geom["geo"], {**train_dataloader_cfg, "batch_size": 2048}, @@ -149,30 +149,30 @@ & (z < BOUNDS_SUPPORT_Z[1]) ), weight_dict={ - "equilibrium_x": "sdf", - "equilibrium_y": "sdf", - "equilibrium_z": "sdf", "stress_disp_xx": "sdf", "stress_disp_yy": "sdf", "stress_disp_zz": "sdf", "stress_disp_xy": "sdf", "stress_disp_xz": "sdf", "stress_disp_yz": "sdf", + "equilibrium_x": "sdf", + "equilibrium_y": "sdf", + "equilibrium_z": "sdf", }, name="support_interior", ) bracket_interior_constraint = ppsci.constraint.InteriorConstraint( equation["LinearElasticity"].equations, { - "equilibrium_x": 0, - "equilibrium_y": 0, - "equilibrium_z": 0, "stress_disp_xx": 0, "stress_disp_yy": 0, "stress_disp_zz": 0, "stress_disp_xy": 0, "stress_disp_xz": 0, "stress_disp_yz": 0, + "equilibrium_x": 0, + "equilibrium_y": 0, + "equilibrium_z": 0, }, geom["geo"], {**train_dataloader_cfg, "batch_size": 1024}, @@ -186,15 +186,15 @@ & (z < BOUNDS_BRACKET_Z[1]) ), weight_dict={ - "equilibrium_x": "sdf", - "equilibrium_y": "sdf", - "equilibrium_z": "sdf", "stress_disp_xx": "sdf", "stress_disp_yy": "sdf", "stress_disp_zz": "sdf", "stress_disp_xy": "sdf", "stress_disp_xz": "sdf", "stress_disp_yz": "sdf", + "equilibrium_x": "sdf", + "equilibrium_y": "sdf", + "equilibrium_z": "sdf", }, name="bracket_interior", ) diff --git a/examples/pipe/poiseuille_flow.py b/examples/pipe/poiseuille_flow.py index 76b28e170..8b270bf54 100644 --- a/examples/pipe/poiseuille_flow.py +++ b/examples/pipe/poiseuille_flow.py @@ -133,9 +133,7 @@ def output_trans_p(input, out): # set euqation equation = { - "NavierStokes": ppsci.equation.NavierStokes( - nu=lambda out: out["nu"], rho=RHO, dim=2, time=False - ) + "NavierStokes": ppsci.equation.NavierStokes(nu="nu", rho=RHO, dim=2, time=False) } # set constraint diff --git a/ppsci/constraint/boundary_constraint.py b/ppsci/constraint/boundary_constraint.py index 205c31428..9df6e23fd 100644 --- a/ppsci/constraint/boundary_constraint.py +++ b/ppsci/constraint/boundary_constraint.py @@ -87,13 +87,16 @@ def __init__( name: str = "BC", ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): + for output_name, expr in self.output_expr.items(): if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) + self.output_expr[output_name] = sp_parser.parse_expr(expr) self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) + self.output_expr = { + k: v for k, v in output_expr.items() if k in self.output_keys + } # "area" will be kept in "output_dict" for computation. if isinstance(geom, geometry.Mesh): self.output_keys += ["area"] diff --git a/ppsci/constraint/initial_constraint.py b/ppsci/constraint/initial_constraint.py index cfcc89ec1..b7c867123 100644 --- a/ppsci/constraint/initial_constraint.py +++ b/ppsci/constraint/initial_constraint.py @@ -90,13 +90,16 @@ def __init__( name: str = "IC", ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): + for output_name, expr in self.output_expr.items(): if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) + self.output_expr[output_name] = sp_parser.parse_expr(expr) self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) + self.output_expr = { + k: v for k, v in output_expr.items() if k in self.output_keys + } # "area" will be kept in "output_dict" for computation. if isinstance(geom.geometry, geometry.Mesh): self.output_keys += ["area"] diff --git a/ppsci/constraint/integral_constraint.py b/ppsci/constraint/integral_constraint.py index fedce730a..4f655c000 100644 --- a/ppsci/constraint/integral_constraint.py +++ b/ppsci/constraint/integral_constraint.py @@ -87,13 +87,16 @@ def __init__( name: str = "IgC", ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): + for output_name, expr in self.output_expr.items(): if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) + self.output_expr[output_name] = sp_parser.parse_expr(expr) self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) + self.output_expr = { + k: v for k, v in output_expr.items() if k in self.output_keys + } # "area" will be kept in "output_dict" for computation. if isinstance(geom, geometry.Mesh): self.output_keys += ["area"] diff --git a/ppsci/constraint/interior_constraint.py b/ppsci/constraint/interior_constraint.py index 1a8474d74..18ee0c6bc 100644 --- a/ppsci/constraint/interior_constraint.py +++ b/ppsci/constraint/interior_constraint.py @@ -87,13 +87,16 @@ def __init__( name: str = "EQ", ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) + for output_name, expr in self.output_expr.items(): + if isinstance(expr, str) and output_name in label_dict: + self.output_expr[output_name] = sp_parser.parse_expr(expr) self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) + self.output_expr = { + k: v for k, v in output_expr.items() if k in self.output_keys + } # "area" will be kept in "output_dict" for computation. if isinstance(geom, geometry.Mesh): self.output_keys += ["area"] diff --git a/ppsci/constraint/periodic_constraint.py b/ppsci/constraint/periodic_constraint.py index cfbad8796..ba3451ad6 100644 --- a/ppsci/constraint/periodic_constraint.py +++ b/ppsci/constraint/periodic_constraint.py @@ -74,12 +74,15 @@ def __init__( name: str = "PeriodicBC", ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): + for output_name, expr in self.output_expr.items(): if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) + self.output_expr[output_name] = sp_parser.parse_expr(expr) self.input_keys = geom.dim_keys self.output_keys = list(output_expr.keys()) + self.output_expr = { + k: v for k, v in output_expr.items() if k in self.output_keys + } # "area" will be kept in "output_dict" for computation. if isinstance(geom, geometry.Mesh): self.output_keys += ["area"] diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index 005462936..eef4ceb03 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -17,6 +17,7 @@ from typing import Callable from typing import Dict from typing import List +from typing import Optional from typing import Tuple import paddle @@ -30,12 +31,11 @@ class PDE: def __init__(self): super().__init__() self.equations = {} - # self.equations_func = {} - # self.detach_keys = [] - # for PDE which has learnable parameter(s) self.learnable_parameters = nn.ParameterList() + self.detach_keys: Optional[Tuple[str, ...]] = None + def create_symbols(self, symbol_str: str) -> Tuple[sympy.Symbol, ...]: """Create symbols @@ -59,7 +59,24 @@ def create_function( Returns: sympy.Function: Named sympy function. """ - return sympy.Function(name)(*invars) + expr = sympy.Function(name)(*invars) + if self.detach_keys and name in self.detach_keys: + expr = sympy.Function("detach")(expr) + return expr + + def create_detach( + self, + expr: sympy.Basic, + ) -> sympy.Function: + """Create detach function for given expression. + + Args: + expr (sympy.Basic): Given expression to be detached. + + Returns: + sympy.Function: Detached expression. + """ + return sympy.Function("detach")(expr) def add_equation(self, name: str, equation: Callable): """Add an equation. diff --git a/ppsci/equation/pde/biharmonic.py b/ppsci/equation/pde/biharmonic.py index ffccab5c2..d1eb269fd 100644 --- a/ppsci/equation/pde/biharmonic.py +++ b/ppsci/equation/pde/biharmonic.py @@ -14,6 +14,9 @@ from __future__ import annotations +from typing import Optional +from typing import Tuple + from ppsci.equation.pde import base @@ -34,14 +37,28 @@ class Biharmonic(base.PDE): >>> pde = ppsci.equation.Biharmonic(2, -1.0, 1.0) """ - def __init__(self, dim: int, q: float, D: float): + def __init__( + self, + dim: int, + q: float, + D: float, + detach_keys: Optional[Tuple[str, ...]] = None, + ): super().__init__() + self.detach_keys = detach_keys + + invars = self.create_symbols("x y z")[:dim] + u = self.create_function("u", invars) + + if isinstance(q, str): + q = self.create_function("q", invars) + if isinstance(D, str): + D = self.create_function("D", invars) + self.dim = dim self.q = q self.D = D - invars = self.create_symbols("x y z")[: self.dim] - u = self.create_function("u", invars) biharmonic = -self.q / self.D for invar_i in invars: for invar_j in invars: diff --git a/ppsci/equation/pde/laplace.py b/ppsci/equation/pde/laplace.py index 6b1b4e0c8..45480986d 100644 --- a/ppsci/equation/pde/laplace.py +++ b/ppsci/equation/pde/laplace.py @@ -14,6 +14,9 @@ from __future__ import annotations +from typing import Optional +from typing import Tuple + from ppsci.equation.pde import base @@ -32,12 +35,15 @@ class Laplace(base.PDE): >>> pde = ppsci.equation.Laplace(2) """ - def __init__(self, dim: int): + def __init__(self, dim: int, detach_keys: Optional[Tuple[str, ...]] = None): super().__init__() - self.dim = dim - invars = self.create_symbols("x y z")[: self.dim] + self.detach_keys = detach_keys + + invars = self.create_symbols("x y z")[:dim] u = self.create_function("u", invars) + self.dim = dim + laplace = 0 for invar in invars: laplace += u.diff(invar).diff(invar) diff --git a/ppsci/equation/pde/linear_elasticity.py b/ppsci/equation/pde/linear_elasticity.py index 3207d9caf..8f2a668c0 100644 --- a/ppsci/equation/pde/linear_elasticity.py +++ b/ppsci/equation/pde/linear_elasticity.py @@ -15,6 +15,7 @@ from __future__ import annotations from typing import Optional +from typing import Tuple import sympy as sp @@ -61,8 +62,10 @@ def __init__( rho: float = 1, dim: int = 3, time: bool = False, + detach_keys: Optional[Tuple[str, ...]] = None, ): super().__init__() + self.detach_keys = detach_keys self.dim = dim self.time = time diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index 3cd10e3a7..aae9a8b84 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -15,10 +15,10 @@ from __future__ import annotations from typing import Callable +from typing import Optional +from typing import Tuple from typing import Union -import sympy as sp - from ppsci.equation.pde import base @@ -63,19 +63,37 @@ class NavierStokes(base.PDE): >>> pde = ppsci.equation.NavierStokes(0.1, 1.0, 3, False) """ - def __init__(self, nu: Union[float, Callable], rho: float, dim: int, time: bool): + def __init__( + self, + nu: Union[float, Callable], + rho: float, + dim: int, + time: bool, + detach_keys: Optional[Tuple[str, ...]] = None, + ): super().__init__() + self.detach_keys = detach_keys + t, x, y, z = self.create_symbols("t x y z") + invars = (x, y) + if time: + invars = (t,) + invars + if dim == 3: + invars += (z,) + self.nu = nu self.rho = rho self.dim = dim self.time = time - t, x, y, z = self.create_symbols("t x y z") - u, v, w, p = self.create_symbols("u v w p") - if self.dim == 2: - w = sp.Number(0) - if not time: - t = sp.Number(0) + if isinstance(nu, str): + nu = self.create_function(nu, invars) + if isinstance(rho, str): + rho = self.create_function(rho, invars) + + u = self.create_function("u", invars) + v = self.create_function("v", invars) + w = self.create_function("w", invars) + p = self.create_function("p", invars) continuity = u.diff(x) + v.diff(y) + w.diff(z) momentum_x = ( @@ -105,4 +123,5 @@ def __init__(self, nu: Union[float, Callable], rho: float, dim: int, time: bool) self.add_equation("continuity", continuity) self.add_equation("momentum_x", momentum_x) self.add_equation("momentum_y", momentum_y) - self.add_equation("momentum_z", momentum_z) + if self.dim == 3: + self.add_equation("momentum_z", momentum_z) diff --git a/ppsci/equation/pde/normal_dot_vec.py b/ppsci/equation/pde/normal_dot_vec.py index 3e23ac332..6abf7df5b 100644 --- a/ppsci/equation/pde/normal_dot_vec.py +++ b/ppsci/equation/pde/normal_dot_vec.py @@ -14,6 +14,7 @@ from __future__ import annotations +from typing import Optional from typing import Tuple from ppsci.equation.pde import base @@ -35,12 +36,15 @@ class NormalDotVec(base.PDE): >>> pde = ppsci.equation.NormalDotVec(("u", "v", "w")) """ - def __init__(self, vec_keys: Tuple[str, ...]): + def __init__( + self, vec_keys: Tuple[str, ...], detach_keys: Optional[Tuple[str, ...]] = None + ): super().__init__() + self.detach_keys = detach_keys if not vec_keys: raise ValueError(f"len(vec_keys)({len(vec_keys)}) should be larger than 0.") - self.vec_keys = vec_keys + self.vec_keys = vec_keys vec_vars = self.create_symbols(" ".join(vec_keys)) normals = self.create_symbols("normal_x normal_y normal_z") diff --git a/ppsci/equation/pde/poisson.py b/ppsci/equation/pde/poisson.py index 3a8ccff44..d949f636e 100644 --- a/ppsci/equation/pde/poisson.py +++ b/ppsci/equation/pde/poisson.py @@ -14,6 +14,9 @@ from __future__ import annotations +from typing import Optional +from typing import Tuple + from ppsci.equation.pde import base @@ -32,12 +35,12 @@ class Poisson(base.PDE): >>> pde = ppsci.equation.Poisson(2) """ - def __init__(self, dim: int): + def __init__(self, dim: int, detach_keys: Optional[Tuple[str, ...]] = None): super().__init__() - self.dim = dim - - invars = self.create_symbols("x y z")[: self.dim] + self.detach_keys = detach_keys + invars = self.create_symbols("x y z")[:dim] p = self.create_function("p", invars) + self.dim = dim poisson = 0 for invar in invars: diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index a732f4bac..c6a4a7832 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -331,9 +331,9 @@ def convert_expr( container.output_expr[name] = sym_to_func.sympy_to_function( expr, self.model, - container.detach_keys, extra_parameters, ) + logger.message(f"Convert expression[{name}]: {expr}") if self.constraint: convert_expr(self.constraint) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 9c74b2a36..6f54926b0 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -19,7 +19,6 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING from typing import Dict from typing import List from typing import Optional @@ -32,31 +31,16 @@ from paddle import nn from typing_extensions import TypeAlias +from ppsci import arch from ppsci.autodiff import hessian from ppsci.autodiff import jacobian -if TYPE_CHECKING: - from ppsci import arch - - __all__ = [ "sympy_to_function", ] DATA_DICT: TypeAlias = Dict[str, paddle.Tensor] -PADDLE_FUNC_MAP = { - sp.sin: paddle.sin, - sp.cos: paddle.cos, - sp.exp: paddle.exp, - sp.Pow: paddle.pow, - sp.log: paddle.log, - sp.tan: paddle.tan, - sp.Max: paddle.maximum, - sp.Min: paddle.minimum, - sp.Abs: paddle.abs, - sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), -} SYMPY_BUILTIN_FUNC: TypeAlias = Union[ sp.sin, @@ -132,6 +116,27 @@ def __repr__(self): return f"{self.__class__.__name__}(expr: {self.expr})" +class DetachNode(nn.Layer): + """Class for detach node in converted expression tree. + + Args: + expr (sp.Basic): Sympy expression. + """ + + def __init__(self, expr: sp.Basic): + super().__init__() + self.expr = expr + self.key = _cvt_to_key(self.expr) + self.key_detach = self.key + "_detach" + + def forward(self, data_dict: DATA_DICT): + if self.key_detach in data_dict: + return data_dict + + data_dict[self.key_detach] = data_dict[self.key].detach() + return data_dict + + class OperatorNode(Node): """Class for operator node in converted expression tree. @@ -164,7 +169,7 @@ def __init__(self, expr: SYMPY_BUILTIN_FUNC): self._operator_func = self._vanilla_operator_func self._compute_func = PADDLE_FUNC_MAP[self.expr.func] - def forward(self, data_dict: Dict): + def forward(self, data_dict: DATA_DICT): # use cache if self.key in data_dict: return data_dict @@ -215,11 +220,9 @@ def __init__( self, expr: sp.core.function.UndefinedFunction, model: arch.Arch, - detach_keys: Optional[Tuple[str, ...]] = None, ): super().__init__(expr) self.model = model - self.detach_keys = detach_keys def forward(self, data_dict: DATA_DICT) -> DATA_DICT: # use cache @@ -229,11 +232,6 @@ def forward(self, data_dict: DATA_DICT) -> DATA_DICT: output_dict = self.model(data_dict) data_dict.update(output_dict) - # detach Tensor(s) if specified - if self.detach_keys: - for key in self.detach_keys: - data_dict[key] = data_dict[key].detach() - return data_dict @@ -336,7 +334,6 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: def sympy_to_function( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, - detach_keys: Optional[Tuple[str, ...]] = None, extra_parameters: Optional[Sequence[paddle.Tensor]] = None, ) -> ComposedNode: """Convert sympy expression to callable function. @@ -344,7 +341,7 @@ def sympy_to_function( Args: expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. - detach_keys (Optional[Tuple[str, ...]], optional): Keys which will be detached in computation. Defaults to None. + # detach_keys (Optional[Tuple[str, ...]], optional): Keys which will be detached in computation. Defaults to None. extra_parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. Returns: @@ -412,10 +409,10 @@ def sympy_to_function( # remove duplicates with topo-order kept sympy_nodes = list(dict.fromkeys(sympy_nodes)) + if isinstance(models, arch.ModelList): + models = tuple(models.model_list[i] for i in range(len(models.model_list))) if not isinstance(models, (tuple, list)): models = (models,) - if detach_keys is None: - detach_keys = () # convert sympy node to callable node callable_nodes = [] @@ -429,25 +426,25 @@ def sympy_to_function( ): callable_nodes.append(OperatorNode(node)) elif isinstance(node, sp.Function): - match_index = None - for j, model in enumerate(models): - if str(node.func.name) in model.output_keys: - callable_nodes.append( - LayerNode( - node, - model, - tuple( - key for key in detach_keys if key in model.output_keys - ), - ) - ) - if match_index is not None: - raise ValueError( - f"Name of function({node}) should be unique along given models," - f" but got same output_key({node.func.name}) in models[{match_index}]" - f" and models[{j}]." + if node.name == "detach": + callable_nodes.append(DetachNode(node)) + else: + match_index = None + for j, model in enumerate(models): + if str(node.func.name) in model.output_keys: + callable_nodes.append( + LayerNode( + node, + model, + ) ) - match_index = j + if match_index is not None: + raise ValueError( + f"Name of function({node}) should be unique along given models," + f" but got same output_key({node.func.name}) in models[{match_index}]" + f" and models[{j}]." + ) + match_index = j elif node.is_Number or node.is_NumberSymbol: callable_nodes.append(ConstantNode(node)) elif isinstance(node, sp.Symbol): From 3a870d9fd8d077ce3c19bd9b08a88bb2d8b40a61 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 05:20:20 +0000 Subject: [PATCH 26/48] rename 'normal_dot_vel' to 'normal_dot_vec' --- examples/aneurysm/aneurysm.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/aneurysm/aneurysm.py b/examples/aneurysm/aneurysm.py index 922f7f483..2d34ae0dc 100644 --- a/examples/aneurysm/aneurysm.py +++ b/examples/aneurysm/aneurysm.py @@ -132,7 +132,7 @@ def inlet_w_ref_func(_in): ) igc_outlet = ppsci.constraint.IntegralConstraint( equation["NormalDotVec"].equations, - {"normal_dot_vel": 2.54}, + {"normal_dot_vec": 2.54}, geom["outlet_geo"], { **train_dataloader_cfg, @@ -141,12 +141,12 @@ def inlet_w_ref_func(_in): "integral_batch_size": 310, }, ppsci.loss.IntegralLoss("sum"), - weight_dict={"normal_dot_vel": 0.1}, + weight_dict={"normal_dot_vec": 0.1}, name="igc_outlet", ) igc_integral = ppsci.constraint.IntegralConstraint( equation["NormalDotVec"].equations, - {"normal_dot_vel": -2.54}, + {"normal_dot_vec": -2.54}, geom["integral_geo"], { **train_dataloader_cfg, @@ -155,7 +155,7 @@ def inlet_w_ref_func(_in): "integral_batch_size": 310, }, ppsci.loss.IntegralLoss("sum"), - weight_dict={"normal_dot_vel": 0.1}, + weight_dict={"normal_dot_vec": 0.1}, name="igc_integral", ) # wrap constraints together From 08e92d36deb3e2f6dda3c8b2f40eecc9daaca224 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:19:57 +0000 Subject: [PATCH 27/48] fix bug --- ppsci/equation/pde/navier_stokes.py | 4 ++-- ppsci/utils/sym_to_func.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index aae9a8b84..d33d1b61a 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -105,7 +105,7 @@ def __init__( + 1 / rho * p.diff(x) ) momentum_y = ( - u.diff(t) + v.diff(t) + u * v.diff(x) + v * v.diff(y) + w * v.diff(z) @@ -113,7 +113,7 @@ def __init__( + 1 / rho * p.diff(y) ) momentum_z = ( - u.diff(t) + w.diff(t) + u * w.diff(x) + v * w.diff(y) + w * w.diff(z) diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 6f54926b0..6edf03f65 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -399,6 +399,8 @@ def sympy_to_function( sympy_nodes = _post_traverse(expr, sympy_nodes) # remove unnecessary symbol node for already in input dict(except for paramter symbol) + if not extra_parameters: + extra_parameters = () _parameter_names = tuple(param.name for param in extra_parameters) sympy_nodes = [ node From c2373cafffe68d8ed83fe1321c20deeb9f809368 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:20:38 +0000 Subject: [PATCH 28/48] update unitest --- test/equation/test_biharmonic.py | 17 ++++--- test/equation/test_laplace.py | 18 +++++--- test/equation/test_linear_elasticity.py | 59 ++++++++++++++++++++---- test/equation/test_navier_stokes.py | 61 ++++++++++++------------- test/equation/test_normal_dot_vec.py | 34 ++++++++++++-- test/equation/test_poisson.py | 18 +++++--- test/equation/test_viv.py | 30 ++++++++---- 7 files changed, 164 insertions(+), 73 deletions(-) diff --git a/test/equation/test_biharmonic.py b/test/equation/test_biharmonic.py index 314393844..c4a210576 100644 --- a/test/equation/test_biharmonic.py +++ b/test/equation/test_biharmonic.py @@ -1,8 +1,10 @@ import paddle import pytest -from paddle import nn +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func __all__ = [] @@ -29,13 +31,10 @@ def test_biharmonic(dim): input_data = paddle.concat([x, y, z], axis=1) # build NN model - model = nn.Sequential( - nn.Linear(len(input_dims), len(output_dims)), - nn.Tanh(), - ) + model = arch.MLP(input_dims, output_dims, 2, 16) # manually generate output - u = model(input_data) + u = model.forward_tensor(input_data) # use self-defined jacobian and hessian def jacobian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": @@ -57,6 +56,12 @@ def hessian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": # compute result using built-in Biharmonic module biharmonic_equation = equation.Biharmonic(dim=dim, q=q, D=D) + for name, expr in biharmonic_equation.equations.items(): + if isinstance(expr, sp.Basic): + biharmonic_equation.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) data_dict = { "x": x, "y": y, diff --git a/test/equation/test_laplace.py b/test/equation/test_laplace.py index ce41e47ce..257ec6b25 100644 --- a/test/equation/test_laplace.py +++ b/test/equation/test_laplace.py @@ -1,8 +1,10 @@ import paddle import pytest -from paddle import nn +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func __all__ = [] @@ -26,13 +28,10 @@ def test_l1loss_mean(dim): input_data = paddle.concat([x, y, z], axis=1) # build NN model - model = nn.Sequential( - nn.Linear(len(input_dims), len(output_dims)), - nn.Tanh(), - ) + model = arch.MLP(input_dims, output_dims, 2, 16) # manually generate output - u = model(input_data) + u = model.forward_tensor(input_data) # use self-defined jacobian and hessian def jacobian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": @@ -48,6 +47,13 @@ def hessian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": # compute result using built-in Laplace module laplace_equation = equation.Laplace(dim=dim) + for name, expr in laplace_equation.equations.items(): + if isinstance(expr, sp.Basic): + laplace_equation.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) + data_dict = { "x": x, "y": y, diff --git a/test/equation/test_linear_elasticity.py b/test/equation/test_linear_elasticity.py index c444effa3..c157f4934 100644 --- a/test/equation/test_linear_elasticity.py +++ b/test/equation/test_linear_elasticity.py @@ -1,8 +1,10 @@ import paddle import pytest -from paddle import nn +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: @@ -124,6 +126,30 @@ def traction_z_expected_result( ) def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): batch_size = 13 + input_dims = ("x", "y", "z")[:dim] + if time: + input_dims += ("t",) + output_dims = ( + ( + "u", + "v", + "sigma_xx", + "sigma_yy", + "sigma_xy", + ) + if dim == 2 + else ( + "u", + "v", + "w", + "sigma_xx", + "sigma_yy", + "sigma_xy", + "sigma_zz", + "sigma_xz", + "sigma_yz", + ) + ) x = paddle.randn([batch_size, 1]) y = paddle.randn([batch_size, 1]) z = paddle.randn([batch_size, 1]) if dim == 3 else None @@ -145,12 +171,14 @@ def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): if dim == 3: input_data = paddle.concat([input_data, z], axis=1) - model = nn.Sequential( - nn.Linear(input_data.shape[1], 9 if dim == 3 else 5), - nn.Tanh(), - ) + model = arch.MLP(input_dims, output_dims, 2, 16) + + # model = nn.Sequential( + # nn.Linear(input_data.shape[1], 9 if dim == 3 else 5), + # nn.Tanh(), + # ) - output = model(input_data) + output = model.forward_tensor(input_data) u, v, *other_outputs = paddle.split(output, num_or_sections=output.shape[1], axis=1) @@ -201,15 +229,20 @@ def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): linear_elasticity = equation.LinearElasticity( E=E, nu=nu, lambda_=lambda_, mu=mu, rho=rho, dim=dim, time=time ) - + for name, expr in linear_elasticity.equations.items(): + if isinstance(expr, sp.Basic): + linear_elasticity.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) data_dict = { + "t": t, "x": x, "y": y, + "z": z, "u": u, "v": v, - "z": z, "w": w, - "t": t, "sigma_xx": sigma_xx, "sigma_xy": sigma_xy, "sigma_xz": sigma_xz, @@ -220,6 +253,14 @@ def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): "normal_y": normal_y, "normal_z": normal_z, } + if not time: + data_dict.pop("t") + if dim == 2: + data_dict.pop("w") + data_dict.pop("sigma_xz") + data_dict.pop("sigma_yz") + data_dict.pop("sigma_zz") + data_dict.pop("normal_z") test_output_names = [ "stress_disp_xx", diff --git a/test/equation/test_navier_stokes.py b/test/equation/test_navier_stokes.py index 9888fb5e1..9f2bfe282 100644 --- a/test/equation/test_navier_stokes.py +++ b/test/equation/test_navier_stokes.py @@ -1,8 +1,10 @@ import paddle import pytest -from paddle import nn +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: @@ -26,8 +28,8 @@ def momentum_x_compute_func( momentum_x = ( u * jacobian(u, x) + v * jacobian(u, y) - - nu / rho * hessian(u, x) - - nu / rho * hessian(u, y) + - nu * hessian(u, x) + - nu * hessian(u, y) + 1 / rho * jacobian(p, x) ) @@ -35,7 +37,7 @@ def momentum_x_compute_func( momentum_x += jacobian(u, t) if dim == 3: momentum_x += w * jacobian(u, z) - momentum_x -= nu / rho * hessian(u, z) + momentum_x -= nu * hessian(u, z) return momentum_x @@ -45,8 +47,8 @@ def momentum_y_compute_func( momentum_y = ( u * jacobian(v, x) + v * jacobian(v, y) - - nu / rho * hessian(v, x) - - nu / rho * hessian(v, y) + - nu * hessian(v, x) + - nu * hessian(v, y) + 1 / rho * jacobian(p, y) ) @@ -54,7 +56,7 @@ def momentum_y_compute_func( momentum_y += jacobian(v, t) if dim == 3: momentum_y += w * jacobian(v, z) - momentum_y -= nu / rho * hessian(v, z) + momentum_y -= nu * hessian(v, z) return momentum_y @@ -65,9 +67,9 @@ def momentum_z_compute_func( u * jacobian(w, x) + v * jacobian(w, y) + w * jacobian(w, z) - - nu / rho * hessian(w, x) - - nu / rho * hessian(w, y) - - nu / rho * hessian(w, z) + - nu * hessian(w, x) + - nu * hessian(w, y) + - nu * hessian(w, z) + 1 / rho * jacobian(p, z) ) if time: @@ -91,40 +93,33 @@ def test_navierstokes(nu, rho, dim, time): y = paddle.randn([batch_size, 1]) x.stop_gradient = False y.stop_gradient = False - input_dims = 2 + + input_dims = ("x", "y") + output_dims = ("u", "v", "p") if dim == 2 else ("u", "v", "w", "p") inputs = (x, y) + if time: t = paddle.randn([batch_size, 1]) t.stop_gradient = False inputs = (t,) + inputs - input_dims += 1 + input_dims = ("t",) + input_dims if dim == 3: z = paddle.randn([batch_size, 1]) z.stop_gradient = False inputs = inputs + (z,) - input_dims += 1 + input_dims = input_dims + ("z",) input_data = paddle.concat(inputs, axis=1) - """ - Use the relatively simple Multilayer Perceptron - to represent the mapping function from (t, x, y, z) to (u, v, w, p): - f(x, y) = (u, v, p) or - f(t, x, y) = (u, v, p) or - f(t, x, y, z) = (u, v, w, p) - """ - model = nn.Sequential( - nn.Linear(input_dims, 3 if dim == 2 else 4), - nn.Tanh(), - ) + model = arch.MLP(input_dims, output_dims, 2, 16) # manually generate output - output = model(input_data) + output = model.forward_tensor(input_data) if dim == 2: - u, v, p = paddle.split(output, num_or_sections=output.shape[1], axis=1) + u, v, p = paddle.split(output, num_or_sections=len(output_dims), axis=1) w, z = None, None else: - u, v, w, p = paddle.split(output, num_or_sections=output.shape[1], axis=1) + u, v, w, p = paddle.split(output, num_or_sections=len(output_dims), axis=1) if not time: t = None expected_continuity = continuity_compute_func(x=x, y=y, u=u, v=v, dim=dim, w=w, z=z) @@ -141,6 +136,12 @@ def test_navierstokes(nu, rho, dim, time): # compute result using NavierStokes class navier_stokes_equation = equation.NavierStokes(nu=nu, rho=rho, dim=dim, time=time) + for name, expr in navier_stokes_equation.equations.items(): + if isinstance(expr, sp.Basic): + navier_stokes_equation.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) data_dict = {"x": x, "y": y, "u": u, "v": v, "p": p} if time: @@ -156,9 +157,7 @@ def test_navierstokes(nu, rho, dim, time): ] if dim == 3: - test_output_names.append( - "momentum_z", - ) + test_output_names.append("momentum_z") test_output = {} for name in test_output_names: @@ -174,7 +173,7 @@ def test_navierstokes(nu, rho, dim, time): # check result whether is equal for name in test_output_names: - assert paddle.allclose(expected_output[name], test_output[name]) + assert paddle.allclose(expected_output[name], test_output[name]), f"{name}" if __name__ == "__main__": diff --git a/test/equation/test_normal_dot_vec.py b/test/equation/test_normal_dot_vec.py index 6b930a271..448a9834c 100644 --- a/test/equation/test_normal_dot_vec.py +++ b/test/equation/test_normal_dot_vec.py @@ -1,7 +1,10 @@ import paddle import pytest +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func def compute_func(x: tuple, y: tuple): @@ -13,15 +16,34 @@ def compute_func(x: tuple, y: tuple): def test_normal_dot_vel(): batch_size = 13 - u = paddle.randn([batch_size, 1]) - v = paddle.randn([batch_size, 1]) - w = paddle.randn([batch_size, 1]) + x = paddle.randn([batch_size, 1]) + y = paddle.randn([batch_size, 1]) + z = paddle.randn([batch_size, 1]) + input_dims = ("x", "y", "z") + output_dims = ("u", "v", "w") + model = arch.MLP(input_dims, output_dims, 2, 16) + output_dict = model( + { + "x": x, + "y": y, + "z": z, + } + ) + u = output_dict["u"] + v = output_dict["v"] + w = output_dict["w"] normal_x = paddle.randn([batch_size, 1]) normal_y = paddle.randn([batch_size, 1]) normal_z = paddle.randn([batch_size, 1]) - pde = equation.NormalDotVec(("u", "v", "w")) + norm_doc_vec = equation.NormalDotVec(output_dims) + for name, expr in norm_doc_vec.equations.items(): + if isinstance(expr, sp.Basic): + norm_doc_vec.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) out = { "u": u, "v": v, @@ -32,7 +54,9 @@ def test_normal_dot_vel(): } expected_result = compute_func((u, v, w), (normal_x, normal_y, normal_z)) - assert paddle.allclose(pde.equations["normal_dot_vel"](out), expected_result) + assert paddle.allclose( + norm_doc_vec.equations["normal_dot_vec"](out), expected_result + ) if __name__ == "__main__": diff --git a/test/equation/test_poisson.py b/test/equation/test_poisson.py index 502acb310..d3bedf81d 100644 --- a/test/equation/test_poisson.py +++ b/test/equation/test_poisson.py @@ -14,9 +14,11 @@ import paddle import pytest -from paddle import nn +import sympy as sp +from ppsci import arch from ppsci import equation +from ppsci.utils import sym_to_func __all__ = [] @@ -40,13 +42,10 @@ def test_poisson(dim): input_data = paddle.concat([x, y, z], axis=1) # build NN model - model = nn.Sequential( - nn.Linear(len(input_dims), len(output_dims)), - nn.Tanh(), - ) + model = arch.MLP(input_dims, output_dims, 2, 16) # manually generate output - p = model(input_data) + p = model.forward_tensor(input_data) def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: return paddle.grad(y, x, create_graph=True)[0] @@ -61,6 +60,13 @@ def hessian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: # compute result using built-in Laplace module poisson_equation = equation.Poisson(dim=dim) + for name, expr in poisson_equation.equations.items(): + if isinstance(expr, sp.Basic): + poisson_equation.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + ) + data_dict = { "x": x, "y": y, diff --git a/test/equation/test_viv.py b/test/equation/test_viv.py index b5567727d..363835d99 100644 --- a/test/equation/test_viv.py +++ b/test/equation/test_viv.py @@ -1,9 +1,11 @@ import paddle import pytest -from paddle import nn +import sympy as sp from paddle.nn import initializer +from ppsci import arch from ppsci.equation.pde import Vibration +from ppsci.utils import sym_to_func @pytest.mark.parametrize("rho,k1,k2", [(1.0, 4.0, -1.0)]) @@ -11,13 +13,15 @@ def test_vibration(rho, k1, k2): """Test for Vibration equation.""" batch_size = 13 rho = rho - k1 = paddle.create_parameter( + k11 = paddle.create_parameter( shape=[], dtype=paddle.get_default_dtype(), + name="k11", default_initializer=initializer.Constant(k1), ) - k2 = paddle.create_parameter( + k22 = paddle.create_parameter( shape=[], + name="k22", dtype=paddle.get_default_dtype(), default_initializer=initializer.Constant(k2), ) @@ -27,13 +31,12 @@ def test_vibration(rho, k1, k2): eta.stop_gradient = False t_f.stop_gradient = False input_data = paddle.concat([eta, t_f], axis=1) - model = nn.Sequential( - nn.Linear(2, 1), - nn.Tanh(), - ) + input_dims = ("eta", "t_f") + output_dims = ("f",) + model = arch.MLP(input_dims, output_dims, 2, 16) # manually generate output - eta = model(input_data) + eta = model.forward_tensor(input_data) def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: return paddle.grad(y, x, create_graph=True)[0] @@ -43,12 +46,19 @@ def hessian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: expected_result = ( rho * hessian(eta, t_f) - + paddle.exp(k1) * jacobian(eta, t_f) - + paddle.exp(k2) * eta + + paddle.exp(k11) * jacobian(eta, t_f) + + paddle.exp(k22) * eta ) # compute result using Vibration class vibration_equation = Vibration(rho=rho, k1=k1, k2=k2) + for name, expr in vibration_equation.equations.items(): + if isinstance(expr, sp.Basic): + vibration_equation.equations[name] = sym_to_func.sympy_to_function( + expr, + model, + vibration_equation.learnable_parameters, + ) data_dict = {"eta": eta, "t_f": t_f} test_result = vibration_equation.equations["f"](data_dict) # check result whether is equal From 051dcd6037957391e4615302ef66f5159e7c6487 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:28:50 +0000 Subject: [PATCH 29/48] remove redundant unitest --- ppsci/utils/sym_to_func.py | 1 - test/utils/speed_test_navier_stokes.py | 476 ------------------ test/utils/test_linear_elasticity_sympy.py | 243 ---------- test/utils/test_navier_stokes_sympy.py | 540 --------------------- 4 files changed, 1260 deletions(-) delete mode 100644 test/utils/speed_test_navier_stokes.py delete mode 100644 test/utils/test_linear_elasticity_sympy.py delete mode 100644 test/utils/test_navier_stokes_sympy.py diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 6edf03f65..1e634698b 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -341,7 +341,6 @@ def sympy_to_function( Args: expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. - # detach_keys (Optional[Tuple[str, ...]], optional): Keys which will be detached in computation. Defaults to None. extra_parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. Returns: diff --git a/test/utils/speed_test_navier_stokes.py b/test/utils/speed_test_navier_stokes.py deleted file mode 100644 index 838a38c74..000000000 --- a/test/utils/speed_test_navier_stokes.py +++ /dev/null @@ -1,476 +0,0 @@ -import time as time_module - -import paddle -import sympy as sp - -from ppsci import arch -from ppsci import equation -from ppsci.autodiff import clear -from ppsci.autodiff import hessian as H -from ppsci.autodiff import jacobian as J -from ppsci.utils import sym_to_func - - -class NavierStokes_sympy: - def __init__(self, nu, rho, dim, time): - # set params - self.dim = dim - self.time = time - - # coordinates - x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") - - # time - t = sp.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sp.Function("u")(*input_variables) - v = sp.Function("v")(*input_variables) - if self.dim == 3: - w = sp.Function("w")(*input_variables) - else: - w = sp.Number(0) - - # pressure - p = sp.Function("p")(*input_variables) - - # kinematic viscosity - if isinstance(nu, str): - nu = sp.Function(nu)(*input_variables) - elif isinstance(nu, (float, int)): - nu = sp.Number(nu) - - # density - if isinstance(rho, str): - rho = sp.Function(rho)(*input_variables) - elif isinstance(rho, (float, int)): - rho = sp.Number(rho) - - # dynamic viscosity - mu = rho * nu - - # set equations - self.equations = {} - self.equations["continuity"] = ( - rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) - ) - - curl = sp.Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) - self.equations["momentum_x"] = ( - (rho * u).diff(t) - + ( - u * ((rho * u).diff(x)) - + v * ((rho * u).diff(y)) - + w * ((rho * u).diff(z)) - + rho * u * (curl) - ) - + p.diff(x) - - (-2 / 3 * mu * (curl)).diff(x) - - (mu * u.diff(x)).diff(x) - - (mu * u.diff(y)).diff(y) - - (mu * u.diff(z)).diff(z) - - (mu * (curl).diff(x)) - ) - self.equations["momentum_y"] = ( - (rho * v).diff(t) - + ( - u * ((rho * v).diff(x)) - + v * ((rho * v).diff(y)) - + w * ((rho * v).diff(z)) - + rho * v * (curl) - ) - + p.diff(y) - - (-2 / 3 * mu * (curl)).diff(y) - - (mu * v.diff(x)).diff(x) - - (mu * v.diff(y)).diff(y) - - (mu * v.diff(z)).diff(z) - - (mu * (curl).diff(y)) - ) - self.equations["momentum_z"] = ( - (rho * w).diff(t) - + ( - u * ((rho * w).diff(x)) - + v * ((rho * w).diff(y)) - + w * ((rho * w).diff(z)) - + rho * w * (curl) - ) - + p.diff(z) - - (-2 / 3 * mu * (curl)).diff(z) - - (mu * w.diff(x)).diff(x) - - (mu * w.diff(y)).diff(y) - - (mu * w.diff(z)).diff(z) - - (mu * (curl).diff(z)) - ) - - if self.dim == 2: - self.equations.pop("momentum_z") - - -class ZeroEquation_sympy: - def __init__( - self, nu, max_distance, rho=1, dim=3, time=True - ): # TODO add density into model - # set params - self.dim = dim - self.time = time - - # model coefficients - self.max_distance = max_distance - self.karman_constant = 0.419 - self.max_distance_ratio = 0.09 - - # coordinates - x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") - - # time - t = sp.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sp.Function("u")(*input_variables) - v = sp.Function("v")(*input_variables) - if self.dim == 3: - w = sp.Function("w")(*input_variables) - else: - w = sp.Number(0) - - # density - if type(rho) is str: - rho = sp.Function(rho)(*input_variables) - elif type(rho) in [float, int]: - rho = sp.Number(rho) - - # wall distance - normal_distance = sp.Function("sdf")(*input_variables) - - # mixing length - mixing_length = sp.Min( - self.karman_constant * normal_distance, - self.max_distance_ratio * self.max_distance, - ) - G = ( - 2 * u.diff(x) ** 2 - + 2 * v.diff(y) ** 2 - + 2 * w.diff(z) ** 2 - + (u.diff(y) + v.diff(x)) ** 2 - + (u.diff(z) + w.diff(x)) ** 2 - + (v.diff(z) + w.diff(y)) ** 2 - ) - - # set equations - self.equations = {} - self.equations["nu"] = nu + rho * mixing_length**2 * sp.sqrt(G) - - -def compute_with_sympy(input_dicts, nu, rho, dim, time, model): - """Test for navier_stokes equation.""" - # define input/output keys - ze = ZeroEquation_sympy(nu=nu, rho=rho, dim=dim, max_distance=3.4, time=time) - nu_sympy = ze.equations["nu"] - - input_keys = ("x", "y", "z")[:dim] - if time: - input_keys = ("t",) + input_keys - - output_keys = ("u", "v") - if dim == 3: - output_keys += ("w",) - output_keys += ("p",) - - # prepare input data in dict - cost_list = [] - # prepare python function expressions and sympy-expression in dict - sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations - for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = sym_to_func.sympy_to_function( - expr, - [ - model, - ], - ) - for i, input_dict in enumerate(input_dicts): - input_dict = input_dicts[i] - - # compute equation with funciton converted from sympy - output_dict_sympy = {k: v for k, v in input_dict.items()} - tmp = {k: v for k, v in output_dict_sympy.items()} - beg = time_module.perf_counter() - for name, expr in sympy_expr_dict.items(): - output = expr(tmp) - output_dict_sympy[name] = output - for key in model.output_keys: - output_dict_sympy[key] = tmp[key] - clear() - end = time_module.perf_counter() - cost_list.append(end - beg) - - # test for result - print( - f"compute_with_sympy overhead: {sum(cost_list[10:]) / len(cost_list[10:]):.5f}" - ) - return output_dict_sympy - - -def compute_with_pyfunc(input_dicts, nu, rho, dim, time, model): - def continuity_f(out): - x, y = out["x"], out["y"] - u, v = out["u"], out["v"] - return 1.0 * J(u, x) + 1.0 * J(v, y) - - def momentum_x_f(out): - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - if time: - t = out["t"] - return ( - -( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(u, x) - - ( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(u, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 - + 2 * J(u, x) * J(J(u, x), y) - + 2 * J(v, y) * H(v, y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__y"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(u, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 - + 2 * J(u, x) * H(u, x) - + 2 * J(v, y) * J(J(v, x), y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__x"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(u, x) - + (1.0 * u * J(u, x) + 1.0 * v * J(u, y) + J(p, x)) - + (J(u, t) if time else 0) - ) - - def momentum_y_f(out): - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - if time: - t = out["t"] - return ( - -( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(v, x) - - ( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(v, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 - + 2 * J(u, x) * J(J(u, x), y) - + 2 * J(v, y) * H(v, y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__y"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(v, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 - + 2 * J(u, x) * H(u, x) - + 2 * J(v, y) * J(J(v, x), y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__x"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(v, x) - + (1.0 * u * J(v, x) + 1.0 * v * J(v, y) + J(p, y)) - + (J(v, t) if time else 0) - ) - - """Test for navier_stokes equation.""" - # define input/output keys - - # prepare input data in dict - cost_list = [] - for i, input_dict in enumerate(input_dicts): - input_dict = input_dicts[i] - - # prepare python function expressions in dict - functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations - functional_expr_dict["continuity"] = continuity_f - functional_expr_dict["momentum_x"] = momentum_x_f - functional_expr_dict["momentum_y"] = momentum_y_f - - # compute equation with python function - output_dict_functional = model(input_dict) - beg = time_module.perf_counter() - for name, expr in functional_expr_dict.items(): - if callable(expr): - output_dict_functional[name] = expr( - {**output_dict_functional, **input_dict} - ) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") - clear() - end = time_module.perf_counter() - cost_list.append(end - beg) - - # test for result - print( - f"compute_with_pyfunc overhead: {sum(cost_list[10:]) / len(cost_list[10:]):.5f}" - ) - return output_dict_functional - - -if __name__ == "__main__": - input_keys = ("t", "x", "y") - output_keys = ("u", "v", "p") - nu = 2 - rho = 1 - dim = 2 - time = True - model = arch.MLP(input_keys, output_keys, 4, 50) - - batch_size = 2048 - input_dicts = [] - for i in range(50): - input_dict = {} - for var in input_keys: - input_dict[var] = paddle.randn([batch_size, 1]) - input_dict[var].stop_gradient = False - if var != "t": - input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) - input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) - - input_dict[f"sdf__{var}"].stop_gradient = False - input_dict[f"normal__{var}"].stop_gradient = False - - input_dict["sdf"] = paddle.randn([batch_size, 1]) - input_dict["sdf"].stop_gradient = False - input_dicts.append(input_dict) - - output_dict_sympy = compute_with_sympy( - input_dicts, nu=nu, rho=rho, dim=dim, time=time, model=model - ) - output_dict_pyfunc = compute_with_pyfunc( - input_dicts, nu=nu, rho=rho, dim=dim, time=time, model=model - ) - - for key in output_dict_pyfunc: - if not paddle.allclose( - output_dict_sympy[key], output_dict_pyfunc[key], atol=1e-7 - ): - print(f"{key} {output_dict_sympy[key]}\n{output_dict_pyfunc[key]}") - else: - print(f"{key} check pass") diff --git a/test/utils/test_linear_elasticity_sympy.py b/test/utils/test_linear_elasticity_sympy.py deleted file mode 100644 index aeddf7f53..000000000 --- a/test/utils/test_linear_elasticity_sympy.py +++ /dev/null @@ -1,243 +0,0 @@ -# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -import pytest -from sympy import Function -from sympy import Number -from sympy import Symbol - -import ppsci -from ppsci import equation -from ppsci.autodiff import clear -from ppsci.utils import sym_to_func - -__all__ = [] - - -class LinearElasticity_sympy: - def __init__( - self, E=None, nu=None, lambda_=None, mu=None, rho=1, dim=3, time=False - ): - - # set params - self.dim = dim - self.time = time - - # coordinates - x, y, z = Symbol("x"), Symbol("y"), Symbol("z") - normal_x, normal_y, normal_z = ( - Symbol("normal_x"), - Symbol("normal_y"), - Symbol("normal_z"), - ) - - # time - t = Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # displacement componets - u = Function("u")(*input_variables) - v = Function("v")(*input_variables) - sigma_xx = Function("sigma_xx")(*input_variables) - sigma_yy = Function("sigma_yy")(*input_variables) - sigma_xy = Function("sigma_xy")(*input_variables) - if self.dim == 3: - w = Function("w")(*input_variables) - sigma_zz = Function("sigma_zz")(*input_variables) - sigma_xz = Function("sigma_xz")(*input_variables) - sigma_yz = Function("sigma_yz")(*input_variables) - else: - w = Number(0) - sigma_zz = Number(0) - sigma_xz = Number(0) - sigma_yz = Number(0) - - # material properties - if lambda_ is None: - if isinstance(nu, str): - nu = Function(nu)(*input_variables) - elif isinstance(nu, (float, int)): - nu = Number(nu) - if isinstance(E, str): - E = Function(E)(*input_variables) - elif isinstance(E, (float, int)): - E = Number(E) - lambda_ = nu * E / ((1 + nu) * (1 - 2 * nu)) - mu = E / (2 * (1 + nu)) - else: - if isinstance(lambda_, str): - lambda_ = Function(lambda_)(*input_variables) - elif isinstance(lambda_, (float, int)): - lambda_ = Number(lambda_) - if isinstance(mu, str): - mu = Function(mu)(*input_variables) - elif isinstance(mu, (float, int)): - mu = Number(mu) - if isinstance(rho, str): - rho = Function(rho)(*input_variables) - elif isinstance(rho, (float, int)): - rho = Number(rho) - - # set equations - self.equations = {} - - # Stress equations - self.equations["stress_disp_xx"] = ( - lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) - + 2 * mu * u.diff(x) - - sigma_xx - ) - self.equations["stress_disp_yy"] = ( - lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) - + 2 * mu * v.diff(y) - - sigma_yy - ) - self.equations["stress_disp_zz"] = ( - lambda_ * (u.diff(x) + v.diff(y) + w.diff(z)) - + 2 * mu * w.diff(z) - - sigma_zz - ) - self.equations["stress_disp_xy"] = mu * (u.diff(y) + v.diff(x)) - sigma_xy - self.equations["stress_disp_xz"] = mu * (u.diff(z) + w.diff(x)) - sigma_xz - self.equations["stress_disp_yz"] = mu * (v.diff(z) + w.diff(y)) - sigma_yz - - # Equations of equilibrium - self.equations["equilibrium_x"] = rho * ((u.diff(t)).diff(t)) - ( - sigma_xx.diff(x) + sigma_xy.diff(y) + sigma_xz.diff(z) - ) - self.equations["equilibrium_y"] = rho * ((v.diff(t)).diff(t)) - ( - sigma_xy.diff(x) + sigma_yy.diff(y) + sigma_yz.diff(z) - ) - self.equations["equilibrium_z"] = rho * ((w.diff(t)).diff(t)) - ( - sigma_xz.diff(x) + sigma_yz.diff(y) + sigma_zz.diff(z) - ) - - # Traction equations - self.equations["traction_x"] = ( - normal_x * sigma_xx + normal_y * sigma_xy + normal_z * sigma_xz - ) - self.equations["traction_y"] = ( - normal_x * sigma_xy + normal_y * sigma_yy + normal_z * sigma_yz - ) - self.equations["traction_z"] = ( - normal_x * sigma_xz + normal_y * sigma_yz + normal_z * sigma_zz - ) - - if self.dim == 2: - self.equations.pop("stress_disp_zz") - self.equations.pop("stress_disp_xz") - self.equations.pop("stress_disp_yz") - self.equations.pop("equilibrium_z") - self.equations.pop("traction_z") - - -@pytest.mark.parametrize( - "E,nu,lambda_,mu", - ( - (2.0, 3.0, None, None), - (None, None, 2.0, 3.0), - ), -) -@pytest.mark.parametrize("rho", (1,)) -@pytest.mark.parametrize("dim", (2, 3)) -@pytest.mark.parametrize("time", (False, True)) -def test_linearelasticity(E, nu, lambda_, mu, rho, dim, time): - """Test for linearelasticity equation.""" - # define input/output keys - input_keys = ("x", "y", "z")[:dim] - if time: - input_keys = ("t",) + input_keys - - disp_output_keys = ("u", "v") - if dim == 3: - disp_output_keys += ("w",) - disp_output_keys += ("p",) - - stress_output_keys = ("sigma_xx", "sigma_yy") - if dim == 3: - stress_output_keys += ("sigma_zz",) - stress_output_keys += ("sigma_xy",) - if dim == 3: - stress_output_keys += ("sigma_xz", "sigma_yz") - - # prepare input data in dict - batch_size = 13 - input_dict = {} - for var in input_keys: - input_dict[var] = paddle.randn([batch_size, 1]) - input_dict[var].stop_gradient = False - input_dict[f"normal_{var}"] = paddle.randn([batch_size, 1]) - input_dict[f"normal_{var}"].stop_gradient = False - - # prepare model - disp_net = ppsci.arch.MLP( - input_keys, disp_output_keys, 3, 16, "silu", weight_norm=True - ) - stress_net = ppsci.arch.MLP( - input_keys, - stress_output_keys, - 3, - 16, - "silu", - weight_norm=True, - ) - model_list = ppsci.arch.ModelList((disp_net, stress_net)) - - # prepare python function expressions and sympy-expression in dict - functional_expr_dict = equation.LinearElasticity( - E, nu, lambda_, mu, rho, dim, time - ).equations - sympy_expr_dict = LinearElasticity_sympy( - E, nu, lambda_, mu, rho, dim, time - ).equations - for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = sym_to_func.sympy_to_function( - expr, [disp_net, stress_net] - ) - - # compute equation with python function - output_dict_functional = model_list(input_dict) - for name, expr in functional_expr_dict.items(): - if callable(expr): - output_dict_functional[name] = expr( - {**output_dict_functional, **input_dict} - ) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") - clear() - - # compute equation with funciton converted from sympy - output_dict_sympy = {k: v for k, v in input_dict.items()} - for name, _ in sympy_expr_dict.items(): - output_dict_sympy[name] = sympy_expr_dict[name]( - {**output_dict_sympy, **input_dict} - ) - clear() - - # test for result - for key in functional_expr_dict: - assert paddle.allclose( - output_dict_functional[key], output_dict_sympy[key], atol=2e-7 - ) - - -if __name__ == "__main__": - pytest.main() diff --git a/test/utils/test_navier_stokes_sympy.py b/test/utils/test_navier_stokes_sympy.py deleted file mode 100644 index e10592c13..000000000 --- a/test/utils/test_navier_stokes_sympy.py +++ /dev/null @@ -1,540 +0,0 @@ -# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -import pytest -import sympy as sp - -import ppsci -from ppsci import equation -from ppsci.autodiff import clear -from ppsci.autodiff import hessian as H -from ppsci.autodiff import jacobian as J -from ppsci.utils import sym_to_func - - -class NavierStokes_sympy: - def __init__(self, nu, rho=1, dim=3, time=True): - # set params - self.dim = dim - self.time = time - - # coordinates - x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") - - # time - t = sp.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sp.Function("u")(*input_variables) - v = sp.Function("v")(*input_variables) - if self.dim == 3: - w = sp.Function("w")(*input_variables) - else: - w = sp.Number(0) - - # pressure - p = sp.Function("p")(*input_variables) - - # kinematic viscosity - if isinstance(nu, str): - nu = sp.Function(nu)(*input_variables) - elif isinstance(nu, (float, int)): - nu = sp.Number(nu) - - # density - if isinstance(rho, str): - rho = sp.Function(rho)(*input_variables) - elif isinstance(rho, (float, int)): - rho = sp.Number(rho) - - # dynamic viscosity - mu = rho * nu - - # set equations - self.equations = {} - self.equations["continuity"] = ( - rho.diff(t) + (rho * u).diff(x) + (rho * v).diff(y) + (rho * w).diff(z) - ) - - curl = sp.Number(0) if rho.diff(x) == 0 else u.diff(x) + v.diff(y) + w.diff(z) - self.equations["momentum_x"] = ( - (rho * u).diff(t) - + ( - u * ((rho * u).diff(x)) - + v * ((rho * u).diff(y)) - + w * ((rho * u).diff(z)) - + rho * u * (curl) - ) - + p.diff(x) - - (-2 / 3 * mu * (curl)).diff(x) - - (mu * u.diff(x)).diff(x) - - (mu * u.diff(y)).diff(y) - - (mu * u.diff(z)).diff(z) - - (mu * (curl).diff(x)) - ) - self.equations["momentum_y"] = ( - (rho * v).diff(t) - + ( - u * ((rho * v).diff(x)) - + v * ((rho * v).diff(y)) - + w * ((rho * v).diff(z)) - + rho * v * (curl) - ) - + p.diff(y) - - (-2 / 3 * mu * (curl)).diff(y) - - (mu * v.diff(x)).diff(x) - - (mu * v.diff(y)).diff(y) - - (mu * v.diff(z)).diff(z) - - (mu * (curl).diff(y)) - ) - self.equations["momentum_z"] = ( - (rho * w).diff(t) - + ( - u * ((rho * w).diff(x)) - + v * ((rho * w).diff(y)) - + w * ((rho * w).diff(z)) - + rho * w * (curl) - ) - + p.diff(z) - - (-2 / 3 * mu * (curl)).diff(z) - - (mu * w.diff(x)).diff(x) - - (mu * w.diff(y)).diff(y) - - (mu * w.diff(z)).diff(z) - - (mu * (curl).diff(z)) - ) - - if self.dim == 2: - self.equations.pop("momentum_z") - - -class ZeroEquation_sympy: - def __init__( - self, nu, max_distance, rho=1, dim=3, time=True - ): # TODO add density into model - # set params - self.dim = dim - self.time = time - - # model coefficients - self.max_distance = max_distance - self.karman_constant = 0.419 - self.max_distance_ratio = 0.09 - - # coordinates - x, y, z = sp.Symbol("x"), sp.Symbol("y"), sp.Symbol("z") - - # time - t = sp.Symbol("t") - - # make input variables - input_variables = {"x": x, "y": y, "z": z, "t": t} - if self.dim == 2: - input_variables.pop("z") - if not self.time: - input_variables.pop("t") - - # velocity componets - u = sp.Function("u")(*input_variables) - v = sp.Function("v")(*input_variables) - if self.dim == 3: - w = sp.Function("w")(*input_variables) - else: - w = sp.Number(0) - - # density - if type(rho) is str: - rho = sp.Function(rho)(*input_variables) - elif type(rho) in [float, int]: - rho = sp.Number(rho) - - # wall distance - normal_distance = sp.Function("sdf")(*input_variables) - - # mixing length - mixing_length = sp.Min( - self.karman_constant * normal_distance, - self.max_distance_ratio * self.max_distance, - ) - G = ( - 2 * u.diff(x) ** 2 - + 2 * v.diff(y) ** 2 - + 2 * w.diff(z) ** 2 - + (u.diff(y) + v.diff(x)) ** 2 - + (u.diff(z) + w.diff(x)) ** 2 - + (v.diff(z) + w.diff(y)) ** 2 - ) - - # set equations - self.equations = {} - self.equations["nu"] = nu + rho * mixing_length**2 * sp.sqrt(G) - - -class Test_NavierStokes_sympy: - @pytest.mark.parametrize("nu", (2.0,)) - @pytest.mark.parametrize("rho", (1.0,)) - @pytest.mark.parametrize("dim", (2,)) - @pytest.mark.parametrize("time", (False, True)) - def test_nu_sympy(self, nu, rho, dim, time): - """Test for navier_stokes equation.""" - # define input/output keys - ze = ZeroEquation_sympy(nu=nu, rho=rho, dim=dim, max_distance=3.4, time=time) - nu_sympy = ze.equations["nu"] - - input_keys = ("x", "y", "z")[:dim] - if time: - input_keys = ("t",) + input_keys - - output_keys = ("u", "v") - if dim == 3: - output_keys += ("w",) - output_keys += ("p",) - - # prepare input data in dict - batch_size = 13 - input_dict = {} - for var in input_keys: - input_dict[var] = paddle.randn([batch_size, 1]) - input_dict[var].stop_gradient = False - if var != "t": - input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) - input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) - - input_dict[f"sdf__{var}"].stop_gradient = False - input_dict[f"normal__{var}"].stop_gradient = False - - input_dict["sdf"] = paddle.randn([batch_size, 1]) - input_dict["sdf"].stop_gradient = False - - # prepare model - model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) - - # prepare python function expressions and sympy-expression in dict - def nu_f(out): - karman_constant = 0.419 - max_distance_ratio = 0.09 - normal_distance = out["sdf"] - max_distance = ze.max_distance - mixing_length = paddle.minimum( - karman_constant * normal_distance, - max_distance_ratio * max_distance, - ) - x, y = out["x"], out["y"] - u, v = out["u"], out["v"] - G = 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 + (J(u, y) + J(v, x)) ** 2 - if dim == 3: - z, w = out["z"], out["w"] - G += ( - +2 * J(w, z) ** 2 - + (J(u, z) + J(w, x)) ** 2 - + (J(v, z) + J(w, y)) ** 2 - ) - return nu + rho * mixing_length**2 * paddle.sqrt(G) - - functional_expr_dict = equation.NavierStokes(nu_f, rho, dim, time).equations - - def continuity_f(out): - x, y = out["x"], out["y"] - u, v = out["u"], out["v"] - return 1.0 * J(u, x) + 1.0 * J(v, y) - - def momentum_x_f(out): - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - if time: - t = out["t"] - return ( - -( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(u, x) - - ( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(u, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 - + 2 * J(u, x) * J(J(u, x), y) - + 2 * J(v, y) * H(v, y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__y"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(u, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 - + 2 * J(u, x) * H(u, x) - + 2 * J(v, y) * J(J(v, x), y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__x"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(u, x) - + (1.0 * u * J(u, x) + 1.0 * v * J(u, y) + J(p, x)) - + (J(u, t) if time else 0) - ) - - def momentum_y_f(out): - x, y = out["x"], out["y"] - u, v, p = out["u"], out["v"], out["p"] - if time: - t = out["t"] - return ( - -( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(v, x) - - ( - 1.0 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - + 2.0 - ) - * H(v, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(u, y) + 2 * J(J(v, x), y)) / 2 - + 2 * J(u, x) * J(J(u, x), y) - + 2 * J(v, y) * H(v, y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__y"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(v, y) - - ( - 1.0 - * ( - (J(u, y) + J(v, x)) * (2 * H(v, x) + 2 * J(J(u, x), y)) / 2 - + 2 * J(u, x) * H(u, x) - + 2 * J(v, y) * J(J(v, x), y) - ) - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ** 2 - / paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - + 0.838 - * paddle.sqrt( - (J(u, y) + J(v, x)) ** 2 + 2 * J(u, x) ** 2 + 2 * J(v, y) ** 2 - ) - * paddle.heaviside(0.306 - 0.419 * out["sdf"], paddle.zeros([])) - * out["sdf__x"] - * paddle.minimum( - paddle.full_like(out["sdf"], 0.306), 0.419 * out["sdf"] - ) - ) - * J(v, x) - + (1.0 * u * J(v, x) + 1.0 * v * J(v, y) + J(p, y)) - + (J(v, t) if time else 0) - ) - - functional_expr_dict["continuity"] = continuity_f - functional_expr_dict["momentum_x"] = momentum_x_f - functional_expr_dict["momentum_y"] = momentum_y_f - - sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations - for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = sym_to_func.sympy_to_function( - expr, - [ - model, - ], - ) - - # compute equation with python function - output_dict_functional = model(input_dict) - for name, expr in functional_expr_dict.items(): - if callable(expr): - output_dict_functional[name] = expr( - {**output_dict_functional, **input_dict} - ) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") - clear() - - # compute equation with funciton converted from sympy - output_dict_sympy = {k: v for k, v in input_dict.items()} - for name, expr in sympy_expr_dict.items(): - tmp = expr(output_dict_sympy) - output_dict_sympy[name] = tmp - clear() - - # test for result - for key in functional_expr_dict: - assert paddle.allclose( - output_dict_functional[key], output_dict_sympy[key], atol=1e-7 - ), f"{key} not equal." - - @pytest.mark.parametrize("nu", (2.0,)) - @pytest.mark.parametrize("rho", (1.0,)) - @pytest.mark.parametrize("dim", (2,)) - @pytest.mark.parametrize("time", (False, True)) - def test_nu_constant(self, nu, rho, dim, time): - """Test for navier_stokes equation.""" - # define input/output keys - nu_sympy = nu - - input_keys = ("x", "y", "z")[:dim] - if time: - input_keys = ("t",) + input_keys - - output_keys = ("u", "v") - if dim == 3: - output_keys += ("w",) - output_keys += ("p",) - - # prepare input data in dict - batch_size = 13 - input_dict = {} - for var in input_keys: - input_dict[var] = paddle.randn([batch_size, 1]) - input_dict[var].stop_gradient = False - if var != "t": - input_dict[f"sdf__{var}"] = paddle.randn([batch_size, 1]) - input_dict[f"normal__{var}"] = paddle.randn([batch_size, 1]) - - input_dict[f"sdf__{var}"].stop_gradient = False - input_dict[f"normal__{var}"].stop_gradient = False - - input_dict["sdf"] = paddle.randn([batch_size, 1]) - input_dict["sdf"].stop_gradient = False - - # prepare model - model = ppsci.arch.MLP(input_keys, output_keys, 2, 10) - - # prepare python function expressions and sympy-expression in dict - functional_expr_dict = equation.NavierStokes(nu, rho, dim, time).equations - - sympy_expr_dict = NavierStokes_sympy(nu_sympy, rho, dim, time).equations - for target, expr in sympy_expr_dict.items(): - sympy_expr_dict[target] = sym_to_func.sympy_to_function( - expr, - [ - model, - ], - ) - - # compute equation with python function - output_dict_functional = model(input_dict) - for name, expr in functional_expr_dict.items(): - if callable(expr): - output_dict_functional[name] = expr( - {**output_dict_functional, **input_dict} - ) - else: - raise TypeError(f"expr type({type(expr)}) is invalid") - clear() - - # compute equation with funciton converted from sympy - output_dict_sympy = {k: v for k, v in input_dict.items()} - tmp = {k: v for k, v in output_dict_sympy.items()} - for name, expr in sympy_expr_dict.items(): - output = expr(tmp) - output_dict_sympy[name] = output - clear() - - # test for result - for key in functional_expr_dict: - assert paddle.allclose( - output_dict_functional[key], output_dict_sympy[key], atol=1e-7 - ), f"{key} not equal." - - -if __name__ == "__main__": - pytest.main() From c7eea1b47093c4f0b653f01944df363f2a29b201 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:33:05 +0000 Subject: [PATCH 30/48] remove unnecessary code --- examples/euler_beam/euler_beam.py | 6 +----- ppsci/constraint/base.py | 1 - 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/examples/euler_beam/euler_beam.py b/examples/euler_beam/euler_beam.py index ed4533c7e..1dfffeaec 100644 --- a/examples/euler_beam/euler_beam.py +++ b/examples/euler_beam/euler_beam.py @@ -31,11 +31,7 @@ ITERS_PER_EPOCH = 1 EPOCHS = 10000 if not args.epochs else args.epochs # set output directory - OUTPUT_DIR = ( - "./output_euler_beam_sympy_to_static" - if not args.output_dir - else args.output_dir - ) + OUTPUT_DIR = "./output/euler_beam" if not args.output_dir else args.output_dir # initialize logger logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info") diff --git a/ppsci/constraint/base.py b/ppsci/constraint/base.py index 95bdce058..c3b4c8a12 100644 --- a/ppsci/constraint/base.py +++ b/ppsci/constraint/base.py @@ -47,7 +47,6 @@ def __init__( self.data_iter = iter(self.data_loader) self.loss = loss self.name = name - self.detach_keys = () def __str__(self): return ", ".join( From 615fefbd3dec7b8bde9d2769ab0ebf612171b84e Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:35:09 +0000 Subject: [PATCH 31/48] remove unnecessary more code --- ppsci/constraint/boundary_constraint.py | 8 -------- ppsci/constraint/initial_constraint.py | 9 --------- ppsci/constraint/integral_constraint.py | 8 -------- ppsci/constraint/interior_constraint.py | 7 ------- ppsci/constraint/periodic_constraint.py | 8 -------- ppsci/validate/geo_validator.py | 5 ----- 6 files changed, 45 deletions(-) diff --git a/ppsci/constraint/boundary_constraint.py b/ppsci/constraint/boundary_constraint.py index 9df6e23fd..6420a9685 100644 --- a/ppsci/constraint/boundary_constraint.py +++ b/ppsci/constraint/boundary_constraint.py @@ -23,7 +23,6 @@ import numpy as np import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -87,10 +86,6 @@ def __init__( name: str = "BC", ): self.output_expr = output_expr - for output_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[output_name] = sp_parser.parse_expr(expr) - self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) @@ -140,9 +135,6 @@ def __init__( weight = {key: np.ones_like(next(iter(label.values()))) for key in label} if weight_dict is not None: for key, value in weight_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) - if isinstance(value, (int, float)): weight[key] = np.full_like(next(iter(label.values())), value) elif isinstance(value, sympy.Basic): diff --git a/ppsci/constraint/initial_constraint.py b/ppsci/constraint/initial_constraint.py index b7c867123..d32d8c00c 100644 --- a/ppsci/constraint/initial_constraint.py +++ b/ppsci/constraint/initial_constraint.py @@ -23,7 +23,6 @@ import numpy as np import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -90,10 +89,6 @@ def __init__( name: str = "IC", ): self.output_expr = output_expr - for output_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[output_name] = sp_parser.parse_expr(expr) - self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) @@ -120,8 +115,6 @@ def __init__( # prepare label label = {} for key, value in label_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) if isinstance(value, (int, float)): label[key] = np.full_like(next(iter(input.values())), value) elif isinstance(value, sympy.Basic): @@ -145,8 +138,6 @@ def __init__( weight = {key: np.ones_like(next(iter(label.values()))) for key in label} if weight_dict is not None: for key, value in weight_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) if isinstance(value, (int, float)): weight[key] = np.full_like(next(iter(label.values())), value) elif isinstance(value, sympy.Basic): diff --git a/ppsci/constraint/integral_constraint.py b/ppsci/constraint/integral_constraint.py index 4f655c000..511f82373 100644 --- a/ppsci/constraint/integral_constraint.py +++ b/ppsci/constraint/integral_constraint.py @@ -24,7 +24,6 @@ import numpy as np import paddle import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -87,10 +86,6 @@ def __init__( name: str = "IgC", ): self.output_expr = output_expr - for output_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[output_name] = sp_parser.parse_expr(expr) - self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) @@ -152,9 +147,6 @@ def __init__( weight = {key: np.ones_like(next(iter(label.values()))) for key in label} if weight_dict is not None: for key, value in weight_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) - if isinstance(value, (int, float)): weight[key] = np.full_like(next(iter(label.values())), value) elif isinstance(value, sympy.Basic): diff --git a/ppsci/constraint/interior_constraint.py b/ppsci/constraint/interior_constraint.py index 18ee0c6bc..d0c77df10 100644 --- a/ppsci/constraint/interior_constraint.py +++ b/ppsci/constraint/interior_constraint.py @@ -23,7 +23,6 @@ import numpy as np import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -87,10 +86,6 @@ def __init__( name: str = "EQ", ): self.output_expr = output_expr - for output_name, expr in self.output_expr.items(): - if isinstance(expr, str) and output_name in label_dict: - self.output_expr[output_name] = sp_parser.parse_expr(expr) - self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) @@ -117,8 +112,6 @@ def __init__( # prepare label label = {} for key, value in label_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) if isinstance(value, (int, float)): label[key] = np.full_like(next(iter(input.values())), value) elif isinstance(value, sympy.Basic): diff --git a/ppsci/constraint/periodic_constraint.py b/ppsci/constraint/periodic_constraint.py index ba3451ad6..7ad3e2fc1 100644 --- a/ppsci/constraint/periodic_constraint.py +++ b/ppsci/constraint/periodic_constraint.py @@ -24,7 +24,6 @@ import numpy as np import paddle import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -74,10 +73,6 @@ def __init__( name: str = "PeriodicBC", ): self.output_expr = output_expr - for output_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[output_name] = sp_parser.parse_expr(expr) - self.input_keys = geom.dim_keys self.output_keys = list(output_expr.keys()) self.output_expr = { @@ -146,9 +141,6 @@ def __init__( weight = {key: np.ones_like(next(iter(label.values()))) for key in label} if weight_dict is not None: for key, value in weight_dict.items(): - if isinstance(value, str): - value = sp_parser.parse_expr(value) - if isinstance(value, (int, float)): weight[key] = np.full_like(next(iter(label.values())), value) elif isinstance(value, sympy.Basic): diff --git a/ppsci/validate/geo_validator.py b/ppsci/validate/geo_validator.py index 9f4605194..b9b781f87 100644 --- a/ppsci/validate/geo_validator.py +++ b/ppsci/validate/geo_validator.py @@ -23,7 +23,6 @@ import numpy as np import paddle import sympy -from sympy.parsing import sympy_parser as sp_parser from typing_extensions import Literal from ppsci import geometry @@ -85,10 +84,6 @@ def __init__( name: Optional[str] = None, ): self.output_expr = output_expr - for label_name, expr in self.output_expr.items(): - if isinstance(expr, str): - self.output_expr[label_name] = sp_parser.parse_expr(expr) - self.label_dict = label_dict self.input_keys = geom.dim_keys self.output_keys = list(label_dict.keys()) From e096ea222b1063efc7e27c1e78dd199ebfe789bc Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 06:57:15 +0000 Subject: [PATCH 32/48] use DETACH_FUNC_NAME instead of 'detach' --- ppsci/equation/__init__.py | 2 ++ ppsci/equation/pde/__init__.py | 2 ++ ppsci/equation/pde/base.py | 21 ++++++--------------- ppsci/utils/sym_to_func.py | 11 ++++++----- 4 files changed, 16 insertions(+), 20 deletions(-) diff --git a/ppsci/equation/__init__.py b/ppsci/equation/__init__.py index d1c95a6e6..47526d4d6 100644 --- a/ppsci/equation/__init__.py +++ b/ppsci/equation/__init__.py @@ -16,6 +16,7 @@ from ppsci.equation.fpde import FractionalPoisson from ppsci.equation.ide import Volterra +from ppsci.equation.pde import DETACH_FUNC_NAME from ppsci.equation.pde import PDE from ppsci.equation.pde import Biharmonic from ppsci.equation.pde import Laplace @@ -29,6 +30,7 @@ __all__ = [ "PDE", + "DETACH_FUNC_NAME", "Biharmonic", "Laplace", "LinearElasticity", diff --git a/ppsci/equation/pde/__init__.py b/ppsci/equation/pde/__init__.py index 65addab79..1ff84a31a 100644 --- a/ppsci/equation/pde/__init__.py +++ b/ppsci/equation/pde/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from ppsci.equation.pde.base import DETACH_FUNC_NAME from ppsci.equation.pde.base import PDE from ppsci.equation.pde.biharmonic import Biharmonic from ppsci.equation.pde.laplace import Laplace @@ -23,6 +24,7 @@ __all__ = [ "PDE", + "DETACH_FUNC_NAME", "Biharmonic", "Laplace", "LinearElasticity", diff --git a/ppsci/equation/pde/base.py b/ppsci/equation/pde/base.py index eef4ceb03..a9cfa599a 100644 --- a/ppsci/equation/pde/base.py +++ b/ppsci/equation/pde/base.py @@ -24,6 +24,8 @@ import sympy from paddle import nn +DETACH_FUNC_NAME = "detach" + class PDE: """Base class for Partial Differential Equation""" @@ -60,24 +62,13 @@ def create_function( sympy.Function: Named sympy function. """ expr = sympy.Function(name)(*invars) + + # wrap `expression(...)` to `detach(expression(...))` + # if name of expression is in given detach_keys if self.detach_keys and name in self.detach_keys: - expr = sympy.Function("detach")(expr) + expr = sympy.Function(DETACH_FUNC_NAME)(expr) return expr - def create_detach( - self, - expr: sympy.Basic, - ) -> sympy.Function: - """Create detach function for given expression. - - Args: - expr (sympy.Basic): Given expression to be detached. - - Returns: - sympy.Function: Detached expression. - """ - return sympy.Function("detach")(expr) - def add_equation(self, name: str, equation: Callable): """Add an equation. diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 1e634698b..2e9a4670a 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -32,6 +32,7 @@ from typing_extensions import TypeAlias from ppsci import arch +from ppsci import equation from ppsci.autodiff import hessian from ppsci.autodiff import jacobian @@ -117,7 +118,7 @@ def __repr__(self): class DetachNode(nn.Layer): - """Class for detach node in converted expression tree. + """Class for detach operation in converted expression tree. Args: expr (sp.Basic): Sympy expression. @@ -127,13 +128,13 @@ def __init__(self, expr: sp.Basic): super().__init__() self.expr = expr self.key = _cvt_to_key(self.expr) - self.key_detach = self.key + "_detach" + self.child = _cvt_to_key(self.expr.args[0]) def forward(self, data_dict: DATA_DICT): - if self.key_detach in data_dict: + if self.key in data_dict: return data_dict - data_dict[self.key_detach] = data_dict[self.key].detach() + data_dict[self.key] = data_dict[self.child].detach() return data_dict @@ -427,7 +428,7 @@ def sympy_to_function( ): callable_nodes.append(OperatorNode(node)) elif isinstance(node, sp.Function): - if node.name == "detach": + if node.name == equation.DETACH_FUNC_NAME: callable_nodes.append(DetachNode(node)) else: match_index = None From cb16107787933eefbb0eb1283abfa98dd7891fa7 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 7 Sep 2023 09:05:32 +0000 Subject: [PATCH 33/48] add derivatives for sdf function --- ppsci/constraint/interior_constraint.py | 2 ++ ppsci/equation/pde/navier_stokes.py | 18 +++++++++++--- ppsci/geometry/geometry.py | 31 +++++++++++++++++++++++-- ppsci/geometry/mesh.py | 4 ++-- 4 files changed, 48 insertions(+), 7 deletions(-) diff --git a/ppsci/constraint/interior_constraint.py b/ppsci/constraint/interior_constraint.py index d0c77df10..47f2ea88e 100644 --- a/ppsci/constraint/interior_constraint.py +++ b/ppsci/constraint/interior_constraint.py @@ -83,6 +83,7 @@ def __init__( criteria: Optional[Callable] = None, evenly: bool = False, weight_dict: Optional[Dict[str, Union[Callable, float]]] = None, + compute_sdf_derivatives: bool = False, name: str = "EQ", ): self.output_expr = output_expr @@ -105,6 +106,7 @@ def __init__( random, criteria, evenly, + compute_sdf_derivatives, ) if "area" in input: input["area"] *= dataloader_cfg["iters_per_epoch"] diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index 4c26aefc0..abe0c1a34 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -101,7 +101,11 @@ def __init__( + u * u.diff(x) + v * u.diff(y) + w * u.diff(z) - - nu * (u.diff(x).diff(x) + u.diff(y).diff(y) + u.diff(z).diff(z)) + - ( + (nu * u.diff(x)).diff(x) + + (nu * u.diff(y)).diff(y) + + (nu * u.diff(z)).diff(z) + ) + 1 / rho * p.diff(x) ) momentum_y = ( @@ -109,7 +113,11 @@ def __init__( + u * v.diff(x) + v * v.diff(y) + w * v.diff(z) - - nu * (v.diff(x).diff(x) + v.diff(y).diff(y) + v.diff(z).diff(z)) + - ( + (nu * v.diff(x)).diff(x) + + (nu * v.diff(y)).diff(y) + + (nu * v.diff(z)).diff(z) + ) + 1 / rho * p.diff(y) ) momentum_z = ( @@ -117,7 +125,11 @@ def __init__( + u * w.diff(x) + v * w.diff(y) + w * w.diff(z) - - nu * (w.diff(x).diff(x) + w.diff(y).diff(y) + w.diff(z).diff(z)) + - ( + (nu * w.diff(x)).diff(x) + + (nu * w.diff(y)).diff(y) + + (nu * w.diff(z)).diff(z) + ) + 1 / rho * p.diff(z) ) self.add_equation("continuity", continuity) diff --git a/ppsci/geometry/geometry.py b/ppsci/geometry/geometry.py index f511dc730..9ee01f12d 100644 --- a/ppsci/geometry/geometry.py +++ b/ppsci/geometry/geometry.py @@ -64,7 +64,14 @@ def uniform_points(self, n: int, boundary=True): ) return self.random_points(n) - def sample_interior(self, n, random="pseudo", criteria=None, evenly=False): + def sample_interior( + self, + n, + random="pseudo", + criteria=None, + evenly=False, + compute_sdf_derivatives=False, + ): """Sample random points in the geometry and return those meet criteria.""" x = np.empty(shape=(n, self.ndim), dtype=paddle.get_default_dtype()) _size, _ntry, _nsuc = 0, 0, 0 @@ -103,7 +110,15 @@ def sample_interior(self, n, random="pseudo", criteria=None, evenly=False): else: sdf_dict = {} x_dict = misc.convert_to_dict(x, self.dim_keys) - return {**x_dict, **sdf_dict} + + if compute_sdf_derivatives: + sdf_derivatives_dict = misc.convert_to_dict( + self.sdf_derivatives(x), (f"sdf__{d}" for d in self.dim_keys) + ) + else: + sdf_derivatives_dict = {} + + return {**x_dict, **sdf_dict, **sdf_derivatives_dict} def sample_boundary(self, n, random="pseudo", criteria=None, evenly=False): """Compute the random points in the geometry and return those meet criteria.""" @@ -189,6 +204,18 @@ def periodic_point(self, x: np.ndarray, component: int): """Compute the periodic image of x.""" raise NotImplementedError(f"{self}.periodic_point to be implemented") + def sdf_derivatives(self, x: np.ndarray, eps=0.0001) -> dict: + # compute sdf by centered difference + sdf_derivative = np.zeros_like(x) + for i in range(len(self.dim_keys)): + delta = np.zeros_like(x) + delta[:, i] += eps / 2 + sdf_plus = self.sdf_func(x + delta) + sdf_minus = self.sdf_func(x - delta) + # store sdf derivative + sdf_derivative[:, i] = (sdf_plus - sdf_minus) / eps + return sdf_derivative + def union(self, other): """CSG Union.""" from ppsci.geometry import csg diff --git a/ppsci/geometry/mesh.py b/ppsci/geometry/mesh.py index f43dab31a..0c10e47ce 100644 --- a/ppsci/geometry/mesh.py +++ b/ppsci/geometry/mesh.py @@ -438,11 +438,11 @@ def sample_interior(self, n, random="pseudo", criteria=None, evenly=False): points, areas = self.random_points(n, random, criteria) x_dict = misc.convert_to_dict(points, self.dim_keys) - area_dict = misc.convert_to_dict(areas, ["area"]) + area_dict = misc.convert_to_dict(areas, ("area",)) # NOTE: add negtive to the sdf values because weight should be positive. sdf = -self.sdf_func(points) - sdf_dict = misc.convert_to_dict(sdf, ["sdf"]) + sdf_dict = misc.convert_to_dict(sdf, ("sdf",)) return {**x_dict, **area_dict, **sdf_dict} From 899a2d247476bdb6209798218fd8059f351162a4 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:27:38 +0000 Subject: [PATCH 34/48] replace .diff.diff with .diff(, 2) --- ppsci/equation/pde/biharmonic.py | 2 +- ppsci/equation/pde/laplace.py | 2 +- ppsci/equation/pde/navier_stokes.py | 4 +++- ppsci/equation/pde/normal_dot_vec.py | 2 +- ppsci/equation/pde/poisson.py | 2 +- ppsci/equation/pde/viv.py | 6 +----- 6 files changed, 8 insertions(+), 10 deletions(-) diff --git a/ppsci/equation/pde/biharmonic.py b/ppsci/equation/pde/biharmonic.py index d1eb269fd..436d5b4bb 100644 --- a/ppsci/equation/pde/biharmonic.py +++ b/ppsci/equation/pde/biharmonic.py @@ -62,6 +62,6 @@ def __init__( biharmonic = -self.q / self.D for invar_i in invars: for invar_j in invars: - biharmonic += u.diff(invar_i).diff(invar_i).diff(invar_j).diff(invar_j) + biharmonic += u.diff(invar_i, 2).diff(invar_j, 2) self.add_equation("biharmonic", biharmonic) diff --git a/ppsci/equation/pde/laplace.py b/ppsci/equation/pde/laplace.py index 45480986d..0b0c90b38 100644 --- a/ppsci/equation/pde/laplace.py +++ b/ppsci/equation/pde/laplace.py @@ -46,6 +46,6 @@ def __init__(self, dim: int, detach_keys: Optional[Tuple[str, ...]] = None): laplace = 0 for invar in invars: - laplace += u.diff(invar).diff(invar) + laplace += u.diff(invar, 2) self.add_equation("laplace", laplace) diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index abe0c1a34..9adcc01ed 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -19,6 +19,8 @@ from typing import Tuple from typing import Union +import sympy as sp + from ppsci.equation.pde import base @@ -92,7 +94,7 @@ def __init__( u = self.create_function("u", invars) v = self.create_function("v", invars) - w = self.create_function("w", invars) + w = self.create_function("w", invars) if dim == 3 else sp.Number(0) p = self.create_function("p", invars) continuity = u.diff(x) + v.diff(y) + w.diff(z) diff --git a/ppsci/equation/pde/normal_dot_vec.py b/ppsci/equation/pde/normal_dot_vec.py index 6abf7df5b..156c0ee85 100644 --- a/ppsci/equation/pde/normal_dot_vec.py +++ b/ppsci/equation/pde/normal_dot_vec.py @@ -50,6 +50,6 @@ def __init__( normal_dot_vec = 0 for (vec, normal) in zip(vec_vars, normals): - normal_dot_vec += vec * normal + normal_dot_vec += normal * vec self.add_equation("normal_dot_vec", normal_dot_vec) diff --git a/ppsci/equation/pde/poisson.py b/ppsci/equation/pde/poisson.py index d949f636e..b921f7d5b 100644 --- a/ppsci/equation/pde/poisson.py +++ b/ppsci/equation/pde/poisson.py @@ -44,6 +44,6 @@ def __init__(self, dim: int, detach_keys: Optional[Tuple[str, ...]] = None): poisson = 0 for invar in invars: - poisson += p.diff(invar).diff(invar) + poisson += p.diff(invar, 2) self.add_equation("poisson", poisson) diff --git a/ppsci/equation/pde/viv.py b/ppsci/equation/pde/viv.py index 55db22c79..0e37a721c 100644 --- a/ppsci/equation/pde/viv.py +++ b/ppsci/equation/pde/viv.py @@ -60,9 +60,5 @@ def __init__(self, rho: float, k1: float, k2: float): eta = self.create_function("eta", (t_f,)) k1 = self.create_symbols(self.k1.name) k2 = self.create_symbols(self.k2.name) - f = ( - self.rho * eta.diff(t_f).diff(t_f) - + sp.exp(k1) * eta.diff(t_f) - + sp.exp(k2) * eta - ) + f = self.rho * eta.diff(t_f, 2) + sp.exp(k1) * eta.diff(t_f) + sp.exp(k2) * eta self.add_equation("f", f) From 640c0803288f254437a8015601932359becd05d2 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:29:07 +0000 Subject: [PATCH 35/48] support exporting expression to .dot and .png file for visualizing and DEBUG --- ppsci/solver/solver.py | 1 + ppsci/utils/sym_to_func.py | 142 ++++++++++++++++++++++++++++++------- 2 files changed, 117 insertions(+), 26 deletions(-) diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index c6a4a7832..afc5d77ba 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -332,6 +332,7 @@ def convert_expr( expr, self.model, extra_parameters, + # os.path.join(self.output_dir, container.name, expr), # HACK: Activate it for DEBUG. ) logger.message(f"Convert expression[{name}]: {expr}") diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 2e9a4670a..0cc5eba9d 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -54,9 +54,11 @@ sp.Min, sp.Abs, sp.Heaviside, + sp.Add, + sp.Mul, ] -PADDLE_FUNC_MAP = { +SYMPT_TO_PADDLE = { sp.sin: paddle.sin, sp.cos: paddle.cos, sp.exp: paddle.exp, @@ -67,6 +69,8 @@ sp.Min: paddle.minimum, sp.Abs: paddle.abs, sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), + # NOTE: sp.Add and sp.Mul is not included here for unalignment with sympy + # and are implemented manually. } @@ -157,28 +161,29 @@ def __init__(self, expr: SYMPY_BUILTIN_FUNC): self.childs = [_cvt_to_key(arg) for arg in self.expr.args] if self.expr.func == sp.Add: - self._operator_func = self._add_operator_func + self._apply_func = self._add_operator_func elif self.expr.func == sp.Mul: - self._operator_func = self._mul_operator_func + self._apply_func = self._mul_operator_func elif self.expr.func == sp.Derivative: - self._operator_func = self._derivate_operator_func + self._apply_func = self._derivate_operator_func + elif self.expr.func == sp.Heaviside: + self._apply_func = self._heaviside_operator_func + self._auxiliary_func = SYMPT_TO_PADDLE[sp.Heaviside] else: - if self.expr.func == sp.Heaviside: - self._operator_func = self._heaviside_operator_func - self._compute_func = PADDLE_FUNC_MAP[sp.Heaviside] - else: - self._operator_func = self._vanilla_operator_func - self._compute_func = PADDLE_FUNC_MAP[self.expr.func] + self._apply_func = self._vanilla_operator_func + self._auxiliary_func = SYMPT_TO_PADDLE[self.expr.func] def forward(self, data_dict: DATA_DICT): # use cache if self.key in data_dict: return data_dict - return self._operator_func(data_dict) + return self._apply_func(data_dict) def _add_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = sum([data_dict[child] for child in self.childs]) + data_dict[self.key] = data_dict[self.childs[0]] + for p in self.childs[1:]: + data_dict[self.key] += data_dict[p] return data_dict def _mul_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: @@ -199,11 +204,11 @@ def _derivate_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: return data_dict def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._compute_func(data_dict[self.childs[0]]) + data_dict[self.key] = self._auxiliary_func(data_dict[self.childs[0]]) return data_dict def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._compute_func( + data_dict[self.key] = self._auxiliary_func( *tuple(data_dict[child] for child in self.childs) ) return data_dict @@ -289,17 +294,17 @@ class ComposedNode(nn.Layer): Compose list of several callable objects together. """ - def __init__(self, funcs: List[Node]): + def __init__(self, callable_nodes: List[Node]): super().__init__() - self.funcs = funcs + self.callable_nodes = callable_nodes def forward(self, data_dict: DATA_DICT) -> DATA_DICT: - # call all funcs in order - for func in self.funcs: + # call all callable_nodes in order + for func in self.callable_nodes: data_dict = func(data_dict) # return result of last node(root node) for target - return data_dict[self.funcs[-1].key] + return data_dict[self.callable_nodes[-1].key] def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: @@ -332,17 +337,99 @@ def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: return nodes +def _visualize_graph(nodes: List[sp.Basic], graph_filename: str): + try: + import pygraphviz + except ModuleNotFoundError: + raise ModuleNotFoundError( + "Please install pygraphviz by steps below:\n" + "1. apt-get install graphviz graphviz-dev\n" + "2. python -m pip install pygraphviz" + ) + + SYMPY_BUILTIN_NAME = { + sp.sin: "sin", + sp.cos: "cos", + sp.exp: "exp", + sp.Pow: "Pow", + sp.log: "log", + sp.tan: "tan", + sp.Max: "Max", + sp.Min: "Min", + sp.Abs: "Abs", + sp.Heaviside: "Heaviside", + sp.Add: "Add", + sp.Mul: "Mul", + } + naming_counter = {k: 0 for k in SYMPY_BUILTIN_NAME} + + def get_operator_name(node): + ret = f"{SYMPY_BUILTIN_NAME[node.func]}_{naming_counter[node.func]}" + naming_counter[node.func] += 1 + return ret + + graph = pygraphviz.AGraph(directed=True, rankdir="TB") + C_FUNC = "#9196f1" # purple color function node + C_DATA = "#feb64d" # oringe color for data node + C_EDGE = "#000000" # black color for edge + + def add_edge(u: str, v: str, u_color: str = C_DATA, v_color: str = C_DATA): + """Add an edge from `u` to `v`. + + Args: + u (str): Name of begin node u. + v (str): Name of end node v. + u_color (str, optional): _description_. Defaults to C_DATA. + v_color (str, optional): _description_. Defaults to C_DATA. + """ + graph.add_node(u, style="filled", shape="ellipse", color=u_color) + graph.add_node(v, style="filled", shape="ellipse", color=v_color) + graph.add_edge(u, v, color=C_EDGE, style="solid", penwidth=0.5, arrowsize=0.5) + + for node in nodes: + if isinstance(node, tuple(SYMPY_BUILTIN_NAME.keys())): + operator_str = get_operator_name(node) + for arg in node.args: + add_edge(_cvt_to_key(arg), operator_str, v_color=C_FUNC) + add_edge(operator_str, _cvt_to_key(node), u_color=C_FUNC) + if isinstance(node, sp.Function): + for arg in node.args: + add_edge(_cvt_to_key(arg), str(node), v_color=C_FUNC) + add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) + elif isinstance(node, sp.Derivative): + add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) + add_edge(_cvt_to_key(node.args[0]), str(node), v_color=C_FUNC) + for arg in node.args[1:]: + add_edge(_cvt_to_key(arg[0]), str(node), v_color=C_FUNC) + + # export graph to image + from ppsci.utils import logger + + graph.layout() + image_path = f"{graph_filename}.png" + dot_path = f"{graph_filename}.dot" + graph.draw(image_path, prog="dot") + graph.write(dot_path) + logger.message( + f"Computational graph has been writen to {image_path} and {dot_path}," + "dot file can be visualized at https://dreampuf.github.io/GraphvizOnline/" + ) + + def sympy_to_function( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, extra_parameters: Optional[Sequence[paddle.Tensor]] = None, + graph_filename: Optional[str] = None, ) -> ComposedNode: """Convert sympy expression to callable function. Args: expr (sp.Expr): Sympy expression to be converted. models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. - extra_parameters (Optional[nn.ParameterList], optional): Extra learnable parameters. Defaults to None. + extra_parameters (Optional[nn.ParameterList]): Extra learnable parameters. Defaults to None. + graph_filename (Optional[str]): Save computational graph to `graph_filename.png` + for given `expr`, if `graph_filename` is not None and a valid string, such as 'momentum_x'. Defaults to None. Returns: ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. @@ -394,6 +481,9 @@ def sympy_to_function( # expr = sp.expand(expr) # expr = sp.simplify(expr) + # remove 1.0 from sympy expression tree + expr = expr.subs(1.0, 1) + # convert sympy expression tree to list of nodes in postorder sympy_nodes = [] sympy_nodes = _post_traverse(expr, sympy_nodes) @@ -419,12 +509,8 @@ def sympy_to_function( # convert sympy node to callable node callable_nodes = [] for i, node in enumerate(sympy_nodes): - if ( - isinstance(node, tuple(PADDLE_FUNC_MAP.keys())) - or node.is_Add - or node.is_Mul - or node.is_Derivative - or node.is_Pow + if isinstance( + node, tuple(SYMPT_TO_PADDLE.keys()) + (sp.Add, sp.Mul, sp.Derivative) ): callable_nodes.append(OperatorNode(node)) elif isinstance(node, sp.Function): @@ -461,5 +547,9 @@ def sympy_to_function( f"The node {node} is not supported in sympy_to_function." ) + # NOTE: Visualize computational graph using 'pygraphviz' + if isinstance(graph_filename, str): + _visualize_graph(sympy_nodes, graph_filename) + # Compose callable nodes into one callable object return ComposedNode(callable_nodes) From 16aae527deab61f675ad2ba54f76be1f3d8952a5 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:29:46 +0000 Subject: [PATCH 36/48] remove compute_sdf_derivatives for next PR --- ppsci/constraint/interior_constraint.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ppsci/constraint/interior_constraint.py b/ppsci/constraint/interior_constraint.py index 47f2ea88e..d0c77df10 100644 --- a/ppsci/constraint/interior_constraint.py +++ b/ppsci/constraint/interior_constraint.py @@ -83,7 +83,6 @@ def __init__( criteria: Optional[Callable] = None, evenly: bool = False, weight_dict: Optional[Dict[str, Union[Callable, float]]] = None, - compute_sdf_derivatives: bool = False, name: str = "EQ", ): self.output_expr = output_expr @@ -106,7 +105,6 @@ def __init__( random, criteria, evenly, - compute_sdf_derivatives, ) if "area" in input: input["area"] *= dataloader_cfg["iters_per_epoch"] From 9b46410b147bebe86125bd995bf9594965acffad Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:30:12 +0000 Subject: [PATCH 37/48] refine docstring of ppsci/data/dataset/array_dataset.py --- ppsci/data/dataset/array_dataset.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ppsci/data/dataset/array_dataset.py b/ppsci/data/dataset/array_dataset.py index ad31e707b..7f0621702 100644 --- a/ppsci/data/dataset/array_dataset.py +++ b/ppsci/data/dataset/array_dataset.py @@ -29,9 +29,9 @@ class NamedArrayDataset(io.Dataset): Args: input (Dict[str, np.ndarray]): Input dict. label (Dict[str, np.ndarray]): Label dict. - weight (Optional[Dict[str, np.ndarray]], optional): Weight dict. - transforms (Optional[vision.Compose], optional): Compose object contains sample wise - transform(s). + weight (Optional[Dict[str, np.ndarray]]): Weight dict. Defaults to None. + transforms (Optional[vision.Compose]): Compose object contains sample wise + transform(s). Defaults to None. Examples: >>> import ppsci From f179ec1473d16e459a4fefeed2cbc4fde5cc7d64 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:32:50 +0000 Subject: [PATCH 38/48] remove sdf_derivatives code in geometry for next PR --- ppsci/geometry/geometry.py | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/ppsci/geometry/geometry.py b/ppsci/geometry/geometry.py index 9ee01f12d..3e087c70c 100644 --- a/ppsci/geometry/geometry.py +++ b/ppsci/geometry/geometry.py @@ -70,7 +70,6 @@ def sample_interior( random="pseudo", criteria=None, evenly=False, - compute_sdf_derivatives=False, ): """Sample random points in the geometry and return those meet criteria.""" x = np.empty(shape=(n, self.ndim), dtype=paddle.get_default_dtype()) @@ -111,14 +110,7 @@ def sample_interior( sdf_dict = {} x_dict = misc.convert_to_dict(x, self.dim_keys) - if compute_sdf_derivatives: - sdf_derivatives_dict = misc.convert_to_dict( - self.sdf_derivatives(x), (f"sdf__{d}" for d in self.dim_keys) - ) - else: - sdf_derivatives_dict = {} - - return {**x_dict, **sdf_dict, **sdf_derivatives_dict} + return {**x_dict, **sdf_dict} def sample_boundary(self, n, random="pseudo", criteria=None, evenly=False): """Compute the random points in the geometry and return those meet criteria.""" @@ -204,18 +196,6 @@ def periodic_point(self, x: np.ndarray, component: int): """Compute the periodic image of x.""" raise NotImplementedError(f"{self}.periodic_point to be implemented") - def sdf_derivatives(self, x: np.ndarray, eps=0.0001) -> dict: - # compute sdf by centered difference - sdf_derivative = np.zeros_like(x) - for i in range(len(self.dim_keys)): - delta = np.zeros_like(x) - delta[:, i] += eps / 2 - sdf_plus = self.sdf_func(x + delta) - sdf_minus = self.sdf_func(x - delta) - # store sdf derivative - sdf_derivative[:, i] = (sdf_plus - sdf_minus) / eps - return sdf_derivative - def union(self, other): """CSG Union.""" from ppsci.geometry import csg From 2e37bcab541dfdaee82be4fdaaf6b99c3b5e1d4b Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 05:34:23 +0000 Subject: [PATCH 39/48] remove print code in solver --- ppsci/solver/solver.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index afc5d77ba..ba11b5412 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -334,7 +334,6 @@ def convert_expr( extra_parameters, # os.path.join(self.output_dir, container.name, expr), # HACK: Activate it for DEBUG. ) - logger.message(f"Convert expression[{name}]: {expr}") if self.constraint: convert_expr(self.constraint) From 6ecb31bfa62f7070903ef6f5120c7bb34cfcc7bf Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Sun, 10 Sep 2023 10:04:32 +0000 Subject: [PATCH 40/48] rename sympy_to_function to lambdify and add it in ppsci.* --- docs/zh/api/utils.md | 1 + ppsci/__init__.py | 2 + ppsci/solver/solver.py | 3 +- ppsci/utils/__init__.py | 2 + ppsci/utils/sym_to_func.py | 126 +++++++++++++++++++++++++++++-------- 5 files changed, 107 insertions(+), 27 deletions(-) diff --git a/docs/zh/api/utils.md b/docs/zh/api/utils.md index e4bc9ac6f..8ce7878a3 100644 --- a/docs/zh/api/utils.md +++ b/docs/zh/api/utils.md @@ -19,5 +19,6 @@ - load_checkpoint - load_pretrain - save_checkpoint + - lambdify show_root_heading: false heading_level: 3 diff --git a/ppsci/__init__.py b/ppsci/__init__.py index 3877401fa..5ddca0615 100644 --- a/ppsci/__init__.py +++ b/ppsci/__init__.py @@ -29,6 +29,7 @@ from ppsci.utils.checker import run_check # isort:skip from ppsci.utils.checker import run_check_mesh # isort:skip +from ppsci.utils import lambdify # isort:skip __all__ = [ "arch", @@ -47,4 +48,5 @@ "experimental", "run_check", "run_check_mesh", + "lambdify", ] diff --git a/ppsci/solver/solver.py b/ppsci/solver/solver.py index ba11b5412..4de5360eb 100644 --- a/ppsci/solver/solver.py +++ b/ppsci/solver/solver.py @@ -44,7 +44,6 @@ from ppsci.utils import logger from ppsci.utils import misc from ppsci.utils import save_load -from ppsci.utils import sym_to_func class Solver: @@ -328,7 +327,7 @@ def convert_expr( for container in container_dict.values(): for name, expr in container.output_expr.items(): if isinstance(expr, sp.Basic): - container.output_expr[name] = sym_to_func.sympy_to_function( + container.output_expr[name] = ppsci.lambdify( expr, self.model, extra_parameters, diff --git a/ppsci/utils/__init__.py b/ppsci/utils/__init__.py index 1d341c40b..df95c7182 100644 --- a/ppsci/utils/__init__.py +++ b/ppsci/utils/__init__.py @@ -32,6 +32,7 @@ from ppsci.utils.save_load import load_checkpoint from ppsci.utils.save_load import load_pretrain from ppsci.utils.save_load import save_checkpoint +from ppsci.utils.sym_to_func import lambdify __all__ = [ "initializer", @@ -54,4 +55,5 @@ "load_checkpoint", "load_pretrain", "save_checkpoint", + "lambdify", ] diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py index 0cc5eba9d..06ef4c3b8 100644 --- a/ppsci/utils/sym_to_func.py +++ b/ppsci/utils/sym_to_func.py @@ -37,7 +37,7 @@ from ppsci.autodiff import jacobian __all__ = [ - "sympy_to_function", + "lambdify", ] @@ -45,30 +45,60 @@ SYMPY_BUILTIN_FUNC: TypeAlias = Union[ sp.sin, + sp.sinh, + sp.asin, sp.cos, + sp.acos, + sp.cosh, + sp.tan, + sp.atan, + sp.atan2, + sp.acosh, + sp.asinh, + sp.tanh, + sp.atanh, + sp.erf, + sp.loggamma, sp.exp, sp.Pow, sp.log, - sp.tan, sp.Max, sp.Min, sp.Abs, sp.Heaviside, + sp.sign, + sp.ceiling, + sp.floor, sp.Add, sp.Mul, ] SYMPT_TO_PADDLE = { sp.sin: paddle.sin, + sp.sinh: paddle.sinh, + sp.asin: paddle.asin, sp.cos: paddle.cos, + sp.acos: paddle.acos, + sp.cosh: paddle.cosh, + sp.tan: paddle.tan, + sp.atan: paddle.atan, + sp.atan2: paddle.atan2, + sp.acosh: paddle.acosh, + sp.asinh: paddle.asinh, + sp.tanh: paddle.tanh, + sp.atanh: paddle.atanh, + sp.erf: paddle.erf, + sp.loggamma: paddle.lgamma, sp.exp: paddle.exp, sp.Pow: paddle.pow, sp.log: paddle.log, - sp.tan: paddle.tan, sp.Max: paddle.maximum, sp.Min: paddle.minimum, sp.Abs: paddle.abs, sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), + sp.sign: paddle.sign, + sp.ceiling: paddle.ceil, + sp.floor: paddle.floor, # NOTE: sp.Add and sp.Mul is not included here for unalignment with sympy # and are implemented manually. } @@ -115,7 +145,10 @@ def forward(self, **kwargs): raise NotImplementedError("Node.forward is not implemented") def __str__(self): - return f"{self.__class__.__name__}(expr: {self.expr}, expr_type: {type(self.expr)})" + return ( + f"{self.__class__.__name__}(expr: {self.expr}, " + f"expr_type: {type(self.expr)})" + ) def __repr__(self): return f"{self.__class__.__name__}(expr: {self.expr})" @@ -151,7 +184,7 @@ class OperatorNode(Node): def __init__(self, expr: SYMPY_BUILTIN_FUNC): super().__init__(expr) - # preprocess childs' key instead of processing at run-time + # preprocess childs' key instead of processing at run-time in forward # which can reduce considerable overhead of time for calling "_cvt_to_key" if self.expr.func == sp.Derivative: self.childs = [_cvt_to_key(self.expr.args[0])] + [ @@ -169,6 +202,10 @@ def __init__(self, expr: SYMPY_BUILTIN_FUNC): elif self.expr.func == sp.Heaviside: self._apply_func = self._heaviside_operator_func self._auxiliary_func = SYMPT_TO_PADDLE[sp.Heaviside] + elif self.expr.func == sp.Min: + self._apply_func = self._minimum_operator_func + elif self.expr.func == sp.Max: + self._apply_func = self._maximum_operator_func else: self._apply_func = self._vanilla_operator_func self._auxiliary_func = SYMPT_TO_PADDLE[self.expr.func] @@ -207,6 +244,28 @@ def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = self._auxiliary_func(data_dict[self.childs[0]]) return data_dict + def _minimum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = paddle.minimum( + data_dict[self.childs[0]], data_dict[self.childs[1]] + ) + for i in range(2, len(self.childs)): + data_dict[self.key] = paddle.minimum( + data_dict[data_dict[self.key]], + data_dict[data_dict[self.childs[i]]], + ) + return data_dict + + def _maximum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = paddle.maximum( + data_dict[self.childs[0]], data_dict[self.childs[1]] + ) + for i in range(2, len(self.childs)): + data_dict[self.key] = paddle.maximum( + data_dict[data_dict[self.key]], + data_dict[data_dict[self.childs[i]]], + ) + return data_dict + def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: data_dict[self.key] = self._auxiliary_func( *tuple(data_dict[child] for child in self.childs) @@ -259,7 +318,8 @@ def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): self.expr = float(self.expr) else: raise TypeError( - f"expr({expr}) should be Float/Integer/Boolean/Rational, but got {type(self.expr)}" + "expr({expr}) should be Float/Integer/Boolean/Rational, " + f"but got {type(self.expr)}" ) self.expr = paddle.to_tensor(self.expr) @@ -349,15 +409,30 @@ def _visualize_graph(nodes: List[sp.Basic], graph_filename: str): SYMPY_BUILTIN_NAME = { sp.sin: "sin", + sp.sinh: "sinh", + sp.asin: "asin", sp.cos: "cos", + sp.acos: "acos", + sp.cosh: "cosh", + sp.tan: "tan", + sp.atan: "atan", + sp.atan2: "atan2", + sp.acosh: "acosh", + sp.asinh: "asinh", + sp.tanh: "tanh", + sp.atanh: "atanh", + sp.erf: "erf", + sp.loggamma: "loggamma", sp.exp: "exp", sp.Pow: "Pow", sp.log: "log", - sp.tan: "tan", sp.Max: "Max", sp.Min: "Min", sp.Abs: "Abs", sp.Heaviside: "Heaviside", + sp.sign: "sign", + sp.ceiling: "ceiling", + sp.floor: "floor", sp.Add: "Add", sp.Mul: "Mul", } @@ -411,12 +486,12 @@ def add_edge(u: str, v: str, u_color: str = C_DATA, v_color: str = C_DATA): graph.draw(image_path, prog="dot") graph.write(dot_path) logger.message( - f"Computational graph has been writen to {image_path} and {dot_path}," + f"Computational graph has been writen to {image_path} and {dot_path}. " "dot file can be visualized at https://dreampuf.github.io/GraphvizOnline/" ) -def sympy_to_function( +def lambdify( expr: sp.Expr, models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, extra_parameters: Optional[Sequence[paddle.Tensor]] = None, @@ -426,26 +501,29 @@ def sympy_to_function( Args: expr (sp.Expr): Sympy expression to be converted. - models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for computing forward result in `LayerNode`. - extra_parameters (Optional[nn.ParameterList]): Extra learnable parameters. Defaults to None. + models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for + computing forward result in `LayerNode`. + extra_parameters (Optional[nn.ParameterList]): Extra learnable parameters. + Defaults to None. graph_filename (Optional[str]): Save computational graph to `graph_filename.png` - for given `expr`, if `graph_filename` is not None and a valid string, such as 'momentum_x'. Defaults to None. + for given `expr`, if `graph_filename` is not None and a valid string, + such as 'momentum_x'. Defaults to None. Returns: - ComposedNode: Callable object for computing expr with necessary input(s) data in dict given. + ComposedNode: Callable object for computing expr with necessary input(s) data + in dict given. Examples: >>> import paddle + >>> import ppsci >>> import sympy as sp - >>> from ppsci import arch - >>> from ppsci.utils import sym_to_func >>> a, b, c, x, y = sp.symbols("a b c x y") >>> u = sp.Function("u")(x, y) >>> v = sp.Function("v")(x, y) >>> z = -a + b * (c ** 2) + u * v + 2.3 - >>> model = arch.MLP(("x", "y"), ("u", "v"), 4, 16) + >>> model = ppsci.arch.MLP(("x", "y"), ("u", "v"), 4, 16) >>> batch_size = 13 >>> a_tensor = paddle.randn([batch_size, 1]) @@ -461,7 +539,7 @@ def sympy_to_function( ... -a_tensor + b_tensor * (c_tensor ** 2) ... + u_tensor * v_tensor + 2.3 ... ) - >>> z_tensor_sympy = sym_to_func.sympy_to_function(z, model)( + >>> z_tensor_sympy = ppsci.lambdify(z, model)( ... { ... "a": a_tensor, ... "b": b_tensor, @@ -475,7 +553,7 @@ def sympy_to_function( True """ - # NOTE: Those simplify methods seem complicate given expr instead, so not use them here + # NOTE: Those simplify methods may complicate given expr instead, so not use here # simplify expression to reduce nodes in tree # expr = sp.nsimplify(expr) # expr = sp.expand(expr) @@ -488,7 +566,7 @@ def sympy_to_function( sympy_nodes = [] sympy_nodes = _post_traverse(expr, sympy_nodes) - # remove unnecessary symbol node for already in input dict(except for paramter symbol) + # remove unnecessary symbol nodes already in input dict(except for paramter symbol) if not extra_parameters: extra_parameters = () _parameter_names = tuple(param.name for param in extra_parameters) @@ -528,9 +606,9 @@ def sympy_to_function( ) if match_index is not None: raise ValueError( - f"Name of function({node}) should be unique along given models," - f" but got same output_key({node.func.name}) in models[{match_index}]" - f" and models[{j}]." + f"Name of function({node}) should be unique along given" + f" models, but got same output_key({node.func.name}) " + f"in models[{match_index}] and models[{j}]." ) match_index = j elif node.is_Number or node.is_NumberSymbol: @@ -543,9 +621,7 @@ def sympy_to_function( ) ) else: - raise NotImplementedError( - f"The node {node} is not supported in sympy_to_function." - ) + raise NotImplementedError(f"The node {node} is not supported in lambdify.") # NOTE: Visualize computational graph using 'pygraphviz' if isinstance(graph_filename, str): From 0a88a083352977e7cc0034f81cefadf0145612b8 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Mon, 11 Sep 2023 02:29:48 +0000 Subject: [PATCH 41/48] rename for test files --- test/equation/test_biharmonic.py | 4 ++-- test/equation/test_laplace.py | 4 ++-- test/equation/test_linear_elasticity.py | 4 ++-- test/equation/test_navier_stokes.py | 4 ++-- test/equation/test_normal_dot_vec.py | 4 ++-- test/equation/test_poisson.py | 4 ++-- test/equation/test_viv.py | 4 ++-- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/test/equation/test_biharmonic.py b/test/equation/test_biharmonic.py index c4a210576..8e1d6c2be 100644 --- a/test/equation/test_biharmonic.py +++ b/test/equation/test_biharmonic.py @@ -2,9 +2,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func __all__ = [] @@ -58,7 +58,7 @@ def hessian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": biharmonic_equation = equation.Biharmonic(dim=dim, q=q, D=D) for name, expr in biharmonic_equation.equations.items(): if isinstance(expr, sp.Basic): - biharmonic_equation.equations[name] = sym_to_func.sympy_to_function( + biharmonic_equation.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_laplace.py b/test/equation/test_laplace.py index 257ec6b25..6c438df3e 100644 --- a/test/equation/test_laplace.py +++ b/test/equation/test_laplace.py @@ -2,9 +2,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func __all__ = [] @@ -49,7 +49,7 @@ def hessian(y: "paddle.Tensor", x: "paddle.Tensor") -> "paddle.Tensor": laplace_equation = equation.Laplace(dim=dim) for name, expr in laplace_equation.equations.items(): if isinstance(expr, sp.Basic): - laplace_equation.equations[name] = sym_to_func.sympy_to_function( + laplace_equation.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_linear_elasticity.py b/test/equation/test_linear_elasticity.py index c157f4934..5f54041e7 100644 --- a/test/equation/test_linear_elasticity.py +++ b/test/equation/test_linear_elasticity.py @@ -2,9 +2,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: @@ -231,7 +231,7 @@ def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): ) for name, expr in linear_elasticity.equations.items(): if isinstance(expr, sp.Basic): - linear_elasticity.equations[name] = sym_to_func.sympy_to_function( + linear_elasticity.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_navier_stokes.py b/test/equation/test_navier_stokes.py index 9f2bfe282..0279374ac 100644 --- a/test/equation/test_navier_stokes.py +++ b/test/equation/test_navier_stokes.py @@ -2,9 +2,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func def jacobian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: @@ -138,7 +138,7 @@ def test_navierstokes(nu, rho, dim, time): navier_stokes_equation = equation.NavierStokes(nu=nu, rho=rho, dim=dim, time=time) for name, expr in navier_stokes_equation.equations.items(): if isinstance(expr, sp.Basic): - navier_stokes_equation.equations[name] = sym_to_func.sympy_to_function( + navier_stokes_equation.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_normal_dot_vec.py b/test/equation/test_normal_dot_vec.py index 448a9834c..e701d2ea6 100644 --- a/test/equation/test_normal_dot_vec.py +++ b/test/equation/test_normal_dot_vec.py @@ -2,9 +2,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func def compute_func(x: tuple, y: tuple): @@ -40,7 +40,7 @@ def test_normal_dot_vel(): norm_doc_vec = equation.NormalDotVec(output_dims) for name, expr in norm_doc_vec.equations.items(): if isinstance(expr, sp.Basic): - norm_doc_vec.equations[name] = sym_to_func.sympy_to_function( + norm_doc_vec.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_poisson.py b/test/equation/test_poisson.py index d3bedf81d..ca86d98db 100644 --- a/test/equation/test_poisson.py +++ b/test/equation/test_poisson.py @@ -16,9 +16,9 @@ import pytest import sympy as sp +import ppsci from ppsci import arch from ppsci import equation -from ppsci.utils import sym_to_func __all__ = [] @@ -62,7 +62,7 @@ def hessian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: poisson_equation = equation.Poisson(dim=dim) for name, expr in poisson_equation.equations.items(): if isinstance(expr, sp.Basic): - poisson_equation.equations[name] = sym_to_func.sympy_to_function( + poisson_equation.equations[name] = ppsci.lambdify( expr, model, ) diff --git a/test/equation/test_viv.py b/test/equation/test_viv.py index 363835d99..2dfc4f778 100644 --- a/test/equation/test_viv.py +++ b/test/equation/test_viv.py @@ -3,9 +3,9 @@ import sympy as sp from paddle.nn import initializer +import ppsci from ppsci import arch from ppsci.equation.pde import Vibration -from ppsci.utils import sym_to_func @pytest.mark.parametrize("rho,k1,k2", [(1.0, 4.0, -1.0)]) @@ -54,7 +54,7 @@ def hessian(y: paddle.Tensor, x: paddle.Tensor) -> paddle.Tensor: vibration_equation = Vibration(rho=rho, k1=k1, k2=k2) for name, expr in vibration_equation.equations.items(): if isinstance(expr, sp.Basic): - vibration_equation.equations[name] = sym_to_func.sympy_to_function( + vibration_equation.equations[name] = ppsci.lambdify( expr, model, vibration_equation.learnable_parameters, From 18a16388888c4b7431a25329efd050cd919c0b71 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 12 Sep 2023 05:10:52 +0000 Subject: [PATCH 42/48] rename sym_to_func.py to symbolic.py --- ppsci/utils/__init__.py | 2 +- ppsci/utils/sym_to_func.py | 631 ------------------------------------- 2 files changed, 1 insertion(+), 632 deletions(-) delete mode 100644 ppsci/utils/sym_to_func.py diff --git a/ppsci/utils/__init__.py b/ppsci/utils/__init__.py index df95c7182..e6e327ffe 100644 --- a/ppsci/utils/__init__.py +++ b/ppsci/utils/__init__.py @@ -32,7 +32,7 @@ from ppsci.utils.save_load import load_checkpoint from ppsci.utils.save_load import load_pretrain from ppsci.utils.save_load import save_checkpoint -from ppsci.utils.sym_to_func import lambdify +from ppsci.utils.symbolic import lambdify __all__ = [ "initializer", diff --git a/ppsci/utils/sym_to_func.py b/ppsci/utils/sym_to_func.py deleted file mode 100644 index 06ef4c3b8..000000000 --- a/ppsci/utils/sym_to_func.py +++ /dev/null @@ -1,631 +0,0 @@ -# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Sympy to python function conversion module -""" - -from __future__ import annotations - -import functools -from typing import Dict -from typing import List -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import Union - -import paddle -import sympy as sp -from paddle import nn -from typing_extensions import TypeAlias - -from ppsci import arch -from ppsci import equation -from ppsci.autodiff import hessian -from ppsci.autodiff import jacobian - -__all__ = [ - "lambdify", -] - - -DATA_DICT: TypeAlias = Dict[str, paddle.Tensor] - -SYMPY_BUILTIN_FUNC: TypeAlias = Union[ - sp.sin, - sp.sinh, - sp.asin, - sp.cos, - sp.acos, - sp.cosh, - sp.tan, - sp.atan, - sp.atan2, - sp.acosh, - sp.asinh, - sp.tanh, - sp.atanh, - sp.erf, - sp.loggamma, - sp.exp, - sp.Pow, - sp.log, - sp.Max, - sp.Min, - sp.Abs, - sp.Heaviside, - sp.sign, - sp.ceiling, - sp.floor, - sp.Add, - sp.Mul, -] - -SYMPT_TO_PADDLE = { - sp.sin: paddle.sin, - sp.sinh: paddle.sinh, - sp.asin: paddle.asin, - sp.cos: paddle.cos, - sp.acos: paddle.acos, - sp.cosh: paddle.cosh, - sp.tan: paddle.tan, - sp.atan: paddle.atan, - sp.atan2: paddle.atan2, - sp.acosh: paddle.acosh, - sp.asinh: paddle.asinh, - sp.tanh: paddle.tanh, - sp.atanh: paddle.atanh, - sp.erf: paddle.erf, - sp.loggamma: paddle.lgamma, - sp.exp: paddle.exp, - sp.Pow: paddle.pow, - sp.log: paddle.log, - sp.Max: paddle.maximum, - sp.Min: paddle.minimum, - sp.Abs: paddle.abs, - sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), - sp.sign: paddle.sign, - sp.ceiling: paddle.ceil, - sp.floor: paddle.floor, - # NOTE: sp.Add and sp.Mul is not included here for unalignment with sympy - # and are implemented manually. -} - - -def _cvt_to_key(expr: sp.Basic) -> str: - """Convert sympy expression to a string key, mainly as retrieval key in dict. - - Args: - expr (sp.Basic): Sympy expression. - - Returns: - str: Converted string key. - """ - if isinstance(expr, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function)): - if hasattr(expr, "name"): - # use name of custom function instead of itself. - return expr.name - else: - return str(expr) - elif isinstance(expr, sp.Derivative): - # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" - expr_str = expr.args[0].name - for symbol, order in expr.args[1:]: - expr_str += f"__{symbol}" * order - return expr_str - else: - return str(expr) - - -class Node(nn.Layer): - """The base class of the node in expression tree. - - Args: - expr (sp.Basic): Sympy expression. - """ - - def __init__(self, expr: sp.Basic): - super().__init__() - self.expr = expr - self.key = _cvt_to_key(self.expr) - - def forward(self, **kwargs): - raise NotImplementedError("Node.forward is not implemented") - - def __str__(self): - return ( - f"{self.__class__.__name__}(expr: {self.expr}, " - f"expr_type: {type(self.expr)})" - ) - - def __repr__(self): - return f"{self.__class__.__name__}(expr: {self.expr})" - - -class DetachNode(nn.Layer): - """Class for detach operation in converted expression tree. - - Args: - expr (sp.Basic): Sympy expression. - """ - - def __init__(self, expr: sp.Basic): - super().__init__() - self.expr = expr - self.key = _cvt_to_key(self.expr) - self.child = _cvt_to_key(self.expr.args[0]) - - def forward(self, data_dict: DATA_DICT): - if self.key in data_dict: - return data_dict - - data_dict[self.key] = data_dict[self.child].detach() - return data_dict - - -class OperatorNode(Node): - """Class for operator node in converted expression tree. - - Args: - expr (SYMPY_BUILTIN_FUNC): Sympy expression. - """ - - def __init__(self, expr: SYMPY_BUILTIN_FUNC): - super().__init__(expr) - # preprocess childs' key instead of processing at run-time in forward - # which can reduce considerable overhead of time for calling "_cvt_to_key" - if self.expr.func == sp.Derivative: - self.childs = [_cvt_to_key(self.expr.args[0])] + [ - (_cvt_to_key(arg), order) for (arg, order) in self.expr.args[1:] - ] - else: - self.childs = [_cvt_to_key(arg) for arg in self.expr.args] - - if self.expr.func == sp.Add: - self._apply_func = self._add_operator_func - elif self.expr.func == sp.Mul: - self._apply_func = self._mul_operator_func - elif self.expr.func == sp.Derivative: - self._apply_func = self._derivate_operator_func - elif self.expr.func == sp.Heaviside: - self._apply_func = self._heaviside_operator_func - self._auxiliary_func = SYMPT_TO_PADDLE[sp.Heaviside] - elif self.expr.func == sp.Min: - self._apply_func = self._minimum_operator_func - elif self.expr.func == sp.Max: - self._apply_func = self._maximum_operator_func - else: - self._apply_func = self._vanilla_operator_func - self._auxiliary_func = SYMPT_TO_PADDLE[self.expr.func] - - def forward(self, data_dict: DATA_DICT): - # use cache - if self.key in data_dict: - return data_dict - - return self._apply_func(data_dict) - - def _add_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = data_dict[self.childs[0]] - for p in self.childs[1:]: - data_dict[self.key] += data_dict[p] - return data_dict - - def _mul_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = data_dict[self.childs[0]] - for child in self.childs[1:]: - data_dict[self.key] *= data_dict[child] - return data_dict - - def _derivate_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = data_dict[self.childs[0]] - for child, order in self.childs[1:]: - if order & 1: - data_dict[self.key] = jacobian(data_dict[self.key], data_dict[child]) - order -= 1 - for _ in range(0, order, 2): - data_dict[self.key] = hessian(data_dict[self.key], data_dict[child]) - order -= 2 - return data_dict - - def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._auxiliary_func(data_dict[self.childs[0]]) - return data_dict - - def _minimum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = paddle.minimum( - data_dict[self.childs[0]], data_dict[self.childs[1]] - ) - for i in range(2, len(self.childs)): - data_dict[self.key] = paddle.minimum( - data_dict[data_dict[self.key]], - data_dict[data_dict[self.childs[i]]], - ) - return data_dict - - def _maximum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = paddle.maximum( - data_dict[self.childs[0]], data_dict[self.childs[1]] - ) - for i in range(2, len(self.childs)): - data_dict[self.key] = paddle.maximum( - data_dict[data_dict[self.key]], - data_dict[data_dict[self.childs[i]]], - ) - return data_dict - - def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self._auxiliary_func( - *tuple(data_dict[child] for child in self.childs) - ) - return data_dict - - -class LayerNode(Node): - """Class for layer node in converted expression tree. - - Args: - expr (sp.core.function.UndefinedFunction): Sympy expression. - model (arch.Arch): NN model for computing forward result in this node. - """ - - def __init__( - self, - expr: sp.core.function.UndefinedFunction, - model: arch.Arch, - ): - super().__init__(expr) - self.model = model - - def forward(self, data_dict: DATA_DICT) -> DATA_DICT: - # use cache - if self.key in data_dict: - return data_dict - - output_dict = self.model(data_dict) - data_dict.update(output_dict) - - return data_dict - - -class ConstantNode(Node): - """Class for constant variable node in converted expression tree. - - Args: - expr (Union[sp.Number, sp.NumberSymbol]): Number expression. - """ - - def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): - super().__init__(expr) - if ( - self.expr.is_Float - or self.expr.is_Integer - or self.expr.is_Boolean - or self.expr.is_Rational - ): - self.expr = float(self.expr) - else: - raise TypeError( - "expr({expr}) should be Float/Integer/Boolean/Rational, " - f"but got {type(self.expr)}" - ) - self.expr = paddle.to_tensor(self.expr) - - def forward(self, data_dict: DATA_DICT) -> DATA_DICT: - # use cache - if self.key in data_dict: - return data_dict - - data_dict[self.key] = self.expr - return data_dict - - -class ParameterNode(Node): - """Class for constant variable node in converted expression tree. - - Args: - expr (sp.Symbol): Parameter expression. - parameter (paddle.framework.io.EagerParamBase): Parameter tensor. - """ - - def __init__(self, expr: sp.Symbol, parameter: paddle.framework.io.EagerParamBase): - super().__init__(expr) - self.parameter = parameter - - def forward(self, data_dict: DATA_DICT) -> DATA_DICT: - data_dict[self.key] = self.parameter - return data_dict - - -class ComposedNode(nn.Layer): - """ - Compose list of several callable objects together. - """ - - def __init__(self, callable_nodes: List[Node]): - super().__init__() - self.callable_nodes = callable_nodes - - def forward(self, data_dict: DATA_DICT) -> DATA_DICT: - # call all callable_nodes in order - for func in self.callable_nodes: - data_dict = func(data_dict) - - # return result of last node(root node) for target - return data_dict[self.callable_nodes[-1].key] - - -def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: - """Traverse sympy expression tree in postorder. - - Args: - cur_node (sp.Basic): Sympy expression of current node. - nodes (List[sp.Basic]): Node list storing all tree nodes in postorder. - - Returns: - List[sp.Basic]: Node list storing all tree nodes in postorder. - """ - # traverse into sub-nodes - if isinstance(cur_node, sp.Function): - for arg in cur_node.args: - nodes = _post_traverse(arg, nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sp.Derivative): - nodes = _post_traverse(cur_node.args[0], nodes) - nodes.append(cur_node) - elif isinstance(cur_node, sp.Symbol): - nodes.append(cur_node) - return nodes - elif isinstance(cur_node, sp.Number): - nodes.append(cur_node) - else: - for arg in cur_node.args: - nodes = _post_traverse(arg, nodes) - nodes.append(cur_node) - return nodes - - -def _visualize_graph(nodes: List[sp.Basic], graph_filename: str): - try: - import pygraphviz - except ModuleNotFoundError: - raise ModuleNotFoundError( - "Please install pygraphviz by steps below:\n" - "1. apt-get install graphviz graphviz-dev\n" - "2. python -m pip install pygraphviz" - ) - - SYMPY_BUILTIN_NAME = { - sp.sin: "sin", - sp.sinh: "sinh", - sp.asin: "asin", - sp.cos: "cos", - sp.acos: "acos", - sp.cosh: "cosh", - sp.tan: "tan", - sp.atan: "atan", - sp.atan2: "atan2", - sp.acosh: "acosh", - sp.asinh: "asinh", - sp.tanh: "tanh", - sp.atanh: "atanh", - sp.erf: "erf", - sp.loggamma: "loggamma", - sp.exp: "exp", - sp.Pow: "Pow", - sp.log: "log", - sp.Max: "Max", - sp.Min: "Min", - sp.Abs: "Abs", - sp.Heaviside: "Heaviside", - sp.sign: "sign", - sp.ceiling: "ceiling", - sp.floor: "floor", - sp.Add: "Add", - sp.Mul: "Mul", - } - naming_counter = {k: 0 for k in SYMPY_BUILTIN_NAME} - - def get_operator_name(node): - ret = f"{SYMPY_BUILTIN_NAME[node.func]}_{naming_counter[node.func]}" - naming_counter[node.func] += 1 - return ret - - graph = pygraphviz.AGraph(directed=True, rankdir="TB") - C_FUNC = "#9196f1" # purple color function node - C_DATA = "#feb64d" # oringe color for data node - C_EDGE = "#000000" # black color for edge - - def add_edge(u: str, v: str, u_color: str = C_DATA, v_color: str = C_DATA): - """Add an edge from `u` to `v`. - - Args: - u (str): Name of begin node u. - v (str): Name of end node v. - u_color (str, optional): _description_. Defaults to C_DATA. - v_color (str, optional): _description_. Defaults to C_DATA. - """ - graph.add_node(u, style="filled", shape="ellipse", color=u_color) - graph.add_node(v, style="filled", shape="ellipse", color=v_color) - graph.add_edge(u, v, color=C_EDGE, style="solid", penwidth=0.5, arrowsize=0.5) - - for node in nodes: - if isinstance(node, tuple(SYMPY_BUILTIN_NAME.keys())): - operator_str = get_operator_name(node) - for arg in node.args: - add_edge(_cvt_to_key(arg), operator_str, v_color=C_FUNC) - add_edge(operator_str, _cvt_to_key(node), u_color=C_FUNC) - if isinstance(node, sp.Function): - for arg in node.args: - add_edge(_cvt_to_key(arg), str(node), v_color=C_FUNC) - add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) - elif isinstance(node, sp.Derivative): - add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) - add_edge(_cvt_to_key(node.args[0]), str(node), v_color=C_FUNC) - for arg in node.args[1:]: - add_edge(_cvt_to_key(arg[0]), str(node), v_color=C_FUNC) - - # export graph to image - from ppsci.utils import logger - - graph.layout() - image_path = f"{graph_filename}.png" - dot_path = f"{graph_filename}.dot" - graph.draw(image_path, prog="dot") - graph.write(dot_path) - logger.message( - f"Computational graph has been writen to {image_path} and {dot_path}. " - "dot file can be visualized at https://dreampuf.github.io/GraphvizOnline/" - ) - - -def lambdify( - expr: sp.Expr, - models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, - extra_parameters: Optional[Sequence[paddle.Tensor]] = None, - graph_filename: Optional[str] = None, -) -> ComposedNode: - """Convert sympy expression to callable function. - - Args: - expr (sp.Expr): Sympy expression to be converted. - models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for - computing forward result in `LayerNode`. - extra_parameters (Optional[nn.ParameterList]): Extra learnable parameters. - Defaults to None. - graph_filename (Optional[str]): Save computational graph to `graph_filename.png` - for given `expr`, if `graph_filename` is not None and a valid string, - such as 'momentum_x'. Defaults to None. - - Returns: - ComposedNode: Callable object for computing expr with necessary input(s) data - in dict given. - - Examples: - >>> import paddle - >>> import ppsci - >>> import sympy as sp - - >>> a, b, c, x, y = sp.symbols("a b c x y") - >>> u = sp.Function("u")(x, y) - >>> v = sp.Function("v")(x, y) - >>> z = -a + b * (c ** 2) + u * v + 2.3 - - >>> model = ppsci.arch.MLP(("x", "y"), ("u", "v"), 4, 16) - - >>> batch_size = 13 - >>> a_tensor = paddle.randn([batch_size, 1]) - >>> b_tensor = paddle.randn([batch_size, 1]) - >>> c_tensor = paddle.randn([batch_size, 1]) - >>> x_tensor = paddle.randn([batch_size, 1]) - >>> y_tensor = paddle.randn([batch_size, 1]) - - >>> model_output_dict = model({"x": x_tensor, "y": y_tensor}) - >>> u_tensor, v_tensor = model_output_dict["u"], model_output_dict["v"] - - >>> z_tensor_manually = ( - ... -a_tensor + b_tensor * (c_tensor ** 2) - ... + u_tensor * v_tensor + 2.3 - ... ) - >>> z_tensor_sympy = ppsci.lambdify(z, model)( - ... { - ... "a": a_tensor, - ... "b": b_tensor, - ... "c": c_tensor, - ... "x": x_tensor, - ... "y": y_tensor, - ... } - ... ) - - >>> paddle.allclose(z_tensor_manually, z_tensor_sympy).item() - True - """ - - # NOTE: Those simplify methods may complicate given expr instead, so not use here - # simplify expression to reduce nodes in tree - # expr = sp.nsimplify(expr) - # expr = sp.expand(expr) - # expr = sp.simplify(expr) - - # remove 1.0 from sympy expression tree - expr = expr.subs(1.0, 1) - - # convert sympy expression tree to list of nodes in postorder - sympy_nodes = [] - sympy_nodes = _post_traverse(expr, sympy_nodes) - - # remove unnecessary symbol nodes already in input dict(except for paramter symbol) - if not extra_parameters: - extra_parameters = () - _parameter_names = tuple(param.name for param in extra_parameters) - sympy_nodes = [ - node - for node in sympy_nodes - if (not node.is_Symbol) or (_cvt_to_key(node) in _parameter_names) - ] - - # remove duplicates with topo-order kept - sympy_nodes = list(dict.fromkeys(sympy_nodes)) - - if isinstance(models, arch.ModelList): - models = tuple(models.model_list[i] for i in range(len(models.model_list))) - if not isinstance(models, (tuple, list)): - models = (models,) - - # convert sympy node to callable node - callable_nodes = [] - for i, node in enumerate(sympy_nodes): - if isinstance( - node, tuple(SYMPT_TO_PADDLE.keys()) + (sp.Add, sp.Mul, sp.Derivative) - ): - callable_nodes.append(OperatorNode(node)) - elif isinstance(node, sp.Function): - if node.name == equation.DETACH_FUNC_NAME: - callable_nodes.append(DetachNode(node)) - else: - match_index = None - for j, model in enumerate(models): - if str(node.func.name) in model.output_keys: - callable_nodes.append( - LayerNode( - node, - model, - ) - ) - if match_index is not None: - raise ValueError( - f"Name of function({node}) should be unique along given" - f" models, but got same output_key({node.func.name}) " - f"in models[{match_index}] and models[{j}]." - ) - match_index = j - elif node.is_Number or node.is_NumberSymbol: - callable_nodes.append(ConstantNode(node)) - elif isinstance(node, sp.Symbol): - callable_nodes.append( - ParameterNode( - node, - *[param for param in extra_parameters if param.name == node.name], - ) - ) - else: - raise NotImplementedError(f"The node {node} is not supported in lambdify.") - - # NOTE: Visualize computational graph using 'pygraphviz' - if isinstance(graph_filename, str): - _visualize_graph(sympy_nodes, graph_filename) - - # Compose callable nodes into one callable object - return ComposedNode(callable_nodes) From 8ccf858acd273c6a238b4a255de4b67a8e6d4285 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 12 Sep 2023 05:13:23 +0000 Subject: [PATCH 43/48] update linear_init_ and conv_init_ to kaiming style --- ppsci/utils/initializer.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/ppsci/utils/initializer.py b/ppsci/utils/initializer.py index 8c32980d3..de8a992f2 100644 --- a/ppsci/utils/initializer.py +++ b/ppsci/utils/initializer.py @@ -91,8 +91,10 @@ def norm_cdf(x): _tensor.erfinv_() # Transform to proper mean, std - _tensor = paddle.multiply(_tensor, paddle.to_tensor(std * math.sqrt(2.0))) - _tensor = paddle.add(_tensor, paddle.to_tensor(mean)) + _tensor = paddle.multiply( + _tensor, paddle.to_tensor(std * math.sqrt(2.0), tensor.dtype) + ) + _tensor = paddle.add(_tensor, paddle.to_tensor(mean, tensor.dtype)) # Clamp to ensure it"s in the proper range _tensor = paddle.clip(_tensor, min=a, max=b) @@ -436,9 +438,10 @@ def linear_init_(module: nn.Layer) -> None: Args: module (nn.Layer): Linear Layer to be initialized. """ - bound = 1 / math.sqrt(module.weight.shape[0]) - uniform_(module.weight, -bound, bound) + kaiming_uniform_(module.weight, a=math.sqrt(5)) if module.bias is not None: + fan_in, _ = _calculate_fan_in_and_fan_out(module.weight, reverse=True) + bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0 uniform_(module.bias, -bound, bound) @@ -448,7 +451,9 @@ def conv_init_(module: nn.Layer) -> None: Args: module (nn.Layer): Convolution Layer to be initialized. """ - bound = 1 / np.sqrt(np.prod(module.weight.shape[1:])) - uniform_(module.weight, -bound, bound) + kaiming_uniform_(module.weight, a=math.sqrt(5)) if module.bias is not None: - uniform_(module.bias, -bound, bound) + fan_in, _ = _calculate_fan_in_and_fan_out(module.weight, reverse=False) + if fan_in != 0: + bound = 1 / math.sqrt(fan_in) + uniform_(module.bias, -bound, bound) From a319e99f0d723c15ad3466f86cb63754ecba3cfe Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 12 Sep 2023 05:20:14 +0000 Subject: [PATCH 44/48] refine probability document --- docs/zh/api/probability.md | 2 ++ mkdocs.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/docs/zh/api/probability.md b/docs/zh/api/probability.md index e4915291f..4e1df7acc 100644 --- a/docs/zh/api/probability.md +++ b/docs/zh/api/probability.md @@ -1,3 +1,5 @@ +# Probability(概率编程) 模块 + ::: ppsci.probability handler: python options: diff --git a/mkdocs.yml b/mkdocs.yml index 5ee2ea9ce..4f16b6361 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -87,6 +87,7 @@ nav: - ppsci.validate: zh/api/validate.md - ppsci.visualize: zh/api/visualize.md - ppsci.experimental: zh/api/experimental.md + - ppsci.probability: zh/api/probability.md - 使用指南: zh/user_guide.md - 开发与复现指南: - 开发指南: zh/development.md From be67fb449db622d88f0026c122247a510ff2bee1 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 12 Sep 2023 05:34:58 +0000 Subject: [PATCH 45/48] change list to tuple --- ppsci/constraint/boundary_constraint.py | 3 +- ppsci/constraint/initial_constraint.py | 3 +- ppsci/constraint/integral_constraint.py | 3 +- ppsci/constraint/interior_constraint.py | 3 +- ppsci/constraint/periodic_constraint.py | 3 +- ppsci/constraint/supervised_constraint.py | 9 +- ppsci/utils/symbolic.py | 631 ++++++++++++++++++++++ ppsci/validate/geo_validator.py | 2 +- ppsci/validate/sup_validator.py | 4 +- 9 files changed, 645 insertions(+), 16 deletions(-) create mode 100644 ppsci/utils/symbolic.py diff --git a/ppsci/constraint/boundary_constraint.py b/ppsci/constraint/boundary_constraint.py index 6420a9685..afbad13c1 100644 --- a/ppsci/constraint/boundary_constraint.py +++ b/ppsci/constraint/boundary_constraint.py @@ -85,10 +85,9 @@ def __init__( weight_dict: Optional[Dict[str, Union[float, Callable]]] = None, name: str = "BC", ): - self.output_expr = output_expr self.label_dict = label_dict self.input_keys = geom.dim_keys - self.output_keys = list(label_dict.keys()) + self.output_keys = tuple(label_dict.keys()) self.output_expr = { k: v for k, v in output_expr.items() if k in self.output_keys } diff --git a/ppsci/constraint/initial_constraint.py b/ppsci/constraint/initial_constraint.py index d32d8c00c..351af60c7 100644 --- a/ppsci/constraint/initial_constraint.py +++ b/ppsci/constraint/initial_constraint.py @@ -88,10 +88,9 @@ def __init__( weight_dict: Optional[Dict[str, Callable]] = None, name: str = "IC", ): - self.output_expr = output_expr self.label_dict = label_dict self.input_keys = geom.dim_keys - self.output_keys = list(label_dict.keys()) + self.output_keys = tuple(label_dict.keys()) self.output_expr = { k: v for k, v in output_expr.items() if k in self.output_keys } diff --git a/ppsci/constraint/integral_constraint.py b/ppsci/constraint/integral_constraint.py index 511f82373..63d8314fa 100644 --- a/ppsci/constraint/integral_constraint.py +++ b/ppsci/constraint/integral_constraint.py @@ -85,10 +85,9 @@ def __init__( weight_dict: Optional[Dict[str, Callable]] = None, name: str = "IgC", ): - self.output_expr = output_expr self.label_dict = label_dict self.input_keys = geom.dim_keys - self.output_keys = list(label_dict.keys()) + self.output_keys = tuple(label_dict.keys()) self.output_expr = { k: v for k, v in output_expr.items() if k in self.output_keys } diff --git a/ppsci/constraint/interior_constraint.py b/ppsci/constraint/interior_constraint.py index d0c77df10..a333c82db 100644 --- a/ppsci/constraint/interior_constraint.py +++ b/ppsci/constraint/interior_constraint.py @@ -85,10 +85,9 @@ def __init__( weight_dict: Optional[Dict[str, Union[Callable, float]]] = None, name: str = "EQ", ): - self.output_expr = output_expr self.label_dict = label_dict self.input_keys = geom.dim_keys - self.output_keys = list(label_dict.keys()) + self.output_keys = tuple(label_dict.keys()) self.output_expr = { k: v for k, v in output_expr.items() if k in self.output_keys } diff --git a/ppsci/constraint/periodic_constraint.py b/ppsci/constraint/periodic_constraint.py index 7ad3e2fc1..6aace571e 100644 --- a/ppsci/constraint/periodic_constraint.py +++ b/ppsci/constraint/periodic_constraint.py @@ -72,9 +72,8 @@ def __init__( weight_dict: Optional[Dict[str, Callable]] = None, name: str = "PeriodicBC", ): - self.output_expr = output_expr self.input_keys = geom.dim_keys - self.output_keys = list(output_expr.keys()) + self.output_keys = tuple(output_expr.keys()) self.output_expr = { k: v for k, v in output_expr.items() if k in self.output_keys } diff --git a/ppsci/constraint/supervised_constraint.py b/ppsci/constraint/supervised_constraint.py index a0c34d8be..84b881622 100644 --- a/ppsci/constraint/supervised_constraint.py +++ b/ppsci/constraint/supervised_constraint.py @@ -60,19 +60,20 @@ def __init__( output_expr: Optional[Dict[str, Callable]] = None, name: str = "Sup", ): - self.output_expr = output_expr - # build dataset _dataset = dataset.build_dataset(dataloader_cfg["dataset"]) self.input_keys = _dataset.input_keys self.output_keys = ( - list(output_expr.keys()) if output_expr is not None else _dataset.label_keys + tuple(output_expr.keys()) + if output_expr is not None + else _dataset.label_keys ) + self.output_expr = output_expr if self.output_expr is None: self.output_expr = { - key: lambda out, k=key: out[k] for key in self.output_keys + key: (lambda out, k=key: out[k]) for key in self.output_keys } # construct dataloader with dataset and dataloader_cfg diff --git a/ppsci/utils/symbolic.py b/ppsci/utils/symbolic.py new file mode 100644 index 000000000..06ef4c3b8 --- /dev/null +++ b/ppsci/utils/symbolic.py @@ -0,0 +1,631 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Sympy to python function conversion module +""" + +from __future__ import annotations + +import functools +from typing import Dict +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +import paddle +import sympy as sp +from paddle import nn +from typing_extensions import TypeAlias + +from ppsci import arch +from ppsci import equation +from ppsci.autodiff import hessian +from ppsci.autodiff import jacobian + +__all__ = [ + "lambdify", +] + + +DATA_DICT: TypeAlias = Dict[str, paddle.Tensor] + +SYMPY_BUILTIN_FUNC: TypeAlias = Union[ + sp.sin, + sp.sinh, + sp.asin, + sp.cos, + sp.acos, + sp.cosh, + sp.tan, + sp.atan, + sp.atan2, + sp.acosh, + sp.asinh, + sp.tanh, + sp.atanh, + sp.erf, + sp.loggamma, + sp.exp, + sp.Pow, + sp.log, + sp.Max, + sp.Min, + sp.Abs, + sp.Heaviside, + sp.sign, + sp.ceiling, + sp.floor, + sp.Add, + sp.Mul, +] + +SYMPT_TO_PADDLE = { + sp.sin: paddle.sin, + sp.sinh: paddle.sinh, + sp.asin: paddle.asin, + sp.cos: paddle.cos, + sp.acos: paddle.acos, + sp.cosh: paddle.cosh, + sp.tan: paddle.tan, + sp.atan: paddle.atan, + sp.atan2: paddle.atan2, + sp.acosh: paddle.acosh, + sp.asinh: paddle.asinh, + sp.tanh: paddle.tanh, + sp.atanh: paddle.atanh, + sp.erf: paddle.erf, + sp.loggamma: paddle.lgamma, + sp.exp: paddle.exp, + sp.Pow: paddle.pow, + sp.log: paddle.log, + sp.Max: paddle.maximum, + sp.Min: paddle.minimum, + sp.Abs: paddle.abs, + sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])), + sp.sign: paddle.sign, + sp.ceiling: paddle.ceil, + sp.floor: paddle.floor, + # NOTE: sp.Add and sp.Mul is not included here for unalignment with sympy + # and are implemented manually. +} + + +def _cvt_to_key(expr: sp.Basic) -> str: + """Convert sympy expression to a string key, mainly as retrieval key in dict. + + Args: + expr (sp.Basic): Sympy expression. + + Returns: + str: Converted string key. + """ + if isinstance(expr, (sp.Symbol, sp.core.function.UndefinedFunction, sp.Function)): + if hasattr(expr, "name"): + # use name of custom function instead of itself. + return expr.name + else: + return str(expr) + elif isinstance(expr, sp.Derivative): + # convert Derivative(u(x,y),(x,2),(y,2)) to "u__x__x__y__y" + expr_str = expr.args[0].name + for symbol, order in expr.args[1:]: + expr_str += f"__{symbol}" * order + return expr_str + else: + return str(expr) + + +class Node(nn.Layer): + """The base class of the node in expression tree. + + Args: + expr (sp.Basic): Sympy expression. + """ + + def __init__(self, expr: sp.Basic): + super().__init__() + self.expr = expr + self.key = _cvt_to_key(self.expr) + + def forward(self, **kwargs): + raise NotImplementedError("Node.forward is not implemented") + + def __str__(self): + return ( + f"{self.__class__.__name__}(expr: {self.expr}, " + f"expr_type: {type(self.expr)})" + ) + + def __repr__(self): + return f"{self.__class__.__name__}(expr: {self.expr})" + + +class DetachNode(nn.Layer): + """Class for detach operation in converted expression tree. + + Args: + expr (sp.Basic): Sympy expression. + """ + + def __init__(self, expr: sp.Basic): + super().__init__() + self.expr = expr + self.key = _cvt_to_key(self.expr) + self.child = _cvt_to_key(self.expr.args[0]) + + def forward(self, data_dict: DATA_DICT): + if self.key in data_dict: + return data_dict + + data_dict[self.key] = data_dict[self.child].detach() + return data_dict + + +class OperatorNode(Node): + """Class for operator node in converted expression tree. + + Args: + expr (SYMPY_BUILTIN_FUNC): Sympy expression. + """ + + def __init__(self, expr: SYMPY_BUILTIN_FUNC): + super().__init__(expr) + # preprocess childs' key instead of processing at run-time in forward + # which can reduce considerable overhead of time for calling "_cvt_to_key" + if self.expr.func == sp.Derivative: + self.childs = [_cvt_to_key(self.expr.args[0])] + [ + (_cvt_to_key(arg), order) for (arg, order) in self.expr.args[1:] + ] + else: + self.childs = [_cvt_to_key(arg) for arg in self.expr.args] + + if self.expr.func == sp.Add: + self._apply_func = self._add_operator_func + elif self.expr.func == sp.Mul: + self._apply_func = self._mul_operator_func + elif self.expr.func == sp.Derivative: + self._apply_func = self._derivate_operator_func + elif self.expr.func == sp.Heaviside: + self._apply_func = self._heaviside_operator_func + self._auxiliary_func = SYMPT_TO_PADDLE[sp.Heaviside] + elif self.expr.func == sp.Min: + self._apply_func = self._minimum_operator_func + elif self.expr.func == sp.Max: + self._apply_func = self._maximum_operator_func + else: + self._apply_func = self._vanilla_operator_func + self._auxiliary_func = SYMPT_TO_PADDLE[self.expr.func] + + def forward(self, data_dict: DATA_DICT): + # use cache + if self.key in data_dict: + return data_dict + + return self._apply_func(data_dict) + + def _add_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = data_dict[self.childs[0]] + for p in self.childs[1:]: + data_dict[self.key] += data_dict[p] + return data_dict + + def _mul_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = data_dict[self.childs[0]] + for child in self.childs[1:]: + data_dict[self.key] *= data_dict[child] + return data_dict + + def _derivate_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = data_dict[self.childs[0]] + for child, order in self.childs[1:]: + if order & 1: + data_dict[self.key] = jacobian(data_dict[self.key], data_dict[child]) + order -= 1 + for _ in range(0, order, 2): + data_dict[self.key] = hessian(data_dict[self.key], data_dict[child]) + order -= 2 + return data_dict + + def _heaviside_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self._auxiliary_func(data_dict[self.childs[0]]) + return data_dict + + def _minimum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = paddle.minimum( + data_dict[self.childs[0]], data_dict[self.childs[1]] + ) + for i in range(2, len(self.childs)): + data_dict[self.key] = paddle.minimum( + data_dict[data_dict[self.key]], + data_dict[data_dict[self.childs[i]]], + ) + return data_dict + + def _maximum_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = paddle.maximum( + data_dict[self.childs[0]], data_dict[self.childs[1]] + ) + for i in range(2, len(self.childs)): + data_dict[self.key] = paddle.maximum( + data_dict[data_dict[self.key]], + data_dict[data_dict[self.childs[i]]], + ) + return data_dict + + def _vanilla_operator_func(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self._auxiliary_func( + *tuple(data_dict[child] for child in self.childs) + ) + return data_dict + + +class LayerNode(Node): + """Class for layer node in converted expression tree. + + Args: + expr (sp.core.function.UndefinedFunction): Sympy expression. + model (arch.Arch): NN model for computing forward result in this node. + """ + + def __init__( + self, + expr: sp.core.function.UndefinedFunction, + model: arch.Arch, + ): + super().__init__(expr) + self.model = model + + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: + # use cache + if self.key in data_dict: + return data_dict + + output_dict = self.model(data_dict) + data_dict.update(output_dict) + + return data_dict + + +class ConstantNode(Node): + """Class for constant variable node in converted expression tree. + + Args: + expr (Union[sp.Number, sp.NumberSymbol]): Number expression. + """ + + def __init__(self, expr: Union[sp.Number, sp.NumberSymbol]): + super().__init__(expr) + if ( + self.expr.is_Float + or self.expr.is_Integer + or self.expr.is_Boolean + or self.expr.is_Rational + ): + self.expr = float(self.expr) + else: + raise TypeError( + "expr({expr}) should be Float/Integer/Boolean/Rational, " + f"but got {type(self.expr)}" + ) + self.expr = paddle.to_tensor(self.expr) + + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: + # use cache + if self.key in data_dict: + return data_dict + + data_dict[self.key] = self.expr + return data_dict + + +class ParameterNode(Node): + """Class for constant variable node in converted expression tree. + + Args: + expr (sp.Symbol): Parameter expression. + parameter (paddle.framework.io.EagerParamBase): Parameter tensor. + """ + + def __init__(self, expr: sp.Symbol, parameter: paddle.framework.io.EagerParamBase): + super().__init__(expr) + self.parameter = parameter + + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: + data_dict[self.key] = self.parameter + return data_dict + + +class ComposedNode(nn.Layer): + """ + Compose list of several callable objects together. + """ + + def __init__(self, callable_nodes: List[Node]): + super().__init__() + self.callable_nodes = callable_nodes + + def forward(self, data_dict: DATA_DICT) -> DATA_DICT: + # call all callable_nodes in order + for func in self.callable_nodes: + data_dict = func(data_dict) + + # return result of last node(root node) for target + return data_dict[self.callable_nodes[-1].key] + + +def _post_traverse(cur_node: sp.Basic, nodes: List[sp.Basic]) -> List[sp.Basic]: + """Traverse sympy expression tree in postorder. + + Args: + cur_node (sp.Basic): Sympy expression of current node. + nodes (List[sp.Basic]): Node list storing all tree nodes in postorder. + + Returns: + List[sp.Basic]: Node list storing all tree nodes in postorder. + """ + # traverse into sub-nodes + if isinstance(cur_node, sp.Function): + for arg in cur_node.args: + nodes = _post_traverse(arg, nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sp.Derivative): + nodes = _post_traverse(cur_node.args[0], nodes) + nodes.append(cur_node) + elif isinstance(cur_node, sp.Symbol): + nodes.append(cur_node) + return nodes + elif isinstance(cur_node, sp.Number): + nodes.append(cur_node) + else: + for arg in cur_node.args: + nodes = _post_traverse(arg, nodes) + nodes.append(cur_node) + return nodes + + +def _visualize_graph(nodes: List[sp.Basic], graph_filename: str): + try: + import pygraphviz + except ModuleNotFoundError: + raise ModuleNotFoundError( + "Please install pygraphviz by steps below:\n" + "1. apt-get install graphviz graphviz-dev\n" + "2. python -m pip install pygraphviz" + ) + + SYMPY_BUILTIN_NAME = { + sp.sin: "sin", + sp.sinh: "sinh", + sp.asin: "asin", + sp.cos: "cos", + sp.acos: "acos", + sp.cosh: "cosh", + sp.tan: "tan", + sp.atan: "atan", + sp.atan2: "atan2", + sp.acosh: "acosh", + sp.asinh: "asinh", + sp.tanh: "tanh", + sp.atanh: "atanh", + sp.erf: "erf", + sp.loggamma: "loggamma", + sp.exp: "exp", + sp.Pow: "Pow", + sp.log: "log", + sp.Max: "Max", + sp.Min: "Min", + sp.Abs: "Abs", + sp.Heaviside: "Heaviside", + sp.sign: "sign", + sp.ceiling: "ceiling", + sp.floor: "floor", + sp.Add: "Add", + sp.Mul: "Mul", + } + naming_counter = {k: 0 for k in SYMPY_BUILTIN_NAME} + + def get_operator_name(node): + ret = f"{SYMPY_BUILTIN_NAME[node.func]}_{naming_counter[node.func]}" + naming_counter[node.func] += 1 + return ret + + graph = pygraphviz.AGraph(directed=True, rankdir="TB") + C_FUNC = "#9196f1" # purple color function node + C_DATA = "#feb64d" # oringe color for data node + C_EDGE = "#000000" # black color for edge + + def add_edge(u: str, v: str, u_color: str = C_DATA, v_color: str = C_DATA): + """Add an edge from `u` to `v`. + + Args: + u (str): Name of begin node u. + v (str): Name of end node v. + u_color (str, optional): _description_. Defaults to C_DATA. + v_color (str, optional): _description_. Defaults to C_DATA. + """ + graph.add_node(u, style="filled", shape="ellipse", color=u_color) + graph.add_node(v, style="filled", shape="ellipse", color=v_color) + graph.add_edge(u, v, color=C_EDGE, style="solid", penwidth=0.5, arrowsize=0.5) + + for node in nodes: + if isinstance(node, tuple(SYMPY_BUILTIN_NAME.keys())): + operator_str = get_operator_name(node) + for arg in node.args: + add_edge(_cvt_to_key(arg), operator_str, v_color=C_FUNC) + add_edge(operator_str, _cvt_to_key(node), u_color=C_FUNC) + if isinstance(node, sp.Function): + for arg in node.args: + add_edge(_cvt_to_key(arg), str(node), v_color=C_FUNC) + add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) + elif isinstance(node, sp.Derivative): + add_edge(str(node), _cvt_to_key(node), u_color=C_FUNC) + add_edge(_cvt_to_key(node.args[0]), str(node), v_color=C_FUNC) + for arg in node.args[1:]: + add_edge(_cvt_to_key(arg[0]), str(node), v_color=C_FUNC) + + # export graph to image + from ppsci.utils import logger + + graph.layout() + image_path = f"{graph_filename}.png" + dot_path = f"{graph_filename}.dot" + graph.draw(image_path, prog="dot") + graph.write(dot_path) + logger.message( + f"Computational graph has been writen to {image_path} and {dot_path}. " + "dot file can be visualized at https://dreampuf.github.io/GraphvizOnline/" + ) + + +def lambdify( + expr: sp.Expr, + models: Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]] = None, + extra_parameters: Optional[Sequence[paddle.Tensor]] = None, + graph_filename: Optional[str] = None, +) -> ComposedNode: + """Convert sympy expression to callable function. + + Args: + expr (sp.Expr): Sympy expression to be converted. + models (Optional[Union[arch.Arch, Tuple[arch.Arch, ...]]]): Model(s) for + computing forward result in `LayerNode`. + extra_parameters (Optional[nn.ParameterList]): Extra learnable parameters. + Defaults to None. + graph_filename (Optional[str]): Save computational graph to `graph_filename.png` + for given `expr`, if `graph_filename` is not None and a valid string, + such as 'momentum_x'. Defaults to None. + + Returns: + ComposedNode: Callable object for computing expr with necessary input(s) data + in dict given. + + Examples: + >>> import paddle + >>> import ppsci + >>> import sympy as sp + + >>> a, b, c, x, y = sp.symbols("a b c x y") + >>> u = sp.Function("u")(x, y) + >>> v = sp.Function("v")(x, y) + >>> z = -a + b * (c ** 2) + u * v + 2.3 + + >>> model = ppsci.arch.MLP(("x", "y"), ("u", "v"), 4, 16) + + >>> batch_size = 13 + >>> a_tensor = paddle.randn([batch_size, 1]) + >>> b_tensor = paddle.randn([batch_size, 1]) + >>> c_tensor = paddle.randn([batch_size, 1]) + >>> x_tensor = paddle.randn([batch_size, 1]) + >>> y_tensor = paddle.randn([batch_size, 1]) + + >>> model_output_dict = model({"x": x_tensor, "y": y_tensor}) + >>> u_tensor, v_tensor = model_output_dict["u"], model_output_dict["v"] + + >>> z_tensor_manually = ( + ... -a_tensor + b_tensor * (c_tensor ** 2) + ... + u_tensor * v_tensor + 2.3 + ... ) + >>> z_tensor_sympy = ppsci.lambdify(z, model)( + ... { + ... "a": a_tensor, + ... "b": b_tensor, + ... "c": c_tensor, + ... "x": x_tensor, + ... "y": y_tensor, + ... } + ... ) + + >>> paddle.allclose(z_tensor_manually, z_tensor_sympy).item() + True + """ + + # NOTE: Those simplify methods may complicate given expr instead, so not use here + # simplify expression to reduce nodes in tree + # expr = sp.nsimplify(expr) + # expr = sp.expand(expr) + # expr = sp.simplify(expr) + + # remove 1.0 from sympy expression tree + expr = expr.subs(1.0, 1) + + # convert sympy expression tree to list of nodes in postorder + sympy_nodes = [] + sympy_nodes = _post_traverse(expr, sympy_nodes) + + # remove unnecessary symbol nodes already in input dict(except for paramter symbol) + if not extra_parameters: + extra_parameters = () + _parameter_names = tuple(param.name for param in extra_parameters) + sympy_nodes = [ + node + for node in sympy_nodes + if (not node.is_Symbol) or (_cvt_to_key(node) in _parameter_names) + ] + + # remove duplicates with topo-order kept + sympy_nodes = list(dict.fromkeys(sympy_nodes)) + + if isinstance(models, arch.ModelList): + models = tuple(models.model_list[i] for i in range(len(models.model_list))) + if not isinstance(models, (tuple, list)): + models = (models,) + + # convert sympy node to callable node + callable_nodes = [] + for i, node in enumerate(sympy_nodes): + if isinstance( + node, tuple(SYMPT_TO_PADDLE.keys()) + (sp.Add, sp.Mul, sp.Derivative) + ): + callable_nodes.append(OperatorNode(node)) + elif isinstance(node, sp.Function): + if node.name == equation.DETACH_FUNC_NAME: + callable_nodes.append(DetachNode(node)) + else: + match_index = None + for j, model in enumerate(models): + if str(node.func.name) in model.output_keys: + callable_nodes.append( + LayerNode( + node, + model, + ) + ) + if match_index is not None: + raise ValueError( + f"Name of function({node}) should be unique along given" + f" models, but got same output_key({node.func.name}) " + f"in models[{match_index}] and models[{j}]." + ) + match_index = j + elif node.is_Number or node.is_NumberSymbol: + callable_nodes.append(ConstantNode(node)) + elif isinstance(node, sp.Symbol): + callable_nodes.append( + ParameterNode( + node, + *[param for param in extra_parameters if param.name == node.name], + ) + ) + else: + raise NotImplementedError(f"The node {node} is not supported in lambdify.") + + # NOTE: Visualize computational graph using 'pygraphviz' + if isinstance(graph_filename, str): + _visualize_graph(sympy_nodes, graph_filename) + + # Compose callable nodes into one callable object + return ComposedNode(callable_nodes) diff --git a/ppsci/validate/geo_validator.py b/ppsci/validate/geo_validator.py index b9b781f87..6741baddc 100644 --- a/ppsci/validate/geo_validator.py +++ b/ppsci/validate/geo_validator.py @@ -86,7 +86,7 @@ def __init__( self.output_expr = output_expr self.label_dict = label_dict self.input_keys = geom.dim_keys - self.output_keys = list(label_dict.keys()) + self.output_keys = tuple(label_dict.keys()) nx = dataloader_cfg["total_size"] self.num_timestamps = 1 diff --git a/ppsci/validate/sup_validator.py b/ppsci/validate/sup_validator.py index a45a28f98..56f2b9a50 100644 --- a/ppsci/validate/sup_validator.py +++ b/ppsci/validate/sup_validator.py @@ -75,7 +75,9 @@ def __init__( self.input_keys = _dataset.input_keys self.output_keys = ( - list(output_expr.keys()) if output_expr is not None else _dataset.label_keys + tuple(output_expr.keys()) + if output_expr is not None + else _dataset.label_keys ) if self.output_expr is None: From eecc5e9d3f1a984c8ca939fae14f4ae2cb4ea319 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 13 Sep 2023 08:12:41 +0000 Subject: [PATCH 46/48] update docstrings of equations --- ppsci/equation/pde/biharmonic.py | 11 +++++++---- ppsci/equation/pde/laplace.py | 2 ++ ppsci/equation/pde/linear_elasticity.py | 23 +++++++++++++---------- ppsci/equation/pde/navier_stokes.py | 22 ++++++++++++---------- ppsci/equation/pde/normal_dot_vec.py | 4 +++- ppsci/equation/pde/poisson.py | 2 ++ 6 files changed, 39 insertions(+), 25 deletions(-) diff --git a/ppsci/equation/pde/biharmonic.py b/ppsci/equation/pde/biharmonic.py index 436d5b4bb..8c79651a1 100644 --- a/ppsci/equation/pde/biharmonic.py +++ b/ppsci/equation/pde/biharmonic.py @@ -16,6 +16,7 @@ from typing import Optional from typing import Tuple +from typing import Union from ppsci.equation.pde import base @@ -29,8 +30,10 @@ class Biharmonic(base.PDE): Args: dim (int): Dimension of equation. - q (float): Load. - D (float): Rigidity. + q (Union[float, str]): Load. + D (Union[float, str]): Rigidity. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci @@ -40,8 +43,8 @@ class Biharmonic(base.PDE): def __init__( self, dim: int, - q: float, - D: float, + q: Union[float, str], + D: Union[float, str], detach_keys: Optional[Tuple[str, ...]] = None, ): super().__init__() diff --git a/ppsci/equation/pde/laplace.py b/ppsci/equation/pde/laplace.py index 0b0c90b38..ad63bdafb 100644 --- a/ppsci/equation/pde/laplace.py +++ b/ppsci/equation/pde/laplace.py @@ -29,6 +29,8 @@ class Laplace(base.PDE): Args: dim (int): Dimension of equation. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci diff --git a/ppsci/equation/pde/linear_elasticity.py b/ppsci/equation/pde/linear_elasticity.py index 8f2a668c0..3b906207f 100644 --- a/ppsci/equation/pde/linear_elasticity.py +++ b/ppsci/equation/pde/linear_elasticity.py @@ -16,6 +16,7 @@ from typing import Optional from typing import Tuple +from typing import Union import sympy as sp @@ -38,13 +39,15 @@ class LinearElasticity(base.PDE): $$ Args: - E (Optional[float]): The Young's modulus. Defaults to None. - nu (Optional[float]): The Poisson's ratio. Defaults to None. - lambda_ (Optional[float]): Lamé's first parameter. Defaults to None. - mu (Optional[float]): Lamé's second parameter (shear modulus). Defaults to None. - rho (float, optional): Mass density. Defaults to 1. + E (Optional[Union[float, str]]): The Young's modulus. Defaults to None. + nu (Optional[Union[float, str]]): The Poisson's ratio. Defaults to None. + lambda_ (Optional[Union[float, str]]): Lamé's first parameter. Defaults to None. + mu (Optional[Union[float, str]]): Lamé's second parameter (shear modulus). Defaults to None. + rho (Union[float, str], optional): Mass density. Defaults to 1. dim (int, optional): Dimension of the linear elasticity (2 or 3). Defaults to 3. time (bool, optional): Whether contains time data. Defaults to False. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci @@ -55,11 +58,11 @@ class LinearElasticity(base.PDE): def __init__( self, - E: Optional[float] = None, - nu: Optional[float] = None, - lambda_: Optional[float] = None, - mu: Optional[float] = None, - rho: float = 1, + E: Optional[Union[float, str]] = None, + nu: Optional[Union[float, str]] = None, + lambda_: Optional[Union[float, str]] = None, + mu: Optional[Union[float, str]] = None, + rho: Union[float, str] = 1, dim: int = 3, time: bool = False, detach_keys: Optional[Tuple[str, ...]] = None, diff --git a/ppsci/equation/pde/navier_stokes.py b/ppsci/equation/pde/navier_stokes.py index 9adcc01ed..6446e9d13 100644 --- a/ppsci/equation/pde/navier_stokes.py +++ b/ppsci/equation/pde/navier_stokes.py @@ -14,7 +14,6 @@ from __future__ import annotations -from typing import Callable from typing import Optional from typing import Tuple from typing import Union @@ -55,10 +54,12 @@ class NavierStokes(base.PDE): $$ Args: - nu (Union[float, Callable]): Dynamic viscosity. - rho (float): Density. + nu (Union[float, str]): Dynamic viscosity. + rho (Union[float, str]): Density. dim (int): Dimension of equation. time (bool): Whether the euqation is time-dependent. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci @@ -67,14 +68,17 @@ class NavierStokes(base.PDE): def __init__( self, - nu: Union[float, Callable], - rho: float, + nu: Union[float, str], + rho: Union[float, str], dim: int, time: bool, detach_keys: Optional[Tuple[str, ...]] = None, ): super().__init__() self.detach_keys = detach_keys + self.dim = dim + self.time = time + t, x, y, z = self.create_symbols("t x y z") invars = (x, y) if time: @@ -82,16 +86,14 @@ def __init__( if dim == 3: invars += (z,) - self.nu = nu - self.rho = rho - self.dim = dim - self.time = time - if isinstance(nu, str): nu = self.create_function(nu, invars) if isinstance(rho, str): rho = self.create_function(rho, invars) + self.nu = nu + self.rho = rho + u = self.create_function("u", invars) v = self.create_function("v", invars) w = self.create_function("w", invars) if dim == 3 else sp.Number(0) diff --git a/ppsci/equation/pde/normal_dot_vec.py b/ppsci/equation/pde/normal_dot_vec.py index 156c0ee85..c20efd2ff 100644 --- a/ppsci/equation/pde/normal_dot_vec.py +++ b/ppsci/equation/pde/normal_dot_vec.py @@ -30,6 +30,8 @@ class NormalDotVec(base.PDE): Args: vec_keys (Tuple[str, ...]): Keys for vectors, such as ("u", "v", "w") for velocity vector. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci @@ -49,7 +51,7 @@ def __init__( normals = self.create_symbols("normal_x normal_y normal_z") normal_dot_vec = 0 - for (vec, normal) in zip(vec_vars, normals): + for (normal, vec) in zip(normals, vec_vars): normal_dot_vec += normal * vec self.add_equation("normal_dot_vec", normal_dot_vec) diff --git a/ppsci/equation/pde/poisson.py b/ppsci/equation/pde/poisson.py index b921f7d5b..8cb7c62e7 100644 --- a/ppsci/equation/pde/poisson.py +++ b/ppsci/equation/pde/poisson.py @@ -29,6 +29,8 @@ class Poisson(base.PDE): Args: dim (int): Dimension of equation. + detach_keys(Optional[Tuple[str, ...]]): Keys used for detach during computing. + Defaults to None. Examples: >>> import ppsci From c0228f06bef696385967a37c5e8db96a3e209985 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 13 Sep 2023 08:45:56 +0000 Subject: [PATCH 47/48] larger atol to 1e-7 for test_linear_elasticity --- test/equation/test_linear_elasticity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/equation/test_linear_elasticity.py b/test/equation/test_linear_elasticity.py index 5f54041e7..a42f05cc1 100644 --- a/test/equation/test_linear_elasticity.py +++ b/test/equation/test_linear_elasticity.py @@ -308,7 +308,7 @@ def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): ) for name in test_output_names: - assert paddle.allclose(expected_output[name], test_output[name]) + assert paddle.allclose(expected_output[name], test_output[name], atol=1e-7) if __name__ == "__main__": From 408e378ae0d634e3417d4c39bec399685e0adaa4 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 13 Sep 2023 10:56:55 +0000 Subject: [PATCH 48/48] fix seed to 42 for test_linear_elasticity --- test/equation/test_linear_elasticity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/equation/test_linear_elasticity.py b/test/equation/test_linear_elasticity.py index a42f05cc1..973e3df10 100644 --- a/test/equation/test_linear_elasticity.py +++ b/test/equation/test_linear_elasticity.py @@ -125,6 +125,7 @@ def traction_z_expected_result( ], ) def test_linear_elasticity(E, nu, lambda_, mu, rho, dim, time): + paddle.seed(42) batch_size = 13 input_dims = ("x", "y", "z")[:dim] if time: