Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion flow360/component/simulation/framework/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def __pydantic_init_subclass__(cls, **kwargs) -> None:
# pylint: disable=fixme
# TODO: Remove alias_generator since it is only for translator
alias_generator=pd.AliasGenerator(
serialization_alias=snake_to_camel,
alias=snake_to_camel,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is this change for?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using a serialization_alias instead of alias here meant that the aliases were being used during serialization only (so python -> frontend), but during validation we weren't accepting aliased fields (so frontend -> python). For example, in SerializedValueOrExpression we would only accept type_name instead of typeName in the expression validation service. I believe for consistency its better to provide full interchangeability.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But I believe front end already adopted snake case throughout their form. Why we are now allowing them to write camel case again?

),
)

Expand Down
28 changes: 17 additions & 11 deletions flow360/component/simulation/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,13 @@

# pylint: disable=duplicate-code
import json
import re
from enum import Enum
from numbers import Number
from typing import Any, Collection, Dict, Literal, Optional, Tuple, Union

import numpy as np
import pydantic as pd
from unyt import unyt_array, unyt_quantity
from unyt.exceptions import UnitParseError
from unyt import unyt_array

# Required for correct global scope initialization
from flow360.component.simulation.exposed_units import supported_units_by_front_end
Expand Down Expand Up @@ -410,7 +408,9 @@ def clean_unrelated_setting_from_params_dict(params: dict, root_item_type: str)
return params


def handle_generic_exception(err: Exception, validation_errors: Optional[list]) -> list:
def handle_generic_exception(
err: Exception, validation_errors: Optional[list], loc_prefix: Optional[list[str]] = None
) -> list:
"""
Handles generic exceptions during validation, adding to validation errors.

Expand All @@ -420,6 +420,8 @@ def handle_generic_exception(err: Exception, validation_errors: Optional[list])
The exception caught during validation.
validation_errors : list or None
Current list of validation errors, may be None.
loc_prefix : list or None
Prefix of the location of the generic error to help locate the issue

Returns
-------
Expand All @@ -432,7 +434,7 @@ def handle_generic_exception(err: Exception, validation_errors: Optional[list])
validation_errors.append(
{
"type": err.__class__.__name__.lower().replace("error", "_error"),
"loc": ["unknown"],
"loc": ["unknown"] if loc_prefix is None else loc_prefix,
"msg": str(err),
"ctx": {},
}
Expand Down Expand Up @@ -791,17 +793,19 @@ def validate_expression(variables: list[dict], expressions: list[str]):
# Populate variable scope
for i in range(len(variables)):
variable = variables[i]
loc = f"variables/{i}"
loc_hint = ["variables", str(i)]
try:
variable = UserVariable(name=variable["name"], value=variable["value"])
if variable and isinstance(variable.value, Expression):
_ = variable.value.evaluate(strict=False)
except (ValueError, KeyError, NameError, UnitParseError) as e:
errors.append({"loc": loc, "msg": str(e)})
except pd.ValidationError as err:
errors.extend(err.errors())
except Exception as err: # pylint: disable=broad-exception-caught
handle_generic_exception(err, errors, loc_hint)

for i in range(len(expressions)):
expression = expressions[i]
loc = f"expressions/{i}"
loc_hint = ["expressions", str(i)]
value = None
unit = None
try:
Expand All @@ -822,8 +826,10 @@ def validate_expression(variables: list[dict], expressions: list[str]):
value = float(result[0])
else:
value = tuple(result.tolist())
except (ValueError, KeyError, NameError, UnitParseError) as e:
errors.append({"loc": loc, "msg": str(e)})
except pd.ValidationError as err:
errors.extend(err.errors())
except Exception as err: # pylint: disable=broad-exception-caught
handle_generic_exception(err, errors, loc_hint)
values.append(value)
units.append(unit)

Expand Down
54 changes: 50 additions & 4 deletions flow360/component/simulation/user_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,18 @@ def _convert_argument(value):
elif isinstance(value, unyt_array):
unit = str(value.units)
tokens = _split_keep_delimiters(unit, unit_delimiters)
arg = f"{str(value.value)} * "
arg = f"{_convert_argument(value.value)[0]} * "
for token in tokens:
if token not in unit_delimiters and not _is_number_string(token):
token = f"u.{token}"
arg += token
else:
arg += token
elif isinstance(value, np.ndarray):
arg = f"np.array([{','.join([_convert_argument(item)[0] for item in value])}])"
if value.ndim == 0:
arg = str(value)
else:
arg = f"np.array([{','.join([_convert_argument(item)[0] for item in value])}])"
else:
raise ValueError(f"Incompatible argument of type {type(value)}")
return arg, parenthesize
Expand All @@ -81,9 +84,44 @@ class SerializedValueOrExpression(Flow360BaseModel):
evaluated_units: Optional[str] = pd.Field(None)


# This is a wrapper to allow using ndarrays with pydantic models
class NdArray(np.ndarray):
def __repr__(self):
return f"NdArray(shape={self.shape}, dtype={self.dtype})"

@classmethod
def __get_pydantic_core_schema__(cls, source_type, handler):
return core_schema.no_info_plain_validator_function(cls.validate)

@classmethod
def validate(cls, value: Any):
if isinstance(value, np.ndarray):
return value
raise ValueError(f"Cannot convert {type(value)} to NdArray")


# This is a wrapper to allow using unyt arrays with pydantic models
class UnytArray(unyt_array):
def __repr__(self):
return f"UnytArray({str(self)})"

@classmethod
def __get_pydantic_core_schema__(cls, source_type, handler):
return core_schema.no_info_plain_validator_function(cls.validate)

@classmethod
def validate(cls, value: Any):
if isinstance(value, unyt_array):
return value
raise ValueError(f"Cannot convert {type(value)} to UnytArray")


AnyNumericType = Union[float, UnytArray, NdArray]


class Variable(Flow360BaseModel):
name: str = pd.Field()
value: ValueOrExpression[Any] = pd.Field()
value: ValueOrExpression[AnyNumericType] = pd.Field()

model_config = pd.ConfigDict(validate_assignment=True, extra="allow")

Expand Down Expand Up @@ -182,6 +220,9 @@ def __str__(self):
def __repr__(self):
return f"Variable({self.name} = {self.value})"

def __hash__(self):
return hash(self.name)

def sqrt(self):
return Expression(expression=f"np.sqrt({self.expression})")

Expand Down Expand Up @@ -288,7 +329,12 @@ def _validate_expression(cls, value) -> Self:
elif isinstance(value, Variable):
expression = str(value)
elif isinstance(value, np.ndarray) and not isinstance(value, unyt_array):
expression = f"np.array([{','.join([_convert_argument(item)[0] for item in value])}])"
if value.ndim == 0:
expression = str(value)
else:
expression = (
f"np.array([{','.join([_convert_argument(item)[0] for item in value])}])"
)
else:
details = InitErrorDetails(
type="value_error", ctx={"error": f"Invalid type {type(value)}"}
Expand Down
39 changes: 31 additions & 8 deletions tests/simulation/test_expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,10 +303,10 @@ class TestModel(Flow360BaseModel):
direction: ValueOrExpression[LengthType.Direction] = pd.Field()
moment: ValueOrExpression[LengthType.Moment] = pd.Field()

x = UserVariable(name="x", value=[1, 0, 0])
y = UserVariable(name="y", value=[0, 0, 0])
z = UserVariable(name="z", value=[1, 0, 0, 0])
w = UserVariable(name="w", value=[1, 1, 1])
x = UserVariable(name="x", value=np.array([1, 0, 0]))
y = UserVariable(name="y", value=np.array([0, 0, 0]))
z = UserVariable(name="z", value=np.array([1, 0, 0, 0]))
w = UserVariable(name="w", value=np.array([1, 1, 1]))

model = TestModel(
vector=y * u.m, axis=x * u.m, array=z * u.m, direction=x * u.m, moment=w * u.m
Expand Down Expand Up @@ -409,8 +409,6 @@ class TestModel(Flow360BaseModel):


def test_numpy_interop_scalars():
# Disclaimer - doesn't fully work yet with dimensioned types...

class ScalarModel(Flow360BaseModel):
scalar: ValueOrExpression[float] = pd.Field()

Expand Down Expand Up @@ -481,8 +479,6 @@ class ScalarModel(Flow360BaseModel):


def test_numpy_interop_vectors():
# Disclaimer - doesn't fully work yet with dimensioned types...

Vec3 = tuple[float, float, float]

class VectorModel(Flow360BaseModel):
Expand Down Expand Up @@ -613,3 +609,30 @@ def test_cyclic_dependencies():

with pytest.raises(pd.ValidationError):
x.value = x


def test_auto_alias():
class TestModel(Flow360BaseModel):
field: ValueOrExpression[VelocityType] = pd.Field()

x = UserVariable(name="x", value=4)

unaliased = {
"type_name": "expression",
"expression": "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)",
"evaluated_value": 68.0,
"evaluated_units": "m/s",
}

aliased = {
"typeName": "expression",
"expression": "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)",
"evaluatedValue": 68.0,
"evaluatedUnits": "m/s",
}

model_1 = TestModel(field=unaliased)
model_2 = TestModel(field=aliased)

assert str(model_1.field) == "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)"
assert str(model_2.field) == "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)"
Loading