Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
shell: bash
strategy:
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
python-version: ['3.9', '3.10', '3.11', '3.12']
platform: [ubuntu-latest, macos-latest, windows-latest]

steps:
Expand Down
22 changes: 8 additions & 14 deletions flow360/cloud/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@
from datetime import datetime
from enum import Enum

import boto3
from boto3.s3.transfer import TransferConfig
from botocore.config import Config as BotocoreConfig

# pylint: disable=unused-import
from botocore.exceptions import ClientError as CloudFileNotFoundError
from pydantic.v1 import BaseModel, Field

from ..environment import Env
Expand Down Expand Up @@ -38,14 +43,9 @@ def __call__(self, bytes_chunk_transferred):
pass


def _get_dynamic_upload_config(file_size):
def _get_dynamic_upload_config(file_size) -> TransferConfig:
# pylint: disable=invalid-name
# Constant definition: https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html

from boto3.s3.transfer import ( # pylint: disable=import-outside-toplevel
TransferConfig,
)

MIN_CHUNK_SIZE = 5 * 1024 * 1024
MAX_PART_COUNT = 100000

Expand Down Expand Up @@ -143,10 +143,6 @@ def get_client(self):
Get s3 client.
:return:
"""
# pylint: disable=import-outside-toplevel
from boto3 import client
from botocore.config import Config as BotocoreConfig

# pylint: disable=no-member
kwargs = {
"region_name": self.user_credential.region,
Expand All @@ -159,7 +155,7 @@ def get_client(self):
if Env.current.s3_endpoint_url is not None:
kwargs["endpoint_url"] = Env.current.s3_endpoint_url

return client("s3", **kwargs)
return boto3.client("s3", **kwargs)

def is_expired(self):
"""
Expand Down Expand Up @@ -339,7 +335,7 @@ def _call_back(bytes_in_chunk):
Config=_get_dynamic_upload_config(os.path.getsize(file_name)),
)

# pylint: disable=too-many-arguments, too-many-locals
# pylint: disable=too-many-arguments
def download_file(
self,
resource_id: str,
Expand All @@ -361,8 +357,6 @@ def download_file(
:param progress_callback: provide custom callback for progress
:return:
"""
# pylint: disable=import-outside-toplevel
from botocore.exceptions import ClientError as CloudFileNotFoundError

to_file = get_local_filename_and_create_folders(remote_file_name, to_file, to_folder)
if os.path.exists(to_file) and not overwrite:
Expand Down
10 changes: 3 additions & 7 deletions flow360/component/case.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import json
import os
import tempfile
from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Union
from typing import Any, Iterator, List, Optional, Union

import pydantic as pd
import pydantic.v1 as pd_v1
Expand All @@ -20,6 +20,7 @@
RenameAssetRequestV2,
)
from ..cloud.rest_api import RestApi
from ..cloud.s3_utils import CloudFileNotFoundError
from ..exceptions import Flow360RuntimeError, Flow360ValidationError, Flow360ValueError
from ..log import log
from .folder import Folder
Expand Down Expand Up @@ -75,9 +76,6 @@
from .v1.flow360_params import Flow360Params, UnvalidatedFlow360Params
from .validator import Validator

if TYPE_CHECKING:
from flow360.component.volume_mesh import VolumeMeshV2


class CaseBase:
"""
Expand Down Expand Up @@ -461,8 +459,6 @@ def get_simulation_params(self):
"""
returns simulation params
"""
# pylint: disable=import-outside-toplevel
from botocore.exceptions import ClientError as CloudFileNotFoundError

try:
params_as_dict = self._parse_json_from_cloud("simulation.json")
Expand Down Expand Up @@ -584,7 +580,7 @@ def tags(self) -> List[str]:
return self._web_api_v2.info.tags

@property
def volume_mesh(self) -> VolumeMeshV2:
def volume_mesh(self) -> "VolumeMeshV2":
"""
returns volume mesh
"""
Expand Down
4 changes: 2 additions & 2 deletions flow360/component/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1466,9 +1466,9 @@ def _run(
use_geometry_AI=use_geometry_AI,
start_from=start_from,
)
except RuntimeError:
except RuntimeError as exception:
if raise_on_error:
raise ValueError("Submission terminated due to error.") from None
raise ValueError("Submission terminated due to validation error.") from exception
return None

self._project_webapi.patch(
Expand Down
22 changes: 5 additions & 17 deletions flow360/component/results/base_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@
import pandas
import pydantic as pd

from flow360.cloud.s3_utils import get_local_filename_and_create_folders
from flow360.cloud.s3_utils import (
CloudFileNotFoundError,
get_local_filename_and_create_folders,
)
from flow360.component.simulation.entity_info import GeometryEntityInfo
from flow360.component.simulation.models.surface_models import BoundaryBase
from flow360.component.simulation.simulation_params import SimulationParams
from flow360.component.v1.flow360_params import Flow360Params
from flow360.exceptions import Flow360TypeError, Flow360ValueError
from flow360.exceptions import Flow360ValueError
from flow360.log import log

# pylint: disable=consider-using-with
Expand Down Expand Up @@ -417,8 +420,6 @@ def wait(self, timeout_minutes=60):
"""
Wait until the Case finishes processing, refresh periodically. Useful for postprocessing, eg sectional data
"""
# pylint: disable=import-outside-toplevel
from botocore.exceptions import ClientError as CloudFileNotFoundError

start_time = time.time()
while time.time() - start_time < timeout_minutes * 60:
Expand Down Expand Up @@ -752,16 +753,3 @@ def reload_data(self, filter_physical_steps_only: bool = False, include_time: bo
filter_physical_steps_only=filter_physical_steps_only, include_time=include_time
)
self._filtered_sum()


class LocalResultCSVModel(ResultCSVModel):
"""
CSV Model with no remote file that cannot be downloaded used for locally working with csv data
"""

remote_file_name: Optional[str] = None

def download(
self, to_file: str = None, to_folder: str = ".", overwrite: bool = False, **kwargs
):
raise Flow360TypeError("Cannot download csv from LocalResultCSVModel")
67 changes: 1 addition & 66 deletions flow360/component/results/case_results.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""Case results module"""

# pylint: disable=too-many-lines

from __future__ import annotations

import re
Expand All @@ -12,11 +10,11 @@
import numpy as np
import pydantic as pd

from flow360.cloud.s3_utils import CloudFileNotFoundError
from flow360.component.results.base_results import (
_PHYSICAL_STEP,
_PSEUDO_STEP,
_TIME,
LocalResultCSVModel,
PerEntityResultCSVModel,
ResultBaseModel,
ResultCSVModel,
Expand All @@ -34,7 +32,6 @@
_HEAT_FLUX,
_X,
_Y,
BETDiskCSVHeaderOperation,
DiskCoefficientsComputation,
PorousMediumCoefficientsComputation,
_CFx,
Expand Down Expand Up @@ -583,8 +580,6 @@ def download(
CloudFileNotFoundError
If the cloud file for the results is not found.
"""
# pylint: disable=import-outside-toplevel
from botocore.exceptions import ClientError as CloudFileNotFoundError

try:
super().download(
Expand Down Expand Up @@ -820,26 +815,6 @@ def to_base(self, base: str, params: Flow360Params = None):
self.values["ForceUnits"] = bet.force_x.units
self.values["MomentUnits"] = bet.moment_x.units

def format_headers(
self, params: SimulationParams, pattern: str = "$BETName_$CylinderName"
) -> LocalResultCSVModel:
"""
Renames the header entries from Disk{i}_ to based on an input user pattern
such as $BETName_$CylinderName
Parameters
----------
params : SimulationParams
Simulation parameters
pattern : str
Pattern string to rename header entries. Available patterns
[$BETName, $CylinderName, $DiskLocalIndex, $DiskGlobalIndex]
Returns
-------
LocalResultCSVModel
Model containing csv with updated header
"""
return BETDiskCSVHeaderOperation.format_headers(self, params, pattern)

def compute_coefficients(self, params: SimulationParams) -> BETDiskCoefficientsCSVModel:
"""
Compute disk coefficients from BET disk forces and moments.
Expand Down Expand Up @@ -902,26 +877,6 @@ class BETDiskCoefficientsCSVModel(ResultCSVModel):

remote_file_name: str = pd.Field("bet_disk_coefficients_v2.csv", frozen=True)

def format_headers(
self, params: SimulationParams, pattern: str = "$BETName_$CylinderName"
) -> LocalResultCSVModel:
"""
Renames the header entries from Disk{i}_ to based on an input user pattern
such as $BETName_$CylinderName
Parameters
----------
params : SimulationParams
Simulation parameters
pattern : str
Pattern string to rename header entries. Available patterns
[$BETName, $CylinderName, $DiskLocalIndex, $DiskGlobalIndex]
Returns
-------
LocalResultCSVModel
Model containing csv with updated header
"""
return BETDiskCSVHeaderOperation.format_headers(self, params, pattern)


class PorousMediumResultCSVModel(OptionallyDownloadableResultCSVModel):
"""Model for handling porous medium CSV results."""
Expand Down Expand Up @@ -998,23 +953,3 @@ class BETForcesRadialDistributionResultCSVModel(OptionallyDownloadableResultCSVM
CaseDownloadable.BET_FORCES_RADIAL_DISTRIBUTION.value, frozen=True
)
_err_msg = "Case does not have any BET disks."

def format_headers(
self, params: SimulationParams, pattern: str = "$BETName_$CylinderName"
) -> LocalResultCSVModel:
"""
Renames the header entries from Disk{i}_ to based on an input user pattern
such as $BETName_$CylinderName
Parameters
----------
params : SimulationParams
Simulation parameters
pattern : str
Pattern string to rename header entries. Available patterns
[$BETName, $CylinderName, $DiskLocalIndex, $DiskGlobalIndex]
Returns
-------
LocalResultCSVModel
Model containing csv with updated header
"""
return BETDiskCSVHeaderOperation.format_headers(self, params, pattern)
78 changes: 1 addition & 77 deletions flow360/component/results/results_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,7 @@

import numpy as np

from flow360.component.results.base_results import (
_PHYSICAL_STEP,
_PSEUDO_STEP,
LocalResultCSVModel,
ResultCSVModel,
)
from flow360.component.simulation.models.volume_models import BETDisk
from flow360.component.results.base_results import _PHYSICAL_STEP, _PSEUDO_STEP
from flow360.component.simulation.simulation_params import SimulationParams
from flow360.exceptions import Flow360ValueError
from flow360.log import log
Expand Down Expand Up @@ -416,73 +410,3 @@ def compute_coefficients_static(
out[f"{zone_name}_{_CL}"].append(CL_val)

return coefficients_model_class().from_dict(out)


class BETDiskCSVHeaderOperation:
# pylint:disable=too-few-public-methods
"""
Static utilities for renaming BET disk csv output headers to include the name of the BET disk.

This class provides only static methods and should not be instantiated or subclassed.
All methods are self-contained and require explicit parameters.
"""

@staticmethod
def format_headers(
BETCSVModel: ResultCSVModel,
params: SimulationParams,
pattern: str = "$BETName_$CylinderName",
) -> LocalResultCSVModel:
"""
renames the header entries in a BET csv file from Disk{x}_ based on input pattern
$Default option is $BETName_$CylinderName

pattern can take [$BETName, $CylinderName, $DiskLocalIndex, $DiskGlobalIndex]
Parameters
----------
BETCSVModel : ResultCSVModle
Model containing csv entries
params : SimulationParams
Simulation parameters
pattern : str
Pattern string to rename header entries. Available patterns
[$BETName, $CylinderName, $DiskLocalIndex, $DiskGlobalIndex]
Returns
-------
LocalResultCSVModel
Model containing csv with updated header
"""
# pylint:disable=too-many-locals
bet_disks = []
for model in params.models:
if isinstance(model, BETDisk):
bet_disks.append(model)
if not bet_disks:
raise ValueError("No BET Disks in params to rename header.")

csv_data = BETCSVModel.values
new_csv = {}

disk_rename_map = {}

diskCount = 0
for disk in bet_disks:
for disk_local_index, cylinder in enumerate(disk.entities.stored_entities):
new_name = pattern.replace("$BETName", disk.name)
new_name = new_name.replace("$CylinderName", cylinder.name)
new_name = new_name.replace("$DiskLocalIndex", str(disk_local_index))
new_name = new_name.replace("$DiskGlobalIndex", str(diskCount))
disk_rename_map[f"Disk{diskCount}"] = new_name
diskCount = diskCount + 1

for header, values in csv_data.items():
matched = False
for default_prefix, new_prefix in disk_rename_map.items():
if header.startswith(default_prefix):
new_csv[new_prefix + header[len(default_prefix) :]] = values
matched = True
break
if not matched:
new_csv[header] = values
newModel = LocalResultCSVModel().from_dict(new_csv)
return newModel
2 changes: 1 addition & 1 deletion flow360/component/simulation/framework/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def _to_25_7_6(params_as_dict):
(Flow360Version("25.6.5"), _to_25_6_5),
(Flow360Version("25.6.6"), _to_25_6_6),
(Flow360Version("25.7.2"), _to_25_7_2),
(Flow360Version("25.7.6b0"), _to_25_7_6),
(Flow360Version("25.7.6"), _to_25_7_6),
] # A list of the Python API version tuple with there corresponding updaters.


Expand Down
Loading
Loading