Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 25 additions & 11 deletions engibench/problems/airfoil/dataset_slurm_airfoil.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,21 @@ def calculate_runtime(group_size, minutes_per_sim=6):
be generalized to other problems as well. It includes functions for simulation of designs.

Command Line Arguments:
-n_designs, --num_designs: How many airfoil designs should we use?
-n_flows, --num_flow_conditions: How many flow conditions should we use per design?
-n_aoas, --num_angles_of_attack: How many angles of attack should we use per design & flow condition pairing?
-group_size, --group_size: How many simulations should we group together on a single cpu?
-n_slurm_array, --num_slurm_array: How many slurm jobs to spawn and submit via slurm arrays? Note this may be limited by the HPC system.
-min_ma, --min_mach_number: Lower bound for mach number
-max_ma, --max_mach_number: Upper bound for mach number
-min_re, --min_reynolds_number: Lower bound for reynolds number
-max_re, --max_reynolds_number: Upper bound for reynolds number
-min_aoa, --min_angle_of_attack: Lower bound for angle of attack
-max_aoa, --max_angle_of_attack: Upper bound for angle of attack
-account, --hpc_account: HPC account allocation to charge for job submission (required)
-type, --job_type: Engibench job type to submit: 'simulate' or 'optimize' (required)
-n_designs, --num_designs: Number of airfoil designs to use (default: 10)
-n_flows, --num_flow_conditions: Number of flow conditions (Mach, Reynolds, AoA) sampled per design via LHS (default: 1)
-group_size, --group_size: Number of simulations batched within each individual SLURM job (default: 2)
-minutes_per_sim, --minutes_per_simulation: Estimated runtime per simulation in minutes, used to set SLURM job walltime (default: 15)
-n_slurm_array, --num_slurm_array: Maximum SLURM array size; varies by HPC system (default: 1000)
-min_ma, --min_mach_number: Lower bound for Mach number sampling (default: 0.5)
-max_ma, --max_mach_number: Upper bound for Mach number sampling (default: 0.9)
-min_re, --min_reynolds_number: Lower bound for Reynolds number sampling (default: 1.0e6)
-max_re, --max_reynolds_number: Upper bound for Reynolds number sampling (default: 2.0e7)
-min_aoa, --min_angle_of_attack: Lower bound for angle of attack sampling in degrees (default: 0.0)
-max_aoa, --max_angle_of_attack: Upper bound for angle of attack sampling in degrees (default: 20.0)
--field_output: Flag to include surface field variables (VelocityX, VelocityY, VelocityZ,
CoefPressure) in simulation output under the 'surface_fields' key (default: False)
"""
# Fetch command line arguments for render and simulate to know whether to run those functions
parser = ArgumentParser()
Expand Down Expand Up @@ -135,6 +139,12 @@ def calculate_runtime(group_size, minutes_per_sim=6):
default=20.0,
help="Minimum sampling bound for angle of attack.",
)
parser.add_argument(
"--field_output",
action="store_true",
default=False,
help="Include surface field variables (VelocityX, VelocityY, VelocityZ, CoefPressure) in simulation output.",
)
args = parser.parse_args()

# HPC account for job submission
Expand All @@ -154,6 +164,9 @@ def calculate_runtime(group_size, minutes_per_sim=6):
n_slurm_array = args.num_slurm_array
minutes_per_sim = args.minutes_per_simulation

# Field output flag
field_output = args.field_output

# Flow parameter and angle of attack ranges
min_ma = args.min_mach_number
max_ma = args.max_mach_number
Expand Down Expand Up @@ -200,6 +213,7 @@ def calculate_runtime(group_size, minutes_per_sim=6):
problem_configuration = {"mach": ma, "reynolds": re, "alpha": alpha}
config = {"problem_configuration": problem_configuration, "configuration_id": config_id}
config["design"] = design["coords"]
config["field_output"] = field_output
simulate_configs_designs.append(config)
config_id += 1

Expand Down
23 changes: 19 additions & 4 deletions engibench/problems/airfoil/simulation_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
from engibench.problems.airfoil.v0 import Airfoil


def simulate_slurm(problem_configuration: dict, configuration_id: int, design: list) -> dict:
def simulate_slurm(
problem_configuration: dict, configuration_id: int, design: list, *, field_output: bool = False
) -> dict:
"""Takes in the given configuration and designs, then runs the simulation analysis.

Any arguments should be things that you want to change across the different jobs, and anything
Expand All @@ -18,13 +20,19 @@ def simulate_slurm(problem_configuration: dict, configuration_id: int, design: l
For the airfoil problem this includes Mach number, Reynolds number, and angle of attack.
configuration_id (int): A unique identifier for the job for later debugging or tracking.
design (list): list of lists defining x and y coordinates of airfoil geometry.
field_output (bool): If True, surface field variables (velocity components and pressure
coefficient) are extracted from the simulation and included in the returned dict under
the key ``"surface_fields"``.

Returns:
"performance_dict": Dictionary of aerodynamic performance (lift & drag).
"simulate_time": The time taken to run this simulation job. Useful for aggregating
the time taken for dataset generation.
"problem_configuration": Problem configuration parameters
"configuration_id": Identifier for specific simulation configurations
"surface_fields": Array of shape ``(6, N)`` with rows
``[x, y, VelocityX, VelocityY, VelocityZ, CoefPressure]`` (only present when
``field_output=True``).
"""
# Instantiate problem
problem = Airfoil()
Expand All @@ -41,19 +49,26 @@ def simulate_slurm(problem_configuration: dict, configuration_id: int, design: l
print("Starting `simulate` via SLURM...")
start_time = time.time()

performance = problem.simulate(my_design, mpicores=1, config=problem_configuration)
performance_dict = {"drag": performance[0], "lift": performance[1]}
performance = problem.simulate(my_design, mpicores=1, config=problem_configuration, field_output=field_output)
if field_output:
aerodynamics, surface_fields = performance
performance_dict = {"drag": aerodynamics[0], "lift": aerodynamics[1]}
else:
performance_dict = {"drag": performance[0], "lift": performance[1]}
print("Finished `simulate` via SLURM.")
end_time = time.time()
elapsed_time = end_time - start_time
print(f"Elapsed time for `simulate`: {elapsed_time:.2f} seconds")

return {
result = {
"performance_dict": performance_dict,
"simulate_time": elapsed_time,
"problem_configuration": problem_configuration,
"configuration_id": configuration_id,
}
if field_output:
result["surface_fields"] = surface_fields
return result


def optimize_slurm(problem_configuration: dict, configuration_id: int, design: list):
Expand Down
56 changes: 56 additions & 0 deletions engibench/problems/airfoil/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,62 @@ def _clean_coordinates(
return coords_x_reordered, coords_y_reordered, indices_reordered


def reorder_coords_fields(df_slice: pd.DataFrame) -> npt.NDArray[np.float32]:
"""Reorder the coordinates and surface field variables of a slice of a dataframe.

Mirrors :func:`reorder_coords` but additionally extracts velocity components
and the pressure coefficient, returning them aligned to the same point ordering.

Args:
df_slice (pd.DataFrame): A slice of a dataframe containing coordinates,
velocity components (VelocityX/Y/Z), and CoefPressure.

Returns:
npt.NDArray[np.float32]: Array of shape ``(6, N)`` with rows
``[CoordinateX, CoordinateY, VelocityX, VelocityY, VelocityZ, CoefPressure]``.
"""
# Extract connectivities
node_c1, node_c2 = _extract_connectivities(df_slice)
connectivities = np.concatenate((node_c1.reshape(-1, 1), node_c2.reshape(-1, 1)), axis=1)

# Get coordinates
coords_x = df_slice["CoordinateX"].to_numpy()
coords_y = df_slice["CoordinateY"].to_numpy()
indices = np.arange(len(df_slice), dtype=np.int32)

# Identify segments
id_breaks_start, id_breaks_end, segment_ids = _identify_segments(connectivities)
unique_segment_ids = np.arange(len(id_breaks_start), dtype=np.int32)

# Order segments
new_seg_order = _order_segments(coords_x, coords_y, id_breaks_start, id_breaks_end, unique_segment_ids)

# Reorder coordinates
coords_x_reordered, coords_y_reordered, indices_reordered = _reorder_coordinates(
coords_x, coords_y, indices, connectivities, segment_ids, new_seg_order
)

# Align coordinates
coords_x_reordered, coords_y_reordered, indices_reordered = _align_coordinates(
coords_x_reordered, coords_y_reordered, indices_reordered
)

# Clean coordinates
coords_x_reordered, coords_y_reordered, indices_reordered = _clean_coordinates(
coords_x_reordered, coords_y_reordered, indices_reordered
)

# Extract field data using the reordered indices (indices_reordered already has the
# closing point appended by _clean_coordinates, so field values close the loop too)
idx = indices_reordered.astype(int)
velocity_x = df_slice["VelocityX"].to_numpy()[idx]
velocity_y = df_slice["VelocityY"].to_numpy()[idx]
velocity_z = df_slice["VelocityZ"].to_numpy()[idx]
coef_pressure = df_slice["CoefPressure"].to_numpy()[idx]

return np.array([coords_x_reordered, coords_y_reordered, velocity_x, velocity_y, velocity_z, coef_pressure])


def reorder_coords(df_slice: pd.DataFrame) -> npt.NDArray[np.float32]:
"""Reorder the coordinates of a slice of a dataframe.

Expand Down
28 changes: 24 additions & 4 deletions engibench/problems/airfoil/v0.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from engibench.problems.airfoil.utils import calc_area
from engibench.problems.airfoil.utils import calc_off_wall_distance
from engibench.problems.airfoil.utils import reorder_coords
from engibench.problems.airfoil.utils import reorder_coords_fields
from engibench.problems.airfoil.utils import scale_coords
from engibench.utils import container
from engibench.utils.files import clone_dir
Expand Down Expand Up @@ -259,14 +260,20 @@

return filename

def simulator_output_to_design(self, simulator_output: str | None = None) -> npt.NDArray[np.float32]:
def simulator_output_to_design(
self, simulator_output: str | None = None, *, field_output: bool = False
) -> npt.NDArray[np.float32]:
"""Converts a simulator output to a design.

Args:
simulator_output (str): The simulator output to convert. If None, the latest slice file is used.
field_output (bool): If True, returns ordered coordinates together with their corresponding
surface field variables (VelocityX, VelocityY, VelocityZ, CoefPressure) as an array of
shape ``(6, N)``. If False (default), returns only the ordered coordinates as shape ``(2, N)``.

Returns:
np.ndarray: The corresponding design.
np.ndarray: The reordered airfoil coordinates, shape ``(2, N)``, or coordinates with field
variables, shape ``(6, N)``, when ``field_output=True``.
"""
if simulator_output is None:
# Take latest slice file
Expand Down Expand Up @@ -304,18 +311,28 @@
# Concatenate node connections to the main data
slice_df = pd.concat([slice_df, nodes_arr], axis=1)

if field_output:
return reorder_coords_fields(slice_df)
return reorder_coords(slice_df)

def simulate(self, design: DesignType, config: dict[str, Any] | None = None, mpicores: int = 4) -> npt.NDArray:
def simulate(
self, design: DesignType, config: dict[str, Any] | None = None, mpicores: int = 4, field_output: bool = False

Check failure on line 319 in engibench/problems/airfoil/v0.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (FBT002)

engibench/problems/airfoil/v0.py:319:92: FBT002 Boolean default positional argument in function definition help: Consider adding `@typing.override` if changing the function signature would violate the Liskov Substitution Principle

Check failure on line 319 in engibench/problems/airfoil/v0.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (FBT001)

engibench/problems/airfoil/v0.py:319:92: FBT001 Boolean-typed positional argument in function definition help: Consider adding `@typing.override` if changing the function signature would violate the Liskov Substitution Principle
) -> npt.NDArray | tuple[npt.NDArray, npt.NDArray]:
"""Simulates the performance of an airfoil design.

Args:
design (dict): The design to simulate.
config (dict): A dictionary with configuration (e.g., boundary conditions, filenames) for the simulation.
mpicores (int): The number of MPI cores to use in the simulation.
field_output (bool): If True, also returns surface field variables (velocity components and
pressure coefficient) alongside the aerodynamic performance. Returns a tuple
``(np.array([drag, lift]), surface_fields)`` where ``surface_fields`` has shape ``(6, N)``
with rows ``[x, y, VelocityX, VelocityY, VelocityZ, CoefPressure]``.

Returns:
dict: The performance of the design - each entry of the dict corresponds to a named objective value.
npt.NDArray | tuple[npt.NDArray, npt.NDArray]: ``np.array([drag, lift])`` when
``field_output=False``, or ``(np.array([drag, lift]), surface_fields)`` when
``field_output=True``.
"""
if isinstance(design["angle_of_attack"], np.ndarray):
design["angle_of_attack"] = design["angle_of_attack"][0]
Expand Down Expand Up @@ -353,6 +370,9 @@
outputs = np.load(self.__local_study_dir + "/output/outputs.npy")
lift = float(outputs[3])
drag = float(outputs[4])
if field_output:
surface_fields = self.simulator_output_to_design(field_output=True)
return np.array([drag, lift]), surface_fields
return np.array([drag, lift])

def optimize(
Expand Down
Loading