diff --git a/app/models/data/deeponet_default_settings.json b/app/models/data/deeponet_default_settings.json new file mode 100644 index 0000000..d097e6d --- /dev/null +++ b/app/models/data/deeponet_default_settings.json @@ -0,0 +1,120 @@ +{ + "absorption_coefficients": { + "floor": "0.6, 0.69, 0.71, 0.7, 0.63", + "wall1": "0.6, 0.69, 0.71, 0.7, 0.63", + "ceiling": "0.6, 0.69, 0.71, 0.7, 0.63", + "wall2": "0.6, 0.69, 0.71, 0.7, 0.63", + "wall3": "0.6, 0.69, 0.71, 0.7, 0.63", + "wall4": "0.6, 0.69, 0.71, 0.7, 0.63" + }, + "msh_path": "MeasurementRoom.msh", + "geo_path": "MeasurementRoom.geo", + "results": [ + { + "percentage": 100, + "sourceX": 2, + "sourceY": 2, + "sourceZ": 1.5, + "resultType": "DON", + "responses": [ + { + "x": 1.0, + "y": 1.0, + "z": 1.5, + "receiverResults": [], + "receiverResultsUncorrected": [] + }, + { + "x": 0.5, + "y": 1.0, + "z": 1.5, + "receiverResults": [], + "receiverResultsUncorrected": [] + }, + { + "x": 1, + "y": 2, + "z": 0.5, + "receiverResults": [], + "receiverResultsUncorrected": [] + }, + { + "x": 0.5, + "y": 0.5, + "z": 0.5, + "receiverResults": [], + "receiverResultsUncorrected": [] + } + ] + } + ], + "dg_setup": { + "simulationSettings": { + "dg_freq_upper_limit": 400, + "dg_c0": 343, + "dg_rho0": 1.213, + "dg_ir_length": 0.1, + "dg_poly_order": 4, + "dg_ppw": 2, + "dg_cfl": 1 + }, + "output_path": "tmp/deeponet/", + "output_filename": "dg_sim_results", + "file_format": "npz" + }, + "deeponet_train_setup": { + "id": "cube_dummy_1src_fixed_recvs", + + "input_dir": "tmp/deeponet/", + "output_dir": "tmp/deeponet/results/", + + "train_data_dir": "train_data", + "val_data_dir": "val_data", + + "f0_feat": [500.0, 250.0, 167.0], + "normalize_data": true, + + "iterations": 1000, + "use_adaptive_weights": true, + "decay_steps": 2000, + "decay_rate": 0.90, + "learning_rate": 1e-3, + "optimizer": "adam", + + "__comment1__": "total batch_size is a multiple of branch and coordinate sizes", + "batch_size_branch": 64, + "batch_size_coord": 1000, + + "branch_net": { + "architecture": "mod-mlp", + "activation": "sin", + "num_hidden_layers": 3, + "num_hidden_neurons": 512 + }, + "trunk_net": { + "architecture": "mod-mlp", + "activation": "sin", + "num_hidden_layers": 3, + "num_hidden_neurons": 512 + }, + "num_output_neurons": 100 + }, + "deeponet_inference_setup": { + "write_full_wave_field": false, + "snap_to_grid": true, + "write_ir_plots": true, + "write_ir_animations": false, + "write_ir_wav": true, + + "recv_positions": [[1.0, 1.0, 1.1], + [0.5, 1.0, 1.5], + [1.0, 0.9, 0.5], + [0.5, 0.5, 0.5]], + + + "receiver_position_groups": [ + "recv_positions" + ] + }, + "settingsPreset": "Advanced" +} diff --git a/app/models/data/simulation_settings.json b/app/models/data/simulation_settings.json index e5ef7ad..f0d3908 100644 --- a/app/models/data/simulation_settings.json +++ b/app/models/data/simulation_settings.json @@ -14,5 +14,13 @@ "simulationType": "DG", "repositoryURL": "https://github.com/Building-acoustics-TU-Eindhoven/edg-acoustics/", "documentationURL": "https://dg-roomacoustics.readthedocs.io/en/latest/" + }, + { + "description": "DeepONet for Acoustic Wave Propagation is a deep neural network that trains on other simulation methods (currently uses the Discontinuous Galerkin (DG)), and allows to create impulse responses at arbitrary receiver locations in the space. The latter is currently not yet supported by CHORAS.", + "label": "DeepONet", + "name": "don_setting.json", + "simulationType": "DON", + "repositoryURL": "https://github.com/dtu-act/deeponet-acoustic-wave-prop/", + "documentationURL": "https://github.com/dtu-act/deeponet-acoustic-wave-prop/" } ] \ No newline at end of file diff --git a/app/schemas/auralization_schema.py b/app/schemas/auralization_schema.py index adeb528..7e8d24f 100644 --- a/app/schemas/auralization_schema.py +++ b/app/schemas/auralization_schema.py @@ -19,7 +19,7 @@ class AuralizationSchema(Schema): id = fields.Integer() simulationId = fields.Integer() audioFileId = fields.Integer() - status = fields.Enum(Status, default=Status.Uncreated) + status = fields.Enum(Status, dump_default=Status.Uncreated) createdAt = fields.String() updatedAt = fields.String() diff --git a/app/schemas/file_schema.py b/app/schemas/file_schema.py index b39b10a..4e98a65 100644 --- a/app/schemas/file_schema.py +++ b/app/schemas/file_schema.py @@ -2,10 +2,10 @@ class FileSchema(Schema): - id = fields.Number() + id = fields.Integer() fileName = fields.Str() slot = fields.Str() - size = fields.Number() + size = fields.Integer() class GetSlotSchema(Schema): diff --git a/app/schemas/geometry_schema.py b/app/schemas/geometry_schema.py index b4c2829..1957de7 100644 --- a/app/schemas/geometry_schema.py +++ b/app/schemas/geometry_schema.py @@ -16,7 +16,7 @@ class GeometrySchema(Schema): class GeometryStartQuerySchema(Schema): - fileUploadId = fields.Number(required=True) + fileUploadId = fields.Integer(required=True) class GeometryGetQuerySchema(Schema): diff --git a/app/schemas/material_schema.py b/app/schemas/material_schema.py index 92cd80d..2b0665c 100644 --- a/app/schemas/material_schema.py +++ b/app/schemas/material_schema.py @@ -14,7 +14,7 @@ class MaterialUpdateSchema(Schema): absorptionCoefficients = fields.List(fields.Float()) class MaterialSchema(MaterialCreateSchema): - id = fields.Number() + id = fields.Integer() origin = fields.String() createdAt = fields.String() updatedAt = fields.String() diff --git a/app/schemas/mesh_schema.py b/app/schemas/mesh_schema.py index 631e58f..4f52404 100644 --- a/app/schemas/mesh_schema.py +++ b/app/schemas/mesh_schema.py @@ -10,7 +10,7 @@ class MeshSchema(Schema): - id = fields.Number() + id = fields.Integer() taskId = fields.Integer() createdAt = fields.Str() diff --git a/app/schemas/model_schema.py b/app/schemas/model_schema.py index 3b557ff..0f23ed9 100644 --- a/app/schemas/model_schema.py +++ b/app/schemas/model_schema.py @@ -4,7 +4,7 @@ class ModelSchema(Schema): - id = fields.Number() + id = fields.Integer() name = fields.Str(required=True) sourceFileId = fields.Integer() outputFileId = fields.Integer() diff --git a/app/schemas/project_schema.py b/app/schemas/project_schema.py index 26e272f..6adaff9 100644 --- a/app/schemas/project_schema.py +++ b/app/schemas/project_schema.py @@ -6,7 +6,7 @@ class ProjectSchema(Schema): - id = fields.Number() + id = fields.Integer() name = fields.Str(required=True) description = fields.Str(required=True) group = fields.Str(required=True) diff --git a/app/services/geometry_service.py b/app/services/geometry_service.py index 3801686..ee4e0fb 100644 --- a/app/services/geometry_service.py +++ b/app/services/geometry_service.py @@ -4,6 +4,7 @@ import zipfile import math +import math import rhino3dm from flask_smorest import abort diff --git a/app/services/simulation_service.py b/app/services/simulation_service.py index 693719d..d662d02 100644 --- a/app/services/simulation_service.py +++ b/app/services/simulation_service.py @@ -225,6 +225,13 @@ def start_solver_task(simulation_id): source, simulation.receivers, TaskType.MyNewMethod.value ) ) + if simulation.taskType.value in (TaskType.DON.value): + task_statuses.append(create_source_task(TaskType.DON.value, source["id"])) + results_container.append( + create_result_source_object( + source, simulation.receivers, TaskType.DON.value + ) + ) sources_tasks.append( { @@ -322,6 +329,7 @@ def start_solver_task(simulation_id): def run_solver(simulation_run_id: int, json_path: str): from simulation_backend.DGinterface import dg_method from simulation_backend.DEinterface import de_method + from simulation_backend.DeepONetInterface import deeponet_method from simulation_backend.MyNewMethodInterface import mynewmethod_method from app.db import db @@ -396,6 +404,11 @@ def run_solver(simulation_run_id: int, json_path: str): dg_method(json_file_path=json_path) logger.info("DG method") + case TaskType.DON: + # MyNewMethod METHOD + deeponet_method(json_file_path=json_path) + logger.info("DeepONet method") + case TaskType.MyNewMethod: # MyNewMethod METHOD mynewmethod_method(json_file_path=json_path) @@ -416,12 +429,13 @@ def run_solver(simulation_run_id: int, json_path: str): logger.info("Saving to xlsx...") # save the simulation result json to xlsx - if not ExportHelper.parse_json_file_to_xlsx_file( - json_path, json_path.replace(".json", ".xlsx") - ): - logger.error("Error saving the result to xlsx") - raise "Error saving the result to xlsx" - + try: + ExportHelper.parse_json_file_to_xlsx_file( + json_path, json_path.replace(".json", ".xlsx") + ) + except Exception as ex: + logger.error(f"Error saving the result to xlsx: {ex}") + # db - save the xlsx file path export = Export( name=Path(json_path).name.replace(".json", ".xlsx"), @@ -443,19 +457,22 @@ def run_solver(simulation_run_id: int, json_path: str): json_path.replace(".json", "_pressure.csv"), json_path.replace(".json", ".wav"), ) - + case TaskType.DON: + imp_tot, fs = auralization_calculation_DG( + None, + json_path.replace(".json", "_pressure.csv"), + json_path.replace(".json", ".wav"), + ) # auralization: save the impulse response to xlsx - if not ExportHelper.write_data_to_xlsx_file( - json_path.replace(".json", ".xlsx"), - CustomExportParametersConfig.impulse_response, - {f"{fs}Hz": imp_tot}, - ): - logger.error( - "Error saving the impulse response to xlsx" + try: + ExportHelper.write_data_to_xlsx_file( + json_path.replace(".json", ".xlsx"), + CustomExportParametersConfig.impulse_response, + {f"{fs}Hz": imp_tot}, ) - raise "Error saving the impulse response to xlsx" - + except Exception as ex: + logger.error(f"Error saving the impulse response to xlsx: {ex}") result_container = {} if json_path is not None: diff --git a/app/types/Task.py b/app/types/Task.py index ec93aae..972c4f4 100644 --- a/app/types/Task.py +++ b/app/types/Task.py @@ -6,5 +6,6 @@ class TaskType(Enum): Mesh = "Mesh" DE = "DE" DG = "DG" + DON = "DON" MyNewMethod = "MyNewMethod" BOTH = "BOTH" diff --git a/example_settings/de_setting.json b/example_settings/de_setting.json index a384c65..a250051 100644 --- a/example_settings/de_setting.json +++ b/example_settings/de_setting.json @@ -3,6 +3,7 @@ "options": [ { "name": "Simulation length", + "description": "Selects what parameter determines the length of the simulation. If EDT is selected, the Impulse response length variable will have no effect, and vice versa.", "id": "sim_len_type", "type": "string", "display": "radio", @@ -15,6 +16,7 @@ }, { "name": "Energy decay threshold", + "description": "The threshold (in dB) until when the DE method simulates. Only used if the Simulation length parameter is set to EDT.", "id": "edt", "type": "integer", "display": "text", @@ -26,6 +28,7 @@ }, { "name": "Impulse response length", + "description": "The length of the impulse response the DE method will simulate. Only used if the Simulation length parameter is set to IR length.", "id": "de_ir_length", "type": "float", "display": "text", @@ -37,6 +40,7 @@ }, { "name": "Speed of sound", + "description": "The speed of sound in the medium (air).", "id": "de_c0", "type": "float", "display": "text", @@ -48,6 +52,7 @@ }, { "name": "Characteristic length (mesh)", + "description": "Distance between nodes that the meshing tool aims for (in meters). Higher values yield shorter simulation times, but might cause errors due to a too coarse mesh.", "id": "de_lc", "type": "float", "display": "text", diff --git a/example_settings/dg_setting.json b/example_settings/dg_setting.json index dd1c852..b3b7603 100644 --- a/example_settings/dg_setting.json +++ b/example_settings/dg_setting.json @@ -3,6 +3,7 @@ "options": [ { "name": "Freq. upper limit", + "description": "Until what frequency you want to simulate. Higher frequencies yield longer simulation times.", "id": "dg_freq_upper_limit", "type": "integer", "display": "text", @@ -14,6 +15,7 @@ }, { "name": "Speed of sound", + "description": "The speed of sound in the medium (air).", "id": "dg_c0", "type": "float", "display": "text", @@ -25,7 +27,8 @@ }, { "name": "Air density", - "id": "dg_rho0", + "description": "The density of the medium (air).", + "id": "dg_rho0", "type": "float", "display": "text", "min": 0.001, @@ -36,6 +39,7 @@ }, { "name": "Impulse Response length", + "description": "The length of the impulse response you want to simulate.", "id": "dg_ir_length", "type": "float", "display": "text", @@ -47,6 +51,7 @@ }, { "name": "Poly order", + "description": "The polynomial order of the underlying DG method (only change if you know what you're doing).", "id": "dg_poly_order", "type": "integer", "display": "text", @@ -57,6 +62,7 @@ }, { "name": "Points per wavelength", + "description": "The points per wavelength of the underlying DG method (only change if you know what you're doing).", "id": "dg_ppw", "type": "integer", "display": "text", @@ -67,6 +73,7 @@ }, { "name": "CFL", + "description": "The Courant-Friedrichs-Lewy condition of the underlying DG method (only change if you know what you're doing).", "id": "dg_cfl", "type": "float", "display": "text", diff --git a/example_settings/don_setting.json b/example_settings/don_setting.json new file mode 100644 index 0000000..4d44d4d --- /dev/null +++ b/example_settings/don_setting.json @@ -0,0 +1,86 @@ +{ + "type": "simulationSettings", + "options": [ + { + "name": "Freq. upper limit", + "description": "Until what frequency you want to simulate. Higher frequencies yield longer simulation times.", + "id": "don_freq_upper_limit", + "type": "integer", + "display": "text", + "min": 20, + "max": 4000, + "default": 200, + "step": 1, + "endAdornment": "Hz" + }, + { + "name": "Speed of sound", + "description": "The speed of sound in the medium (air).", + "id": "don_c0", + "type": "float", + "display": "text", + "min": 100, + "max": 500, + "default": 343, + "step": 1, + "endAdornment": "m/s" + }, + { + "name": "Air density", + "description": "The density of the medium (air).", + "id": "don_rho0", + "type": "float", + "display": "text", + "min": 0.001, + "max": 3, + "default": 1.213, + "step": 0.001, + "endAdornment": "kg/m^3" + }, + { + "name": "Impulse Response length", + "description": "The length of the impulse response you want to simulate.", + "id": "don_ir_length", + "type": "float", + "display": "text", + "min": 0.01, + "max": 5, + "default": 0.1, + "step": 0.01, + "endAdornment": "s" + }, + { + "name": "Poly order", + "description": "The polynomial order of the underlying DG method (only change if you know what you're doing).", + "id": "don_poly_order", + "type": "integer", + "display": "text", + "min": 2, + "max": 10, + "default": 4, + "step": 1 + }, + { + "name": "Points per wavelength", + "description": "The points per wavelength of the underlying DG method (only change if you know what you're doing).", + "id": "don_ppw", + "type": "integer", + "display": "text", + "min": 2, + "max": 10, + "default": 2, + "step": 1 + }, + { + "name": "CFL", + "description": "The Courant-Friedrichs-Lewy condition of the underlying DG method (only change if you know what you're doing).", + "id": "don_cfl", + "type": "float", + "display": "text", + "min": 0.001, + "max": 1, + "default": 1, + "step": 0.001 + } + ] +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index a36eaa2..bd36942 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,144 +1,144 @@ -alabaster==0.7.16 -alembic==1.10.2 -amqp==5.2.0 -apispec==6.3.0 -asgiref==3.7.2 -asttokens==2.2.1 -attrs==23.2.0 -Babel==2.15.0 -backcall==0.2.0 -billiard==4.2.0 -black==25.1.0 -blinker==1.5 -celery==5.4.0 -certifi==2023.5.7 -cfgv==3.4.0 -charset-normalizer==3.3.2 -click==8.1.3 -click-didyoumean==0.3.1 -click-plugins==1.1.1 -click-repl==0.3.0 -colorama==0.4.6 -comm==0.1.2 -contourpy==1.2.1 -coverage==7.2.7 -cycler==0.12.1 -debugpy==1.6.4 -decorator==5.1.1 -distlib==0.3.6 -Django==4.2.6 -dnspython==2.6.1 -docutils==0.21.2 -entrypoints==0.4 -eventlet==0.36.1 -executing==1.2.0 -filelock==3.12.2 -flake8==6.1.0 -Flask==2.2.5 -Flask-Cors==3.0.10 -Flask-JWT-Extended==4.4.4 -Flask-Migrate==4.0.4 -Flask-Script==2.0.5 -flask-smorest==0.40.0 -Flask-SQLAlchemy==3.0.3 -fonttools==4.53.1 -gmsh==4.13.1 -greenlet==2.0.2 -gunicorn==20.1.0 -identify==2.6.0 -idna==3.7 -imagesize==1.4.1 -importlib-metadata==6.7.0 -ipykernel==6.19.2 -ipython==8.10.0 -isort==5.12.0 -itsdangerous==2.1.2 -jedi==0.18.2 -Jinja2==3.1.2 -jsonschema==4.22.0 -jsonschema-specifications==2023.12.1 -jupyter_client==7.4.8 -jupyter_core==5.1.0 -kiwisolver==1.4.5 -kombu==5.3.7 -Mako==1.2.4 -markdown-it-py==3.0.0 -MarkupSafe==2.1.2 -marshmallow==3.19.0 -mccabe==0.7.0 -mdurl==0.1.2 -meshio==5.3.5 -mypy-extensions==1.0.0 -nest-asyncio==1.5.6 -networkx==3.3 -nodeenv==1.9.1 +alabaster +alembic +amqp +apispec +asgiref +asttokens +attrs +Babel +backcall +billiard +black +blinker +celery +certifi +cfgv +charset-normalizer +click +click-didyoumean +click-plugins +click-repl +colorama +comm +contourpy +coverage +cycler +debugpy +decorator +distlib +Django +dnspython +docutils +entrypoints +eventlet +executing +filelock +flake8 +Flask +Flask-Cors +Flask-JWT-Extended +Flask-Migrate +Flask-Script +flask-smorest +Flask-SQLAlchemy +fonttools +gmsh +greenlet +gunicorn +identify +idna +imagesize +importlib-metadata +ipykernel +ipython +isort +itsdangerous +jedi +Jinja2 +jsonschema +jsonschema-specifications +jupyter_client +jupyter_core +kiwisolver +kombu +Mako +markdown-it-py +MarkupSafe +marshmallow +mccabe +mdurl +meshio +mypy-extensions +nest-asyncio +networkx +nodeenv numpy -numpy-stl==3.1.1 -openpyxl==3.1.5 -packaging==22.0 -pandas==2.2.3 -parso==0.8.3 -passlib==1.7.4 -pathspec==0.12.1 -pexpect==4.8.0 -pickleshare==0.7.5 -pillow==10.3.0 -pipenv==2023.7.3 -platformdirs==3.8.0 -pre-commit==3.8.0 -prompt-toolkit==3.0.36 -psutil==5.9.4 -psycopg2-binary==2.9.6 -ptyprocess==0.7.0 -pure-eval==0.2.2 -pycodestyle==2.11.1 -pyflakes==3.1.0 -pyglet==1.5.29 -Pygments==2.18.0 -PyJWT==2.6.0 -pyparsing==3.1.2 -python-dateutil==2.8.2 -python-dotenv==1.0.0 -python-utils==3.8.2 -pytz==2023.3 -# pywin32==306 -PyYAML==6.0.2 -pyzmq==24.0.1 -referencing==0.35.1 -requests==2.32.3 -rhino3dm==8.6.1 -rich==13.7.1 -rpds-py==0.18.1 -scipy==1.14.0 -six==1.16.0 -snowballstemmer==2.2.0 -soundfile==0.13.1 -Sphinx==7.3.7 # Docs -sphinxcontrib-applehelp==1.0.8 # docs, potentially secondary requirement -sphinxcontrib-devhelp==1.0.6 # docs, potentially secondary requirement -sphinxcontrib-htmlhelp==2.0.5 # docs, potentially secondary requirement -sphinxcontrib-jsmath==1.0.1 # docs, potentially secondary requirement -sphinxcontrib-qthelp==1.0.7 # docs, potentially secondary requirement -sphinxcontrib-serializinghtml==1.1.10 # docs, potentially secondary requirement -SQLAlchemy==2.0.7 -sqlparse==0.4.4 -stack-data==0.6.2 -tomli==2.0.1 -tornado==6.3.2 -traitlets==5.7.1 -trimesh==4.4.1 +numpy-stl +openpyxl +packaging +pandas +parso +passlib +pathspec +pexpect +pickleshare +pillow +pipenv +platformdirs +pre-commit +prompt-toolkit +psutil +psycopg2-binary +ptyprocess +pure-eval +pycodestyle +pyflakes +pyglet +Pygments +PyJWT +pyparsing +python-dateutil +python-dotenv +python-utils +pytz +# pywin32 +PyYAML +pyzmq +referencing +requests +rhino3dm +rich +rpds-py +scipy +six +snowballstemmer +soundfile +Sphinx # Docs +sphinxcontrib-applehelp # docs, potentially secondary requirement +sphinxcontrib-devhelp # docs, potentially secondary requirement +sphinxcontrib-htmlhelp # docs, potentially secondary requirement +sphinxcontrib-jsmath # docs, potentially secondary requirement +sphinxcontrib-qthelp # docs, potentially secondary requirement +sphinxcontrib-serializinghtml # docs, potentially secondary requirement +SQLAlchemy +sqlparse +stack-data +tomli +tornado +traitlets +trimesh typing_extensions -tzdata==2023.3 -urllib3==2.2.2 -vine==5.1.0 -virtualenv==20.23.1 -virtualenv-clone==0.5.7 -watchdog==4.0.1 -wcwidth==0.2.5 -webargs==8.2.0 -Werkzeug==2.2.3 -wincertstore==0.2 -zipp==3.15.0 +tzdata +urllib3 +vine +virtualenv +virtualenv-clone +watchdog +wcwidth +webargs +Werkzeug +wincertstore +zipp pytest locust ezdxf @@ -149,10 +149,10 @@ sphinx-design # Docs, required for tabs and other design elements sphinx-copybutton # Docs, required for copy buttons in code blocks sphinx-gallery # Docs, required for example gallery matplotlib # Docs, required for plot directive -pyroomacoustics +# pyroomacoustics -e ./simulation-backend +git+https://github.com/dtu-act/deeponet-acoustic-wave-prop.git@3da1ae7bce73d558b005ef20b19a92cdce675e10 git+https://github.com/Building-acoustics-TU-Eindhoven/acousticDE.git@d32afb2498e27bd996fc7356d57dc4f1ed76aa44#egg=acousticDE -# git+https://github.com/dtu-act/deeponet-acoustic-wave-prop.git@3d3fc5ee952756eedcd4fec3c3674ad829825c7e#egg=deeponet-acoustics git+https://github.com/Building-acoustics-TU-Eindhoven/edg-acoustics.git@08cac98da98ed14ba1366741b1c0644001503b82#egg=edg-acoustics --e ./MyNewMethod +-e ./MyNewMethod \ No newline at end of file diff --git a/simulation-backend/simulation_backend/DGinterface.py b/simulation-backend/simulation_backend/DGinterface.py index bdb6e09..1fa74a2 100644 --- a/simulation-backend/simulation_backend/DGinterface.py +++ b/simulation-backend/simulation_backend/DGinterface.py @@ -133,22 +133,6 @@ def dg_method(json_file_path: str | Path, save_results_to_json: bool = True): # Block 1: User input # -------------------- simulation_settings = result_container["simulationSettings"] - - # TODO: should make a better solution for this that calls the dg_method function as if there was no deeponet - called_from_deeponet = False - if result_container["results"][0]["resultType"] == "DON": - called_from_deeponet = True - - if called_from_deeponet: - output_path = result_container["output_path"] - output_results = result_container["output_filename"] - file_format = result_container["file_format"] - - # clean up - os.makedirs(output_path, exist_ok=True) - Path(os.path.join(output_path, f"{output_results}.{file_format}")).unlink(missing_ok=True) - Path(os.path.join(output_path, "results.json")).unlink(missing_ok=True) - freq_upper_limit = simulation_settings["dg_freq_upper_limit"] mesh_filename = result_container["msh_path"] @@ -276,18 +260,6 @@ def dg_method(json_file_path: str | Path, save_results_to_json: bool = True): rec, "brute_force" ) # brute_force or scipy(default) approach to locate the receiver points in the mesh - if called_from_deeponet: - # write initial conditition - if file_format == "npz": - ic_mesh = np.array([sim.xyz[0].flatten(), sim.xyz[0].flatten(), sim.xyz[0].flatten()]) - numpy.savez( - os.path.join(output_path, output_results), - IC_pressure=sim.P.flatten(), - IC_mesh=ic_mesh, - ) - else: - raise NotImplementedError("file_format") - tsi_time_integrator = edg_acoustics.TSI_TI(sim.RHS_operator, sim.dtscale, CFL, Nt=3) sim.init_TimeIntegrator(tsi_time_integrator) sim.time_integration( @@ -316,7 +288,7 @@ def dg_method(json_file_path: str | Path, save_results_to_json: bool = True): except Exception: print("Error saving the simulation solver settings") raise Exception("Error saving the simulation solver settings") - + df = pd.DataFrame() df["t"] = impulse_length * np.arange(0, len(data["results"][0]["responses"][0]["receiverResults"]))/len(data["results"][0]["responses"][0]["receiverResults"]) df["pressure"] = data["results"][0]["responses"][0]["receiverResults"] @@ -325,9 +297,28 @@ def dg_method(json_file_path: str | Path, save_results_to_json: bool = True): json_file_path.replace(".json", "_pressure.csv"), "w", newline="" ) as pressure_result_csv: df.to_csv(pressure_result_csv, index=False) + else: + output_path = result_container["output_path"] + output_results = result_container["output_filename"] + file_format = result_container["file_format"] + + # clean up + os.makedirs(output_path, exist_ok=True) + Path(os.path.join(output_path, f"{output_results}.{file_format}")).unlink(missing_ok=True) + + # write initial conditition + if file_format == "npz": + ic_mesh = np.array([sim.xyz[0].flatten(), sim.xyz[0].flatten(), sim.xyz[0].flatten()]) + numpy.savez( + os.path.join(output_path, output_results), + IC_pressure=sim.P.flatten(), + IC_mesh=ic_mesh, + ) + else: + raise NotImplementedError("file_format") - if called_from_deeponet: results.write_results(os.path.join(output_path, output_results), file_format, append=True) + print("Finished!") diff --git a/simulation-backend/simulation_backend/DeepONetInterface.py b/simulation-backend/simulation_backend/DeepONetInterface.py index 60a62ea..c65b127 100644 --- a/simulation-backend/simulation_backend/DeepONetInterface.py +++ b/simulation-backend/simulation_backend/DeepONetInterface.py @@ -60,9 +60,9 @@ def _convert_relative_to_absolute_paths( ) # Construct derived paths - data["deeponet_inference_setup"]["validation_data_dir"] = os.path.join( + data["deeponet_inference_setup"]["test_data_dir"] = os.path.join( data["deeponet_train_setup"]["input_dir"], - data["deeponet_train_setup"]["testing_data_dir"], + data["deeponet_train_setup"]["val_data_dir"], ) data["deeponet_inference_setup"]["model_dir"] = os.path.join( data["deeponet_train_setup"]["output_dir"], data["deeponet_train_setup"]["id"] @@ -75,7 +75,7 @@ def _convert_relative_to_absolute_paths( return data -def _prepare_dg_json(json_file_path: str) -> str: +def _prepare_dg_json(json_file_path: str, dirname: str) -> str: """ Create a new JSON file from the configuration file to be used by the DG method. @@ -128,9 +128,16 @@ def _run_dg_simulation(json_file_path: str | Path) -> None: Args: json_file_path: Path to the JSON configuration file """ - gmsh.initialize() + should_finalise = False + if gmsh.isInitialized(): + should_finalise = True + else: + gmsh.initialize() + dg_method(json_file_path, save_results_to_json=False) - gmsh.finalize() + + if should_finalise: + gmsh.finalize() def _load_and_process_dg_results( @@ -278,7 +285,7 @@ def _prepare_validation_data( # Copy HDF5 validation data file_path_val_h5 = os.path.join( output_path, - settings["deeponet_train_setup"]["testing_data_dir"], + settings["deeponet_train_setup"]["val_data_dir"], f"src{source_index}", os.path.basename(train_h5_path), ) @@ -447,6 +454,35 @@ def _write_results_json( print(f"Results written to: {output_json_path}") +def _convert_from_CHORAS_json(json_file_path: str | Path, dirname: str): + + default_data_path = _resolve_path("app/models/data/deeponet_default_settings.json", Path(dirname).parent.parent) + with open(default_data_path, "r", encoding="utf-8") as default_file: + default_data = json.load(default_file) + + with open(json_file_path, "r", encoding="utf-8") as file: + data = json.load(file) + + default_data["absorption_coefficients"] = data["absorption_coefficients"] + default_data["msh_path"] = data["msh_path"] + default_data["geo_path"] = data["geo_path"] + + default_data["results"][0]["sourceX"] = data["results"][0]["sourceX"] + default_data["results"][0]["sourceY"] = data["results"][0]["sourceY"] + default_data["results"][0]["sourceZ"] = data["results"][0]["sourceZ"] + + data["simulationSettings"] = { + k.replace("don_", "dg_", 1): v + for k, v in data["simulationSettings"].items() + } + + default_data["dg_setup"]["simulationSettings"] = data["simulationSettings"] + default_data["should_cancel"] = False + + with open(json_file_path, "w") as file: + json.dump(default_data, file, indent=4) + + def deeponet_method(json_file_path: str | Path, output_json_path: str | Path = None): """ Execute the complete DeepONet pipeline: DG simulation, data preparation, training, and inference. @@ -467,6 +503,14 @@ def deeponet_method(json_file_path: str | Path, output_json_path: str | Path = N """ # Step 1: Convert relative paths to absolute paths dirname = os.path.dirname(__file__) + + with open(json_file_path, "r", encoding="utf-8") as file: + data = json.load(file) + + # This means that the data came from CHORAS + if "dg_setup" not in data: + _convert_from_CHORAS_json(json_file_path, dirname) + settings = _convert_relative_to_absolute_paths(json_file_path, dirname) # Set default output path if not provided @@ -476,7 +520,7 @@ def deeponet_method(json_file_path: str | Path, output_json_path: str | Path = N ) # Step 2: Run DG simulation - dg_json = _prepare_dg_json(json_file_path) + dg_json = _prepare_dg_json(json_file_path, dirname) _run_dg_simulation(dg_json) # Step 3: Load and process DG results @@ -502,7 +546,7 @@ def deeponet_method(json_file_path: str | Path, output_json_path: str | Path = N # Save training data to HDF5 file_path_train_h5 = os.path.join( output_path, - settings["deeponet_train_setup"]["training_data_dir"], + settings["deeponet_train_setup"]["train_data_dir"], f"src{i}", f"{output_filename}.h5", ) diff --git a/simulation-backend/simulation_backend/headless_backend/input/exampleInput_deeponet_acoustics.json b/simulation-backend/simulation_backend/headless_backend/input/exampleInput_deeponet_acoustics.json index 5fced38..8705925 100644 --- a/simulation-backend/simulation_backend/headless_backend/input/exampleInput_deeponet_acoustics.json +++ b/simulation-backend/simulation_backend/headless_backend/input/exampleInput_deeponet_acoustics.json @@ -12,15 +12,15 @@ "results": [ { "percentage": 100, - "sourceX": 0.4, - "sourceY": 1.0, - "sourceZ": 1.2, + "sourceX": 2, + "sourceY": 2, + "sourceZ": 1.5, "resultType": "DON", "responses": [ { "x": 1.0, "y": 1.0, - "z": 1.1, + "z": 1.5, "receiverResults": [], "receiverResultsUncorrected": [] }, @@ -32,8 +32,8 @@ "receiverResultsUncorrected": [] }, { - "x": 1.0, - "y": 0.9, + "x": 1, + "y": 2, "z": 0.5, "receiverResults": [], "receiverResultsUncorrected": [] @@ -53,7 +53,7 @@ "dg_freq_upper_limit": 200, "dg_c0": 343, "dg_rho0": 1.213, - "dg_ir_length": 0.05, + "dg_ir_length": 0.1, "dg_poly_order": 4, "dg_ppw": 2, "dg_cfl": 1 @@ -68,12 +68,10 @@ "input_dir": "tmp/deeponet/", "output_dir": "tmp/deeponet/results/", - "training_data_dir": "train_data", - "testing_data_dir": "val_data", - - "tmax": 1000, + "train_data_dir": "train_data", + "val_data_dir": "val_data", - "f0_feat": [1.458,0.729,0.486], + "f0_feat": [500.0, 250.0, 167.0], "normalize_data": true, "iterations": 1000, @@ -101,9 +99,7 @@ }, "num_output_neurons": 100 }, - "deeponet_inference_setup": { - "tmax": 1000, - + "deeponet_inference_setup": { "write_full_wave_field": false, "snap_to_grid": true, "write_ir_plots": true,