diff --git a/doc/source/conf.py b/doc/source/conf.py index fff2f32d0ac..d3d6fd294ee 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -124,6 +124,7 @@ "ansys-dpf-core": ("https://dpf.docs.pyansys.com/version/stable/", None), "ansys-math-core": ("https://math.docs.pyansys.com/version/stable/", None), "ansys-tools-path": ("https://path.tools.docs.pyansys.com/version/stable/", None), + "pytwin": ("https://twin.docs.pyansys.com/version/stable/", None), } suppress_warnings = ["label.*", "design.fa-build", "config.cache"] diff --git a/doc/source/examples/extended_examples/index.rst b/doc/source/examples/extended_examples/index.rst index 34cdf88a19d..cd7e985eea3 100644 --- a/doc/source/examples/extended_examples/index.rst +++ b/doc/source/examples/extended_examples/index.rst @@ -19,13 +19,16 @@ with other programs, libraries, and features in development. +------------------------------------------------------+--------------------------------------------------------------------------------------------+ | :ref:`dash_example` | Demonstrates the use of PyMAPDL with Dash. | +------------------------------------------------------+--------------------------------------------------------------------------------------------+ -| :ref:`executable_example` | Demonstrates how to create your own Python app with a command-line interface. | +| :ref:`executable_example` | Demonstrates how to create your own Python app with a command-line interface. | +------------------------------------------------------+--------------------------------------------------------------------------------------------+ -| :ref:`gui_example` | Demonstrates how to create your own GUI app with Python and PySide6. | +| :ref:`gui_example` | Demonstrates how to create your own GUI app with Python and PySide6. | +------------------------------------------------------+--------------------------------------------------------------------------------------------+ -| :ref:`hpc_ml_ga_example` | Demonstrates how to use PyMAPDL in a high-performance computing system managed by SLURM. | +| :ref:`hpc_ml_ga_example` | Demonstrates how to use PyMAPDL in a high-performance computing system managed by SLURM. | +------------------------------------------------------+--------------------------------------------------------------------------------------------+ -| :ref:`stochastic_fem_example` | Demonstrates using PyMAPDL for stochastic FEA using Monte Carlo simulation. | +| :ref:`stochastic_fem_example` | Demonstrates using PyMAPDL for stochastic FEA using Monte Carlo simulation. | ++------------------------------------------------------+--------------------------------------------------------------------------------------------+ +| :ref:`static_rom_data_generation` | Demonstrates using PyMAPDL and PyDPF to generate training data for an Ansys Twin Builder | +| | static reduced order model (ROM) . | +------------------------------------------------------+--------------------------------------------------------------------------------------------+ @@ -42,4 +45,5 @@ with other programs, libraries, and features in development. gui/executable.rst hpc/hpc_ml_ga.rst sfem/stochastic_fem.rst + static_rom/static_rom_data_generation.rst diff --git a/doc/source/examples/extended_examples/static_rom/images/static_ROM_file_structure.png b/doc/source/examples/extended_examples/static_rom/images/static_ROM_file_structure.png new file mode 100644 index 00000000000..70915031608 Binary files /dev/null and b/doc/source/examples/extended_examples/static_rom/images/static_ROM_file_structure.png differ diff --git a/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.py b/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.py new file mode 100644 index 00000000000..05b2d4709b2 --- /dev/null +++ b/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.py @@ -0,0 +1,302 @@ +# Copyright (C) 2016 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +""" +Creating training data for a 3D Static reduced order model (ROM) +---------------------------------------------------------------- + +This example shows how a parametric sweep may be run on a MAPDL model and the output displacement +and stress data exported into the format required to build a Static ROM with Ansys Twin Builder. +""" + +import csv +import json +from pathlib import Path +import tempfile + +from ansys.dpf import core as dpf +import numpy as np +from pytwin import write_binary + +from ansys.mapdl.core import launch_mapdl +from ansys.mapdl.core.examples.downloads import download_example_data + + +def compress_id_list(id_list: np.ndarray): + """ + Compress array of consecutive IDs. + + Compress array by replacing runs of three or more consecutive integers with ``start, -1, end``. + + Example + ------- + >>> input = np.array([0, 1, 2, 3, 4, 5, 6, 28, 29, 30, 31, 13, 15, 17, 18, 19, 20]) + >>> compress_id_list(input) + [0, -1, 6, 28, -1, 31, 13, 15, 17, -1, 20] + """ + if id_list.size == 0: + return [] + + # Find breaks in consecutive sequences. + breaks = np.where(np.diff(id_list) != 1)[0] + + # Add endpoints to form run boundaries + run_starts = np.insert(breaks + 1, 0, 0) + run_ends = np.append(breaks, len(id_list) - 1) + + result = [] + for start, end in zip(run_starts, run_ends): + length = end - start + 1 + if length >= 3: + result.extend([int(id_list[start]), -1, int(id_list[end])]) + else: + result.extend(id_list[start : end + 1].tolist()) + return result + + +def write_settings( + path: str | Path, field: dpf.Field, name: str, is_deformation: bool = False +): + """Write the settings.json file.""" + + if field.component_count in [1, 3]: + dimensionality = [field.component_count] + symmetricalDim = False + elif field.component_count == 6: + dimensionality = [3, 3] + symmetricalDim = True + else: + raise ValueError(f"Unsupported field dimensionality {field.component_count}") + + settings = { + "pointsCoordinates": False, + "ids": compress_id_list(field.scoping.ids), + "location": "Nodal", + "unit": field.unit, + "unitDimension": {}, + "unitFactor": 1.0, + "name": name, + "deformation": is_deformation, + "dimensionality": dimensionality, + "symmetricalDim": symmetricalDim, + "namedSelections": {}, + } + + with open(Path(path).joinpath("settings.json"), "w") as fw: + # Set default to convert Numpy int to int + json.dump(settings, fw, default=int, indent=4) + + +def get_scoping(model: dpf.Model): + """Return scoping of unique node IDs connected to elements in model.""" + op = dpf.operators.scoping.connectivity_ids( + mesh_scoping=model.metadata.meshed_region.elements.scoping, + mesh=model.metadata.meshed_region, + take_mid_nodes=True, + ) + # Get output data + connected_nodes_scoping = op.outputs.mesh_scoping() + # Compress the list to only keep unique IDs + connected_nodes_scoping.ids = sorted(list(set(op.outputs.mesh_scoping().ids))) + return connected_nodes_scoping + + +def write_points(model: dpf.Model, scoping: dpf.Scoping, output_folder: str | Path): + """Write points.bin file.""" + nodes = model.metadata.meshed_region.nodes + scoped_node_indices, _ = nodes.map_scoping(scoping) + points_coordinates = nodes.coordinates_field.data[scoped_node_indices] + write_binary(Path(output_folder).joinpath("points.bin"), points_coordinates) + + +def write_doe_headers(output_folder: str | Path, name: str, parameters: dict): + """Write blank doe.csv file with headers.""" + with open(Path(output_folder).joinpath("doe.csv"), "w", newline="") as fw: + writer = csv.writer(fw) + writer.writerow([name] + list(parameters.keys())) + + +def write_doe_entry(output_folder: str | Path, snapshot_name: str, parameters: dict): + """Write entry to doe.csv file.""" + with open(Path(output_folder).joinpath("doe.csv"), "a", newline="") as fw: + writer = csv.writer(fw) + writer.writerow([snapshot_name] + list(parameters.values())) + + +def export_static_ROM_variation( + model: dpf.Model, + scoping: dpf.Scoping, + name: str, + output_folder: str | Path, + parameters: dict, + snap_idx: int = 0, + new_metadata: bool = False, +): + """ + Export static ROM data for one parameter variation. + + Parameters + ---------- + model : dpf.Model + DPF model with results data loaded. + scoping : dpf.Scoping + DPF nodal scoping for result export. + name : str + result quantity to export. Valid options are `displacement` and `stress`. + output_folder : str|Path + exported data will be stored in this folder. Use separate folders for each physics type. + parameters : dict + dictionary of name-value pairs for the input parameters used to generate the current + results. + snap_idx : int, default = 0 + unique ID for the current results. + new_metadata : bool, default = False + used the first time the function is called for a given data generation run. Triggers the + creation of ``points.bin``, ``settings.json`` and a new ``doe.csv`` file. Existing files are + overrwritten. + """ + # Create the output folder + output_folder = Path(output_folder) + output_folder.mkdir(parents=True, exist_ok=True) + + # Modify this section to export additional result types + is_deformation = False + if name == "displacement": + result = model.results.displacement + is_deformation = True + elif name == "stress": + result = model.results.stress + else: + raise ValueError(f"Unsupported result type: {name}") + + # Retrieve displacement and stress at last result set. + scoped_result = result.on_last_time_freq.on_mesh_scoping(scoping) + + # Result must be sorted by scoping to ensure consistency across outputs. + sorted_result = dpf.operators.logic.ascending_sort_fc( + scoped_result, sort_by_scoping=True + ) + result_field = sorted_result.outputs.fields_container()[0] + + if new_metadata: + write_points(model, scoping, output_folder) + write_doe_headers(output_folder, name, parameters) + write_settings(output_folder, result_field, name, is_deformation=is_deformation) + + # Write snapshots + snapshot_folder = output_folder.joinpath("snapshots") + snapshot_folder.mkdir(parents=True, exist_ok=True) + snap_name = f"file{snap_idx}.bin" + write_doe_entry(output_folder, snap_name, parameters) + write_binary(snapshot_folder.joinpath(snap_name), result_field.data) + + +def export_static_ROM_data( + mapdl_results: list[tuple[str, dict]], output_folder: str | Path +): + """ + Export static ROM data to output folder. + + Parameters + ---------- + mapdl_results: list[tuple[str, dict]] + list of tuples of MAPDL result file path and the parameter values for each variation solved. + output_folder: str|Path + location where ROM output data will be stored. + """ + for idx, (rst_path, parameters) in enumerate(mapdl_results): + # Load the results to DPF and create scoping. + model = dpf.Model(rst_path) + scoping = get_scoping(model) + + # Only create points.bin and settings.json on first design point. + new_metadata = idx == 0 + + # Export displacement and stress data. + for name in ["displacement", "stress"]: + data_folder = Path(output_folder).joinpath(name) + export_static_ROM_variation( + model, + scoping, + name, + data_folder, + parameters=parameters, + snap_idx=idx, + new_metadata=new_metadata, + ) + + +def run_mapdl_variations(): + """ + Run the MAPDL model parametric variations. + + Returns + ------- + list[tuple[str, dict]] + list of tuples of MAPDL result file path (on the platform where MAPDL was executed) and the + parameter values for each variation solved. + """ + # Specify the force load variations + forces = [250, 500, 750, 1000] + + # First, start MAPDL and disable all but error messages. + mapdl = launch_mapdl(loglevel="ERROR") + + # Download the example database: ``notch_file`` is the path to the downloaded file. + notch_file = download_example_data( + filename="3d_notch.db", directory="pymapdl/static_ROM_data_generation" + ) + + mapdl.resume(notch_file, mute=True) + + # Initialise the outputs + outputs = [] + + # Solve the parameter variations. + for idx, force_load in enumerate(forces): + # Rename the job and change log, err etc. files. + mapdl.filname(f"variation_{idx}") + mapdl.run("/SOLU") + mapdl.cmsel("S", "load_node", "NODE") + mapdl.fdele("ALL", "FX") + mapdl.f("ALL", "FX", force_load) + mapdl.allsel() + mapdl.antype("STATIC") + mapdl.solve() + mapdl.finish(mute=True) + rst_path = mapdl.result_file + outputs.append((rst_path, {"force[N]": force_load})) + print(f"MAPDL run in: {mapdl.directory}") + mapdl.exit() + return outputs + + +def run(): + # Define a folder for output. + rom_folder = Path(tempfile.gettempdir()).joinpath("ansys_pymadl_Static_ROM") + mapdl_results = run_mapdl_variations() + export_static_ROM_data(mapdl_results, rom_folder) + print(f"ROM data exported to: {rom_folder}") + + +if __name__ == "__main__": + run() diff --git a/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.rst b/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.rst new file mode 100644 index 00000000000..493060c9da1 --- /dev/null +++ b/doc/source/examples/extended_examples/static_rom/static_rom_data_generation.rst @@ -0,0 +1,242 @@ +.. _static_rom_data_generation: + +================================================================ +Creating training data for a 3D Static reduced order model (ROM) +================================================================ + +This example shows how a parametric sweep may be run on an MAPDL model and the output displacement +and stress data exported into the format required to build a Static ROM with Ansys Twin Builder. + +The general data structure for the ROM building is shown in Figure 1. + +.. figure:: images/static_ROM_file_structure.png + + **Figure 1: Organization of files and directories for static ROM creation.** + +The specific files output by the example are shown below:: + + ansys_pymadl_Static_ROM + │ + ├───displacement + │ │ doe.csv + │ │ points.bin + │ │ settings.json + │ │ + │ └───snapshots + │ file0.bin + │ file1.bin + │ file2.bin + │ file3.bin + │ + └───stress + │ doe.csv + │ points.bin + │ settings.json + │ + └───snapshots + file0.bin + file1.bin + file2.bin + file3.bin + +MAPDL notch model +================= +This example uses the model created in :ref:`ref_3d_plane_stress_concentration` as a base. That +example was modified to add a nodal component, ``load_node``, to which the force is a applied. This +makes is simple to retrieve and modify the force scoping for parametric runs. + +The model uses load force in Newtons as the input that is parametrically varied. + + +Additional packages used +======================== + +In addition to PyMAPDL, the example requires the following packages that should be installed be +running: + +* `NumPy `_ is used for using NumPy arrays. +* `PyDPF `_ is used to efficiently manipulate result data. +* `PyTwin `_ is used to convert result data to binary snapshots. + + +Core functions +============== + +Solving the MAPDL parametric variations +--------------------------------------- +The :func:`run_mapdl_variations` function solves the MAPDL model for a set of parameter variations. +It loads a saved MAPDL database and loops through a list of force values. For each value, it applies +that force to nodal component, solves the model and records the location of the result file, along +with the parameter names and values. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: run_mapdl_variations + +Exporting ROM data for the solved variations +-------------------------------------------- +The :func:`export_static_ROM_data` function is used to export the ROM data from a set of solved +parametric variations. The function loads each results file into DPF and gets the scoping. It then +calls the :func:`export_static_ROM_variation` function to export the ROM data for that result file. +The ``new_metadata`` Boolean is set to ``True`` on the first loop to trigger the creation of the +``points.bin`` and ``settings.json`` files and a new ``doe.csv`` file. + +The :func:`pytwin.write_binary` function is used to write the result field data to a ROM binary +file. + + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: export_static_ROM_data + +Exporting ROM data for a specific variation +------------------------------------------- +The :func:`export_static_ROM_variation` function is used to export ROM snapshot data for a specific +parametric variation. The function exports a snapshot for ``displacement`` and for ``stress`` to +correspondingly named folders and records the snapshot name and parameter data to the ``doe.csv`` +file in that folder. + +The results are sorted by the scoping IDs to ensure consistent ordering of results and points. + +The ``new_metadata`` Boolean controls the creation of the ``points.bin`` and ``settings.json`` files +and a new ``doe.csv`` file. If it is ``False``, the first two files are not written and parameter +data is appended to the existing ``doe.csv`` file. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: export_static_ROM_variation + + +Additional functions +==================== + +Setting the export scope +------------------------ +The ROM data is exported on all nodes connected to elements. The :func:`get_scoping` function gets +the nodes that are connected to all the elements in the mesh. This avoids including any unconnected +nodes, with null values, in the scoping. + +The returned scoping initially contains duplicate node IDs, since a node can be connected to +multiple elements and is included once for each one. Creating a Python :type:`set` of node IDs removes the +duplicates. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: get_scoping + +Write points coordinates file +----------------------------- +:func:`write_points` writes the x,y,z coordinates of the scoped nodes to the ROM ``points.bin`` file +using :func:`pytwin.write_binary`. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: write_points + +Write ROM settings +------------------ +:func:`write_settings` writes the ROM ``settings.json`` file. This records information about the +field dimensionality (scalar, vector, tensor), result name, unit, whether it represent a deformation +etc. The full file specification is available in the Twin Builder Static ROM Builder documentation. + +The original node numbers from export scoping are compressed and stored in the ``ids`` field. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: write_settings + +Compress ID list +---------------- +The ``settings.json`` specification allows the storage of lists of consecutive integers in a +compressed fashion. :func:`compress_id_list` implements this compression. + +Runs of three or more incrementally increasing values are replaced by a sequence of +``start, -1, end``. + +.. literalinclude:: static_rom_data_generation.py + :language: python + :pyobject: compress_id_list + + +Running the script +==================== + +To run the script, install PyMAPDL and the additional dependencies listed in +`Additional packages used`_, then download and run +:download:`static_rom_data_generation.py `. The ROM data is stored in +a folder called ``ansys_pymadl_Static_ROM`` in the system temporary folder. + + +Script assumptions and modification ideas +========================================= + +Local execution +--------------- +The scripts assumes that both the MAPDL solving and DPF export are performed locally on the same +machine as the Python script execution. + +See :ref:`ref_dpf_basic_example` for potential modification options. + +Additional result types +----------------------- +The script exports ``stress`` and ``displacement`` results. Additional result types may be included +or excluded by modifying :func:`export_static_ROM_variation`. + +For example elastic strain could be added by making these changes to +:func:`export_static_ROM_variation` from line 180: + +.. code-block:: python + + # Modify this section to export additional result types + is_deformation = False + if name == "displacement": + result = model.results.displacement + is_deformation = True + elif name == "stress": + result = model.results.stress + # Add additional quantities here + elif name == "elastic_strain": + result = model.results.elastic_strain + else: + raise ValueError(f"Unsupported result type: {name}") + +Modifying result scoping +------------------------ +The script exports results on all nodes that are connected to elements. This does not account for +nodes that are connected to elements, but do not have results associated with them. For example +MPC184 pilot nodes would not usually have a stress result. The script also does not allow scoping +component. + +The :func:`get_scoping` could be modified to allow broader scoping options. + +Modifying settings +------------------ +The ``settings.json`` files generated by :func:`write_settings` implicitly assume SI units +(``unitFactor`` is a scaling factor from SI units) and do not include any information about unit +dimensions. For example, for displacement use: + +.. code-block:: python + + "unitDimension": {"length": 1.0} + +or for stress use: + +.. code-block:: python + + "unitDimension": {"mass": 1.0, "length": -1.0, "time": -2.0} + +Logic could be added to check the model units and change the ``unitDimension`` and ``unitFactor`` +fields accordingly. This information is not used to build the ROM, but may be used when consuming +it in downstream applications. + +If results are included on nodal components, these can be referenced in the ``namedSelections`` +field. Each entry consists of a name and the indices of the scoped nodes in the overall scoping (not +node IDs). :func:`compress_id_list` can be used to compress long lists of nodes. + +For example to add a named selection called ``first_nodes`` on the first one hundred scoped nodes +and another called ``second_nodes`` to the fiftieth to one hundred and fiftieth nodes, you would add +this: + +.. code-block:: python + + namedSelections: {"first_nodes": [0, -1, 99], "second_nodes": [49, -1, 149]} diff --git a/doc/source/links.rst b/doc/source/links.rst index ee717569a82..c452aef7088 100644 --- a/doc/source/links.rst +++ b/doc/source/links.rst @@ -17,6 +17,7 @@ .. _ansys_tools_path: http://path.tools.docs.pyansys.com/ .. _pyansys_math: https://math.docs.pyansys.com/version/stable/ .. _pyansys_math_api: https://math.docs.pyansys.com/version/stable/api/index.html +.. _pytwin_docs: https://twin.docs.pyansys.com .. #PyAnsys Developer Guide .. _dev_guide_pyansys: https://dev.docs.pyansys.com