Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/cluster_solidification/input_rve.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ steps:
batch: True
np: 24
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
data:
build:
datatype: Peregrine
Expand Down
4 changes: 2 additions & 2 deletions examples/microstructure_region/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ steps:
exaca-mesh: 10.0e-6
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
execute:
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
- exaca:
class: microstructure_region
application: exaca
Expand Down
2 changes: 1 addition & 1 deletion examples/microstructure_region_slice/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ steps:
batch: True
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
- exaca:
class: microstructure_region_slice
application: exaca
Expand Down
2 changes: 1 addition & 1 deletion examples/rve_part_center/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ steps:
overwrite: True
np: 16
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
data:
build:
datatype: Peregrine
Expand Down
2 changes: 1 addition & 1 deletion examples/solidification_region_reduced/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ steps:
execute:
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
data:
build:
datatype: Peregrine
Expand Down
4 changes: 2 additions & 2 deletions examples/solidification_region_reduced_stl/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ steps:
exaca-mesh: 10.0e-6
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
execute:
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
data:
build:
datatype: Peregrine
Expand Down
2 changes: 1 addition & 1 deletion examples/vtk_to_exodus_region/input.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ steps:
batch: True
np: 2
mpiexec: mpirun
mpiargs: --bind-to none
mpiflags: --bind-to none
- exaca:
class: microstructure_region
application: exaca
Expand Down
30 changes: 20 additions & 10 deletions src/myna/application/bnpy/bnpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
# License: 3-clause BSD, see https://opensource.org/licenses/BSD-3-Clause.
#
from myna.core.app.base import MynaApp
from myna.core.workflow.load_input import load_input
import glob
import os

Expand All @@ -18,17 +17,12 @@ def __init__(self):
self.app_type = "bnpy"
self.sF = 0.5
self.gamma = 8
self.settings = load_input(os.environ["MYNA_INPUT"])
self.input_dir = os.path.dirname(os.environ["MYNA_INPUT"])
self.input_dir = os.path.dirname(self.input_file)
self.resource_dir = os.path.join(self.input_dir, "myna_resources")
self.resource_template_dir = os.path.join(
self.resource_dir, *self.name.split("/")
)
self.training_dir = os.path.join(
self.resource_template_dir, "training_supervoxels"
)
self.make_directory_structure()
self.resource_template_dir = None
self.training_dir = None

def parse_shared_arguments(self):
self.parser.add_argument(
"--model",
default=None,
Expand All @@ -43,6 +37,11 @@ def __init__(self):
help="flag to use pre-trained model",
)

def parse_execute_arguments(self):
self.update_resource_paths()
self.parse_shared_arguments()
self.parse_known_args()

def get_latest_model_path(self):
latest_model = sorted(
glob.glob(os.path.join(self.get_model_dir_path(), "*")), reverse=True
Expand All @@ -62,3 +61,14 @@ def get_model_dir_path(self):
def make_directory_structure(self):
os.makedirs(self.resource_template_dir, exist_ok=True)
os.makedirs(self.training_dir, exist_ok=True)

def update_resource_paths(self):
if self.class_name is None:
return
self.resource_template_dir = os.path.join(
self.resource_dir, *self.name.split("/")
)
self.training_dir = os.path.join(
self.resource_template_dir, "training_supervoxels"
)
self.make_directory_structure()
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,7 @@
#
"""Myna application for applying a clustering algorithm to solidification data
(G and V magnitudes and orientations)"""

from .app import BnpyClusterSolidification

__all__ = ["BnpyClusterSolidification"]
51 changes: 51 additions & 0 deletions src/myna/application/bnpy/cluster_solidification/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,61 @@
#
# License: 3-clause BSD, see https://opensource.org/licenses/BSD-3-Clause.
#
import os
import pandas as pd
from myna.application.bnpy import Bnpy


class BnpyClusterSolidification(Bnpy):
def __init__(self):
super().__init__()
self.class_name = "cluster_solidification"

def parse_execute_arguments(self):
self.parser.add_argument(
"--thermal",
default=None,
type=str,
help='thermal step name, for example: "--thermal 3dthesis"',
)
super().parse_execute_arguments()

def execute(self):
from .execute import run_clustering, train_voxel_model

self.parse_execute_arguments()
train_model = self.args.train_model
overwrite = self.args.overwrite

myna_files = self.get_step_output_paths()
thermal_step_name = self.args.thermal
if thermal_step_name is None:
thermal_step_name = self.last_step_name
thermal_files = self.get_step_output_paths(thermal_step_name)

if train_model:
train_voxel_model(
myna_files, thermal_files, self.sF, self.gamma, os.path.dirname(self.input_file)
)

output_files = []
for case_dir, thermal_file in zip(
self.get_case_dirs(output_paths=myna_files), thermal_files
):
print("Running clustering for:")
print(f"- {case_dir=}")
print(f"- {thermal_file=}")
output_files.append(
run_clustering(
case_dir,
thermal_file,
self.sF,
self.gamma,
overwrite,
os.path.dirname(self.input_file),
)
)

for filepath, mynafile in zip(output_files, myna_files):
df = pd.read_csv(filepath)
df.to_csv(mynafile, index=False)
59 changes: 2 additions & 57 deletions src/myna/application/bnpy/cluster_solidification/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,11 @@
import os
import pandas as pd
import numpy as np
from myna.core.workflow.load_input import load_input
import myna.application.bnpy as myna_bnpy
import glob
import matplotlib.pyplot as plt
from myna.application.bnpy import get_representative_distribution
from .app import BnpyClusterSolidification
from myna.application.bnpy.cluster_solidification import BnpyClusterSolidification


def reduce_thermal_file_to_df(thermal_file):
Expand Down Expand Up @@ -330,61 +329,7 @@ def run_clustering(

def main():
app = BnpyClusterSolidification()

# Set up argparse
parser = app.parser
parser.add_argument(
"--thermal",
default=None,
type=str,
help='thermal step name, for example: "--thermal 3dthesis"',
)

# Parse command line arguments
args = parser.parse_args()
settings = load_input(os.environ["MYNA_INPUT"])
train_model = args.train_model
overwrite = args.overwrite

# Get expected Myna output files
step_name = os.environ["MYNA_STEP_NAME"]
myna_files = settings["data"]["output_paths"][step_name]
thermal_step_name = args.thermal
if thermal_step_name is None:
thermal_step_name = os.environ["MYNA_LAST_STEP_NAME"]
thermal_files = settings["data"]["output_paths"][thermal_step_name]

# Assemble training data and train model
gamma = 8
sF = 0.5
if train_model:
train_voxel_model(
myna_files, thermal_files, sF, gamma, os.path.dirname(app.input_file)
)

# Run clustering using trained model
output_files = []
for case_dir, thermal_file in zip(
[os.path.dirname(x) for x in myna_files], thermal_files
):
print("Running clustering for:")
print(f"- {case_dir=}")
print(f"- {thermal_file=}")
output_files.append(
run_clustering(
case_dir,
thermal_file,
sF,
gamma,
overwrite,
os.path.dirname(app.input_file),
)
)

# Post-process results to convert to Myna format
for filepath, mynafile in zip(output_files, myna_files):
df = pd.read_csv(filepath)
df.to_csv(mynafile, index=False)
app.execute()


if __name__ == "__main__":
Expand Down
4 changes: 4 additions & 0 deletions src/myna/application/bnpy/cluster_supervoxel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,7 @@
"""Myna application for applying a clustering algorithm to supervoxelized data
containing composition data, intended to be run after the bnpy/cluster_solidification
app"""

from .app import BnpyClusterSupervoxel

__all__ = ["BnpyClusterSupervoxel"]
79 changes: 79 additions & 0 deletions src/myna/application/bnpy/cluster_supervoxel/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,89 @@
#
# License: 3-clause BSD, see https://opensource.org/licenses/BSD-3-Clause.
#
import glob
import os
from myna.application.bnpy import Bnpy


class BnpyClusterSupervoxel(Bnpy):
def __init__(self):
super().__init__()
self.class_name = "cluster_supervoxel"

def parse_execute_arguments(self):
self.parser.add_argument(
"--cluster",
default="",
type=str,
help="input cluster step name, for example: --cluster cluster",
)
self.parser.add_argument(
"--voxel-model",
dest="voxel_model",
default="myna_resources/cluster_solidification/voxel_model-sF=0.5-gamma=8",
type=str,
help="path to model for voxel clustering",
)
self.parser.add_argument(
"--res",
default=250.0e-6,
type=float,
help="resolution to use for super-voxel size, in meters, for example: --res 250.0e-6",
)
super().parse_execute_arguments()

def execute(self):
from .execute import run, train_supervoxel_model

self.parse_execute_arguments()

try:
import bnpy
except ImportError:
raise ImportError(
'Myna bnpy app requires "pip install .[bnpy]" optional dependencies!'
)

voxel_model_path = self.args.voxel_model.replace("/", os.sep)
voxel_model_path = sorted(
glob.glob(os.path.join(voxel_model_path, "*")), reverse=True
)[0]
voxel_model_path = sorted(
glob.glob(os.path.join(voxel_model_path, "*")), reverse=True
)[0]
voxel_model, _ = bnpy.load_model_at_lap(voxel_model_path, None)
self.n_voxel_clusters = max(voxel_model.allocModel.K, 2)

myna_files = self.get_step_output_paths()
cluster_step_name = self.args.cluster
if cluster_step_name == "":
cluster_step_name = self.last_step_name
voxel_cluster_files = self.get_step_output_paths(cluster_step_name)

supervoxel_composition_filename = "supervoxel_composition.csv"
self.sF = 0.5
self.gamma = 8.0
if self.args.train_model:
trained_model_path, composition_files = train_supervoxel_model(
myna_files,
voxel_cluster_files,
self,
comp_file_name=supervoxel_composition_filename,
)
else:
trained_model_path = self.get_latest_model_path()
composition_files = [
os.path.join(os.path.dirname(myna_file), supervoxel_composition_filename)
for myna_file in myna_files
]

print("- Clustering supervoxel data:")
for myna_file, composition_file in zip(myna_files, composition_files):
print(f" - {composition_file=}")
run(
myna_file,
composition_file,
trained_model_path,
self,
)
Loading
Loading