Skip to content

Aplowman/develop #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 7 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 20 additions & 7 deletions cipher_parse/cipher_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def from_voronoi(
num_phases=None,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
geometry = CIPHERGeometry.from_voronoi(
num_phases=num_phases,
Expand All @@ -343,6 +344,7 @@ def from_voronoi(
size=size,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

inp = cls(
Expand All @@ -366,6 +368,7 @@ def from_seed_voronoi(
solution_parameters,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
return cls.from_voronoi(
seeds=seeds,
Expand All @@ -378,6 +381,7 @@ def from_seed_voronoi(
solution_parameters=solution_parameters,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

@classmethod
Expand All @@ -393,6 +397,7 @@ def from_random_voronoi(
solution_parameters,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
return cls.from_voronoi(
num_phases=num_phases,
Expand All @@ -405,6 +410,7 @@ def from_random_voronoi(
solution_parameters=solution_parameters,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

@classmethod
Expand All @@ -418,13 +424,15 @@ def from_voxel_phase_map(
outputs,
solution_parameters,
random_seed=None,
combine_phases=None,
):
geometry = CIPHERGeometry(
voxel_phase=voxel_phase,
materials=materials,
interfaces=interfaces,
size=size,
random_seed=random_seed,
combine_phases=combine_phases,
)
inp = cls(
geometry=geometry,
Expand All @@ -445,6 +453,7 @@ def from_dream3D(
solution_parameters,
container_labels=None,
phase_type_map=None,
combine_phases=None,
):
default_container_labels = {
"SyntheticVolumeDataContainer": "SyntheticVolumeDataContainer",
Expand Down Expand Up @@ -559,6 +568,7 @@ def from_dream3D(
components=components,
outputs=outputs,
solution_parameters=solution_parameters,
combine_phases=combine_phases,
)

@property
Expand Down Expand Up @@ -604,14 +614,17 @@ def write_yaml(self, path, separate_mappings=False):

self.geometry._validate_interface_map()

phase_mat_str = compress_1D_array_string(self.geometry.phase_material + 1) + "\n"
vox_phase_str = (
compress_1D_array_string(self.geometry.voxel_phase.flatten(order="F") + 1)
+ "\n"
)
int_str = (
compress_1D_array_string(self.geometry.interface_map_int.flatten() + 1) + "\n"
phase_mat_str = compress_1D_array_string(self.geometry.phase_material + 1)
vox_phase_str = compress_1D_array_string(
self.geometry.voxel_phase.flatten(order="F") + 1
)
int_str = compress_1D_array_string(self.geometry.interface_map_int.flatten() + 1)

if not separate_mappings:
# CIPHER does not like trailing new lines in the mapping text files:
phase_mat_str += "\n"
vox_phase_str += "\n"
int_str += "\n"

if separate_mappings:
phase_mat_map = "phase_material_mapping.txt"
Expand Down
75 changes: 60 additions & 15 deletions cipher_parse/cipher_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,15 +246,28 @@ def get_incremental_data(self):
incremental_data = []
for file_i_idx, file_i in enumerate(vtu_file_list):
print(f"Reading VTU file {file_i.name}...", flush=True)
mesh = pv.get_reader(file_i).read()
try:
mesh = pv.get_reader(file_i).read()
except Exception:
print(f"Failed to read VTU file {file_i.name}.", flush=True)
continue
vtu_file_name = file_i.name

img_data = pv.ImageData(dimensions=grid_size)

print(
f"Resampling VTU file {file_i.name} onto an image-data mesh...",
flush=True,
)
img_mesh = img_data.sample(mesh)
try:
img_mesh = img_data.sample(mesh)
except Exception:
print(
f"Failed to re-sample VTU file {file_i.name} onto an image "
f"data grid.",
flush=True,
)
continue

inc_data_i = {
"increment": int(re.search(r"\d+", vtu_file_name).group()),
Expand All @@ -267,7 +280,15 @@ def get_incremental_data(self):

standard_outputs = {}
for name in output_lookup:
arr_flat = img_mesh.get_array(output_lookup[name])
try:
arr_flat = img_mesh.get_array(output_lookup[name])
except KeyError:
print(
f"Failed to get array {output_lookup[name]} from file "
f"{file_i.name}",
flush=True,
)
continue
arr = arr_flat.reshape(img_mesh.dimensions, order="F")
if name in STANDARD_OUTPUTS_TYPES:
arr = arr.astype(STANDARD_OUTPUTS_TYPES[name])
Expand All @@ -278,19 +299,33 @@ def get_incremental_data(self):
name_i = derive_out_i["name"]
func = DERIVED_OUTPUTS_FUNCS[name_i]
func_args = {"input_data": inp_dat}
func_args.update(
{i: standard_outputs[i] for i in DERIVED_OUTPUTS_REQUIREMENTS[name_i]}
)
try:
func_args.update(
{
i: standard_outputs[i]
for i in DERIVED_OUTPUTS_REQUIREMENTS[name_i]
}
)
except KeyError:
print(
f"Failed to prepare arguments for derived output function "
f"{func.__name__!r}.",
flush=True,
)
continue
derived_outputs[name_i] = func(**func_args)

for out_name, keep_idx in outputs_keep_idx.items():
if file_i_idx in keep_idx:
if out_name in DERIVED_OUTPUTS_REQUIREMENTS:
# a derived output:
inc_data_i[out_name] = derived_outputs[out_name]
else:
# a standard output:
inc_data_i[out_name] = standard_outputs[out_name]
try:
if out_name in DERIVED_OUTPUTS_REQUIREMENTS:
# a derived output:
inc_data_i[out_name] = derived_outputs[out_name]
else:
# a standard output:
inc_data_i[out_name] = standard_outputs[out_name]
except KeyError:
continue

incremental_data.append(inc_data_i)

Expand Down Expand Up @@ -434,7 +469,7 @@ def from_JSON_file(cls, path):
data = json.load(fp)
return cls.from_JSON(data)

def to_zarr(self, path):
def to_zarr(self, path, overwrite=False, close_store=None):
"""Save to a persistent zarr store.

This does not yet save `geometries`.
Expand All @@ -455,18 +490,28 @@ def to_zarr(self, path):
out_group.create_dataset(
name="stdout_file_str",
data=self.stdout_file_str.splitlines(),
overwrite=overwrite,
)
out_group.create_dataset(
name="input_YAML_file_str",
data=self.input_YAML_file_str.splitlines(),
overwrite=overwrite,
)
inc_dat_group = out_group.create_group("incremental_data", overwrite=True)
for idx, inc_dat_i in enumerate(self.incremental_data):
inc_dat_i_group = inc_dat_group.create_group(f"{idx}")
inc_dat_i_group.attrs.put({k: inc_dat_i[k] for k in INC_DATA_NON_ARRAYS})
for k in inc_dat_i:
if k not in INC_DATA_NON_ARRAYS:
inc_dat_i_group.create_dataset(name=k, data=inc_dat_i[k])
inc_dat_i_group.create_dataset(
name=k, data=inc_dat_i[k], overwrite=overwrite
)

if path.endswith(".zip") and close_store is None:
close_store = True

if close_store:
out_group.store.close()

return out_group

Expand All @@ -477,7 +522,7 @@ def from_zarr(cls, path, cipher_input=None, quiet=True):
This does not yet load `geometries`.

"""
group = zarr.open_group(store=path)
group = zarr.open_group(store=path, mode="r")
attrs = group.attrs.asdict()
kwargs = {
"directory": attrs["directory"],
Expand Down
110 changes: 109 additions & 1 deletion cipher_parse/geometry.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import random

from damask import Orientation
import pyvista as pv
import numpy as np
Expand Down Expand Up @@ -39,6 +41,7 @@ def __init__(
time=None,
increment=None,
incremental_data_idx=None,
combine_phases=None,
):
"""
Parameters
Expand Down Expand Up @@ -107,6 +110,22 @@ def __init__(

self._ensure_phase_assignment(random_seed)

if combine_phases:
# combine multiple phases into the same phase to reduce the computational cost
self.voxel_phase = self.combine_phases_per_phase_type(
voxel_map,
materials,
combine_phases,
)
self.voxel_map = VoxelMap(
region_ID=voxel_phase,
size=size,
is_periodic=is_periodic,
quiet=quiet,
)
all_phases = self.present_phases
self._num_phases = all_phases.size

self._phase_material = self._get_phase_material()
self._validate_interfaces()
self._check_interface_phase_pairs()
Expand All @@ -131,6 +150,93 @@ def __init__(
self._misorientation_matrix = None
self._misorientation_matrix_is_degrees = None

@staticmethod
def combine_phases_per_phase_type(voxel_map, materials, combine_phases):

print(f"combining phases according to {combine_phases}")

voxel_phase_new = voxel_map.region_ID
neighbours = voxel_map.neighbour_list

for mat in materials:
for pt_i in mat.phase_types:
print(f"{pt_i.name=}")
max_phases_i = combine_phases.get(pt_i.name)
if max_phases_i:

n_phases = pt_i.phases.size
group_size = int(np.ceil(n_phases / max_phases_i))

print(f" n_phases: {n_phases}")
print(f" max_phases_i: {max_phases_i}")
print(f" group_size: {group_size}")

voxel_phase_new = np.copy(voxel_phase_new)

reassignment = {}
reassignment_inv = {}
possible_IDs_idx = set(range(max_phases_i, n_phases))
possible_IDs = set(pt_i.phases[list(possible_IDs_idx)])
count = 0
kept_IDs = []
for root_ID_idx in range(int(np.ceil(n_phases / group_size))):

root_ID = pt_i.phases[root_ID_idx]
kept_IDs.append(root_ID)

neighbours_i = set(neighbours[:, neighbours[0] == root_ID][1])
possible_IDs -= {root_ID}
possible_IDs_i = possible_IDs - neighbours_i
shared_IDs = [root_ID]
count += 1

for _ in range(1, group_size):
if count >= n_phases:
break
try:
sampled_ID = random.sample(possible_IDs_i, 1)[0]
except ValueError:
print(
f"No non-neighbouring samples left for root_ID: {root_ID}."
)
# allow touching phase IDs within this group:
possible_IDs_i = possible_IDs - set(shared_IDs)

if possible_IDs_i:
sampled_ID = random.sample(possible_IDs_i, 1)[0]
else:
raise ValueError(
f"Cannot find non-neighbouring root IDs"
) from None

neighbours_sampled = set(
neighbours[:, neighbours[0] == sampled_ID][1]
)
possible_IDs_i -= neighbours_sampled
possible_IDs_i -= {sampled_ID}
possible_IDs -= {sampled_ID}
shared_IDs.append(sampled_ID)
count += 1

for i in shared_IDs:
reassignment[i] = root_ID
voxel_phase_new[voxel_phase_new == i] = root_ID

reassignment_inv[root_ID] = shared_IDs

pt_i.phases = kept_IDs # modify phase type phases

# reindex phases across all materials to maintain consecutive phase IDs:
voxel_phase_new_flat = voxel_phase_new.reshape(-1)
uniq, inv = np.unique(voxel_phase_new_flat, return_inverse=True)
reindex = dict(zip(uniq, range(len(uniq))))
voxel_phase_new = inv.reshape(voxel_phase_new.shape)
for mat_j in materials:
for pt_j in mat_j.phase_types:
pt_j.phases = np.array([reindex[i] for i in pt_j.phases])

return voxel_phase_new

def __eq__(self, other):
# Note we don't check seeds (not stored in YAML file)
if not isinstance(other, self.__class__):
Expand Down Expand Up @@ -456,7 +562,7 @@ def _get_phase_material(self):
"Not all phases are accounted for in the phase type definitions."
) # TODO: test raise

# check all phase indices form a consequtive range:
# check all phase indices form a consecutive range:
num_phases_range = set(np.arange(self.num_phases))
known_phases = set(np.hstack(all_phase_idx))
miss_phase_idx = num_phases_range - known_phases
Expand Down Expand Up @@ -740,6 +846,7 @@ def from_voronoi(
num_phases=None,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
if sum(i is not None for i in (seeds, num_phases)) != 1:
raise ValueError(f"Specify exactly one of `seeds` and `num_phases`")
Expand Down Expand Up @@ -769,6 +876,7 @@ def from_voronoi(
size=size,
seeds=seeds,
random_seed=random_seed,
combine_phases=combine_phases,
)

@classmethod
Expand Down
Loading
Loading