Skip to content

Commit

Permalink
Merge pull request #323 from jadball/master
Browse files Browse the repository at this point in the history
eps support in TensorMap
  • Loading branch information
jadball authored Sep 19, 2024
2 parents 2d435e0 + a2b983b commit 8c895dd
Show file tree
Hide file tree
Showing 2 changed files with 136 additions and 62 deletions.
130 changes: 68 additions & 62 deletions ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@
"import matplotlib.pyplot as plt\n",
"\n",
"import ImageD11.columnfile\n",
"# from ImageD11.sinograms.dataset import DataSet\n",
"from ImageD11.sinograms import properties, roi_iradon, dataset\n",
"# from ImageD11.blobcorrector import eiger_spatial\n",
"from ImageD11.sinograms.sinogram import GrainSinogram, build_slice_arrays, write_slice_recon, read_slice_recon, write_h5, read_h5, write_pbp_strain\n",
"from ImageD11.sinograms.tensor_map import TensorMap\n",
"from ImageD11.grain import grain\n",
"\n",
"import ImageD11.nbGui.nb_utils as utils\n"
Expand Down Expand Up @@ -121,76 +121,82 @@
{
"cell_type": "code",
"execution_count": null,
"id": "968d1fcd-46e7-43ad-8f09-5c5b3a639ae1",
"metadata": {
"tags": []
},
"id": "e659e4b6-2962-45c0-a980-d8207a3479be",
"metadata": {},
"outputs": [],
"source": [
"def read_grains(ds):\n",
" with h5py.File(ds.grainsfile, 'r') as hin:\n",
" grp = hin['slice_recon']\n",
" \n",
" raw_intensity_array = grp['intensity'][:]\n",
" grain_labels_array = grp['labels'][:]\n",
" \n",
" grains_group = 'grains'\n",
" \n",
" grains = []\n",
" for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n",
" gg = hin[grains_group][gid_string]\n",
" ubi = gg.attrs['ubi'][:]\n",
" g = ImageD11.grain.grain(ubi)\n",
" # general grain properties\n",
" g.gid = int(gid_string)\n",
" g.translation = gg['translation'][:]\n",
" g.cen = gg.attrs['cen']\n",
" g.y0 = gg.attrs['y0']\n",
" # sinogram stuff\n",
" g.ssino = gg['ssino'][:]\n",
" g.sinoangles = gg['sinoangles'][:]\n",
" # reconstructions\n",
" g.og_recon = gg['og_recon'][:]\n",
" g.recon = gg['recon'][:]\n",
" grains.append(g)\n",
"# pick a specific sample to continue\n",
"\n",
"sample = 'FeAu_0p5_tR_nscope'\n",
"\n",
"# pick the name of the H5 group to import the TensorMap from\n",
"\n",
"tmap_h5group = 'TensorMap_YSZ_refined'\n",
"\n",
"from collections import OrderedDict\n",
"\n",
"ds_dict = OrderedDict()\n",
"\n",
"for dataset in samples_dict[sample]:\n",
" # read the ds\n",
" dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n",
" ds = ImageD11.sinograms.dataset.load(dset_path)\n",
" \n",
" return grains, raw_intensity_array, grain_labels_array"
" # read the tensormaps\n",
" ds.tensor_map = TensorMap.from_h5(ds.grainsfile, h5group=tmap_h5group)\n",
" ds_dict[dataset] = ds"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "48cc2f6e-8cf1-4ec5-b8a0-2cab2a4f44d9",
"metadata": {
"tags": []
},
"id": "5a67e6da-0581-4b35-ab55-27a53982d774",
"metadata": {},
"outputs": [],
"source": [
"# in this case, first scan has the Z motor at its lowest value\n",
"# you should double-check this for your data!\n",
"# this means we're scanning the highest region on the sample first\n",
"# then moving down in the sample as we increase the Z number\n",
"# therefore we want the combined tensormaps in reverse order\n",
"\n",
"# choose the z step for your data\n",
"# in our case we assume it's the same as the y step\n",
"# this may not be true for you!!!\n",
"\n",
"zstep = ds.ystep\n",
"\n",
"combined_tensormap = TensorMap.from_stack([ds.tensor_map for ds in list(ds_dict.values())][::-1], zstep=zstep)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "798acf0d-c5c7-47d8-b0b5-ce8cc89a87e9",
"metadata": {},
"outputs": [],
"source": [
"z_translation_motor = \"pz\"\n",
"\n",
"ds_dict = []\n",
"\n",
"for sample, datasets in samples_dict.items():\n",
" for dataset in datasets:\n",
" print(f\"Processing dataset {dataset} in sample {sample}\")\n",
" dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n",
" if not os.path.exists(dset_path):\n",
" print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" print(\"Importing DataSet object\")\n",
" \n",
" ds = ImageD11.sinograms.dataset.load(dset_path)\n",
" \n",
" print(\"Importing grains\")\n",
" \n",
" ds.grains, ds.raw_intensity_array, ds.grain_labels_array = read_grains(ds)\n",
" \n",
" print(\"Importing z positions\")\n",
" \n",
" with h5py.File(ds.masterfile, 'r' ) as hin:\n",
" this_z_trans_value = hin[\"1.1/instrument/positioners\"][z_translation_motor][()]\n",
" ds.zpos = this_z_trans_value # this is in microns for samtz"
"# compute and plot strains for Z slice 0\n",
"\n",
"fig, axs = plt.subplots(3,3, sharex=True, sharey=True, layout='constrained')\n",
"for i in range(3):\n",
" for j in range(3):\n",
" axs[i,j].imshow(combined_tensormap.eps_sample[0, ..., i, j])\n",
" axs[i,j].set_title(f'eps_{i+1}{j+1}')\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "28707736-3db4-476c-9204-0ae15ead9365",
"metadata": {},
"outputs": [],
"source": [
"# export to file\n",
"\n",
"combined_tensormap.to_h5('combined.h5')\n",
"combined_tensormap.to_paraview('combined.h5')"
]
},
{
Expand Down
68 changes: 68 additions & 0 deletions ImageD11/sinograms/tensor_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,50 @@ def ubi_and_b_to_u(ubi, b, res):
res[...] = np.dot(b, ubi).T


@numba.guvectorize([(numba.float64[:, :], numba.float64[:], numba.float64[:, :])], '(n,n),(p)->(n,n)', nopython=True)
def ubi_and_unitcell_to_eps_sample(ubi, dzero_cell, res):
if np.isnan(ubi[0, 0]):
res[...] = np.nan
elif np.isnan(dzero_cell[0]):
res[...] = np.nan
else:
a, b, c = dzero_cell[:3]
ralpha, rbeta, rgamma = np.radians(dzero_cell[3:]) # radians
ca = np.cos(ralpha)
cb = np.cos(rbeta)
cg = np.cos(rgamma)
g = np.full((3, 3), np.nan, float)
g[0, 0] = a * a
g[0, 1] = a * b * cg
g[0, 2] = a * c * cb
g[1, 0] = a * b * cg
g[1, 1] = b * b
g[1, 2] = b * c * ca
g[2, 0] = a * c * cb
g[2, 1] = b * c * ca
g[2, 2] = c * c
gi = np.linalg.inv(g)
astar, bstar, cstar = np.sqrt(np.diag(gi))
betas = np.degrees(np.arccos(gi[0, 2] / astar / cstar))
gammas = np.degrees(np.arccos(gi[0, 1] / astar / bstar))

B = np.full((3, 3), np.nan, float)
B[0, 0] = astar
B[0, 1] = bstar * np.cos(np.radians(gammas))
B[0, 2] = cstar * np.cos(np.radians(betas))
B[1, 0] = 0.0
B[1, 1] = bstar * np.sin(np.radians(gammas))
B[1, 2] = -cstar * np.sin(np.radians(betas)) * ca
B[2, 0] = 0.0
B[2, 1] = 0.0
B[2, 2] = 1.0 / c

F = np.dot(ubi.T, B.T)
w, sing, vh = np.linalg.svd(F)
V = np.dot(w, np.dot(np.diag(sing), w.T))
em = V - np.eye(3)
res[...] = em

# @numba.njit
# def UB_map_to_U_B_map(UB_map):
# """QR decomp from xfab.tools"""
Expand Down Expand Up @@ -472,6 +516,30 @@ def euler(self):
self.add_map('euler', euler_map)
return euler_map

@property
def dzero_unitcell(self):
"""The dzero unitcell for each voxel from the reference phase for that voxel"""
if 'dzero_unitcell' in self.keys():
return self.maps['dzero_unitcell']
else:
# calculate it
dzero_unitcell_map = np.full(self.shape + (6,), np.nan, dtype=float)
for phase_id, ucell in self.phases.items():
dzero_unitcell_map[self.phase_ids == phase_id] = ucell.lattice_parameters
self.add_map('dzero_unitcell', dzero_unitcell_map)
return dzero_unitcell_map

@property
def eps_sample(self):
"""The per-voxel strain, relative to the B0 of the unitcell of the reference phase for that voxel."""
if 'eps_sample' in self.keys():
return self.maps['eps_sample']
else:
# calculate it
eps_sample_map = ubi_and_unitcell_to_eps_sample(self.UBI, self.dzero_unitcell)
self.add_map('eps_sample', eps_sample_map)
return eps_sample_map

def get_meta_orix_orien(self, phase_id=0):
"""Get a meta orix orientation for all voxels of a given phase ID"""
if phase_id in self._meta_orix_oriens.keys():
Expand Down

0 comments on commit 8c895dd

Please sign in to comment.