Skip to content

Commit

Permalink
Full tomographic route end-to-end now validated on real data
Browse files Browse the repository at this point in the history
  • Loading branch information
jadball committed Jan 23, 2025
1 parent 9942666 commit 07885c5
Show file tree
Hide file tree
Showing 15 changed files with 529 additions and 173 deletions.
4 changes: 4 additions & 0 deletions ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,10 @@
"# skips_dict = {\n",
"# \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
"# }\n",
"# otherwise by default skip nothing:\n",
"skips_dict = {\n",
" ds.sample: []\n",
"}\n",
"\n",
"sample_list = [ds.sample, ]\n",
" \n",
Expand Down
76 changes: 59 additions & 17 deletions ImageD11/nbGui/S3DXRD/4_visualise.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,13 @@
"# which phase to index\n",
"phase_str = 'Si'\n",
"\n",
"# is this phase a minor phase with a different refined path?\n",
"is_minor_phase = False\n",
"\n",
"# the minimum number of peaks you want a pixel to have to be counted\n",
"min_unique = 400"
"min_unique = 400\n",
"\n",
"dset_prefix = \"top_\" # some common string in the names of the datasets if processing multiple scans"
]
},
{
Expand Down Expand Up @@ -150,7 +155,12 @@
"source": [
"# import refinement manager\n",
"\n",
"refine = PBPRefine.from_h5(ds.refmanfile)"
"if is_minor_phase:\n",
" refmanpath = os.path.splitext(ds.refmanfile)[0] + f'_{phase_str}.h5'\n",
"else:\n",
" refmanpath = ds.refmanfile\n",
"\n",
"refine = PBPRefine.from_h5(refmanpath)"
]
},
{
Expand Down Expand Up @@ -213,7 +223,7 @@
"\n",
"for i in range(3):\n",
" for j in range(3):\n",
" axs[i,j].imshow(refine.refinedmap.best_eps[:, :, i, j], origin=\"lower\", cmap=cmap, norm=normalizer)\n",
" axs[i,j].imshow(refine.refinedmap.best_eps[:, :, i, j], origin=\"lower\", cmap=cmap, norm=normalizer, interpolation='nearest')\n",
" axs[i,j].set_title(f'eps_{i+1}{j+1}')\n",
"fig.supxlabel('< Lab Y axis')\n",
"fig.supylabel('Lab X axis')\n",
Expand Down Expand Up @@ -300,7 +310,7 @@
"\n",
"for i in range(3):\n",
" for j in range(3):\n",
" axs[i,j].imshow(tmap.eps_sample[0, ..., i, j], origin=\"lower\", cmap=cmap, norm=normalizer)\n",
" axs[i,j].imshow(tmap.eps_sample[0, ..., i, j], origin=\"lower\", cmap=cmap, norm=normalizer, interpolation='nearest')\n",
" axs[i,j].set_title(f'eps_{i+1}{j+1}')\n",
"fig.supxlabel('Lab X axis --->')\n",
"fig.supylabel('Lab Y axis --->')\n",
Expand Down Expand Up @@ -350,6 +360,21 @@
"eul = tmap.euler"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# if we have a previous tomographic TensorMap, we can try to get the labels map too:\n",
"try:\n",
" tmap_tomo = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str)\n",
" tmap.add_map('labels', tmap_tomo.labels)\n",
"except (FileNotFoundError, OSError, KeyError):\n",
" # couldn't find one, continue anyway\n",
" pass"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -358,7 +383,7 @@
"source": [
"# save the refined TensorMap to disk\n",
"\n",
"refined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap.h5')\n",
"refined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')\n",
"\n",
"tmap.to_h5(refined_tmap_path)\n",
"tmap.to_paraview(refined_tmap_path)"
Expand All @@ -374,7 +399,7 @@
"source": [
"# you can also do an MTEX export if you like:\n",
"\n",
"refined_ctf_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap.ctf')\n",
"refined_ctf_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.ctf')\n",
"\n",
"tmap.to_ctf_mtex(refined_ctf_path, z_index=0)"
]
Expand All @@ -400,15 +425,18 @@
"# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n",
"# you can add samples and datasets to skip in skips_dict\n",
"\n",
"# you can optionally skip samples\n",
"# skips_dict = {\n",
"# \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
"# }\n",
"# otherwise by default skip nothing:\n",
"skips_dict = {\n",
" \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
" ds.sample: []\n",
"}\n",
"\n",
"dset_prefix = \"top\"\n",
"sample_list = [ds.sample, ]\n",
"\n",
"sample_list = [\"FeAu_0p5_tR_nscope\"]\n",
" \n",
"samples_dict = utils.find_datasets_to_process(ds.dataroot, skips_dict, dset_prefix, sample_list)\n",
"samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n",
" \n",
"# manual override:\n",
"# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_150um\"]}\n",
Expand All @@ -432,11 +460,17 @@
" print(f\"Couldn't find PBP refinement output file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" if os.path.exists(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5')):\n",
" refined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')\n",
" if os.path.exists(refined_tmap_path):\n",
" print(f\"Already have refined TensorMap output file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" refine = PBPRefine.from_h5(ds.refmanfile)\n",
" if is_minor_phase:\n",
" refmanpath = os.path.splitext(ds.refmanfile)[0] + f'_{phase_str}.h5'\n",
" else:\n",
" refmanpath = ds.refmanfile\n",
" \n",
" refine = PBPRefine.from_h5(refmanpath)\n",
" refine.refinedmap.choose_best(min_unique)\n",
" \n",
" # first let's work out what phase we have\n",
Expand All @@ -455,10 +489,18 @@
" tmap.get_ipf_maps()\n",
" eul = tmap.euler\n",
" \n",
" tmap.to_h5(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))\n",
" tmap.to_paraview(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))\n",
" ctf_path = os.path.join(ds.analysispath, 'pbp_tensormap_refined.ctf')\n",
" tmap.to_ctf_mtex(ctf_path, z_index=0)\n",
" # if we have a previous tomographic TensorMap, we can try to get the labels map too:\n",
" try:\n",
" tmap_tomo = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str)\n",
" tmap.add_map('labels', tmap_tomo.labels)\n",
" except (FileNotFoundError, OSError, KeyError):\n",
" # couldn't find one, continue anyway\n",
" pass\n",
" \n",
" tmap.to_h5(refined_tmap_path)\n",
" tmap.to_paraview(refined_tmap_path)\n",
" refined_ctf_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.ctf')\n",
" tmap.to_ctf_mtex(refined_ctf_path, z_index=0)\n",
"\n",
" ds.save()\n",
"\n",
Expand Down
147 changes: 117 additions & 30 deletions ImageD11/nbGui/S3DXRD/5_combine_phases.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,39 @@
{
"cell_type": "code",
"execution_count": null,
"id": "94b89030-fdb2-47d2-bc26-3e5cfb0d6509",
"id": "636849a2-54fd-44ce-aca3-cb8e7e945e59",
"metadata": {},
"outputs": [],
"source": [
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )"
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "94b89030-fdb2-47d2-bc26-3e5cfb0d6509",
"metadata": {
"tags": [
"parameters"
]
},
"outputs": [],
"source": [
"# this cell is tagged with 'parameters'\n",
"# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n",
"\n",
"# python environment stuff\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n",
"\n",
"# dataset file to import\n",
"dset_file = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n",
"\n",
"phase_strs = ['Fe', 'Au']\n",
"\n",
"# whether or not we are combining refined tensormaps (changes where we look for them)\n",
"combine_refined = True\n",
"\n",
"dset_prefix = \"top_\" # some common string in the names of the datasets if processing multiple scans"
]
},
{
Expand Down Expand Up @@ -63,12 +90,11 @@
"source": [
"# USER: Pass path to dataset file\n",
"\n",
"dset_file = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n",
"\n",
"ds = ImageD11.sinograms.dataset.load(dset_file)\n",
" \n",
"sample = ds.sample\n",
"dataset = ds.dsname\n",
"rawdata_path = ds.dataroot\n",
"processed_data_root_dir = ds.analysisroot\n",
"\n",
"print(ds)\n",
Expand All @@ -93,21 +119,13 @@
{
"cell_type": "code",
"execution_count": null,
"id": "6e51945b-3baa-4a2b-99bc-b97972a99081",
"metadata": {
"tags": []
},
"id": "58bababb-461c-41a1-898d-378418fdc4f4",
"metadata": {},
"outputs": [],
"source": [
"# now let's select a phase to index from our parameters json\n",
"major_phase_str = 'Fe'\n",
"minor_phase_str = 'Au'\n",
"# what phases are we merging?\n",
"\n",
"major_phase_unitcell = ds.phases.unitcells[major_phase_str]\n",
"minor_phase_unitcell = ds.phases.unitcells[minor_phase_str]\n",
"\n",
"print(major_phase_str, major_phase_unitcell.lattice_parameters, major_phase_unitcell.spacegroup)\n",
"print(minor_phase_str, minor_phase_unitcell.lattice_parameters, minor_phase_unitcell.spacegroup)"
"print(*[ds.phases.unitcells[phase_str].lattice_parameters for phase_str in phase_strs], sep='\\n')"
]
},
{
Expand All @@ -130,21 +148,25 @@
},
"outputs": [],
"source": [
"tensor_map_major = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + major_phase_str)\n",
"tensor_map_minor = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + minor_phase_str)"
"if combine_refined:\n",
" tensor_maps = [TensorMap.from_h5(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')) for phase_str in phase_strs]\n",
"else:\n",
" tensor_maps = [TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str) for phase_str in phase_strs]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3e547336-885e-490f-96ab-8ab6626c04e3",
"metadata": {
"tags": []
},
"id": "1cf5e081-feda-4dee-a260-36f4f286ec1a",
"metadata": {},
"outputs": [],
"source": [
"tensor_map_major.plot('labels')\n",
"tensor_map_minor.plot('labels')"
"try:\n",
" for tmap in tensor_maps:\n",
" tmap.plot('labels')\n",
"except KeyError:\n",
" # no labels field\n",
" pass"
]
},
{
Expand All @@ -156,7 +178,7 @@
},
"outputs": [],
"source": [
"tensor_map_combined = TensorMap.from_combine_phases([tensor_map_major, tensor_map_minor])"
"tensor_map_combined = TensorMap.from_combine_phases(tensor_maps)"
]
},
{
Expand All @@ -169,7 +191,11 @@
"outputs": [],
"source": [
"tensor_map_combined.plot('phase_ids')\n",
"tensor_map_combined.plot('labels')\n",
"try:\n",
" tensor_map_combined.plot('labels')\n",
"except KeyError:\n",
" # no labels field\n",
" pass\n",
"tensor_map_combined.plot('ipf_z')"
]
},
Expand All @@ -182,8 +208,12 @@
},
"outputs": [],
"source": [
"tensor_map_combined.to_h5(os.path.join(ds.analysispath, 'combined_map.h5'))\n",
"tensor_map_combined.to_paraview(os.path.join(ds.analysispath, 'combined_map.h5'))"
"if combine_refined:\n",
" tensor_map_combined.to_h5(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_combined.h5'))\n",
" tensor_map_combined.to_paraview(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_combined.h5'))\n",
"else:\n",
" tensor_map_combined.to_h5(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_tmap_combined.h5'))\n",
" tensor_map_combined.to_paraview(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_tmap_combined.h5'))"
]
},
{
Expand All @@ -192,7 +222,64 @@
"id": "319bf9ac-16ca-4492-9bd4-e5eb1979fd86",
"metadata": {},
"outputs": [],
"source": []
"source": [
"# We can run the below cell to do this in bulk for many samples/datasets\n",
"# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n",
"# you can add samples and datasets to skip in skips_dict\n",
"\n",
"# you can optionally skip samples\n",
"# skips_dict = {\n",
"# \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
"# }\n",
"# otherwise by default skip nothing:\n",
"skips_dict = {\n",
" ds.sample: []\n",
"}\n",
"\n",
"sample_list = [ds.sample, ]\n",
"\n",
"samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n",
" \n",
"# manual override:\n",
"# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_150um\"]}\n",
" \n",
"# now we have our samples_dict, we can process our data:\n",
"\n",
"for sample, datasets in samples_dict.items():\n",
" for dataset in datasets:\n",
" print(f\"Processing dataset {dataset} in sample {sample}\")\n",
" dset_path = os.path.join(ds.analysisroot, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n",
" if not os.path.exists(dset_path):\n",
" print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" print(\"Importing DataSet object\")\n",
" \n",
" ds = ImageD11.sinograms.dataset.load(dset_path)\n",
" print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n",
" \n",
" if combine_refined:\n",
" combined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_combined.h5')\n",
" else:\n",
" combined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_tmap_combined.h5')\n",
" \n",
" if os.path.exists(combined_tmap_path):\n",
" print(f\"Already have combined TensorMap output file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" if combine_refined:\n",
" tensor_maps = [TensorMap.from_h5(os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')) for phase_str in phase_strs]\n",
" else:\n",
" tensor_maps = [TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str) for phase_str in phase_strs]\n",
" tensor_map_combined = TensorMap.from_combine_phases(tensor_maps)\n",
" \n",
" tensor_map_combined.to_h5(combined_tmap_path)\n",
" tensor_map_combined.to_paraview(combined_tmap_path)\n",
" \n",
" ds.save()\n",
"\n",
"print(\"Done!\")"
]
}
],
"metadata": {
Expand Down
Loading

0 comments on commit 07885c5

Please sign in to comment.