From 5459918dc4572ad0a1d50f500bb9fe6b55991f6a Mon Sep 17 00:00:00 2001 From: James Ball Date: Mon, 26 Feb 2024 16:32:48 +0100 Subject: [PATCH 1/8] Rename and add batch scripting --- ...ipynb => 0_S3DXRD_segment_and_label.ipynb} | 141 ++- ...dex_z_slice.ipynb => 1_S3DXRD_index.ipynb} | 181 +++- ...ipynb => 1_S3DXRD_index_minor_phase.ipynb} | 236 ++++- .../nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb | 831 +++++++++++------- .../2_S3DXRD_sinograms_map_minor_phase.ipynb | 733 +++++++++------ .../S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb | 331 +++++-- .../S3DXRD/4_S3DXRD_plot_both_phases.ipynb | 376 +++++--- .../5_S3DXRD_plot_multiple_slices.ipynb | 340 +++++++ 8 files changed, 2269 insertions(+), 900 deletions(-) rename ImageD11/nbGui/S3DXRD/{0_S3DXRD_segment_and_label_single_dset.ipynb => 0_S3DXRD_segment_and_label.ipynb} (64%) rename ImageD11/nbGui/S3DXRD/{1_S3DXRD_index_z_slice.ipynb => 1_S3DXRD_index.ipynb} (75%) rename ImageD11/nbGui/S3DXRD/{1_S3DXRD_index_z_slice_minor_phase.ipynb => 1_S3DXRD_index_minor_phase.ipynb} (64%) create mode 100644 ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb diff --git a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label_single_dset.ipynb b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb similarity index 64% rename from ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label_single_dset.ipynb rename to ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb index b4c0a541..f90d754a 100755 --- a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label_single_dset.ipynb +++ b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb @@ -7,7 +7,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -63,9 +63,9 @@ "import matplotlib.pyplot as plt\n", "from matplotlib.colors import LogNorm\n", "from skimage import filters, measure, morphology\n", - "import ipywidgets as widgets\n", + "from ipywidgets import interact, interactive, widgets, fixed, Layout\n", "import h5py\n", - "from IPython.display import display\n", + "\n", "%matplotlib ipympl" ] }, @@ -116,7 +116,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -196,27 +196,40 @@ " labeled_image = measure.label(cut_image)\n", " blob_properties = measure.regionprops(labeled_image)\n", " blob_mask = np.zeros_like(image, dtype=np.uint8)\n", + " spot_count = 0\n", " for prop in blob_properties:\n", " if prop.area >= pixels_in_spot:\n", " blob_mask[labeled_image == prop.label] = 1\n", + " spot_count += 1\n", " filtered_image = (image-bgimage) * blob_mask\n", - " return filtered_image\n", + " return filtered_image, spot_count\n", + "\n", + "\n", + "cut_slider = widgets.IntSlider(value=start_pars[\"cut\"], min=1, max=20, step=1, description='Cut:')\n", + "pixels_in_spot_slider = widgets.IntSlider(value=start_pars[\"pixels_in_spot\"], min=1, max=20, step=1, description='Pixels in Spot:')\n", + "\n", + "# Display the image initially\n", + "plt.figure()\n", "\n", + "filtered_image, nspots = segment_image(image, cut=cut_slider.value, pixels_in_spot=pixels_in_spot_slider.value, bgimage=bgimage)\n", + "filtered_image[filtered_image == 65535] = 0\n", + "im = plt.imshow(filtered_image, cmap=\"viridis\", norm=LogNorm(vmin=1, vmax=1000), interpolation=\"nearest\")\n", + "plt.title(f\"cut={cut_slider.value}, pixels_in_spot={pixels_in_spot_slider.value}, nspots={nspots}\")\n", + "plt.show()\n", "\n", "def update_image(cut, pixels_in_spot):\n", - " filtered_image = segment_image(image, cut, pixels_in_spot, bgimage)\n", + " filtered_image, nspots = segment_image(image, cut, pixels_in_spot, bgimage)\n", " \n", " filtered_image[filtered_image == 65535] = 0\n", " \n", - " plt.imshow(filtered_image, cmap=\"viridis\", norm=LogNorm(vmin=1, vmax=1000), interpolation=\"nearest\") \n", - " plt.title(f\"cut={cut}, pixels_in_spot={pixels_in_spot}\")\n", - " plt.show()\n", - " \n", + " im.set_data(filtered_image)\n", + " plt.title(f\"cut={cut}, pixels_in_spot={pixels_in_spot}, nspots={nspots}\")\n", + " plt.draw()\n", + "\n", + "\n", "\n", - "cut_slider = widgets.IntSlider(value=start_pars[\"cut\"], min=1, max=2000, step=1, description='Cut:')\n", - "pixels_in_spot_slider = widgets.IntSlider(value=start_pars[\"pixels_in_spot\"], min=1, max=20, step=1, description='Pixels in Spot:')\n", - "plt.figure(figsize=(10, 10))\n", "interactive_plot = widgets.interactive(update_image, cut=cut_slider, pixels_in_spot=pixels_in_spot_slider)\n", + "\n", "display(interactive_plot)" ] }, @@ -319,6 +332,108 @@ "source": [ "# TODO: incorporate DATA/visitor/ma5839/id11/20240118/SCRIPTS/0_S3DXRD_segment_and_label_single_dset.ipynb" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "703d22d0-ef82-4e08-8087-c57e76e16de1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d4f70bb5-035b-48b2-9acd-39c6e3ea8666", + "metadata": {}, + "outputs": [], + "source": [ + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "mask_path = '/data/id11/nanoscope/Eiger/eiger_mask_E-08-0173_20231127.edf'\n", + "\n", + "# you can change these if needed, but they will default to those you selected with the widget\n", + "seg_pars = {\"maskfile\": mask_path,\n", + " \"cut\": cut_slider.value,\n", + " \"pixels_in_spot\": pixels_in_spot_slider.value}\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.DataSet(dataroot=rawdata_path,\n", + " analysisroot=processed_data_root_dir,\n", + " sample=sample,\n", + " dset=dataset)\n", + " if os.path.exists(ds.sparsefile):\n", + " print(f\"Found existing Sparse file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " ds.import_all()\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " ds.save()\n", + " \n", + " print(\"Segmenting\")\n", + " sbat = ImageD11.sinograms.lima_segmenter.setup(ds.dsfile, **seg_pars)\n", + " utils.slurm_submit_and_wait(sbat, 60)\n", + " \n", + " print(\"Labelling sparse peaks\")\n", + " ImageD11.sinograms.assemble_label.main(ds.dsfile, ds.sparsefile)\n", + " \n", + " print(\"Generating peaks table\")\n", + " ImageD11.sinograms.properties.main(ds.dsfile, ds.sparsefile, ds.pksfile, options={'algorithm': 'lmlabel', 'wtmax': 70000, 'save_overlaps': False})\n", + " \n", + " print(\"Cleaning up sparse files\")\n", + " sparse_folder_path = os.path.join(ds.analysispath, \"sparse\")\n", + "\n", + " if not os.path.exists(sparse_folder_path):\n", + " os.mkdir(sparse_folder_path)\n", + "\n", + " scan_sparse_files = glob.glob(os.path.join(ds.analysispath, \"scan*_sparse.h5\"))\n", + "\n", + " for scan_sparse_file in scan_sparse_files:\n", + " shutil.move(scan_sparse_file, sparse_folder_path)\n", + "print(\"Done!\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "abee80e4-d426-46a9-b635-a28ded5039e1", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb similarity index 75% rename from ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice.ipynb rename to ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb index 68fd1bd1..d51f2e31 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb @@ -6,7 +6,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -63,6 +63,35 @@ "from ImageD11.blobcorrector import eiger_spatial" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# define our functions\n", + "\n", + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains(grains, ds):\n", + " with h5py.File(ds.grainsfile, 'w') as hout:\n", + " grn = hout.create_group('grains')\n", + " for g in tqdm(grains):\n", + " gg = grn.create_group(str(g.gid))\n", + " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", + " gg.attrs.update({'ubi':g.ubi})" + ] + }, { "cell_type": "code", "execution_count": null, @@ -83,7 +112,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -97,7 +126,7 @@ "# USER: pick a sample and a dataset you want to segment\n", "\n", "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_100um\"" + "dataset = \"top_200um\"" ] }, { @@ -564,41 +593,139 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", + "# save grain data\n", "\n", - "def save_grains(grains, ds):\n", - " with h5py.File(ds.grainsfile, 'w') as hout:\n", - " grn = hout.create_group('grains')\n", - " for g in tqdm(grains):\n", - " gg = grn.create_group(str(g.gid))\n", - " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", - " gg.attrs.update({'ubi':g.ubi})" + "save_grains(grains, ds)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "# save grain data\n", + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", "\n", - "save_grains(grains, ds)" + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_-100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# manual override:\n", + "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_150um\", \"top_200um\", \"top_250um\"]}\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "cf_strong_frac = 0.994\n", + "cf_strong_dsmax = 1.4\n", + "cf_strong_dstol = 0.01\n", + "\n", + "indexer_ds_tol = 0.01\n", + "max_multiplicity = 11\n", + "min_counts_on_ring = 0\n", + "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.1]\n", + "fracs = [0.9, 0.8, 0.7, 0.6, 0.5]\n", + "cosine_tol = np.cos(np.radians(90.25))\n", + "max_grains = 1000\n", + "\n", + "peak_assign_tol = 0.05\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " if os.path.exists(ds.grainsfile):\n", + " print(f\"Already have grains for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " peaks_table = ImageD11.sinograms.properties.pks_table.load(ds.pksfile)\n", + " peaks_2d = peaks_table.pk2d(ds.omega, ds.dty)\n", + " cf_2d = utils.tocolf(peaks_2d, par_path, e2dx_path, e2dy_path)\n", + " if os.path.exists(ds.col2dfile):\n", + " os.remove(ds.col2dfile)\n", + " ImageD11.columnfile.colfile_to_hdf(cf_2d, ds.col2dfile)\n", + "\n", + " peaks_4d = peaks_table.pk2dmerge(ds.omega, ds.dty)\n", + " cf_4d = utils.tocolf(peaks_4d, par_path, e2dx_path, e2dy_path) # spatial correction\n", + " index_column = np.arange(cf_4d.nrows)\n", + " cf_4d.addcolumn(index_column, 'index')\n", + " if os.path.exists(ds.col4dfile):\n", + " os.remove(ds.col4dfile)\n", + " ImageD11.columnfile.colfile_to_hdf(cf_4d, ds.col4dfile)\n", + " \n", + " cf_strong = utils.selectpeaks(cf_4d, frac=cf_strong_frac, dsmax=cf_strong_dsmax, dstol=cf_strong_dstol)\n", + " \n", + "\n", + "\n", + " grains, indexer = utils.do_index(cf=cf_strong,\n", + " dstol=indexer_ds_tol,\n", + " max_mult=max_multiplicity,\n", + " min_ring_count=min_counts_on_ring,\n", + " hkl_tols=hkl_tols_seq,\n", + " fracs=fracs,\n", + " cosine_tol=cosine_tol,\n", + " max_grains=max_grains\n", + " )\n", + " \n", + " for i, g in enumerate(grains):\n", + " g.gid = i\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", + "\n", + " print(\"Storing peak data in grains\")\n", + " for g in tqdm(grains):\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " \n", + " save_grains(grains, ds)\n", + "\n", + "print(\"Done!\")" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb similarity index 64% rename from ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice_minor_phase.ipynb rename to ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb index 395fe609..3a3c7f18 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_z_slice_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb @@ -6,7 +6,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -68,6 +68,34 @@ "from ImageD11.blobcorrector import eiger_spatial" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains_minor_phase(grains, ds, phase_name='minor'):\n", + " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + " with h5py.File(ds.grainsfile_minor_phase, 'w') as hout:\n", + " grn = hout.create_group('grains')\n", + " for g in tqdm(grains):\n", + " gg = grn.create_group(str(g.gid))\n", + " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", + " gg.attrs.update({'ubi':g.ubi})" + ] + }, { "cell_type": "code", "execution_count": null, @@ -186,19 +214,48 @@ }, "outputs": [], "source": [ - "# isolate Nickel peaks, and remove them from the dataset\n", - "ni_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=0.0075, dsmax=cf_4d.ds.max())\n", + "# isolate main phase peaks, and remove them from the dataset\n", + "main_phase_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=0.0075, dsmax=cf_4d.ds.max())\n", "\n", - "carbides = cf_4d.copy()\n", - "carbides.filter(~ni_peaks_mask)\n", + "minor_phase_peaks = cf_4d.copy()\n", + "minor_phase_peaks.filter(~main_phase_peaks_mask)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# plot the remaining peaks as a cake (d-star vs eta)\n", + "\n", + "fig, ax = plt.subplots()\n", "\n", - "# Update geometry for carbides peaks\n", + "ax.scatter(minor_phase_peaks.ds, minor_phase_peaks.eta, s=1)\n", "\n", - "par_path = 'carbide.par'\n", - "carbides.parameters.loadparameters(par_path)\n", - "carbides.updateGeometry()\n", + "ax.set_xlabel(\"dstar\")\n", + "ax.set_ylabel(\"eta\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Update geometry for minor phase peaks\n", "\n", - "cf_strong = utils.selectpeaks(carbides, dstol=0.0075, dsmax=0.7, frac=0.8, doplot=0.01)\n", + "par_path = os.path.join(processed_data_root_dir, 'Au.par')\n", + "minor_phase_peaks.parameters.loadparameters(par_path)\n", + "minor_phase_peaks.updateGeometry()\n", + "\n", + "cf_strong = utils.selectpeaks(minor_phase_peaks, dstol=0.005, dsmax=0.90, frac=0.875, doplot=0.01)\n", "print(cf_strong.nrows)" ] }, @@ -231,7 +288,7 @@ "ax.plot(cf_strong.ds, cf_strong.sum_intensity,',')\n", "ax.semilogy()\n", "\n", - "ax.set_xlabel(\"Dstar\")\n", + "ax.set_xlabel(\"dstar\")\n", "ax.set_ylabel(\"Intensity\")\n", "\n", "plt.show()" @@ -247,8 +304,8 @@ "source": [ "# now we can define a unit cell from our parameters\n", "\n", - "Fe = ImageD11.unitcell.unitcell_from_parameters(cf_strong.parameters)\n", - "Fe.makerings(cf_strong.ds.max())" + "Au = ImageD11.unitcell.unitcell_from_parameters(cf_strong.parameters)\n", + "Au.makerings(cf_strong.ds.max())" ] }, { @@ -265,7 +322,7 @@ "\n", "skip=1\n", "ax.plot( cf_strong.ds[::skip], cf_strong.eta[::skip],',',alpha=0.5)\n", - "ax.plot( Fe.ringds, [0,]*len(Fe.ringds), '|', ms=90 )\n", + "ax.plot( Au.ringds, [0,]*len(Au.ringds), '|', ms=90 )\n", "ax.set_xlabel('1 / d ($\\AA$)')\n", "ax.set_ylabel('$\\\\eta$ (deg)')\n", "\n", @@ -297,7 +354,7 @@ "source": [ "# USER: set a tolerance in d-space (for assigning peaks to powder rings)\n", "\n", - "indexer.ds_tol = 0.01\n", + "indexer.ds_tol = 0.005\n", "\n", "# change the log level so we can see what the ring assigments look like\n", "\n", @@ -341,13 +398,13 @@ "source": [ "# now we are indexing!\n", "# indexing will select all rings with a multiplicity below max_multiplity to search\n", - "max_multiplicity = 11\n", + "max_multiplicity = 23\n", "# the minimum number of peaks on a ring for a ring to be indexed on\n", - "min_counts_on_ring = 0\n", + "min_counts_on_ring = 100\n", "# the sequence of hkl tolerances the indexer will iterate through\n", - "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.1]\n", + "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06]\n", "# the sequence of minpks fractions the indexer will iterate through\n", - "fracs = [0.9, 0.8, 0.7, 0.6, 0.5]\n", + "fracs = [0.9, 0.8]\n", "# the tolerance in g-vector angle\n", "cosine_tol = np.cos(np.radians(90.25))\n", "# the max number of UBIs we can find per pair of rings\n", @@ -443,42 +500,24 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", + "# save grain data\n", "\n", - "def save_grains(grains, ds):\n", - " ds.grainsfile_carbides = os.path.join(ds.analysispath, ds.dsname + '_grains_carbides.h5')\n", - " with h5py.File(ds.grainsfile_carbides, 'w') as hout:\n", - " grn = hout.create_group('grains')\n", - " for g in tqdm(grains):\n", - " gg = grn.create_group(str(g.gid))\n", - " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", - " gg.attrs.update({'ubi':g.ubi})" + "save_grains_minor_phase(grains, ds)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "# save grain data\n", - "\n", - "save_grains(grains, ds)" + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" ] }, { @@ -486,7 +525,112 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "minor_phase_par_path = os.path.join(processed_data_root_dir, 'Au.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "main_phase_cf_dstol = 0.0075\n", + "\n", + "cf_strong_frac = 0.875\n", + "cf_strong_dsmax = 0.9\n", + "cf_strong_dstol = 0.005\n", + "\n", + "indexer_ds_tol = 0.005\n", + "max_multiplicity = 23\n", + "min_counts_on_ring = 100\n", + "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06]\n", + "fracs = [0.9, 0.8]\n", + "cosine_tol = np.cos(np.radians(90.25))\n", + "max_grains = 1000\n", + "\n", + "peak_assign_tol = 0.05\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " if os.path.exists(ds.grainsfile):\n", + " print(f\"Already have grains for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " cf_4d = ImageD11.columnfile.colfile_from_hdf(ds.col4dfile)\n", + "\n", + " cf_4d.parameters.loadparameters(par_path)\n", + " cf_4d.updateGeometry()\n", + "\n", + " main_phase_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=main_phase_cf_dstol, dsmax=cf_4d.ds.max())\n", + "\n", + " minor_phase_peaks = cf_4d.copy()\n", + " minor_phase_peaks.filter(~main_phase_peaks_mask)\n", + " \n", + " par_path = minor_phase_par_path\n", + " minor_phase_peaks.parameters.loadparameters(par_path)\n", + " minor_phase_peaks.updateGeometry()\n", + "\n", + " cf_strong = utils.selectpeaks(minor_phase_peaks, dstol=cf_strong_dstol, dsmax=cf_strong_dsmax, frac=cf_strong_frac)\n", + "\n", + " grains, indexer = utils.do_index(cf=cf_strong,\n", + " dstol=indexer_ds_tol,\n", + " max_mult=max_multiplicity,\n", + " min_ring_count=min_counts_on_ring,\n", + " hkl_tols=hkl_tols_seq,\n", + " fracs=fracs,\n", + " cosine_tol=cosine_tol,\n", + " max_grains=max_grains\n", + " )\n", + " \n", + " for i, g in enumerate(grains):\n", + " g.gid = i\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", + "\n", + " print(\"Storing peak data in grains\")\n", + " for g in tqdm(grains):\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " \n", + " save_grains_minor_phase(grains, ds)\n", + "\n", + "print(\"Done!\")" + ] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb index bff06193..c5d73a29 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb @@ -6,7 +6,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -50,6 +50,7 @@ "import pprint\n", "from shutil import rmtree\n", "import time\n", + "from functools import partial\n", "\n", "import matplotlib\n", "%matplotlib ipympl\n", @@ -73,6 +74,297 @@ "from ipywidgets import interact" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# define our functions\n", + "\n", + "# save recons and 2d properties to existing grain file\n", + "\n", + "def read_grains(ds):\n", + " with h5py.File(ds.grainsfile, 'r') as hin: \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + " g = ImageD11.grain.grain(ubi)\n", + " g.gid = int(gid_string)\n", + " grains.append(g)\n", + " \n", + " return grains\n", + "\n", + "def map_grain_from_peaks(g, flt, ds):\n", + " \"\"\"\n", + " Computes sinogram\n", + " flt is already the peaks for this grain\n", + " Returns angles, sino\n", + " \"\"\" \n", + " NY = len(ds.ybincens) # number of y translations\n", + " iy = np.round((flt.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", + "\n", + " # The problem is to assign each spot to a place in the sinogram\n", + " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", + " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", + " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", + " # 4D array of h,k,l,+/-\n", + " # pkmsk is whether a peak has been observed with this HKL or not\n", + " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", + " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", + " # sinogram row to hit\n", + " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", + " # counting where we hit an HKL position with a found peak\n", + " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", + "\n", + " npks = pkmsk.sum( )\n", + " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", + " sino = np.zeros( ( npks, NY ), 'f' )\n", + " hits = np.zeros( ( npks, NY ), 'f' )\n", + " angs = np.zeros( ( npks, NY ), 'f' )\n", + " adr = destRow * NY + iy \n", + " # Just accumulate \n", + " sig = flt.sum_intensity\n", + " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", + " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", + " ImageD11.cImageD11.put_incr64( angs, adr, flt.omega)\n", + " \n", + " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", + " # Normalise:\n", + " sino = (sino.T/sino.max( axis=1 )).T\n", + " # Sort (cosmetic):\n", + " order = np.lexsort((np.arange(npks), sinoangles))\n", + " sinoangles = sinoangles[order]\n", + " ssino = sino[order].T\n", + " return sinoangles, ssino, hits[order].T\n", + "\n", + "def do_sinos(g, hkltol=0.25):\n", + " flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", + " \n", + " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # calculate hkl of all assigned peaks\n", + " hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", + " dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", + "\n", + " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", + " # g.npksall = flt.nrows # total number of assigned peaks\n", + " flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", + " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", + " hkl_int = np.round(hkl_real).astype(int)\n", + " dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", + " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", + " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", + " g.etasigns_2d_strong = np.sign(flt.eta)\n", + " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", + " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, flt, ds)\n", + " return i,g\n", + "\n", + "\n", + "def run_iradon_id11(grain, pad=20, y0=0, workers=1, sample_mask=None, apply_halfmask=False, mask_central_zingers=False):\n", + " outsize = grain.ssino.shape[0] + pad\n", + " \n", + " if apply_halfmask:\n", + " halfmask = np.zeros_like(grain.ssino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + " \n", + " ssino_to_recon = grain.ssino * halfmask\n", + " else:\n", + " ssino_to_recon = grain.ssino\n", + " \n", + " # # pad the sample mask\n", + " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", + "\n", + " \n", + " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", + " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=grain.sinoangles, \n", + " mask=sample_mask,\n", + " output_size=outsize,\n", + " projection_shifts=np.full(grain.ssino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=workers)\n", + " \n", + " if mask_central_zingers:\n", + " grs = grain.recon.shape[0]\n", + " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", + " inner_mask_radius = 25\n", + " outer_mask_radius = inner_mask_radius + 2\n", + "\n", + " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", + " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", + "\n", + " mask_ring = inner_circle_mask & outer_circle_mask\n", + " # we now have a mask to apply\n", + " fill_value = np.median(grain.recon[mask_ring])\n", + " grain.recon[inner_circle_mask] = fill_value\n", + " \n", + " return grain\n", + "\n", + "\n", + "# write og_recon and ssino and circle_mask to disk\n", + "\n", + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains_for_mlem(grains, ds, y0):\n", + " with h5py.File(ds.grainsfile, 'r+') as hout:\n", + " try:\n", + " grp = hout.create_group('peak_assignments')\n", + " except ValueError:\n", + " grp = hout['peak_assignments']\n", + "\n", + " ds_gord = save_array( grp, 'gord', gord )\n", + " ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " ds_inds = save_array( grp, 'inds', inds )\n", + " ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " \n", + " grains_group = 'grains'\n", + " for g in tqdm(grains):\n", + " gg = hout[grains_group][str(g.gid)]\n", + " # save stuff for sinograms\n", + " \n", + " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", + " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", + " save_array(gg, 'og_recon', g.og_recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", + " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", + " \n", + " # might as well save peaks stuff while we're here\n", + " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", + " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", + " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", + "\n", + " gg.attrs['cen'] = g.cen\n", + " gg.attrs['y0'] = y0\n", + " \n", + " \n", + "def prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50):\n", + " \n", + " slurm_mlem_path = os.path.join(ds.analysispath, \"slurm_mlem\")\n", + "\n", + " if os.path.exists(slurm_mlem_path):\n", + " print(f\"Removing {slurm_mlem_path}\")\n", + " rmtree(slurm_mlem_path)\n", + "\n", + " os.mkdir(slurm_mlem_path)\n", + " \n", + " recons_path = os.path.join(ds.analysispath, \"mlem_recons\")\n", + "\n", + " if os.path.exists(recons_path):\n", + " print(f\"Removing {recons_path}\")\n", + " rmtree(recons_path)\n", + "\n", + " os.mkdir(recons_path)\n", + " \n", + " if is_half_scan:\n", + " dohm = \"Yes\"\n", + " mask_cen = \"Yes\"\n", + " else:\n", + " dohm = \"No\"\n", + " mask_cen = \"No\"\n", + " \n", + " bash_script_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm.sh')\n", + " python_script_path = os.path.join(id11_code_path, \"ImageD11/nbGui/S3DXRD/run_mlem_recon.py\") \n", + " outfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.out')\n", + " errfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.err')\n", + " log_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_$SLURM_ARRAY_JOB_ID_$SLURM_ARRAY_TASK_ID.log')\n", + "\n", + " reconfile = os.path.join(recons_path, ds.dsname + \"_mlem_recon_$SLURM_ARRAY_TASK_ID.txt\")\n", + "\n", + " bash_script_string = f\"\"\"#!/bin/bash\n", + "#SBATCH --job-name=mlem-recon\n", + "#SBATCH --output={outfile_path}\n", + "#SBATCH --error={errfile_path}\n", + "#SBATCH --array=0-{len(grains)-1}%{n_simultaneous_jobs}\n", + "#SBATCH --time=02:00:00\n", + "# define memory needs and number of tasks for each array job\n", + "#SBATCH --ntasks=1\n", + "#SBATCH --cpus-per-task={cores_per_task}\n", + "#\n", + "date\n", + "echo python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", + "python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", + "date\n", + " \"\"\"\n", + " \n", + " # print(f\"python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\")\n", + "\n", + " with open(bash_script_path, \"w\") as bashscriptfile:\n", + " bashscriptfile.writelines(bash_script_string)\n", + " \n", + " return bash_script_path, recons_path\n", + "\n", + "\n", + "def save_grains(grains, ds):\n", + " with h5py.File(ds.grainsfile, 'r+') as hout:\n", + " try:\n", + " grp = hout.create_group('slice_recon')\n", + " except ValueError:\n", + " grp = hout['slice_recon']\n", + " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", + " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", + " \n", + " grains_group = 'grains'\n", + "\n", + " for g in tqdm(grains):\n", + " gg = hout[grains_group][str(g.gid)]\n", + "\n", + " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", + " \n", + " \n", + "# without a mask, MLEM can introduce artifacts in the corners\n", + "# so we can manually mask those out\n", + "\n", + "# we can incoporate our own mask too\n", + "# by modifying the below function\n", + "\n", + "def apply_manual_mask(mask_in):\n", + " mask_out = mask_in.copy()\n", + " \n", + "# mask_out[:8, :] = 0\n", + "# mask_out[:, 87:] = 0\n", + "\n", + "# mask_out[:, :8] = 0\n", + "# mask_out[82:, :] = 0\n", + " \n", + "# mask_out[74:, :10] = 0\n", + "\n", + "# mask_out[:5, :] = 0\n", + "# # mask_out[131:, :] = 0\n", + "# # mask_out[:, 131:] = 0\n", + "\n", + "# mask_out[:20, 90:] = 0\n", + "# mask_out[119:, :45] = 0\n", + "# mask_out[:30, 100:] = 0\n", + "# # mask_out[112:, 81:] = 0\n", + "\n", + "# # mask_out[100:, 100:] = 0\n", + "# mask_out[90:, 118:] = 0\n", + "# mask_out[118:, 90:] = 0\n", + "\n", + "# mask_out[:40, 112:] = 0\n", + "# mask_out[:52, 120:] = 0\n", + "\n", + "# mask_out[:48, 81:] = 0\n", + " \n", + " return mask_out" + ] + }, { "cell_type": "code", "execution_count": null, @@ -91,7 +383,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -105,7 +397,7 @@ "# USER: pick a sample and a dataset you want to segment\n", "\n", "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_100um\"" + "dataset = \"top_200um\"" ] }, { @@ -156,29 +448,6 @@ "print(f\"Read {cf_4d.nrows} 4D peaks\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def read_grains(ds):\n", - " with h5py.File(ds.grainsfile, 'r') as hin: \n", - " grains_group = 'grains'\n", - " \n", - " grains = []\n", - " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", - " gg = hin[grains_group][gid_string]\n", - " ubi = gg.attrs['ubi'][:]\n", - " g = ImageD11.grain.grain(ubi)\n", - " g.gid = int(gid_string)\n", - " grains.append(g)\n", - " \n", - " return grains" - ] - }, { "cell_type": "code", "execution_count": null, @@ -321,11 +590,11 @@ "source": [ "# generate sinogram for whole sample\n", "\n", - "whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges[::2]])\n", + "whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", "\n", "fig, ax = plt.subplots()\n", "ax.imshow(whole_sample_sino, interpolation=\"nearest\", vmin=0)\n", - "ax.set_aspect(1)\n", + "ax.set_aspect(4)\n", "plt.show()" ] }, @@ -356,10 +625,10 @@ " ssino_to_recon = whole_sample_sino\n", "\n", "recon = ImageD11.sinograms.roi_iradon.mlem(ssino_to_recon, \n", - " theta=ds.obincens[::2],\n", + " theta=ds.obincens,\n", " workers=nthreads - 1,\n", " output_size=outsize,\n", - " projection_shifts=np.full(ssino_to_recon.shape, -c0/2),\n", + " # projection_shifts=np.full(ssino_to_recon.shape, -c0/2),\n", " niter=30)" ] }, @@ -372,20 +641,13 @@ "outputs": [], "source": [ "# we should be able to easily segment this using scikit-image\n", - "recon_man_mask = recon.copy()\n", - "\n", - "# we can incoporate our own mask too\n", - "# by uncommenting and modifying the below lines\n", - "# without a mask, MLEM can introduce artifacts in the corners\n", - "# so we can manually mask those out\n", - "\n", - "recon_man_mask[280:, 280:] = 0\n", + "recon_man_mask = apply_manual_mask(recon)\n", "\n", "thresh = threshold_otsu(recon_man_mask)\n", "\n", "# we can also override the threshold if we don't like it:\n", "\n", - "# thresh = 0.025\n", + "# thresh = 0.05\n", "\n", "binary = recon_man_mask > thresh\n", "\n", @@ -552,84 +814,12 @@ }, "outputs": [], "source": [ - "# now our 2D peak assignments are known, let's populate our grain objects with our 2D peaks\n", - "\n", - "for grain in tqdm(grains):\n", - " i = grain.gid\n", - " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", - " # grain.mask_2d = np.isin(cf_2d.index, grain.peaks_2d)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def map_grain_from_peaks(g, flt, ds):\n", - " \"\"\"\n", - " Computes sinogram\n", - " flt is already the peaks for this grain\n", - " Returns angles, sino\n", - " \"\"\" \n", - " NY = len(ds.ybincens) # number of y translations\n", - " iy = np.round((flt.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", - "\n", - " # The problem is to assign each spot to a place in the sinogram\n", - " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", - " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", - " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", - " # 4D array of h,k,l,+/-\n", - " # pkmsk is whether a peak has been observed with this HKL or not\n", - " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", - " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", - " # sinogram row to hit\n", - " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", - " # counting where we hit an HKL position with a found peak\n", - " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", - "\n", - " npks = pkmsk.sum( )\n", - " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", - " sino = np.zeros( ( npks, NY ), 'f' )\n", - " hits = np.zeros( ( npks, NY ), 'f' )\n", - " angs = np.zeros( ( npks, NY ), 'f' )\n", - " adr = destRow * NY + iy \n", - " # Just accumulate \n", - " sig = flt.sum_intensity\n", - " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", - " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", - " ImageD11.cImageD11.put_incr64( angs, adr, flt.omega)\n", - " \n", - " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", - " # Normalise:\n", - " sino = (sino.T/sino.max( axis=1 )).T\n", - " # Sort (cosmetic):\n", - " order = np.lexsort((np.arange(npks), sinoangles))\n", - " sinoangles = sinoangles[order]\n", - " ssino = sino[order].T\n", - " return sinoangles, ssino, hits[order].T\n", - "\n", - "def do_sinos(g, hkltol=0.25):\n", - " flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", - " \n", - " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # calculate hkl of all assigned peaks\n", - " hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", - " dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", + "# now our 2D peak assignments are known, let's populate our grain objects with our 2D peaks\n", "\n", - " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", - " # g.npksall = flt.nrows # total number of assigned peaks\n", - " flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", - " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", - " hkl_int = np.round(hkl_real).astype(int)\n", - " dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", - " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", - " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", - " g.etasigns_2d_strong = np.sign(flt.eta)\n", - " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", - " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, flt, ds)\n", - " return i,g" + "for grain in tqdm(grains):\n", + " i = grain.gid\n", + " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", + " # grain.mask_2d = np.isin(cf_2d.index, grain.peaks_2d)" ] }, { @@ -668,56 +858,6 @@ "plt.show()" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def run_iradon_id11(grain, pad=20, y0=c0/2, workers=1, sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan):\n", - " outsize = grain.ssino.shape[0] + pad\n", - " \n", - " if apply_halfmask:\n", - " halfmask = np.zeros_like(grain.ssino)\n", - "\n", - " halfmask[:len(halfmask)//2-1, :] = 1\n", - " halfmask[len(halfmask)//2-1, :] = 0.5\n", - " \n", - " ssino_to_recon = grain.ssino * halfmask\n", - " else:\n", - " ssino_to_recon = grain.ssino\n", - " \n", - " # # pad the sample mask\n", - " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", - "\n", - " \n", - " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", - " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", - " theta=grain.sinoangles, \n", - " mask=sample_mask,\n", - " output_size=outsize,\n", - " projection_shifts=np.full(grain.ssino.shape, -y0),\n", - " filter_name='hamming',\n", - " interpolation='linear',\n", - " workers=workers)\n", - " \n", - " if mask_central_zingers:\n", - " grs = grain.recon.shape[0]\n", - " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", - " inner_mask_radius = 25\n", - " outer_mask_radius = inner_mask_radius + 2\n", - "\n", - " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", - " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", - "\n", - " mask_ring = inner_circle_mask & outer_circle_mask\n", - " # we now have a mask to apply\n", - " fill_value = np.median(grain.recon[mask_ring])\n", - " grain.recon[inner_circle_mask] = fill_value\n", - " \n", - " return grain" - ] - }, { "cell_type": "code", "execution_count": null, @@ -730,7 +870,7 @@ "\n", "# y0 = 1.5 # for example!\n", "\n", - "y0 = c0/2" + "y0 = c0" ] }, { @@ -743,7 +883,7 @@ "source": [ "g = grains[0]\n", "\n", - "run_iradon_id11(g, pad=pad, y0=y0, workers=20)" + "run_iradon_id11(g, pad=pad, y0=y0, workers=20,sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)" ] }, { @@ -775,8 +915,10 @@ "source": [ "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", + "run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", "with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", - " for i in tqdm(pool.map(run_iradon_id11, grains, [pad]*len(grains), [y0]*len(grains)), total=len(grains)):\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", " pass" ] }, @@ -856,7 +998,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0)" + "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.4)" ] }, { @@ -920,91 +1062,7 @@ }, "outputs": [], "source": [ - "# write og_recon and ssino and circle_mask to disk\n", - "\n", - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", - "\n", - "def save_grains(grains, ds):\n", - " with h5py.File(ds.grainsfile, 'r+') as hout:\n", - " try:\n", - " grp = hout.create_group('peak_assignments')\n", - " except ValueError:\n", - " grp = hout['peak_assignments']\n", - "\n", - " ds_gord = save_array( grp, 'gord', gord )\n", - " ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " ds_inds = save_array( grp, 'inds', inds )\n", - " ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " \n", - " grains_group = 'grains'\n", - " for g in tqdm(grains):\n", - " gg = hout[grains_group][str(g.gid)]\n", - " # save stuff for sinograms\n", - " \n", - " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", - " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", - " save_array(gg, 'og_recon', g.og_recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", - " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", - " \n", - " # might as well save peaks stuff while we're here\n", - " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", - " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", - " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", - "\n", - " gg.attrs['cen'] = g.cen\n", - " gg.attrs['y0'] = y0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "save_grains(grains, ds)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if is_half_scan:\n", - " dohm = \"Yes\"\n", - " mask_cen = \"Yes\"\n", - "else:\n", - " dohm = \"No\"\n", - " mask_cen = \"No\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "slurm_mlem_path = os.path.join(ds.analysispath, \"slurm_mlem\")\n", - "\n", - "if os.path.exists(slurm_mlem_path):\n", - " print(f\"Removing {slurm_mlem_path}\")\n", - " rmtree(slurm_mlem_path)\n", - "\n", - "os.mkdir(slurm_mlem_path)" + "save_grains_for_mlem(grains, ds, y0=y0)" ] }, { @@ -1013,53 +1071,7 @@ "metadata": {}, "outputs": [], "source": [ - "recons_path = os.path.join(ds.analysispath, \"mlem_recons\")\n", - "\n", - "if os.path.exists(recons_path):\n", - " print(f\"Removing {recons_path}\")\n", - " rmtree(recons_path)\n", - "\n", - "os.mkdir(recons_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "bash_script_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm.sh')\n", - "# python_script_path = os.path.join(ds.analysisroot, \"run_mlem_recon.py\")\n", - "python_script_path = os.path.join(id11_code_path, \"ImageD11/nbGui/S3DXRD/run_mlem_recon.py\") \n", - "outfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.out')\n", - "errfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.err')\n", - "log_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_$SLURM_ARRAY_JOB_ID_$SLURM_ARRAY_TASK_ID.log')\n", - "\n", - "reconfile = os.path.join(recons_path, ds.dsname + \"_mlem_recon_$SLURM_ARRAY_TASK_ID.txt\")\n", - "\n", - "n_simultaneous_jobs = 50\n", - "cores_per_task = 8\n", - "niter = 50\n", - "\n", - "bash_script_string = f\"\"\"#!/bin/bash\n", - "#SBATCH --job-name=mlem-recon\n", - "#SBATCH --output={outfile_path}\n", - "#SBATCH --error={errfile_path}\n", - "#SBATCH --array=0-{len(grains)-1}%{n_simultaneous_jobs}\n", - "#SBATCH --time=02:00:00\n", - "# define memory needs and number of tasks for each array job\n", - "#SBATCH --ntasks=1\n", - "#SBATCH --cpus-per-task={cores_per_task}\n", - "#\n", - "date\n", - "python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", - "date\n", - "\"\"\"\n", - "\n", - "with open(bash_script_path, \"w\") as bashscriptfile:\n", - " bashscriptfile.writelines(bash_script_string)" + "bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50)" ] }, { @@ -1177,34 +1189,203 @@ }, "outputs": [], "source": [ - "# save recons and 2d properties to existing grain file\n", - "\n", - "def save_grains(grains, ds):\n", - " with h5py.File(ds.grainsfile, 'r+') as hout:\n", - " try:\n", - " grp = hout.create_group('slice_recon')\n", - " except ValueError:\n", - " grp = hout['slice_recon']\n", - " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", - " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", - " \n", - " grains_group = 'grains'\n", - "\n", - " for g in tqdm(grains):\n", - " gg = hout[grains_group][str(g.gid)]\n", - "\n", - " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'" + "save_grains(grains, ds)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "save_grains(grains, ds)" + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_-100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# manual override:\n", + "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_150um\", \"top_200um\", \"top_250um\"]}\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "cf_strong_frac = 0.995\n", + "cf_strong_dstol = 0.01\n", + "\n", + "is_half_scan = False\n", + "\n", + "peak_assign_tol = 0.25\n", + "\n", + "manual_threshold = None\n", + "# manual_threshold = 0.025\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "pad = 50\n", + "\n", + "# y0 = -1.4\n", + "\n", + "mlem_wholesample_niter = 25\n", + "mlem_n_simultaneous_jobs = 50\n", + "mlem_cores_per_task = 8\n", + "mlem_niter = 50\n", + "\n", + "cutoff_level = 0.2\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " \n", + " if not os.path.exists(ds.grainsfile):\n", + " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", + " cf_4d.parameters.loadparameters(par_path)\n", + " cf_4d.updateGeometry()\n", + " \n", + " grains = read_grains(ds)\n", + " \n", + " cf_strong = utils.selectpeaks(cf_4d, frac=cf_strong_frac, dsmax=cf_4d.ds.max(), dstol=cf_strong_dstol)\n", + " \n", + " if is_half_scan:\n", + " utils.correct_half_scan(ds)\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", + " \n", + " for g in tqdm(grains):\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " \n", + " for grain in tqdm(grains):\n", + " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " \n", + " c0 = np.median([g.cen for g in grains])\n", + " \n", + " y0 = c0\n", + " \n", + " whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", + " \n", + " print(\"MLEM whole sample mask\")\n", + " outsize = whole_sample_sino.shape[0] + pad\n", + "\n", + " if is_half_scan:\n", + " halfmask = np.zeros_like(whole_sample_sino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + "\n", + " ssino_to_recon = whole_sample_sino * halfmask\n", + " else:\n", + " ssino_to_recon = whole_sample_sino\n", + " recon = ImageD11.sinograms.roi_iradon.mlem(ssino_to_recon, \n", + " theta=ds.obincens,\n", + " workers=nthreads - 1,\n", + " output_size=outsize,\n", + " niter=mlem_wholesample_niter)\n", + " \n", + " recon_man_mask = apply_manual_mask(recon)\n", + " if manual_threshold is None:\n", + " thresh = threshold_otsu(recon_man_mask)\n", + " else:\n", + " thresh = manual_threshold\n", + " \n", + " binary = recon_man_mask > thresh\n", + " whole_sample_mask = convex_hull_image(binary)\n", + " \n", + " for g in grains:\n", + " g.translation = np.array([g.dx, g.dy, 0])\n", + " \n", + " print(\"Peak 2D organise\")\n", + " pks = ImageD11.sinograms.properties.pks_table.load(ds.pksfile)\n", + " p2d = pks.pk2d(ds.omega, ds.dty)\n", + " numba_order, numba_histo = utils.counting_sort(p2d['spot3d_id'])\n", + " grain_2d_id = utils.palloc(p2d['spot3d_id'].shape, np.dtype(int))\n", + " cleanid = cf_strong.grain_id.copy()\n", + " utils.find_grain_id(cf_strong.spot3d_id, cleanid, p2d['spot3d_id'], grain_2d_id, numba_order)\n", + " gord, counts = utils.counting_sort(grain_2d_id)\n", + " inds = np.concatenate(((0,), np.cumsum(counts)))\n", + " \n", + " for grain in tqdm(grains):\n", + " i = grain.gid\n", + " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", + " \n", + " print(\"Making sinograms\")\n", + " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", + " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", + " pass\n", + " \n", + " print(\"Running iradon\")\n", + " \n", + " run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", + " with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", + " pass\n", + " \n", + " for grain in grains:\n", + " grain.og_recon = grain.recon\n", + " \n", + " save_grains_for_mlem(grains, ds, y0)\n", + " \n", + " bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, mlem_n_simultaneous_jobs, mlem_cores_per_task, mlem_niter)\n", + " \n", + " utils.slurm_submit_and_wait(bash_script_path, 60)\n", + " \n", + " for i, grain in enumerate(tqdm(grains)):\n", + " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))\n", + " \n", + " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " \n", + " save_grains(grains, ds)\n", + "\n", + "print(\"Done!\")" ] }, { @@ -1212,7 +1393,9 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "rgb_array.shape" + ] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb index d9d341b3..38411df8 100755 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb @@ -6,7 +6,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -51,6 +51,8 @@ "import matplotlib\n", "%matplotlib widget\n", "\n", + "from skimage.feature import blob_log\n", + "\n", "import h5py\n", "from tqdm.notebook import tqdm\n", "import numba\n", @@ -70,6 +72,232 @@ "from ImageD11.grain import grain" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# define our functions\n", + "\n", + "def read_grains_minor_phase(ds, phase_name='minor'):\n", + " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + " with h5py.File(ds.grainsfile_minor_phase, 'r') as hin: \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + " g = ImageD11.grain.grain(ubi)\n", + " g.gid = int(gid_string)\n", + " g.peaks_4d = gg['peaks_4d_indexing'][:]\n", + " grains.append(g)\n", + " \n", + " return grains\n", + "\n", + "\n", + "\n", + "def read_grains_main_phase(ds):\n", + " with h5py.File(ds.grainsfile, 'r') as hin: \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + "\n", + " g = ImageD11.grain.grain(ubi)\n", + " g.gid = int(gid_string)\n", + " g.y0 = gg.attrs['y0'][()]\n", + " g.sample_mask = gg['circle_mask'][:]\n", + " grains.append(g)\n", + " \n", + " return grains\n", + "\n", + "\n", + "def map_grain_from_peaks(g, flt, ds):\n", + " \"\"\"\n", + " Computes sinogram\n", + " flt is already the peaks for this grain\n", + " Returns angles, sino\n", + " \"\"\" \n", + " NY = len(ds.ybincens) # number of y translations\n", + " iy = np.round((flt.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", + "\n", + " # The problem is to assign each spot to a place in the sinogram\n", + " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", + " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", + " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", + " # 4D array of h,k,l,+/-\n", + " # pkmsk is whether a peak has been observed with this HKL or not\n", + " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", + " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", + " # sinogram row to hit\n", + " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", + " # counting where we hit an HKL position with a found peak\n", + " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", + "\n", + " npks = pkmsk.sum( )\n", + " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", + " sino = np.zeros( ( npks, NY ), 'f' )\n", + " hits = np.zeros( ( npks, NY ), 'f' )\n", + " angs = np.zeros( ( npks, NY ), 'f' )\n", + " adr = destRow * NY + iy \n", + " # Just accumulate \n", + " sig = flt.sum_intensity\n", + " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", + " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", + " ImageD11.cImageD11.put_incr64( angs, adr, flt.omega)\n", + " \n", + " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", + " # Normalise:\n", + " sino = (sino.T/sino.max( axis=1 )).T\n", + " # Sort (cosmetic):\n", + " order = np.lexsort((np.arange(npks), sinoangles))\n", + " sinoangles = sinoangles[order]\n", + " ssino = sino[order].T\n", + " return sinoangles, ssino, hits[order].T\n", + "\n", + "def do_sinos(g, hkltol=0.25):\n", + " flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", + " \n", + " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # calculate hkl of all assigned peaks\n", + " hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", + " dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", + "\n", + " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", + " # g.npksall = flt.nrows # total number of assigned peaks\n", + " flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", + " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", + " hkl_int = np.round(hkl_real).astype(int)\n", + " dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", + " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", + " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", + " g.etasigns_2d_strong = np.sign(flt.eta)\n", + " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", + " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, flt, ds)\n", + " return i,g\n", + "\n", + "\n", + "def run_iradon_id11(grain, pad=20, y0=0, workers=1, sample_mask=None, apply_halfmask=False, mask_central_zingers=False):\n", + " outsize = grain.ssino.shape[0] + pad\n", + " \n", + " if apply_halfmask:\n", + " halfmask = np.zeros_like(grain.ssino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + " \n", + " ssino_to_recon = grain.ssino * halfmask\n", + " else:\n", + " ssino_to_recon = grain.ssino\n", + " \n", + " # # pad the sample mask\n", + " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", + "\n", + " \n", + " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", + " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=grain.sinoangles, \n", + " mask=sample_mask,\n", + " output_size=outsize,\n", + " projection_shifts=np.full(grain.ssino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=workers)\n", + " \n", + " if mask_central_zingers:\n", + " grs = grain.recon.shape[0]\n", + " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", + " inner_mask_radius = 25\n", + " outer_mask_radius = inner_mask_radius + 2\n", + "\n", + " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", + " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", + "\n", + " mask_ring = inner_circle_mask & outer_circle_mask\n", + " # we now have a mask to apply\n", + " fill_value = np.median(grain.recon[mask_ring])\n", + " grain.recon[inner_circle_mask] = fill_value\n", + " \n", + " return grain\n", + "\n", + "\n", + "def find_cens_from_recon(grain):\n", + " grain.bad_recon = False\n", + " blobs = blob_log(grain.recon, min_sigma=1, max_sigma=10, num_sigma=10, threshold=.01)\n", + " blobs_sorted = sorted(blobs, key=lambda x: x[2], reverse=True)\n", + " try:\n", + " largest_blob = blobs_sorted[0]\n", + " grain.x_blob = largest_blob[1]\n", + " grain.y_blob = largest_blob[0]\n", + " except IndexError:\n", + " # didn't find any blobs\n", + " # for small grains like these, if we didn't find a blob, normally indicates recon is bad\n", + " # we will exclude it from maps and export\n", + " grain.bad_recon = True\n", + "\n", + " \n", + "cmp = {'compression':'gzip',\n", + "'compression_opts': 2,\n", + "'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains_minor_phase(grains, ds):\n", + "\n", + " # delete existing file, because our grain numbers have changed\n", + " if os.path.exists(ds.grainsfile_minor_phase):\n", + " os.remove(ds.grainsfile_minor_phase)\n", + " \n", + " with h5py.File(ds.grainsfile_minor_phase, 'w-') as hout: # fail if exists\n", + " try:\n", + " grp = hout.create_group('peak_assignments')\n", + " except ValueError:\n", + " grp = hout['peak_assignments']\n", + "\n", + " ds_gord = save_array( grp, 'gord', gord )\n", + " ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " ds_inds = save_array( grp, 'inds', inds )\n", + " ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " \n", + " try:\n", + " grp = hout.create_group('slice_recon')\n", + " except ValueError:\n", + " grp = hout['slice_recon']\n", + " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", + " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", + " \n", + " grains_group = hout.create_group('grains')\n", + " for g in tqdm(grains):\n", + " gg = grains_group.create_group(str(g.gid))\n", + " # save stuff for sinograms\n", + " \n", + " gg.attrs.update({'ubi':g.ubi})\n", + " \n", + " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", + " \n", + " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", + " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", + " save_array(gg, 'og_recon', g.recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", + " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", + " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", + " \n", + " # might as well save peaks stuff while we're here\n", + " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", + " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", + " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", + "\n", + " gg.attrs['cen'] = g.cen" + ] + }, { "cell_type": "code", "execution_count": null, @@ -163,32 +391,7 @@ }, "outputs": [], "source": [ - "def read_grains(ds):\n", - " ds.grainsfile_carbides = os.path.join(ds.analysispath, ds.dsname + '_grains_carbides.h5')\n", - " with h5py.File(ds.grainsfile_carbides, 'r') as hin: \n", - " grains_group = 'grains'\n", - " \n", - " grains = []\n", - " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", - " gg = hin[grains_group][gid_string]\n", - " ubi = gg.attrs['ubi'][:]\n", - " g = ImageD11.grain.grain(ubi)\n", - " g.gid = int(gid_string)\n", - " g.peaks_4d = gg['peaks_4d_indexing'][:]\n", - " grains.append(g)\n", - " \n", - " return grains" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "grains = read_grains(ds)\n", + "grains = read_grains_minor_phase(ds)\n", "\n", "for grain in grains:\n", " # print(grain.gid)\n", @@ -205,19 +408,19 @@ }, "outputs": [], "source": [ - "# isolate Nickel peaks, and remove them from the dataset\n", - "ni_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=0.0075, dsmax=cf_4d.ds.max())\n", + "# isolate main phase peaks, and remove them from the dataset\n", + "main_phase_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=0.0075, dsmax=cf_4d.ds.max())\n", "\n", - "carbides = cf_4d.copy()\n", - "carbides.filter(~ni_peaks_mask)\n", + "minor_phase_peaks = cf_4d.copy()\n", + "minor_phase_peaks.filter(~main_phase_peaks_mask)\n", "\n", - "# Update geometry for carbides peaks\n", + "# Update geometry for minor phase peaks\n", "\n", - "par_path = 'carbide.par'\n", - "carbides.parameters.loadparameters(par_path)\n", - "carbides.updateGeometry()\n", + "par_path = os.path.join(processed_data_root_dir, 'Au.par')\n", + "minor_phase_peaks.parameters.loadparameters(par_path)\n", + "minor_phase_peaks.updateGeometry()\n", "\n", - "cf_strong = utils.selectpeaks(carbides, dstol=0.0075, dsmax=carbides.ds.max(), frac=0.9, doplot=0.01)\n", + "cf_strong = utils.selectpeaks(minor_phase_peaks, dstol=0.005, dsmax=carbides.ds.max(), frac=0.9, doplot=0.01)\n", "print(cf_strong.nrows)" ] }, @@ -253,32 +456,14 @@ }, "outputs": [], "source": [ - "# load mask for whole sample from Ni grain reconstruction\n", + "# load major phase grain reconstruction\n", + "# for pad and y0\n", "\n", - "def read_ni_grains(ds):\n", - " with h5py.File(ds.grainsfile, 'r') as hin: \n", - " grains_group = 'grains'\n", - " \n", - " grains = []\n", - " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", - " gg = hin[grains_group][gid_string]\n", - " ubi = gg.attrs['ubi'][:]\n", - " g = ImageD11.grain.grain(ubi)\n", - " g.gid = int(gid_string)\n", - " g.sample_mask = gg['circle_mask'][:]\n", - " # g.etasigns_4d = gg['etasigns_4d'][:]\n", - " # g.hkl_4d = gg['hkl_4d'][:]\n", - " # g.mask_4d = gg['mask_4d'][:]\n", - " # g.mask_4d_greedy = gg['mask_4d_greedy'][:]\n", - " # g.peak_indices_4d = gg['peak_indices_4d'][:]\n", - " # g.npks_4d = gg.attrs['npks_4d']\n", - " # g.npks_4d_greedy = gg.attrs['npks_4d_greedy']\n", - " grains.append(g)\n", - " \n", - " return grains\n", + "major_phase_grains = read_grains_main_phase(ds)\n", + "whole_sample_mask = major_phase_grains[0].sample_mask\n", + "y0 = major_phase_grains[0].y0\n", "\n", - "ni_grains = read_ni_grains(ds)\n", - "whole_sample_mask = ni_grains[0].sample_mask" + "pad = ((major_phase_grains.recon.shape[0] - major_phase_grains.ssino.shape[0])//2).astype(int)" ] }, { @@ -387,13 +572,7 @@ "source": [ "c0 = np.median([g.cen for g in grains])\n", "\n", - "print('Center of rotation in dty', c0)\n", - "\n", - "# c0 is being correctly determined\n", - "# we know this because of the earlier single-grain dty vs omega plot\n", - "# if g.cen was off, the fit would be shifted\n", - "# this means we have another parameter we need to introduce\n", - "# to account for uneven spacing either side of the center of rotation?" + "print('Center of rotation in dty', c0)" ] }, { @@ -504,79 +683,7 @@ }, "outputs": [], "source": [ - "def map_grain_from_peaks(g, flt, ds):\n", - " \"\"\"\n", - " Computes sinogram\n", - " flt is already the peaks for this grain\n", - " Returns angles, sino\n", - " \"\"\" \n", - " NY = len(ds.ybincens) # number of y translations\n", - " iy = np.round((flt.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", - "\n", - " # The problem is to assign each spot to a place in the sinogram\n", - " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", - " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", - " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", - " # 4D array of h,k,l,+/-\n", - " # pkmsk is whether a peak has been observed with this HKL or not\n", - " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", - " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", - " # sinogram row to hit\n", - " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", - " # counting where we hit an HKL position with a found peak\n", - " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", - "\n", - " npks = pkmsk.sum( )\n", - " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", - " sino = np.zeros( ( npks, NY ), 'f' )\n", - " hits = np.zeros( ( npks, NY ), 'f' )\n", - " angs = np.zeros( ( npks, NY ), 'f' )\n", - " adr = destRow * NY + iy \n", - " # Just accumulate \n", - " sig = flt.sum_intensity\n", - " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", - " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", - " ImageD11.cImageD11.put_incr64( angs, adr, flt.omega)\n", - " \n", - " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", - " # Normalise:\n", - " sino = (sino.T/sino.max( axis=1 )).T\n", - " # Sort (cosmetic):\n", - " order = np.lexsort((np.arange(npks), sinoangles))\n", - " sinoangles = sinoangles[order]\n", - " ssino = sino[order].T\n", - " return sinoangles, ssino, hits[order].T\n", - "\n", - "def do_sinos(g, hkltol=0.25):\n", - " flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", - " \n", - " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # calculate hkl of all assigned peaks\n", - " hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", - " dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", - "\n", - " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", - " # g.npksall = flt.nrows # total number of assigned peaks\n", - " flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", - " hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", - " hkl_int = np.round(hkl_real).astype(int)\n", - " dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", - " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", - " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", - " g.etasigns_2d_strong = np.sign(flt.eta)\n", - " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", - " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, flt, ds)\n", - " return i,g" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Determine sinograms of all grains\n", + "# Determine sinograms of all grains\n", "\n", "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", @@ -604,58 +711,6 @@ "plt.show()" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def run_iradon_id11(grain, pad=20, y0=c0/2, workers=1, sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=False):\n", - " outsize = grain.ssino.shape[0] + pad\n", - " \n", - " if apply_halfmask:\n", - " halfmask = np.zeros_like(grain.ssino)\n", - "\n", - " halfmask[:len(halfmask)//2-1, :] = 1\n", - " halfmask[len(halfmask)//2-1, :] = 0.5\n", - " \n", - " ssino_to_recon = grain.ssino * halfmask\n", - " else:\n", - " ssino_to_recon = grain.ssino\n", - " \n", - " # pad the sample mask\n", - " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", - "\n", - " \n", - " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", - " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", - " theta=grain.sinoangles, \n", - " mask=sample_mask,\n", - " output_size=outsize,\n", - " projection_shifts=np.full(grain.ssino.shape, -y0),\n", - " filter_name='hamming',\n", - " interpolation='linear',\n", - " workers=workers)\n", - " \n", - " if mask_central_zingers:\n", - " grs = grain.recon.shape[0]\n", - " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", - " inner_mask_radius = 25\n", - " outer_mask_radius = inner_mask_radius + 2\n", - "\n", - " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", - " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", - "\n", - " mask_ring = inner_circle_mask & outer_circle_mask\n", - " # we now have a mask to apply\n", - " fill_value = np.median(grain.recon[mask_ring])\n", - " grain.recon[inner_circle_mask] = fill_value\n", - " \n", - " return grain" - ] - }, { "cell_type": "code", "execution_count": null, @@ -667,21 +722,21 @@ "# you can pick a grain and investigate the effects of changing y0 that gets passed to iradon\n", "# it' best to pick the grain AFTER reconstructing all grains, so you can pick a grain of interest\n", "\n", - "g = grains[1066]\n", + "g = grains[5]\n", " \n", - "vals = np.linspace(-2.0, -1.0, 9)\n", + "vals = np.linspace(-8.5, -7.5, 9)\n", "\n", "grid_size = np.ceil(np.sqrt(len(vals))).astype(int)\n", "nrows = (len(vals)+grid_size-1)//grid_size\n", "\n", "fig, axs = plt.subplots(grid_size, nrows, sharex=True, sharey=True)\n", "\n", - "for inc, val in enumerate(tqdm.tqdm(vals)):\n", + "for inc, val in enumerate(tqdm(vals)):\n", " run_iradon_id11(g, y0=val)\n", " # crop = g.recon[200:240, 230:290]\n", " crop = g.recon\n", " \n", - " axs.ravel()[inc].imshow(crop, origin=\"lower\")\n", + " axs.ravel()[inc].imshow(crop, origin=\"lower\", vmin=0)\n", " axs.ravel()[inc].set_title(val)\n", " \n", "plt.show()" @@ -695,10 +750,10 @@ }, "outputs": [], "source": [ - "# you can overwrite y0 and pad here\n", + "# you can overwrite y0 here\n", "\n", - "y0 = -1.25\n", - "pad = 20" + "y0 = -7.875\n", + "# pad = 50" ] }, { @@ -736,7 +791,7 @@ "def update_frame(i):\n", " rec.set_array(grains[i].recon)\n", " sin.set_array(grains[i].ssino)\n", - " a[0].set(title=str(i))\n", + " a[0].set(title=str(grains[i].gid))\n", " fig.canvas.draw()\n", "\n", "# Create a slider widget to select the frame number\n", @@ -761,27 +816,22 @@ }, "outputs": [], "source": [ - "from skimage.feature import blob_log\n", - "\n", - "def find_cens_from_recon(grain):\n", - " grain.bad_recon = False\n", - " blobs = blob_log(grain.recon, min_sigma=1, max_sigma=10, num_sigma=10, threshold=.01)\n", - " blobs_sorted = sorted(blobs, key=lambda x: x[2], reverse=True)\n", - " try:\n", - " largest_blob = blobs_sorted[0]\n", - " grain.x_blob = largest_blob[1]\n", - " grain.y_blob = largest_blob[0]\n", - " except IndexError:\n", - " # didn't find any blobs\n", - " # for small grains like these, if we didn't find a blob, normally indicates recon is bad\n", - " # we will exclude it from maps and export\n", - " grain.bad_recon = True\n", - "\n", "with concurrent.futures.ThreadPoolExecutor(max_workers= max(1, nthreads-1)) as pool:\n", " for i in tqdm(pool.map(find_cens_from_recon, grains), total=len(grains)):\n", " pass" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# bad_gids = [46, 95, 102, 130, 137, 160, 123, 125, 136, 165, 174, 175, 176, 177]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -793,6 +843,7 @@ "# remove bad recon grains from future analysis\n", "print(f\"{len(grains)} grains before filtration\")\n", "grains = [grain for grain in grains if not grain.bad_recon]\n", + "# grains = [grain for grain in grains if grain.gid not in bad_gids]\n", "print(f\"{len(grains)} grains after filtration\")" ] }, @@ -868,7 +919,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.5)" + "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.7)" ] }, { @@ -900,6 +951,33 @@ "plt.show()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "labels, counts = np.unique(grain_labels_array, return_counts=True)\n", + "\n", + "fig, ax = plt.subplots()\n", + "ax.plot(labels[labels > 0], counts[labels > 0])\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "bad_gids = [int(label) for (label, count) in zip(labels, counts) if count > 25 and label > 0]\n", + "bad_gids" + ] + }, { "cell_type": "code", "execution_count": null, @@ -920,65 +998,7 @@ "metadata": {}, "outputs": [], "source": [ - "# write grains to disk\n", - "\n", - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", - "\n", - "def save_grains(grains, ds):\n", - " \n", - " # delete existing file, because our grain numbers have changed\n", - " if os.path.exists(ds.grainsfile_carbides):\n", - " os.remove(ds.grainsfile_carbides)\n", - " \n", - " with h5py.File(ds.grainsfile_carbides, 'w-') as hout: # fail if exists\n", - " try:\n", - " grp = hout.create_group('peak_assignments')\n", - " except ValueError:\n", - " grp = hout['peak_assignments']\n", - "\n", - " ds_gord = save_array( grp, 'gord', gord )\n", - " ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " ds_inds = save_array( grp, 'inds', inds )\n", - " ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " \n", - " try:\n", - " grp = hout.create_group('slice_recon')\n", - " except ValueError:\n", - " grp = hout['slice_recon']\n", - " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", - " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", - " \n", - " grains_group = hout.create_group('grains')\n", - " for g in tqdm(grains):\n", - " gg = grains_group.create_group(str(g.gid))\n", - " # save stuff for sinograms\n", - " \n", - " gg.attrs.update({'ubi':g.ubi})\n", - " \n", - " save_array(gg, 'peaks_4d_indexing', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain during indexing\"\n", - " \n", - " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", - " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", - " save_array(gg, 'og_recon', g.recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", - " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", - " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", - " \n", - " # might as well save peaks stuff while we're here\n", - " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", - " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", - " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", - "\n", - " gg.attrs['cen'] = g.cen" + "# write grains to disk" ] }, { @@ -989,7 +1009,17 @@ }, "outputs": [], "source": [ - "save_grains(grains, ds)" + "save_grains_minor_phase(grains, ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" ] }, { @@ -997,7 +1027,154 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "minor_phase_par_path = os.path.join(processed_data_root_dir, 'Au.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "main_phase_cf_dstol = 0.0075\n", + "\n", + "cf_strong_frac = 0.9\n", + "cf_strong_dstol = 0.005\n", + "\n", + "is_half_scan = False\n", + "\n", + "peak_assign_tol = 0.25\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "pad = 50\n", + "\n", + "cutoff_level = 0.7\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " \n", + " if not os.path.exists(ds.grainsfile):\n", + " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", + " cf_4d.parameters.loadparameters(par_path)\n", + " cf_4d.updateGeometry()\n", + " \n", + " grains = read_grains_minor_phase(ds)\n", + " \n", + " main_phase_peaks_mask = utils.unitcell_peaks_mask(cf_4d, dstol=main_phase_cf_dstol, dsmax=cf_4d.ds.max())\n", + "\n", + " minor_phase_peaks = cf_4d.copy()\n", + " minor_phase_peaks.filter(~main_phase_peaks_mask)\n", + "\n", + " # Update geometry for minor phase peaks\n", + "\n", + " minor_phase_peaks.parameters.loadparameters(minor_phase_par_path)\n", + " minor_phase_peaks.updateGeometry()\n", + " \n", + " cf_strong = utils.selectpeaks(minor_phase_peaks, frac=cf_strong_frac, dsmax=cf_4d.ds.max(), dstol=cf_strong_dstol)\n", + " \n", + " if is_half_scan:\n", + " utils.correct_half_scan(ds)\n", + " \n", + " main_phase_grains = read_grains_main_phase(ds)\n", + " whole_sample_mask = main_phase_grains[0].sample_mask\n", + " y0 = main_phase_grains[0].y0\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", + " \n", + " for g in tqdm(grains):\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " \n", + " for grain in tqdm(grains):\n", + " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " \n", + " c0 = np.median([g.cen for g in grains])\n", + " \n", + " for g in grains:\n", + " g.translation = np.array([g.dx, g.dy, 0])\n", + " \n", + " print(\"Peak 2D organise\")\n", + " pks = ImageD11.sinograms.properties.pks_table.load(ds.pksfile)\n", + " p2d = pks.pk2d(ds.omega, ds.dty)\n", + " numba_order, numba_histo = utils.counting_sort(p2d['spot3d_id'])\n", + " grain_2d_id = utils.palloc(p2d['spot3d_id'].shape, np.dtype(int))\n", + " cleanid = cf_strong.grain_id.copy()\n", + " utils.find_grain_id(cf_strong.spot3d_id, cleanid, p2d['spot3d_id'], grain_2d_id, numba_order)\n", + " gord, counts = utils.counting_sort(grain_2d_id)\n", + " inds = np.concatenate(((0,), np.cumsum(counts)))\n", + " \n", + " for grain in tqdm(grains):\n", + " i = grain.gid\n", + " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", + " \n", + " print(\"Making sinograms\")\n", + " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", + " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", + " pass\n", + " \n", + " print(\"Running iradon\")\n", + " \n", + " run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", + " with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", + " pass\n", + " \n", + " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1, nthreads-1)) as pool:\n", + " for i in tqdm(pool.map(find_cens_from_recon, grains), total=len(grains)):\n", + " pass\n", + " \n", + " for g in grains:\n", + " g.translation = np.array([g.x_blob, g.y_blob, 0])\n", + " \n", + " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " \n", + " save_grains(grains, ds)\n", + "\n", + "print(\"Done!\")" + ] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb index 4fa11376..0f4e5e67 100755 --- a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb +++ b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb @@ -7,7 +7,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 21/02/2024" + "## Date: 26/02/2024" ] }, { @@ -67,6 +67,67 @@ "import ImageD11.nbGui.nb_utils as utils" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "01541645-a0ee-47c2-ab99-7eecd35392c8", + "metadata": {}, + "outputs": [], + "source": [ + "def read_grains(ds):\n", + " with h5py.File(ds.grainsfile, 'r') as hin:\n", + " grp = hin['slice_recon']\n", + " \n", + " raw_intensity_array = grp['intensity'][:]\n", + " grain_labels_array = grp['labels'][:]\n", + " \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + " g = ImageD11.grain.grain(ubi)\n", + " # general grain properties\n", + " g.gid = int(gid_string)\n", + " g.translation = gg['translation'][:]\n", + " g.cen = gg.attrs['cen']\n", + " g.y0 = gg.attrs['y0']\n", + " # sinogram stuff\n", + " g.ssino = gg['ssino'][:]\n", + " g.sinoangles = gg['sinoangles'][:]\n", + " # reconstructions\n", + " g.og_recon = gg['og_recon'][:]\n", + " g.recon = gg['recon'][:]\n", + " grains.append(g)\n", + " \n", + " return grains, raw_intensity_array, grain_labels_array\n", + "\n", + "\n", + "# save recons and 2d properties to existing grain file\n", + "\n", + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "\n", + "def save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map):\n", + " with h5py.File(ds.pbpubifile, 'w') as hout:\n", + " grp = hout.create_group('arrays')\n", + " save_array(grp, 'ubi_map', ubi_map).attrs['description'] = 'Refined UBI values at each pixel'\n", + " save_array(grp, 'eps_map', eps_map).attrs['description'] = 'Strain matrices (sample ref) at each pixel'\n", + " save_array(grp, 'misorientation_map', misorientation_map).attrs['description'] = 'Misorientation to grain avg at each pixel'\n", + " save_array(grp, 'ipf_z_col_map', ipf_z_col_map).attrs['description'] = 'IPF Z color at each pixel'" + ] + }, { "cell_type": "code", "execution_count": null, @@ -154,45 +215,6 @@ "cf_2d.addcolumn(index_column, 'index')" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "21a2c05d-db13-45fc-9ff5-d419479b3457", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def read_grains(ds):\n", - " with h5py.File(ds.grainsfile, 'r') as hin:\n", - " grp = hin['slice_recon']\n", - " \n", - " raw_intensity_array = grp['intensity'][:]\n", - " grain_labels_array = grp['labels'][:]\n", - " \n", - " grains_group = 'grains'\n", - " \n", - " grains = []\n", - " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", - " gg = hin[grains_group][gid_string]\n", - " ubi = gg.attrs['ubi'][:]\n", - " g = ImageD11.grain.grain(ubi)\n", - " # general grain properties\n", - " g.gid = int(gid_string)\n", - " g.translation = gg['translation'][:]\n", - " g.cen = gg.attrs['cen']\n", - " g.y0 = gg.attrs['y0']\n", - " # sinogram stuff\n", - " g.ssino = gg['ssino'][:]\n", - " g.sinoangles = gg['sinoangles'][:]\n", - " # reconstructions\n", - " g.og_recon = gg['og_recon'][:]\n", - " g.recon = gg['recon'][:]\n", - " grains.append(g)\n", - " \n", - " return grains, raw_intensity_array, grain_labels_array" - ] - }, { "cell_type": "code", "execution_count": null, @@ -560,54 +582,225 @@ { "cell_type": "code", "execution_count": null, - "id": "dfb44b23-076a-4747-945c-0a85fcdc8097", + "id": "9d5a6c02-657e-4dc9-ba78-740ed242672b", "metadata": { "tags": [] }, "outputs": [], "source": [ - "# save recons and 2d properties to existing grain file\n", - "\n", - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", - "\n", - "def save_ubi_map(ds):\n", - " with h5py.File(ds.pbpubifile, 'w') as hout:\n", - " grp = hout.create_group('arrays')\n", - " save_array(grp, 'ubi_map', ubi_map).attrs['description'] = 'Refined UBI values at each pixel'\n", - " save_array(grp, 'eps_map', eps_map).attrs['description'] = 'Strain matrices (sample ref) at each pixel'\n", - " save_array(grp, 'misorientation_map', misorientation_map).attrs['description'] = 'Misorientation to grain avg at each pixel'\n", - " save_array(grp, 'ipf_z_col_map', ipf_z_col_map).attrs['description'] = 'IPF Z color at each pixel'" + "save_ubi_map(ds)" ] }, { "cell_type": "code", "execution_count": null, - "id": "9d5a6c02-657e-4dc9-ba78-740ed242672b", - "metadata": { - "tags": [] - }, + "id": "0c559091-97bd-4b2d-9dc6-d99eb1b6e038", + "metadata": {}, "outputs": [], "source": [ - "save_ubi_map(ds)" + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "0c559091-97bd-4b2d-9dc6-d99eb1b6e038", + "id": "51473acc-dfa2-4a1e-8380-dd5c5c953e54", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "cf_2d_strong_frac = 0.95\n", + "peak_assign_tol = 0.025\n", + "n_ysteps_tol = 10\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " \n", + " if not os.path.exists(ds.grainsfile):\n", + " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " ds.pbpubifile = os.path.join(ds.analysispath, ds.dsname + '_pbp_map.h5')\n", + " \n", + " if os.path.exists(ds.pbpubifile):\n", + " print(f\"PBP file already exists for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " \n", + " # Import 2D peaks\n", + "\n", + " cf_2d = ImageD11.columnfile.columnfile(ds.col2dfile)\n", + " cf_2d.parameters.loadparameters(par_path)\n", + " cf_2d.updateGeometry()\n", + " print(f\"Read {cf_2d.nrows} 2D peaks\")\n", + " index_column = np.arange(cf_2d.nrows)\n", + " cf_2d.addcolumn(index_column, 'index')\n", + " \n", + " grains, raw_intensity_array, grain_labels_array = read_grains(ds)\n", + " \n", + " cf_2d_strong = utils.selectpeaks(cf_2d, frac=cf_2d_strong_frac, dsmax=cf_2d.ds.max())\n", + " cf_2d_strong.addcolumn(np.cos(np.radians(cf_2d_strong.omega)), 'cosomega')\n", + " cf_2d_strong.addcolumn(np.sin(np.radians(cf_2d_strong.omega)), 'sinomega')\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_2d_strong, tol=peak_assign_tol)\n", + "\n", + " print(\"Storing peak data in grains\")\n", + " # iterate through all the grains\n", + "\n", + " gvecs_2d_strong = np.transpose((cf_2d_strong.gx, cf_2d_strong.gy, cf_2d_strong.gz)).astype(float)\n", + "\n", + " # make lookup table for grain IDs so we can quickly get the grain given a GID (might not be contiguous or start at 0)\n", + " grain_lut = {}\n", + " for g in tqdm(grains):\n", + " grain_lut[g.gid] = g\n", + " g.mask_2d_strong = cf_2d_strong.grain_id == g.gid\n", + "\n", + " g.gve_2d_strong = gvecs_2d_strong[g.mask_2d_strong].T\n", + "\n", + " g.cosomega = cf_2d_strong.cosomega[g.mask_2d_strong]\n", + " g.sinomega = cf_2d_strong.sinomega[g.mask_2d_strong]\n", + "\n", + " g.dty = cf_2d_strong.dty[g.mask_2d_strong]\n", + " \n", + " g.label_mask = grain_labels_array == g.gid\n", + " \n", + " \n", + " ubifit = g.ubi.copy()\n", + " _ = cImageD11.score_and_refine(ubifit, np.transpose(grain.gve_2d_strong), tol)\n", + " grain.set_ubi(ubifit)\n", + " \n", + " for ginc, grain in enumerate(tqdm(grains[:])):\n", + " def refine_ubis(pixel_position):\n", + " i, j = pixel_position\n", + "\n", + " # convert pixel position to real space micron position\n", + "\n", + " a = (j - grains[0].recon.shape[0]//2) * ds.ystep\n", + " b = (i - grains[0].recon.shape[0]//2) * ds.ystep\n", + "\n", + " dty_calc = a*grain.cosomega - b*grain.sinomega + grain.y0 # microns\n", + "\n", + " mask = np.abs(grain.dty - dty_calc) < n_ysteps_tol*ds.ystep\n", + "\n", + " gve = np.transpose(grain.gve_2d_strong[:, mask])\n", + "\n", + " ubifit = grain.ubi.copy()\n", + " _ = cImageD11.score_and_refine(ubifit, gve, tol)\n", + "\n", + " return ubifit\n", + "\n", + " pixel_positions = np.argwhere(grain.label_mask == True)\n", + "\n", + " with concurrent.futures.ThreadPoolExecutor(max_workers = max(1, nthreads-1)) as pool:\n", + " pixel_ubis = pool.map(refine_ubis, pixel_positions)\n", + "\n", + " for pixel_position, ubi in zip(pixel_positions, pixel_ubis):\n", + " per_pixel_ubis[tuple(pixel_position)] = (ginc, ubi)\n", + " \n", + " ubi_map = np.empty((grains[0].recon.shape + (3,3)))\n", + " ubi_map.fill(np.nan)\n", + " for pxi in tqdm(range(grains[0].recon.shape[0])):\n", + " for pxj in range(grains[0].recon.shape[1]):\n", + " try:\n", + " graininc, this_ubi = per_pixel_ubis[pxi, pxj]\n", + " ubi_map[pxi, pxj, :, :] = this_ubi\n", + " except KeyError:\n", + " continue\n", + " \n", + " pixel_grain_lut = {}\n", + " for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " this_ubi = ubi_map[i, j]\n", + " if not np.isnan(this_ubi[0,0]):\n", + " this_grain = ImageD11.grain.grain(this_ubi)\n", + " pixel_grain_lut[i, j] = this_grain\n", + " \n", + " eps_map = np.empty((grains[0].recon.shape + (3,3)))\n", + " eps_map.fill(np.nan)\n", + " for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " try:\n", + " this_grain = pixel_grain_lut[i, j]\n", + " this_ref_gid = grain_labels_array[i, j]\n", + " this_ref_grain = grain_lut[this_ref_gid]\n", + " this_eps = this_grain.eps_sample_matrix(dzero_cell=this_ref_grain.unitcell)\n", + " eps_map[i, j] = this_eps\n", + " except KeyError:\n", + " continue\n", + " \n", + " misorientation_map = np.empty((grains[0].recon.shape))\n", + " misorientation_map.fill(np.nan)\n", + " for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " try:\n", + " this_grain = pixel_grain_lut[i, j]\n", + " this_ref_gid = grain_labels_array[i, j]\n", + " this_ref_grain = grain_lut[this_ref_gid]\n", + " this_misorien = np.min(Umis(this_ref_grain.U, this_grain.U, 7), axis=0)[1]\n", + " misorientation_map[i, j] = this_misorien\n", + " except KeyError:\n", + " continue\n", + " \n", + " ipf_z_col_map = np.empty((grains[0].recon.shape + (3,)))\n", + " ipf_z_col_map.fill(np.nan)\n", + " for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " try:\n", + " this_grain = pixel_grain_lut[i, j]\n", + " this_ipf_z_col = utils.hkl_to_color_cubic(utils.crystal_direction_cubic(this_grain.ubi, (0, 0, 1)))\n", + " ipf_z_col_map[i, j] = this_ipf_z_col\n", + " except KeyError:\n", + " continue\n", + " \n", + " save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map)\n", + "\n", + "print(\"Done!\")" + ] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/4_S3DXRD_plot_both_phases.ipynb b/ImageD11/nbGui/S3DXRD/4_S3DXRD_plot_both_phases.ipynb index d740e48b..d10c50fd 100755 --- a/ImageD11/nbGui/S3DXRD/4_S3DXRD_plot_both_phases.ipynb +++ b/ImageD11/nbGui/S3DXRD/4_S3DXRD_plot_both_phases.ipynb @@ -7,7 +7,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 20/02/2024" + "## Date: 26/02/2024" ] }, { @@ -62,7 +62,7 @@ "from ImageD11.grain import grain\n", "from ImageD11 import cImageD11\n", "\n", - "import utils" + "from ImageD11.nbGui import nb_utils as utils" ] }, { @@ -168,9 +168,9 @@ " return grains, raw_intensity_array, grain_labels_array\n", "\n", "\n", - "def read_carbide_grains(ds):\n", - " ds.grainsfile_carbides = os.path.join(ds.analysispath, ds.dsname + '_grains_carbides.h5')\n", - " with h5py.File(ds.grainsfile_carbides, 'r') as hin:\n", + "def read_gold_grains(ds):\n", + " ds.grainsfile_gold = os.path.join(ds.analysispath, ds.dsname + '_grains_Au.h5')\n", + " with h5py.File(ds.grainsfile_gold, 'r') as hin:\n", " grp = hin['slice_recon']\n", " \n", " raw_intensity_array = grp['intensity'][:]\n", @@ -207,8 +207,8 @@ }, "outputs": [], "source": [ - "grains_Ni, raw_intensity_array_Ni, grain_labels_array_Ni = read_grains(ds)\n", - "grains_C, raw_intensity_array_C, grain_labels_array_C = read_carbide_grains(ds)" + "grains_Fe, raw_intensity_array_Fe, grain_labels_array_Fe = read_grains(ds)\n", + "grains_Au, raw_intensity_array_Au, grain_labels_array_Au = read_gold_grains(ds)" ] }, { @@ -220,12 +220,12 @@ }, "outputs": [], "source": [ - "for grain in tqdm(grains_Ni):\n", + "for grain in tqdm(grains_Fe):\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", " \n", - "for grain in tqdm(grains_C):\n", + "for grain in tqdm(grains_Au):\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)" @@ -240,27 +240,27 @@ }, "outputs": [], "source": [ - "red_Ni = np.zeros_like(grains_Ni[0].recon)\n", - "grn_Ni = np.zeros_like(grains_Ni[0].recon)\n", - "blu_Ni = np.zeros_like(grains_Ni[0].recon)\n", - "\n", - "for g in tqdm(grains_Ni):\n", - " red_Ni[grain_labels_array_Ni == g.gid] = g.rgb_z[0]\n", - " grn_Ni[grain_labels_array_Ni == g.gid] = g.rgb_z[1]\n", - " blu_Ni[grain_labels_array_Ni == g.gid] = g.rgb_z[2]\n", + "red_Fe = np.zeros_like(grains_Fe[0].recon)\n", + "grn_Fe = np.zeros_like(grains_Fe[0].recon)\n", + "blu_Fe = np.zeros_like(grains_Fe[0].recon)\n", + "\n", + "for g in tqdm(grains_Fe):\n", + " red_Fe[grain_labels_array_Fe == g.gid] = g.rgb_z[0]\n", + " grn_Fe[grain_labels_array_Fe == g.gid] = g.rgb_z[1]\n", + " blu_Fe[grain_labels_array_Fe == g.gid] = g.rgb_z[2]\n", " \n", - "rgb_Ni = np.transpose((red_Ni, grn_Ni, blu_Ni), axes=(1, 2, 0))\n", + "rgb_Fe = np.transpose((red_Fe, grn_Fe, blu_Fe), axes=(1, 2, 0))\n", "\n", - "red_C = np.zeros_like(grains_C[0].recon)\n", - "grn_C = np.zeros_like(grains_C[0].recon)\n", - "blu_C = np.zeros_like(grains_C[0].recon)\n", + "red_Au = np.zeros_like(grains_Au[0].recon)\n", + "grn_Au = np.zeros_like(grains_Au[0].recon)\n", + "blu_Au = np.zeros_like(grains_Au[0].recon)\n", "\n", - "for g in tqdm(grains_C):\n", - " red_C[grain_labels_array_C == g.gid] = g.rgb_z[0]\n", - " grn_C[grain_labels_array_C == g.gid] = g.rgb_z[1]\n", - " blu_C[grain_labels_array_C == g.gid] = g.rgb_z[2]\n", + "for g in tqdm(grains_Au):\n", + " red_Au[grain_labels_array_Au == g.gid] = g.rgb_z[0]\n", + " grn_Au[grain_labels_array_Au == g.gid] = g.rgb_z[1]\n", + " blu_Au[grain_labels_array_Au == g.gid] = g.rgb_z[2]\n", " \n", - "rgb_C = np.transpose((red_C, grn_C, blu_C), axes=(1, 2, 0))" + "rgb_Au = np.transpose((red_Au, grn_Au, blu_Au), axes=(1, 2, 0))" ] }, { @@ -271,8 +271,8 @@ "outputs": [], "source": [ "fig, axs = plt.subplots(1, 2, constrained_layout=True, sharex=True, sharey=True)\n", - "axs[0].imshow(rgb_Ni)\n", - "axs[1].imshow(rgb_C)\n", + "axs[0].imshow(rgb_Fe)\n", + "axs[1].imshow(rgb_Au)\n", "plt.show()" ] }, @@ -285,7 +285,7 @@ }, "outputs": [], "source": [ - "rgb_C_alpha = np.append(rgb_C, np.zeros((rgb_C.shape[0], rgb_C.shape[1], 1)), axis=2)" + "rgb_Au_alpha = np.append(rgb_Au, np.zeros((rgb_Au.shape[0], rgb_Au.shape[1], 1)), axis=2)" ] }, { @@ -297,7 +297,7 @@ }, "outputs": [], "source": [ - "rgb_C_alpha[rgb_C_alpha[:, :, 0] != 0, 3] = 1" + "rgb_Au_alpha[rgb_Au_alpha[:, :, 0] != 0, 3] = 1" ] }, { @@ -309,7 +309,7 @@ }, "outputs": [], "source": [ - "(raw_intensity_array_C != 0).astype(float)" + "(raw_intensity_array_Au != 0).astype(float)" ] }, { @@ -322,10 +322,10 @@ "outputs": [], "source": [ "fig, axs = plt.subplots(1, 2, constrained_layout=True, sharex=True, sharey=True)\n", - "axs[0].imshow(raw_intensity_array_Ni, cmap=\"viridis\")\n", - "axs[0].imshow(rgb_C_alpha)\n", - "axs[1].imshow(raw_intensity_array_Ni, cmap=\"viridis\")\n", - "axs[0].set_title(\"Ni intensity map with carbides overlaid\")\n", + "axs[0].imshow(raw_intensity_array_Fe, cmap=\"viridis\")\n", + "axs[0].imshow(rgb_Au_alpha)\n", + "axs[1].imshow(raw_intensity_array_Fe, cmap=\"viridis\")\n", + "axs[0].set_title(\"Ni intensity map with Aus overlaid\")\n", "axs[1].set_title(\"Ni intensity map\")\n", "plt.show()" ] @@ -340,9 +340,9 @@ "outputs": [], "source": [ "fig, ax = plt.subplots(constrained_layout=True, sharex=True, sharey=True)\n", - "ax.imshow(rgb_Ni)\n", - "ax.imshow(raw_intensity_array_C, alpha=(raw_intensity_array_C != 0).astype(float), cmap=\"grey\")\n", - "ax.set_title(\"Ni IPF Z colours with white carbides\")\n", + "ax.imshow(rgb_Fe)\n", + "ax.imshow(raw_intensity_array_Au, alpha=(raw_intensity_array_Au != 0).astype(float), cmap=\"grey\")\n", + "ax.set_title(\"Fe IPF Z colours with white Au\")\n", "plt.show()" ] }, @@ -357,21 +357,21 @@ "source": [ "# orientation relationship stuff\n", "\n", - "# pick a carbide grain\n", + "# pick a Au grain\n", "\n", - "carbide_grain = grains_C[500]\n", + "Au_grain = grains_Au[25]\n", "\n", "# find out where it is in the sample\n", "\n", "fig, ax = plt.subplots(constrained_layout=True, sharex=True, sharey=True)\n", - "ax.imshow(rgb_Ni)\n", - "ax.imshow(carbide_grain.recon, alpha=(carbide_grain.recon > 0.05).astype(float))\n", - "ax.set_title(\"Ni IPF Z colours with single carbide\")\n", + "ax.imshow(rgb_Fe)\n", + "ax.imshow(Au_grain.recon, alpha=(Au_grain.recon > 0.05).astype(float))\n", + "ax.set_title(\"Ni IPF Z colours with single Au\")\n", "\n", "vr = 50\n", "\n", - "ax.set_xlim(carbide_grain.translation[0]-vr, carbide_grain.translation[0]+vr)\n", - "ax.set_ylim(carbide_grain.translation[1]+vr, carbide_grain.translation[1]-vr)\n", + "ax.set_xlim(Au_grain.translation[0]-vr, Au_grain.translation[0]+vr)\n", + "ax.set_ylim(Au_grain.translation[1]+vr, Au_grain.translation[1]-vr)\n", "plt.show()" ] }, @@ -384,16 +384,16 @@ }, "outputs": [], "source": [ - "# find Ni grain surrounding carbide grain\n", + "# find Ni grain surrounding Au grain\n", "\n", - "carbide_grain_pos_nearest_px = carbide_grain.translation.copy()\n", - "carbide_grain_pos_nearest_px = np.round(carbide_grain_pos_nearest_px).astype(int)\n", + "Au_grain_pos_nearest_px = Au_grain.translation.copy()\n", + "Au_grain_pos_nearest_px = np.round(Au_grain_pos_nearest_px).astype(int)\n", "\n", - "print(carbide_grain_pos_nearest_px)\n", + "print(Au_grain_pos_nearest_px)\n", "\n", - "Ni_grain_gid = grain_labels_array_Ni[carbide_grain_pos_nearest_px[1], carbide_grain_pos_nearest_px[0]].astype(int)\n", + "Fe_grain_gid = grain_labels_array_Fe[Au_grain_pos_nearest_px[1], Au_grain_pos_nearest_px[0]].astype(int)\n", "\n", - "print(Ni_grain_gid)" + "print(Fe_grain_gid)" ] }, { @@ -407,17 +407,17 @@ "source": [ "# confirm Ni grain selection\n", "\n", - "# rgb_Ni_masked = rgb_\n", + "# rgb_Fe_masked = rgb_\n", "\n", "fig, ax = plt.subplots(constrained_layout=True, sharex=True, sharey=True)\n", - "ax.imshow(grain_labels_array_Ni == Ni_grain_gid, cmap=\"grey\")\n", - "ax.imshow(carbide_grain.recon, alpha=(carbide_grain.recon > 0.05).astype(float))\n", - "ax.set_title(\"Ni IPF Z colours with single carbide\")\n", + "ax.imshow(grain_labels_array_Fe == Fe_grain_gid, cmap=\"grey\")\n", + "ax.imshow(Au_grain.recon, alpha=(Au_grain.recon > 0.05).astype(float))\n", + "ax.set_title(\"Ni IPF Z colours with single Au\")\n", "\n", "vr = 50\n", "\n", - "ax.set_xlim(carbide_grain.translation[0]-vr, carbide_grain.translation[0]+vr)\n", - "ax.set_ylim(carbide_grain.translation[1]+vr, carbide_grain.translation[1]-vr)\n", + "ax.set_xlim(Au_grain.translation[0]-vr, Au_grain.translation[0]+vr)\n", + "ax.set_ylim(Au_grain.translation[1]+vr, Au_grain.translation[1]-vr)\n", "plt.show()" ] }, @@ -430,14 +430,14 @@ }, "outputs": [], "source": [ - "# we now have a carbide grain and the nickel grain that it's inside\n", + "# we now have a Au grain and the Fe grain that it's inside\n", "\n", "# look for orientation relationships\n", "\n", - "nickel_grain = [grain for grain in grains_Ni if grain.gid == Ni_grain_gid][0]\n", + "Fe_grain = [grain for grain in grains_Fe if grain.gid == Fe_grain_gid][0]\n", "\n", - "print(nickel_grain.U)\n", - "print(carbide_grain.U)" + "print(Fe_grain.U)\n", + "print(Au_grain.U)" ] }, { @@ -448,12 +448,12 @@ "outputs": [], "source": [ "# https://doi.org/10.1016/S1005-0302(12)60169-8\n", - "# we are looking for MC carbides\n", + "# we are looking for MC Aus\n", "# 4 different types of OR present\n", - "# A {001} carbide // {001} matrix , <100> carbide // <100> matrix\n", - "# B [001] carbide // [310] matrix , (020) carbide // (1-31) matrix\n", - "# C [110] carbide // [310] matrix , (-11-1) carbide // (002) matrix\n", - "# D [001] carbide // [001] matrix , (-260) carbide // (020) matrix\n", + "# A {001} Au // {001} matrix , <100> Au // <100> matrix\n", + "# B [001] Au // [310] matrix , (020) Au // (1-31) matrix\n", + "# C [110] Au // [310] matrix , (-11-1) Au // (002) matrix\n", + "# D [001] Au // [001] matrix , (-260) Au // (020) matrix\n", "\n", "# let's check A" ] @@ -469,11 +469,11 @@ "source": [ "from xfab.parameters import read_par_file\n", "\n", - "par_file_ni = './nickel.par'\n", - "par_file_c = './carbide.par'\n", + "par_file_Fe = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "par_file_Au = os.path.join(processed_data_root_dir, 'Au.par')\n", "\n", - "pars_ni = read_par_file(par_file_ni)\n", - "pars_c = read_par_file(par_file_c)" + "pars_Fe = read_par_file(par_file_Fe)\n", + "pars_Au = read_par_file(par_file_Au)" ] }, { @@ -485,8 +485,8 @@ }, "outputs": [], "source": [ - "ucell_ni = [pars_ni.get(\"cell__a\"), pars_ni.get(\"cell__b\"), pars_ni.get(\"cell__c\"), pars_ni.get(\"cell_alpha\"), pars_ni.get(\"cell_beta\"), pars_ni.get(\"cell_gamma\")]\n", - "ucell_c = [pars_c.get(\"cell__a\"), pars_c.get(\"cell__b\"), pars_c.get(\"cell__c\"), pars_c.get(\"cell_alpha\"), pars_c.get(\"cell_beta\"), pars_c.get(\"cell_gamma\")]" + "ucell_Fe = [pars_Fe.get(\"cell__a\"), pars_Fe.get(\"cell__b\"), pars_Fe.get(\"cell__c\"), pars_Fe.get(\"cell_alpha\"), pars_Fe.get(\"cell_beta\"), pars_Fe.get(\"cell_gamma\")]\n", + "ucell_Au = [pars_Au.get(\"cell__a\"), pars_Au.get(\"cell__b\"), pars_Au.get(\"cell__c\"), pars_Au.get(\"cell_alpha\"), pars_Au.get(\"cell_beta\"), pars_Au.get(\"cell_gamma\")]" ] }, { @@ -498,7 +498,7 @@ }, "outputs": [], "source": [ - "ucell_c" + "ucell_Au" ] }, { @@ -516,29 +516,29 @@ "from orix.quaternion import Orientation, Rotation, symmetry\n", "from orix.vector import Miller, Vector3d\n", "\n", - "struc_Ni = Structure(lattice=Lattice(*ucell_ni))\n", - "struc_C = Structure(lattice=Lattice(*ucell_c))\n", + "struc_Fe = Structure(lattice=Lattice(*ucell_Fe))\n", + "struc_Au = Structure(lattice=Lattice(*ucell_Au))\n", "\n", - "cubic_Ni = Phase(point_group=\"m-3m\", structure=struc_Ni)\n", - "cubic_C = Phase(point_group=\"m-3m\", structure=struc_C)\n", + "cubic_Fe = Phase(point_group=\"m-3m\", structure=struc_Fe)\n", + "cubic_Au = Phase(point_group=\"m-3m\", structure=struc_Au)\n", "\n", "# crystal frame:\n", "\n", - "matrix_plane = Miller(hkl=[0, 0, 1], phase=cubic_Ni)\n", - "carbide_plane = Miller(hkl=[0, 0, 1], phase=cubic_C)\n", + "matrix_plane = Miller(hkl=[2, 2, 0], phase=cubic_Fe)\n", + "Au_plane = Miller(hkl=[0, 0, 2], phase=cubic_Au)\n", "\n", - "matrix_dir = Miller(uvw=[1, 0, 0], phase=cubic_Ni)\n", - "carbide_dir = Miller(hkl=[1, 0, 0], phase=cubic_C)\n", + "matrix_dir = Miller(uvw=[1, 0, 0], phase=cubic_Fe)\n", + "Au_dir = Miller(hkl=[1, 0, 0], phase=cubic_Au)\n", "\n", "# orientations in Orix are sample-to-crystal, so we have to invert\n", - "o_Ni = Orientation.from_matrix(nickel_grain.U.T)\n", - "o_C = Orientation.from_matrix(carbide_grain.U.T)\n", + "o_Fe = Orientation.from_matrix(Fe_grain.U.T)\n", + "o_Au = Orientation.from_matrix(Au_grain.U.T)\n", "\n", - "matrix_plane_lab = ~o_Ni * matrix_plane.symmetrise(unique=True)\n", - "carbide_plane_lab = ~o_C * carbide_plane.symmetrise(unique=True)\n", + "matrix_plane_lab = ~o_Fe * matrix_plane.symmetrise(unique=True)\n", + "Au_plane_lab = ~o_Au * Au_plane.symmetrise(unique=True)\n", "\n", - "matrix_dir_lab = ~o_Ni * matrix_dir.symmetrise(unique=True)\n", - "carbide_dir_lab = ~o_C * carbide_dir.symmetrise(unique=True)" + "matrix_dir_lab = ~o_Fe * matrix_dir.symmetrise(unique=True)\n", + "Au_dir_lab = ~o_Au * Au_dir.symmetrise(unique=True)" ] }, { @@ -552,32 +552,32 @@ "source": [ "fig, axs = plt.subplots(1, 2, constrained_layout=True, sharex=True, sharey=True)\n", "\n", - "axs[0].imshow(grain_labels_array_Ni == Ni_grain_gid, cmap=\"grey\")\n", - "axs[0].imshow(carbide_grain.recon, alpha=(carbide_grain.recon > 0.05).astype(float))\n", + "axs[0].imshow(grain_labels_array_Fe == Fe_grain_gid, cmap=\"grey\")\n", + "axs[0].imshow(Au_grain.recon, alpha=(Au_grain.recon > 0.05).astype(float))\n", "axs[0].set_title(\"Directions\")\n", "\n", - "axs[1].imshow(grain_labels_array_Ni == Ni_grain_gid, cmap=\"grey\")\n", - "axs[1].imshow(carbide_grain.recon, alpha=(carbide_grain.recon > 0.05).astype(float))\n", + "axs[1].imshow(grain_labels_array_Fe == Fe_grain_gid, cmap=\"grey\")\n", + "axs[1].imshow(Au_grain.recon, alpha=(Au_grain.recon > 0.05).astype(float))\n", "axs[1].set_title(\"Planes\")\n", "\n", "vr = 100\n", "\n", - "axs[0].set_xlim(carbide_grain.translation[0]-vr, carbide_grain.translation[0]+vr)\n", - "axs[0].set_ylim(carbide_grain.translation[1]+vr, carbide_grain.translation[1]-vr)\n", + "axs[0].set_xlim(Au_grain.translation[0]-vr, Au_grain.translation[0]+vr)\n", + "axs[0].set_ylim(Au_grain.translation[1]+vr, Au_grain.translation[1]-vr)\n", "\n", "ars = 5\n", "txs = (50*5)*1/ars\n", "\n", - "for inc in range(len(carbide_dir.symmetrise(unique=True).coordinates)):\n", - " arrow_centre = carbide_grain.translation[0:2]\n", - " axs[0].quiver(arrow_centre[0], arrow_centre[1], carbide_dir_lab.coordinates[inc, 0], carbide_dir_lab.coordinates[inc, 1], color=\"r\", scale=ars)\n", - " axs[0].annotate(np.array_str(carbide_dir.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + carbide_dir_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"red\")\n", + "for inc in range(len(Au_dir.symmetrise(unique=True).coordinates)):\n", + " arrow_centre = Au_grain.translation[0:2]\n", + " axs[0].quiver(arrow_centre[0], arrow_centre[1], Au_dir_lab.coordinates[inc, 0], Au_dir_lab.coordinates[inc, 1], color=\"r\", scale=ars)\n", + " axs[0].annotate(np.array_str(Au_dir.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + Au_dir_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"red\")\n", "\n", "ars = 5\n", "txs = (50*5)*1/ars\n", "\n", "for inc in range(len(matrix_dir.symmetrise(unique=True).coordinates)):\n", - " arrow_centre = carbide_grain.translation[0:2] + [-20, -20]\n", + " arrow_centre = Au_grain.translation[0:2] + [-20, -20]\n", " axs[0].quiver(arrow_centre[0], arrow_centre[1], matrix_dir_lab.coordinates[inc, 0], matrix_dir_lab.coordinates[inc, 1], color=\"b\", scale=ars)\n", " axs[0].annotate(np.array_str(matrix_dir.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + matrix_dir_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"blue\")\n", " \n", @@ -586,23 +586,41 @@ "ars = 5\n", "txs = (50*5)*1/ars\n", "\n", - "for inc in range(len(carbide_dir.symmetrise(unique=True).coordinates)):\n", - " arrow_centre = carbide_grain.translation[0:2]\n", - " axs[1].quiver(arrow_centre[0], arrow_centre[1], carbide_plane_lab.coordinates[inc, 0], carbide_plane_lab.coordinates[inc, 1], color=\"r\", scale=ars)\n", - " axs[1].annotate(np.array_str(carbide_dir.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + carbide_plane_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"red\")\n", + "for inc in range(len(Au_dir.symmetrise(unique=True).coordinates)):\n", + " arrow_centre = Au_grain.translation[0:2]\n", + " axs[1].quiver(arrow_centre[0], arrow_centre[1], Au_plane_lab.coordinates[inc, 0], Au_plane_lab.coordinates[inc, 1], color=\"r\", scale=ars)\n", + " axs[1].annotate(np.array_str(Au_plane.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + Au_plane_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"red\")\n", "\n", "ars = 5\n", "txs = (50*5)*1/ars\n", "\n", "for inc in range(len(matrix_dir.symmetrise(unique=True).coordinates)):\n", - " arrow_centre = carbide_grain.translation[0:2] + [-20, -20]\n", + " arrow_centre = Au_grain.translation[0:2] + [-20, -20]\n", " axs[1].quiver(arrow_centre[0], arrow_centre[1], matrix_plane_lab.coordinates[inc, 0], matrix_plane_lab.coordinates[inc, 1], color=\"b\", scale=ars)\n", - " axs[1].annotate(np.array_str(matrix_dir.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + matrix_plane_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"blue\")\n", + " axs[1].annotate(np.array_str(matrix_plane.symmetrise(unique=True).coordinates[inc], precision=None, suppress_small=True), arrow_centre + matrix_plane_lab.coordinates[inc, 0:2]*[1, -1]*txs, ha=\"center\", va=\"center\", c=\"blue\")\n", "\n", "\n", "plt.show()" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "36049644-d04e-4477-9bb3-1efd1519b350", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from scipy.spatial.transform import Rotation as R\n", + "import ImageD11.sym_u\n", + "cubic = ImageD11.sym_u.cubic()\n", + "for op in cubic.group:\n", + " v = R.from_matrix((Fe_grain.U.T @ op.T) @ Au_grain.U).as_rotvec( degrees=True)\n", + " ang = np.linalg.norm(v)\n", + " print(ang, v/ang)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -613,38 +631,17 @@ "outputs": [], "source": [ "# matrix is 1\n", - "# carbide is 2\n", + "# Au is 2\n", "\n", "# define an orientation matrix for each of the ORs\n", "from orix.quaternion import Misorientation, Orientation\n", "\n", - "# A {001} carbide // {001} matrix , <100> carbide // <100> matrix\n", - "\n", - "matrix_dirs = Miller(hkl=[[0, 0, 1], [0, 0, 1]], phase=cubic_Ni)\n", - "carbide_dirs = Miller(hkl=[[1, 0, 0], [1, 0, 0]], phase=cubic_C)\n", + "# A {001} Au // {001} matrix , <100> Au // <100> matrix\n", "\n", - "misorien_A = Misorientation.from_align_vectors(carbide_dirs, matrix_dirs)\n", + "matrix_dirs = Miller(hkl=[0, 0, 2], phase=cubic_Fe)\n", + "Au_dirs = Miller(hkl=[2, 2, 0], phase=cubic_Au)\n", "\n", - "# B [001] carbide // [310] matrix , (020) carbide // (1-31) matrix\n", - "\n", - "matrix_dirs = Miller(hkl=[[3, 1, 0], [1, -3, 1]], phase=cubic_Ni)\n", - "carbide_dirs = Miller(hkl=[[0, 0, 1], [0, 2, 0]], phase=cubic_C)\n", - "\n", - "misorien_B = Misorientation.from_align_vectors(carbide_dirs, matrix_dirs)\n", - "\n", - "# C [110] carbide // [310] matrix , (-11-1) carbide // (002) matrix\n", - "\n", - "matrix_dirs = Miller(hkl=[[3, 1, 0], [0, 0, 2]], phase=cubic_Ni)\n", - "carbide_dirs = Miller(hkl=[[1, 1, 0], [-1, 1, -1]], phase=cubic_C)\n", - "\n", - "misorien_C = Misorientation.from_align_vectors(carbide_dirs, matrix_dirs)\n", - "\n", - "# D [001] carbide // [001] matrix , (-260) carbide // (020) matrix\n", - "\n", - "matrix_dirs = Miller(hkl=[[0, 0, 1], [0, 2, 0]], phase=cubic_Ni)\n", - "carbide_dirs = Miller(hkl=[[0, 0, 1], [-2, 6, 0]], phase=cubic_C)\n", - "\n", - "misorien_D = Misorientation.from_align_vectors(carbide_dirs, matrix_dirs)" + "misorien_A = Misorientation.from_align_vectors(Au_dirs, matrix_dirs)" ] }, { @@ -656,30 +653,36 @@ }, "outputs": [], "source": [ + "\n", + "\n", "# print(misorien_B)\n", "\n", - "for carbide_grain in grains_C[500:501]:\n", - " carbide_grain_pos_nearest_px = carbide_grain.translation.copy()\n", - " carbide_grain_pos_nearest_px = np.round(carbide_grain_pos_nearest_px).astype(int)\n", - " Ni_grain_gid = grain_labels_array_Ni[carbide_grain_pos_nearest_px[1], carbide_grain_pos_nearest_px[0]].astype(int)\n", + "min_misoriens = []\n", + "\n", + "for Au_grain in grains_Au[:]:\n", + " Au_grain_pos_nearest_px = Au_grain.translation.copy()\n", + " Au_grain_pos_nearest_px = np.round(Au_grain_pos_nearest_px).astype(int)\n", + " Fe_grain_gid = grain_labels_array_Fe[Au_grain_pos_nearest_px[1], Au_grain_pos_nearest_px[0]].astype(int)\n", " try:\n", - " nickel_grain = [grain for grain in grains_Ni if grain.gid == Ni_grain_gid][0]\n", - " o_Ni = Orientation.from_matrix(nickel_grain.U.T, symmetry=cubic_Ni.point_group)\n", - " o_C = Orientation.from_matrix(carbide_grain.U.T, symmetry=cubic_C.point_group)\n", + " Fe_grain = [grain for grain in grains_Fe if grain.gid == Fe_grain_gid][0]\n", + " o_Fe = Orientation.from_matrix(Fe_grain.U.T, symmetry=cubic_Fe.point_group)\n", + " o_Au = Orientation.from_matrix(Au_grain.U.T, symmetry=cubic_Au.point_group)\n", " \n", - " misorien_actual = Misorientation(o_C * (~o_Ni), symmetry=(o_C.symmetry, o_Ni.symmetry))\n", + " misorien_actual = Misorientation(o_Fe * (~o_Au), symmetry=(o_Au.symmetry, o_Fe.symmetry))\n", " misorien_actual = misorien_actual.map_into_symmetry_reduced_zone()\n", " \n", - " misorien_ref = misorien_C\n", + " misorien_ref = misorien_A\n", " misorien_ref = misorien_ref.map_into_symmetry_reduced_zone()\n", " \n", - " misorien_diff = Misorientation(misorien_actual * (~misorien_ref), symmetry=misorien_actual.symmetry)\n", + " misorien_diff = Misorientation(misorien_ref * (~misorien_actual), symmetry=misorien_actual.symmetry)\n", " \n", " misorien_diff = misorien_diff.map_into_symmetry_reduced_zone()\n", " \n", " min_misorien = np.rad2deg(misorien_diff.angle)\n", " \n", - " print(nickel_grain.gid, carbide_grain.gid, min_misorien)\n", + " print(Fe_grain.gid, Au_grain.gid, min_misorien)\n", + " \n", + " min_misoriens.append(min_misorien)\n", " except IndexError:\n", " continue" ] @@ -687,13 +690,73 @@ { "cell_type": "code", "execution_count": null, - "id": "e82f0c6a-a688-4e7b-9a41-638e172dbab3", + "id": "95a14487-809f-4714-953d-4d218d62fbee", "metadata": { "tags": [] }, "outputs": [], "source": [ - "misorien_C.to_matrix()" + "np.rad2deg(misorien_actual.angle)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6eb1e67b-cc93-4b01-a3eb-7dbe18e42208", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "misorien_actual.axis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e43b838a-1977-4196-9c4b-485e46bbcc1f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "o_Au.to_euler(\"mtex\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "93102d3f-6f1e-40c1-b7fe-4ab48a514b62", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "np.savetxt(os.path.join(processed_data_root_dir, 'Fe_grain_matrix.txt'), o_Fe.to_matrix()[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "81eba9dd-4807-4335-afe7-fb4a5e1f1b9d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "np.savetxt(os.path.join(processed_data_root_dir, 'Au_grain_matrix.txt'), o_Au.to_matrix()[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe6c906b-ad2c-4711-bd6b-a9f9dbe2c3fb", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "np.savetxt(os.path.join(processed_data_root_dir, 'OR_matrix.txt'), misorien_A.to_matrix()[0])" ] }, { @@ -702,6 +765,33 @@ "id": "3c747456-eaac-4c91-b46c-50b8207fd13a", "metadata": {}, "outputs": [], + "source": [ + "fig, ax = plt.subplots()\n", + "\n", + "ax.hist(np.array(min_misoriens), bins=50)\n", + "# ax.set_xlim(0,10)\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a6c55fd6-f828-4deb-a17c-3c4d89bc3ee8", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "min_misoriens" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c57ba658-173f-4b99-ae72-111f6f72dbdf", + "metadata": {}, + "outputs": [], "source": [] } ], diff --git a/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb b/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb new file mode 100644 index 00000000..ea1a944c --- /dev/null +++ b/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb @@ -0,0 +1,340 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "a545dd7f-4140-4abc-9c00-6edd96a2e501", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", + "# Written by Haixing Fang, Jon Wright and James Ball\n", + "## Date: 26/02/2024" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "92baa2cd-4e85-4998-979b-88ef0a674c48", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# There is a bug with the current version of ImageD11 in the site-wide Jupyter env.\n", + "# This has been fixed here: https://github.com/FABLE-3DXRD/ImageD11/commit/4af88b886b1775585e868f2339a0eb975401468f\n", + "# Until a new release has been made and added to the env, we need to get the latest version of ImageD11 from GitHub\n", + "# Put it in your home directory\n", + "# USER: Change the path below to point to your local copy of ImageD11:\n", + "\n", + "import os\n", + "\n", + "username = os.environ.get(\"USER\")\n", + "\n", + "id11_code_path = f\"/home/esrf/{username}/Code/ImageD11\"\n", + "\n", + "import sys\n", + "\n", + "sys.path.insert(0, id11_code_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "74e066b0-ae13-46e9-81b0-a11ea524a469", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# import functions we need\n", + "\n", + "import concurrent.futures\n", + "import timeit\n", + "import glob\n", + "import pprint\n", + "from shutil import rmtree\n", + "import time\n", + "from functools import partial\n", + "\n", + "import matplotlib\n", + "%matplotlib ipympl\n", + "\n", + "import h5py\n", + "from tqdm.notebook import tqdm\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import ImageD11.columnfile\n", + "# from ImageD11.sinograms.dataset import DataSet\n", + "from ImageD11.sinograms import properties, roi_iradon, dataset\n", + "# from ImageD11.blobcorrector import eiger_spatial\n", + "from ImageD11.grain import grain\n", + "\n", + "import ImageD11.nbGui.nb_utils as utils\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ac85f820-7358-4ab6-87bf-fa386793cedb", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "total 1180\n", + "drwxr-x--- 19 opid11 id11 4096 Dec 11 13:59 FeAu_0p5_tR\n", + "drwxr-x--- 15 opid11 id11 4096 Dec 11 16:43 FeAu_tR\n", + "drwxr-x--- 4 opid11 id11 4096 Dec 11 16:57 CeO2_ff_after_tR\n", + "drwxr-x--- 20 opid11 id11 4096 Dec 12 09:42 FeAu_0p5_tR_nscope\n", + "drwxr-x--- 19 opid11 id11 4096 Dec 12 15:23 FSH_steel\n", + "drwxr-x--- 7 opid11 id11 4096 Dec 12 17:44 test_furnace\n", + "drwxr-x--- 143 opid11 id11 16384 Dec 13 13:59 FSH_steel_creep\n", + "drwxr-x--- 4 opid11 id11 4096 Jan 5 15:53 FeAu_creep_no_beam\n", + "-rwxr-x--- 1 opid11 id11 1183582 Jan 15 10:11 ihma439_id11.h5\n", + "drwxr-x--- 2 opid11 id11 4096 Jan 17 16:33 __icat__\n" + ] + } + ], + "source": [ + "# NOTE: For old datasets before the new directory layout structure, we don't distinguish between RAW_DATA and PROCESSED_DATA\n", + "# In this case, use this cell to specify where your experimental folder is, and do not run the cell below\n", + "# e.g /data/visitor/ma4752/id11/20210513\n", + "\n", + "### USER: specify your experimental directory\n", + "\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", + "\n", + "!ls -lrt {rawdata_path}\n", + "\n", + "### USER: specify where you want your processed data to go\n", + "\n", + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a1ff363a-8106-45c4-b4de-f557f33c852a", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_-100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# manually override:\n", + "\n", + "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_150um\", \"top_200um\", \"top_250um\"]}" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "968d1fcd-46e7-43ad-8f09-5c5b3a639ae1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "def read_grains(ds):\n", + " with h5py.File(ds.grainsfile, 'r') as hin:\n", + " grp = hin['slice_recon']\n", + " \n", + " raw_intensity_array = grp['intensity'][:]\n", + " grain_labels_array = grp['labels'][:]\n", + " \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + " g = ImageD11.grain.grain(ubi)\n", + " # general grain properties\n", + " g.gid = int(gid_string)\n", + " g.translation = gg['translation'][:]\n", + " g.cen = gg.attrs['cen']\n", + " g.y0 = gg.attrs['y0']\n", + " # sinogram stuff\n", + " g.ssino = gg['ssino'][:]\n", + " g.sinoangles = gg['sinoangles'][:]\n", + " # reconstructions\n", + " g.og_recon = gg['og_recon'][:]\n", + " g.recon = gg['recon'][:]\n", + " grains.append(g)\n", + " \n", + " return grains, raw_intensity_array, grain_labels_array" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "48cc2f6e-8cf1-4ec5-b8a0-2cab2a4f44d9", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing dataset top_150um in sample FeAu_0p5_tR_nscope\n", + "Importing DataSet object\n", + "Importing grains\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8342f023d1ef4d7281376676ff63336a", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/44 [00:00 Date: Tue, 27 Feb 2024 16:28:51 +0100 Subject: [PATCH 2/8] Add batch cells to scanning notebooks --- .../S3DXRD/0_S3DXRD_segment_and_label.ipynb | 10 +- ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb | 8 +- .../S3DXRD/1_S3DXRD_index_minor_phase.ipynb | 38 +- .../nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb | 134 +- .../S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb | 1400 +++++++++++++++++ .../2_S3DXRD_sinograms_map_minor_phase.ipynb | 87 +- .../S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb | 23 +- .../5_S3DXRD_plot_multiple_slices.ipynb | 115 +- ImageD11/nbGui/nb_utils.py | 41 +- 9 files changed, 1669 insertions(+), 187 deletions(-) create mode 100644 ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb diff --git a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb index f90d754a..d9f139e1 100755 --- a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb +++ b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb @@ -350,7 +350,9 @@ "cell_type": "code", "execution_count": null, "id": "d4f70bb5-035b-48b2-9acd-39c6e3ea8666", - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", @@ -360,7 +362,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -378,6 +380,10 @@ "\n", " samples_dict[sample] = dsets_list\n", " \n", + "# manual override:\n", + "\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", + " \n", "# now we have our samples_dict, we can process our data:\n", "mask_path = '/data/id11/nanoscope/Eiger/eiger_mask_E-08-0173_20231127.edf'\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb index d51f2e31..8bf462f1 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb @@ -616,7 +616,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", @@ -626,7 +628,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_-100um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -645,7 +647,7 @@ " samples_dict[sample] = dsets_list\n", " \n", "# manual override:\n", - "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_150um\", \"top_200um\", \"top_250um\"]}\n", + "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_250um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb index 3a3c7f18..3e005ccb 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb @@ -86,8 +86,7 @@ " hds[:] = ary\n", " return hds\n", "\n", - "def save_grains_minor_phase(grains, ds, phase_name='minor'):\n", - " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + "def save_grains_minor_phase(grains, ds):\n", " with h5py.File(ds.grainsfile_minor_phase, 'w') as hout:\n", " grn = hout.create_group('grains')\n", " for g in tqdm(grains):\n", @@ -116,7 +115,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -259,6 +258,16 @@ "print(cf_strong.nrows)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "phase_name = \"Au\"\n", + "ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')" + ] + }, { "cell_type": "code", "execution_count": null, @@ -533,7 +542,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -551,10 +560,14 @@ "\n", " samples_dict[sample] = dsets_list\n", " \n", + "# manual override:\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_250um\"]}\n", + " \n", "# now we have our samples_dict, we can process our data:\n", "\n", - "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "major_phase_par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", "minor_phase_par_path = os.path.join(processed_data_root_dir, 'Au.par')\n", + "phase_name = \"Au\"\n", "\n", "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", @@ -587,10 +600,14 @@ " \n", " ds = ImageD11.sinograms.dataset.load(dset_path)\n", " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", - " if os.path.exists(ds.grainsfile):\n", + " \n", + " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + " \n", + " if os.path.exists(ds.grainsfile_minor_phase):\n", " print(f\"Already have grains for {dataset} in sample {sample}, skipping\")\n", " continue\n", " \n", + " par_path = major_phase_par_path\n", " cf_4d = ImageD11.columnfile.colfile_from_hdf(ds.col4dfile)\n", "\n", " cf_4d.parameters.loadparameters(par_path)\n", @@ -625,12 +642,19 @@ " print(\"Storing peak data in grains\")\n", " for g in tqdm(grains):\n", " g.mask_4d = cf_strong.grain_id == g.gid\n", - " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " g.peaks_4d = cf_strong.index[g.mask_4d]\n", " \n", " save_grains_minor_phase(grains, ds)\n", "\n", "print(\"Done!\")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb index c5d73a29..c9c6f50b 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb @@ -55,6 +55,8 @@ "import matplotlib\n", "%matplotlib ipympl\n", "\n", + "from scipy.optimize import curve_fit\n", + "\n", "import h5py\n", "from tqdm.notebook import tqdm\n", "import numpy as np\n", @@ -100,6 +102,32 @@ " \n", " return grains\n", "\n", + "# def fit_sine_wave(x_data, y_data, frequency):\n", + "# initial_guess = (ds.ymax - ds.ymin, np.mean(y_data), 0) # Initial guess for amplitude, offset, and phase\n", + " \n", + "# def sine_function(x, amplitude, offset, phase):\n", + "# return amplitude * np.sin(2 * np.pi * frequency * x + phase) + offset\n", + "\n", + "# # Fit the sine function to the data\n", + "# popt, _ = curve_fit(sine_function, x_data, y_data, p0=initial_guess, method='trf', loss='soft_l1', max_nfev=10000)\n", + "\n", + "# # Extract fitted parameters\n", + "# amplitude_fit, offset_fit, phase_fit = popt\n", + "\n", + "# return amplitude_fit, offset_fit, phase_fit\n", + "\n", + "# def get_cen_robust(grain, ds):\n", + " \n", + "# frequency = 0.5/(ds.omax - ds.omin)\n", + "# amplitude, offset, phase = fit_sine_wave(cf_strong.omega[grain.mask_4d], cf_strong.dty[grain.mask_4d], frequency)\n", + " \n", + "# x_translation = amplitude/2 * np.sin(phase)\n", + "# y_translation = amplitude/2 * np.cos(phase)\n", + " \n", + "# grain.cen_me = offset\n", + "# grain.dx_me = x_translation\n", + "# grain.dy_me = y_translation\n", + "\n", "def map_grain_from_peaks(g, flt, ds):\n", " \"\"\"\n", " Computes sinogram\n", @@ -336,6 +364,8 @@ "def apply_manual_mask(mask_in):\n", " mask_out = mask_in.copy()\n", " \n", + " mask_out[200:, 250:] = 0\n", + " \n", "# mask_out[:8, :] = 0\n", "# mask_out[:, 87:] = 0\n", "\n", @@ -397,7 +427,7 @@ "# USER: pick a sample and a dataset you want to segment\n", "\n", "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_200um\"" + "dataset = \"top_250um\"" ] }, { @@ -545,11 +575,29 @@ }, "outputs": [], "source": [ - "for grain in tqdm(grains):\n", - " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + "for grain in grains:\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=True, nsigma=1)\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)" + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain, cf_strong)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# g = grains[20]\n", + "\n", + "# fig, ax = plt.subplots()\n", + "# ax.scatter(cf_strong.omega[g.mask_4d], cf_strong.dty[g.mask_4d], c=\"b\")\n", + "# ax.scatter(cf_strong.omega[g.mask_4d], sine_function(cf_strong.omega[g.mask_4d], g.cen, g.dx, g.dy), c=\"r\")\n", + "\n", + "# plt.show()" ] }, { @@ -577,7 +625,9 @@ "source": [ "c0 = np.median([g.cen for g in grains])\n", "\n", - "print('Center of rotation in dty', c0)" + "print('Center of rotation in dty', c0)\n", + "\n", + "y0 = c0/2" ] }, { @@ -624,12 +674,13 @@ "else:\n", " ssino_to_recon = whole_sample_sino\n", "\n", - "recon = ImageD11.sinograms.roi_iradon.mlem(ssino_to_recon, \n", - " theta=ds.obincens,\n", - " workers=nthreads - 1,\n", + "recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=ds.obincens, \n", " output_size=outsize,\n", - " # projection_shifts=np.full(ssino_to_recon.shape, -c0/2),\n", - " niter=30)" + " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=nthreads)" ] }, { @@ -658,7 +709,7 @@ "axs[1].imshow(binary)\n", "axs[2].imshow(chull)\n", "\n", - "axs[0].set_title(\"MLEM reconstruction\")\n", + "axs[0].set_title(\"Reconstruction\")\n", "axs[1].set_title(\"Binarised threshold\")\n", "axs[2].set_title(\"Convex hull\")\n", "\n", @@ -735,12 +786,13 @@ "a = ax.ravel()\n", "x = [g.dx for g in grains]\n", "y = [g.dy for g in grains]\n", - "s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", - "a[0].scatter(x, y, s=s, c=[g.rgb_z for g in grains])\n", + "# s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", + "s = [10 for g in grains]\n", + "a[0].scatter(x, y, c=[g.rgb_z for g in grains])\n", "a[0].set(title='IPF color Z', aspect='equal')\n", - "a[1].scatter(x, y, s=s, c=[g.rgb_y for g in grains])\n", + "a[1].scatter(x, y, c=[g.rgb_y for g in grains])\n", "a[1].set(title='IPF color Y', aspect='equal')\n", - "a[2].scatter(x, y, s=s, c=[g.rgb_x for g in grains])\n", + "a[2].scatter(x, y, c=[g.rgb_x for g in grains])\n", "a[2].set(title='IPF color X', aspect='equal')\n", "a[3].scatter(x, y, c=s)\n", "a[3].set(title='Number of 4D peaks', aspect='equal')\n", @@ -870,7 +922,7 @@ "\n", "# y0 = 1.5 # for example!\n", "\n", - "y0 = c0" + "y0 = c0/2" ] }, { @@ -881,9 +933,9 @@ }, "outputs": [], "source": [ - "g = grains[0]\n", + "g = grains[1]\n", "\n", - "run_iradon_id11(g, pad=pad, y0=y0, workers=20,sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)" + "run_iradon_id11(g, pad=pad, y0=y0, workers=max(nthreads, 20), sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)" ] }, { @@ -894,7 +946,7 @@ }, "outputs": [], "source": [ - "g = grains[0]\n", + "g = grains[1]\n", "\n", "fig, axs = plt.subplots(1,2, figsize=(10,5))\n", "axs[0].imshow(g.recon, vmin=0)\n", @@ -1082,7 +1134,7 @@ }, "outputs": [], "source": [ - "utils.slurm_submit_and_wait(bash_script_path, 60)" + "utils.slurm_submit_and_wait(bash_script_path, 30)" ] }, { @@ -1137,7 +1189,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.2)" + "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.3)" ] }, { @@ -1205,7 +1257,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", @@ -1215,7 +1269,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_-100um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -1234,7 +1288,7 @@ " samples_dict[sample] = dsets_list\n", " \n", "# manual override:\n", - "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_150um\", \"top_200um\", \"top_250um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", @@ -1283,6 +1337,12 @@ " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", " continue\n", " \n", + " # check grains file for existance of slice_recon, skip if it's there\n", + " with h5py.File(ds.grainsfile, \"r\") as hin:\n", + " if \"slice_recon\" in hin.keys():\n", + " print(f\"Already reconstructed {dataset} in {sample}, skipping\")\n", + " continue\n", + " \n", " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", " cf_4d.parameters.loadparameters(par_path)\n", " cf_4d.updateGeometry()\n", @@ -1301,18 +1361,19 @@ " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", " \n", " for grain in tqdm(grains):\n", - " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain, cf_strong)\n", " \n", " c0 = np.median([g.cen for g in grains])\n", " \n", - " y0 = c0\n", + " y0 = c0/2\n", " \n", " whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", " \n", - " print(\"MLEM whole sample mask\")\n", + " print(\"Whole sample mask\")\n", " outsize = whole_sample_sino.shape[0] + pad\n", "\n", " if is_half_scan:\n", @@ -1324,11 +1385,14 @@ " ssino_to_recon = whole_sample_sino * halfmask\n", " else:\n", " ssino_to_recon = whole_sample_sino\n", - " recon = ImageD11.sinograms.roi_iradon.mlem(ssino_to_recon, \n", - " theta=ds.obincens,\n", - " workers=nthreads - 1,\n", - " output_size=outsize,\n", - " niter=mlem_wholesample_niter)\n", + " \n", + " recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=ds.obincens, \n", + " output_size=outsize,\n", + " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=nthreads)\n", " \n", " recon_man_mask = apply_manual_mask(recon)\n", " if manual_threshold is None:\n", @@ -1376,7 +1440,7 @@ " \n", " bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, mlem_n_simultaneous_jobs, mlem_cores_per_task, mlem_niter)\n", " \n", - " utils.slurm_submit_and_wait(bash_script_path, 60)\n", + " utils.slurm_submit_and_wait(bash_script_path, 30)\n", " \n", " for i, grain in enumerate(tqdm(grains)):\n", " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))\n", @@ -1393,9 +1457,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "rgb_array.shape" - ] + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb new file mode 100644 index 00000000..88a15604 --- /dev/null +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb @@ -0,0 +1,1400 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", + "# Written by Haixing Fang, Jon Wright and James Ball\n", + "## Date: 26/02/2024" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# There is a bug with the current version of ImageD11 in the site-wide Jupyter env.\n", + "# This has been fixed here: https://github.com/FABLE-3DXRD/ImageD11/commit/4af88b886b1775585e868f2339a0eb975401468f\n", + "# Until a new release has been made and added to the env, we need to get the latest version of ImageD11 from GitHub\n", + "# Put it in your home directory\n", + "# USER: Change the path below to point to your local copy of ImageD11:\n", + "\n", + "import os\n", + "\n", + "username = os.environ.get(\"USER\")\n", + "\n", + "id11_code_path = f\"/home/esrf/{username}/Code/ImageD11\"\n", + "\n", + "import sys\n", + "\n", + "sys.path.insert(0, id11_code_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# import functions we need\n", + "\n", + "import concurrent.futures\n", + "import timeit\n", + "import glob\n", + "import pprint\n", + "from shutil import rmtree\n", + "import time\n", + "from functools import partial\n", + "\n", + "import matplotlib\n", + "%matplotlib ipympl\n", + "\n", + "from scipy.optimize import curve_fit\n", + "\n", + "import h5py\n", + "from tqdm.notebook import tqdm\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import ImageD11.columnfile\n", + "from ImageD11.sinograms import properties, roi_iradon\n", + "from ImageD11.blobcorrector import eiger_spatial\n", + "from ImageD11.grain import grain\n", + "\n", + "from skimage.filters import threshold_otsu\n", + "from skimage.morphology import convex_hull_image\n", + "\n", + "import ImageD11.nbGui.nb_utils as utils\n", + "\n", + "import ipywidgets as widgets\n", + "from ipywidgets import interact" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# define our functions\n", + "\n", + "# save recons and 2d properties to existing grain file\n", + "\n", + "def read_grains(ds):\n", + " with h5py.File(ds.grainsfile, 'r') as hin: \n", + " grains_group = 'grains'\n", + " \n", + " grains = []\n", + " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", + " gg = hin[grains_group][gid_string]\n", + " ubi = gg.attrs['ubi'][:]\n", + " g = ImageD11.grain.grain(ubi)\n", + " g.gid = int(gid_string)\n", + " grains.append(g)\n", + " \n", + " return grains\n", + "\n", + "\n", + "def map_grain_from_peaks(g, ds):\n", + " \"\"\"\n", + " Computes sinogram\n", + " flt is already the peaks for this grain\n", + " Returns angles, sino\n", + " \"\"\" \n", + " NY = len(ds.ybincens) # number of y translations\n", + " iy = np.round((g.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", + "\n", + " # The problem is to assign each spot to a place in the sinogram\n", + " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", + " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", + " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", + " # 4D array of h,k,l,+/-\n", + " # pkmsk is whether a peak has been observed with this HKL or not\n", + " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", + " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", + " # sinogram row to hit\n", + " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", + " # counting where we hit an HKL position with a found peak\n", + " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", + "\n", + " npks = pkmsk.sum( )\n", + " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", + " sino = np.zeros( ( npks, NY ), 'f' )\n", + " hits = np.zeros( ( npks, NY ), 'f' )\n", + " angs = np.zeros( ( npks, NY ), 'f' )\n", + " adr = destRow * NY + iy \n", + " # Just accumulate \n", + " sig = g.sum_intensity\n", + " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", + " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", + " ImageD11.cImageD11.put_incr64( angs, adr, g.omega)\n", + " \n", + " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", + " # Normalise:\n", + " sino = (sino.T/sino.max( axis=1 )).T\n", + " # Sort (cosmetic):\n", + " order = np.lexsort((np.arange(npks), sinoangles))\n", + " sinoangles = sinoangles[order]\n", + " ssino = sino[order].T\n", + " return sinoangles, ssino, hits[order].T\n", + "\n", + "def do_sinos(g, hkltol=0.25):\n", + "# # flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", + " \n", + " \n", + "# flt = cf_2d.copy()\n", + "# flt.filter(g.mask_2d)\n", + " \n", + " # hkl_real = np.dot(g.ubi, (g.gx, g.gy, g.gz)) # calculate hkl of all assigned peaks\n", + " # hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", + " # dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", + "\n", + " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", + " # g.npksall = flt.nrows # total number of assigned peaks\n", + " # flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", + " # hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", + " # hkl_int = np.round(hkl_real).astype(int)\n", + " # dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", + " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", + " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", + " \n", + " hkl_real = np.dot(g.ubi, (g.gx, g.gy, g.gz)) # recalculate error after filtration\n", + " hkl_int = np.round(hkl_real).astype(int)\n", + " \n", + " g.etasigns_2d_strong = np.sign(g.eta)\n", + " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", + " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, ds)\n", + " # return i,g\n", + "\n", + "\n", + "def run_iradon_id11(grain, pad=20, y0=0, workers=1, sample_mask=None, apply_halfmask=False, mask_central_zingers=False):\n", + " outsize = grain.ssino.shape[0] + pad\n", + " \n", + " if apply_halfmask:\n", + " halfmask = np.zeros_like(grain.ssino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + " \n", + " ssino_to_recon = grain.ssino * halfmask\n", + " else:\n", + " ssino_to_recon = grain.ssino\n", + " \n", + " # # pad the sample mask\n", + " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", + "\n", + " \n", + " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", + " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=grain.sinoangles, \n", + " mask=sample_mask,\n", + " output_size=outsize,\n", + " projection_shifts=np.full(grain.ssino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=workers)\n", + " \n", + " if mask_central_zingers:\n", + " grs = grain.recon.shape[0]\n", + " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", + " inner_mask_radius = 25\n", + " outer_mask_radius = inner_mask_radius + 2\n", + "\n", + " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", + " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", + "\n", + " mask_ring = inner_circle_mask & outer_circle_mask\n", + " # we now have a mask to apply\n", + " fill_value = np.median(grain.recon[mask_ring])\n", + " grain.recon[inner_circle_mask] = fill_value\n", + " \n", + " return grain\n", + "\n", + "\n", + "# write og_recon and ssino and circle_mask to disk\n", + "\n", + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains_for_mlem(grains, ds, y0):\n", + " with h5py.File(ds.grainsfile, 'r+') as hout:\n", + " try:\n", + " grp = hout.create_group('peak_assignments')\n", + " except ValueError:\n", + " grp = hout['peak_assignments']\n", + "\n", + " # ds_gord = save_array( grp, 'gord', gord )\n", + " # ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " # ds_inds = save_array( grp, 'inds', inds )\n", + " # ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", + " \n", + " grains_group = 'grains'\n", + " for g in tqdm(grains):\n", + " gg = hout[grains_group][str(g.gid)]\n", + " # save stuff for sinograms\n", + " \n", + " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", + " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", + " save_array(gg, 'og_recon', g.og_recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", + " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", + " \n", + " # might as well save peaks stuff while we're here\n", + " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", + " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", + " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", + "\n", + " gg.attrs['cen'] = g.cen\n", + " gg.attrs['y0'] = y0\n", + " \n", + " \n", + "def prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50):\n", + " \n", + " slurm_mlem_path = os.path.join(ds.analysispath, \"slurm_mlem\")\n", + "\n", + " if os.path.exists(slurm_mlem_path):\n", + " print(f\"Removing {slurm_mlem_path}\")\n", + " rmtree(slurm_mlem_path)\n", + "\n", + " os.mkdir(slurm_mlem_path)\n", + " \n", + " recons_path = os.path.join(ds.analysispath, \"mlem_recons\")\n", + "\n", + " if os.path.exists(recons_path):\n", + " print(f\"Removing {recons_path}\")\n", + " rmtree(recons_path)\n", + "\n", + " os.mkdir(recons_path)\n", + " \n", + " if is_half_scan:\n", + " dohm = \"Yes\"\n", + " mask_cen = \"Yes\"\n", + " else:\n", + " dohm = \"No\"\n", + " mask_cen = \"No\"\n", + " \n", + " bash_script_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm.sh')\n", + " python_script_path = os.path.join(id11_code_path, \"ImageD11/nbGui/S3DXRD/run_mlem_recon.py\") \n", + " outfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.out')\n", + " errfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.err')\n", + " log_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_$SLURM_ARRAY_JOB_ID_$SLURM_ARRAY_TASK_ID.log')\n", + "\n", + " reconfile = os.path.join(recons_path, ds.dsname + \"_mlem_recon_$SLURM_ARRAY_TASK_ID.txt\")\n", + "\n", + " bash_script_string = f\"\"\"#!/bin/bash\n", + "#SBATCH --job-name=mlem-recon\n", + "#SBATCH --output={outfile_path}\n", + "#SBATCH --error={errfile_path}\n", + "#SBATCH --array=0-{len(grains)-1}%{n_simultaneous_jobs}\n", + "#SBATCH --time=02:00:00\n", + "# define memory needs and number of tasks for each array job\n", + "#SBATCH --ntasks=1\n", + "#SBATCH --cpus-per-task={cores_per_task}\n", + "#\n", + "date\n", + "echo python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", + "python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", + "date\n", + " \"\"\"\n", + " \n", + " # print(f\"python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\")\n", + "\n", + " with open(bash_script_path, \"w\") as bashscriptfile:\n", + " bashscriptfile.writelines(bash_script_string)\n", + " \n", + " return bash_script_path, recons_path\n", + "\n", + "\n", + "def save_grains(grains, ds):\n", + " with h5py.File(ds.grainsfile, 'r+') as hout:\n", + " try:\n", + " grp = hout.create_group('slice_recon')\n", + " except ValueError:\n", + " grp = hout['slice_recon']\n", + " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", + " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", + " \n", + " grains_group = 'grains'\n", + "\n", + " for g in tqdm(grains):\n", + " gg = hout[grains_group][str(g.gid)]\n", + "\n", + " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", + " \n", + " \n", + "# without a mask, MLEM can introduce artifacts in the corners\n", + "# so we can manually mask those out\n", + "\n", + "# we can incoporate our own mask too\n", + "# by modifying the below function\n", + "\n", + "def apply_manual_mask(mask_in):\n", + " mask_out = mask_in.copy()\n", + " \n", + " mask_out[200:, 250:] = 0\n", + " \n", + "# mask_out[:8, :] = 0\n", + "# mask_out[:, 87:] = 0\n", + "\n", + "# mask_out[:, :8] = 0\n", + "# mask_out[82:, :] = 0\n", + " \n", + "# mask_out[74:, :10] = 0\n", + "\n", + "# mask_out[:5, :] = 0\n", + "# # mask_out[131:, :] = 0\n", + "# # mask_out[:, 131:] = 0\n", + "\n", + "# mask_out[:20, 90:] = 0\n", + "# mask_out[119:, :45] = 0\n", + "# mask_out[:30, 100:] = 0\n", + "# # mask_out[112:, 81:] = 0\n", + "\n", + "# # mask_out[100:, 100:] = 0\n", + "# mask_out[90:, 118:] = 0\n", + "# mask_out[118:, 90:] = 0\n", + "\n", + "# mask_out[:40, 112:] = 0\n", + "# mask_out[:52, 120:] = 0\n", + "\n", + "# mask_out[:48, 81:] = 0\n", + " \n", + " return mask_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: For old datasets before the new directory layout structure, we don't distinguish between RAW_DATA and PROCESSED_DATA\n", + "# In this case, use this cell to specify where your experimental folder is, and do not run the cell below\n", + "# e.g /data/visitor/ma4752/id11/20210513\n", + "\n", + "### USER: specify your experimental directory\n", + "\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", + "\n", + "!ls -lrt {rawdata_path}\n", + "\n", + "### USER: specify where you want your processed data to go\n", + "\n", + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# USER: pick a sample and a dataset you want to segment\n", + "\n", + "sample = \"FeAu_0p5_tR_nscope\"\n", + "dataset = \"top_250um\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# desination of H5 files\n", + "\n", + "dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Load the dataset (for motor positions, not sure why these are not in peaks)\n", + "ds = ImageD11.sinograms.dataset.load(dset_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Import 4D peaks\n", + "\n", + "cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", + "\n", + "cf_4d.parameters.loadparameters(par_path)\n", + "cf_4d.updateGeometry()\n", + "\n", + "print(f\"Read {cf_4d.nrows} 4D peaks\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "grains = read_grains(ds)\n", + "\n", + "for grain in grains:\n", + " # print(grain.gid)\n", + " grain.a = np.cbrt(np.linalg.det(grain.ubi))\n", + " \n", + "print(f\"{len(grains)} grains imported\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# here we are filtering our peaks (cf_4d) to select only the strongest ones\n", + "# this time as opposed to indexing, our frac is slightly weaker but we are NOT filtering in dstar!!!!!\n", + "# this means many more peaks per grain = stronger sinograms\n", + "\n", + "# USER: modify the \"frac\" parameter below and re-run the cell until the orange dot sits nicely on the \"elbow\" of the blue line\n", + "# this indicates the fractional intensity cutoff we will select\n", + "# if the blue line does not look elbow-shaped in the logscale plot, try changing the \"doplot\" parameter (the y scale of the logscale plot) until it does\n", + "\n", + "cf_strong = utils.selectpeaks(cf_4d, frac=0.995, dsmax=cf_4d.ds.max(), doplot=0.9)\n", + "print(cf_4d.nrows)\n", + "cf_strong.nrows" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# # now let's do a whole-sample tomographic reconstruction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# If the sinograms are only half-sinograms (we scanned dty across half the sample rather than the full sample), set the below to true:\n", + "is_half_scan = False" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if is_half_scan:\n", + " utils.correct_half_scan(ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "utils.assign_peaks_to_grains(grains, cf_strong, tol=0.25)\n", + "\n", + "print(\"Storing peak data in grains\")\n", + "# iterate through all the grains\n", + "for g in tqdm(grains):\n", + " # store this grain's peak indices so we know which 4D peaks we used for indexing\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[g.mask_4d]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "for grain in grains:\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=True, nsigma=1)\n", + " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain, cf_strong)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "c0 = np.median([g.cen for g in grains])\n", + "\n", + "print('Center of rotation in dty', c0)\n", + "\n", + "y0 = c0/2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# generate sinogram for whole sample\n", + "\n", + "whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", + "\n", + "fig, ax = plt.subplots()\n", + "ax.imshow(whole_sample_sino, interpolation=\"nearest\", vmin=0)\n", + "ax.set_aspect(4)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# \"quick\" MLEM reconstruction\n", + "\n", + "pad = 50\n", + "\n", + "outsize = whole_sample_sino.shape[0] + pad\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "if is_half_scan:\n", + " halfmask = np.zeros_like(whole_sample_sino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + "\n", + " ssino_to_recon = whole_sample_sino * halfmask\n", + "else:\n", + " ssino_to_recon = whole_sample_sino\n", + "\n", + "recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=ds.obincens, \n", + " output_size=outsize,\n", + " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=nthreads)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# we should be able to easily segment this using scikit-image\n", + "recon_man_mask = apply_manual_mask(recon)\n", + "\n", + "thresh = threshold_otsu(recon_man_mask)\n", + "\n", + "# we can also override the threshold if we don't like it:\n", + "\n", + "# thresh = 0.05\n", + "\n", + "binary = recon_man_mask > thresh\n", + "\n", + "chull = convex_hull_image(binary)\n", + "\n", + "fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, constrained_layout=True)\n", + "axs[0].imshow(recon_man_mask, vmin=0)\n", + "axs[1].imshow(binary)\n", + "axs[2].imshow(chull)\n", + "\n", + "axs[0].set_title(\"Reconstruction\")\n", + "axs[1].set_title(\"Binarised threshold\")\n", + "axs[2].set_title(\"Convex hull\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "whole_sample_mask = chull" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# populate translations of grains\n", + "for g in grains:\n", + " g.translation = np.array([g.dx, g.dy, 0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Import 2D peaks\n", + "\n", + "cf_2d = ImageD11.columnfile.columnfile(ds.col2dfile)\n", + "\n", + "cf_2d.parameters.loadparameters(par_path)\n", + "cf_2d.updateGeometry()\n", + "\n", + "print(f\"Read {cf_2d.nrows} 2D peaks\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# here we are filtering our peaks (cf_4d) to select only the strongest ones\n", + "# this time as opposed to indexing, our frac is slightly weaker but we are NOT filtering in dstar!!!!!\n", + "# this means many more peaks per grain = stronger sinograms\n", + "\n", + "# USER: modify the \"frac\" parameter below and re-run the cell until the orange dot sits nicely on the \"elbow\" of the blue line\n", + "# this indicates the fractional intensity cutoff we will select\n", + "# if the blue line does not look elbow-shaped in the logscale plot, try changing the \"doplot\" parameter (the y scale of the logscale plot) until it does\n", + "\n", + "cf_2d_strong = utils.selectpeaks(cf_2d, frac=0.995, dsmax=cf_2d.ds.max(), doplot=0.9)\n", + "print(cf_2d.nrows)\n", + "cf_2d_strong.nrows" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# add index column to cf_2d\n", + "\n", + "cf_2d_strong.addcolumn(np.arange(cf_2d_strong.nrows), \"index\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# take your grains\n", + "# greedy assign 2d peaks\n", + "# build sinos with those\n", + "\n", + "# get all g-vectors from columnfile\n", + "gv = np.transpose((cf_2d_strong.gx, cf_2d_strong.gy, cf_2d_strong.gz)).astype(float)\n", + "\n", + "tol = 0.06\n", + "\n", + "print(\"Scoring and assigning {} grains\".format(len(grains)))\n", + "\n", + "for grain in tqdm(grains):\n", + " # column to store the grain labels\n", + " labels = np.zeros(cf_2d_strong.nrows, 'i')\n", + "\n", + " # column to store drlv2 (error in hkl)\n", + " drlv2 = np.ones(cf_2d_strong.nrows, 'd')\n", + " # iterate over all grains\n", + " # \n", + " n = ImageD11.cImageD11.score_and_assign(grain.ubi, gv, tol, drlv2, labels, grain.gid)\n", + " \n", + " grain.mask_2d = labels == grain.gid\n", + " grain.peaks_2d = cf_2d_strong.index[grain.mask_2d]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# populate the data for each grain that we need\n", + "\n", + "for grain in tqdm(grains):\n", + " grain.dty = cf_2d_strong.dty[grain.mask_2d]\n", + " grain.omega = cf_2d_strong.omega[grain.mask_2d]\n", + " grain.gx = cf_2d_strong.gx[grain.mask_2d]\n", + " grain.gy = cf_2d_strong.gy[grain.mask_2d]\n", + " grain.gz = cf_2d_strong.gz[grain.mask_2d]\n", + " grain.eta = cf_2d_strong.eta[grain.mask_2d]\n", + " grain.sum_intensity = cf_2d_strong.sum_intensity[grain.mask_2d]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Determine sinograms of all grains\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", + " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Show sinogram of single grain\n", + "\n", + "g = grains[0]\n", + "\n", + "fig, ax = plt.subplots()\n", + "\n", + "ax.imshow((g.ssino/g.ssino.mean(axis=0)), norm=matplotlib.colors.LogNorm(), interpolation='nearest', origin=\"lower\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# if you want, you can override the y0 value here\n", + "\n", + "# y0 = 1.5 # for example!\n", + "\n", + "y0 = c0/2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "g = grains[1]\n", + "\n", + "run_iradon_id11(g, pad=pad, y0=y0, workers=max(nthreads, 20), sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "g = grains[1]\n", + "\n", + "fig, axs = plt.subplots(1,2, figsize=(10,5))\n", + "axs[0].imshow(g.recon, vmin=0)\n", + "axs[0].set_title(\"ID11 iradon\")\n", + "axs[1].imshow(g.ssino, aspect='auto')\n", + "axs[1].set_title(\"ssino\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", + "with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "for grain in grains:\n", + " grain.og_recon = grain.recon" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "fig, a = plt.subplots(1,2,figsize=(10,5))\n", + "rec = a[0].imshow(grains[8].og_recon, vmin=0)\n", + "sin = a[1].imshow(grains[8].ssino, aspect='auto')\n", + "\n", + "# Function to update the displayed image based on the selected frame\n", + "def update_frame(i):\n", + " rec.set_array(grains[i].og_recon)\n", + " sin.set_array(grains[i].ssino)\n", + " a[0].set(title=str(i))\n", + " fig.canvas.draw()\n", + "\n", + "# Create a slider widget to select the frame number\n", + "frame_slider = widgets.IntSlider(\n", + " value=0,\n", + " min=0,\n", + " max=len(grains) - 1,\n", + " step=1,\n", + " description='Grain:'\n", + ")\n", + "\n", + "interact(update_frame, i=frame_slider)\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "f,a = plt.subplots( 1,3, figsize=(15,5) )\n", + "ty, tx = utils.triangle().T\n", + "for i,title in enumerate( 'xyz' ):\n", + " ax = np.zeros(3)\n", + " ax[i] = 1.\n", + " hkl = [utils.crystal_direction_cubic( g.ubi, ax ) for g in grains]\n", + " xy = np.array([utils.hkl_to_pf_cubic(h) for h in hkl ])\n", + " rgb = np.array([utils.hkl_to_color_cubic(h) for h in hkl ])\n", + " for j in range(len(grains)):\n", + " grains[j].rgb = rgb[j]\n", + " a[i].scatter( xy[:,1], xy[:,0], c = rgb ) # Note the \"x\" axis of the plot is the 'k' direction and 'y' is h (smaller)\n", + " a[i].set(title=title, aspect='equal', facecolor='silver', xticks=[], yticks=[])\n", + " a[i].plot( tx, ty, 'k-', lw = 1 )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# plot initial output\n", + "\n", + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(rgb_array)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(grain_labels_array) # originally 1,2,0\n", + "ax.set_title(\"Grain label map\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(raw_intensity_array)\n", + "ax.set_title(\"Raw intensity array\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# we can clean up these reconstructions using an MLEM iterative recon\n", + "# we can use the whole sample shape mask for this" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "save_grains_for_mlem(grains, ds, y0=y0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "utils.slurm_submit_and_wait(bash_script_path, 30)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# collect results into grain attributes\n", + "# the filenames are element position not gid\n", + "\n", + "for i, grain in enumerate(tqdm(grains)):\n", + " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# look at all our grains\n", + "\n", + "n_grains_to_plot = 25\n", + "\n", + "grains_step = len(grains)//n_grains_to_plot\n", + "\n", + "grid_size = np.ceil(np.sqrt(len(grains[::grains_step]))).astype(int)\n", + "nrows = (len(grains[::grains_step])+grid_size-1)//grid_size\n", + "\n", + "fig, axs = plt.subplots(grid_size, nrows, figsize=(10,10), layout=\"constrained\", sharex=True, sharey=True)\n", + "for i, ax in enumerate(axs.ravel()):\n", + " if i < len(grains[::grains_step]):\n", + " # get corresponding grain for this axis\n", + " g = grains[::grains_step][i]\n", + " ax.imshow(g.recon, vmin=0)\n", + " # ax.invert_yaxis()\n", + " ax.set_title(i)\n", + " \n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(rgb_array)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(raw_intensity_array)\n", + "ax.set_title(\"Sinogram raw intensity map\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(constrained_layout=True)\n", + "ax.imshow(grain_labels_array)\n", + "ax.set_title(\"Grain label map\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "save_grains(grains, ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if 1:\n", + " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "samples_dict = {}\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", + "}\n", + "\n", + "dset_prefix = \"top\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", + "\n", + "for sample in sample_list:\n", + " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", + " dsets_list = []\n", + " for folder in all_dset_folders_for_sample:\n", + " if dset_prefix in folder:\n", + " dset_name = folder.split(f\"{sample}_\")[1]\n", + " if dset_name not in skips_dict[sample]:\n", + " dsets_list.append(dset_name)\n", + "\n", + " samples_dict[sample] = dsets_list\n", + " \n", + "# manual override:\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", + " \n", + "# now we have our samples_dict, we can process our data:\n", + "\n", + "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", + "\n", + "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", + "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", + "\n", + "cf_strong_frac = 0.995\n", + "cf_strong_dstol = 0.01\n", + "\n", + "is_half_scan = False\n", + "\n", + "peak_assign_tol = 0.25\n", + "\n", + "manual_threshold = None\n", + "# manual_threshold = 0.025\n", + "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "pad = 50\n", + "\n", + "# y0 = -1.4\n", + "\n", + "mlem_wholesample_niter = 25\n", + "mlem_n_simultaneous_jobs = 50\n", + "mlem_cores_per_task = 8\n", + "mlem_niter = 50\n", + "\n", + "cutoff_level = 0.2\n", + "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " if not os.path.exists(dset_path):\n", + " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " print(\"Importing DataSet object\")\n", + " \n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " \n", + " if not os.path.exists(ds.grainsfile):\n", + " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", + " continue\n", + " \n", + " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", + " cf_4d.parameters.loadparameters(par_path)\n", + " cf_4d.updateGeometry()\n", + " \n", + " grains = read_grains(ds)\n", + " \n", + " cf_strong = utils.selectpeaks(cf_4d, frac=cf_strong_frac, dsmax=cf_4d.ds.max(), dstol=cf_strong_dstol)\n", + " \n", + " if is_half_scan:\n", + " utils.correct_half_scan(ds)\n", + " \n", + " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", + " \n", + " for g in tqdm(grains):\n", + " g.mask_4d = cf_strong.grain_id == g.gid\n", + " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " \n", + " for grain in tqdm(grains):\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain)\n", + " \n", + " c0 = np.median([g.cen for g in grains])\n", + " \n", + " y0 = c0/2\n", + " \n", + " whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", + " \n", + " print(\"Whole sample mask\")\n", + " outsize = whole_sample_sino.shape[0] + pad\n", + "\n", + " if is_half_scan:\n", + " halfmask = np.zeros_like(whole_sample_sino)\n", + "\n", + " halfmask[:len(halfmask)//2-1, :] = 1\n", + " halfmask[len(halfmask)//2-1, :] = 0.5\n", + "\n", + " ssino_to_recon = whole_sample_sino * halfmask\n", + " else:\n", + " ssino_to_recon = whole_sample_sino\n", + " \n", + " recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", + " theta=ds.obincens, \n", + " output_size=outsize,\n", + " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", + " filter_name='hamming',\n", + " interpolation='linear',\n", + " workers=nthreads)\n", + " \n", + " recon_man_mask = apply_manual_mask(recon)\n", + " if manual_threshold is None:\n", + " thresh = threshold_otsu(recon_man_mask)\n", + " else:\n", + " thresh = manual_threshold\n", + " \n", + " binary = recon_man_mask > thresh\n", + " whole_sample_mask = convex_hull_image(binary)\n", + " \n", + " for g in grains:\n", + " g.translation = np.array([g.dx, g.dy, 0])\n", + " \n", + " print(\"Peak 2D organise\")\n", + " pks = ImageD11.sinograms.properties.pks_table.load(ds.pksfile)\n", + " p2d = pks.pk2d(ds.omega, ds.dty)\n", + " numba_order, numba_histo = utils.counting_sort(p2d['spot3d_id'])\n", + " grain_2d_id = utils.palloc(p2d['spot3d_id'].shape, np.dtype(int))\n", + " cleanid = cf_strong.grain_id.copy()\n", + " utils.find_grain_id(cf_strong.spot3d_id, cleanid, p2d['spot3d_id'], grain_2d_id, numba_order)\n", + " gord, counts = utils.counting_sort(grain_2d_id)\n", + " inds = np.concatenate(((0,), np.cumsum(counts)))\n", + " \n", + " for grain in tqdm(grains):\n", + " i = grain.gid\n", + " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", + " \n", + " print(\"Making sinograms\")\n", + " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", + " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", + " pass\n", + " \n", + " print(\"Running iradon\")\n", + " \n", + " run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", + " with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", + " pass\n", + " \n", + " for grain in grains:\n", + " grain.og_recon = grain.recon\n", + " \n", + " save_grains_for_mlem(grains, ds, y0)\n", + " \n", + " bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, mlem_n_simultaneous_jobs, mlem_cores_per_task, mlem_niter)\n", + " \n", + " utils.slurm_submit_and_wait(bash_script_path, 30)\n", + " \n", + " for i, grain in enumerate(tqdm(grains)):\n", + " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))\n", + " \n", + " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " \n", + " save_grains(grains, ds)\n", + "\n", + "print(\"Done!\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (main)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb index 38411df8..8dc0ab4b 100755 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb @@ -62,6 +62,8 @@ "import ipywidgets as ipyw\n", "import matplotlib.pyplot as plt\n", "\n", + "from functools import partial\n", + "\n", "import ImageD11.nbGui.nb_utils as utils\n", "\n", "import ImageD11.refinegrains\n", @@ -80,8 +82,7 @@ "source": [ "# define our functions\n", "\n", - "def read_grains_minor_phase(ds, phase_name='minor'):\n", - " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + "def read_grains_minor_phase(ds):\n", " with h5py.File(ds.grainsfile_minor_phase, 'r') as hin: \n", " grains_group = 'grains'\n", " \n", @@ -111,6 +112,8 @@ " g.gid = int(gid_string)\n", " g.y0 = gg.attrs['y0'][()]\n", " g.sample_mask = gg['circle_mask'][:]\n", + " g.recon = gg['recon'][:]\n", + " g.ssino = gg['ssino'][:]\n", " grains.append(g)\n", " \n", " return grains\n", @@ -318,7 +321,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -391,6 +394,10 @@ }, "outputs": [], "source": [ + "phase_name = \"Au\"\n", + "\n", + "ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + "\n", "grains = read_grains_minor_phase(ds)\n", "\n", "for grain in grains:\n", @@ -420,7 +427,7 @@ "minor_phase_peaks.parameters.loadparameters(par_path)\n", "minor_phase_peaks.updateGeometry()\n", "\n", - "cf_strong = utils.selectpeaks(minor_phase_peaks, dstol=0.005, dsmax=carbides.ds.max(), frac=0.9, doplot=0.01)\n", + "cf_strong = utils.selectpeaks(minor_phase_peaks, dstol=0.005, dsmax=minor_phase_peaks.ds.max(), frac=0.9, doplot=0.01)\n", "print(cf_strong.nrows)" ] }, @@ -463,7 +470,8 @@ "whole_sample_mask = major_phase_grains[0].sample_mask\n", "y0 = major_phase_grains[0].y0\n", "\n", - "pad = ((major_phase_grains.recon.shape[0] - major_phase_grains.ssino.shape[0])//2).astype(int)" + "pad = ((major_phase_grains[0].recon.shape[0] - major_phase_grains[0].ssino.shape[0]))\n", + "pad" ] }, { @@ -541,10 +549,11 @@ "outputs": [], "source": [ "for grain in tqdm(grains):\n", - " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)" + " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain, cf_strong)" ] }, { @@ -588,7 +597,8 @@ "a = ax.ravel()\n", "x = [g.dx for g in grains]\n", "y = [g.dy for g in grains]\n", - "s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", + "# s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", + "s = [10 for g in grains]\n", "a[0].scatter(x, y, s=s, c=[g.rgb_z for g in grains])\n", "a[0].set(title='IPF color Z', aspect='equal')\n", "a[1].scatter(x, y, s=s, c=[g.rgb_y for g in grains])\n", @@ -752,7 +762,7 @@ "source": [ "# you can overwrite y0 here\n", "\n", - "y0 = -7.875\n", + "# y0 = -7.875\n", "# pad = 50" ] }, @@ -766,8 +776,10 @@ "source": [ "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", + "run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", + "\n", "with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", - " for i in tqdm(pool.map(run_iradon_id11, grains, [pad]*len(grains), [y0]*len(grains)), total=len(grains)):\n", + " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", " pass" ] }, @@ -843,7 +855,7 @@ "# remove bad recon grains from future analysis\n", "print(f\"{len(grains)} grains before filtration\")\n", "grains = [grain for grain in grains if not grain.bad_recon]\n", - "# grains = [grain for grain in grains if grain.gid not in bad_gids]\n", + "grains = [grain for grain in grains if grain.gid not in bad_gids]\n", "print(f\"{len(grains)} grains after filtration\")" ] }, @@ -872,7 +884,8 @@ "a = ax.ravel()\n", "x = [g.x_blob for g in grains]\n", "y = [g.y_blob for g in grains]\n", - "s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", + "# s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", + "s = [10 for g in grains]\n", "a[0].scatter(x, y, s=s, c=[g.rgb_z for g in grains])\n", "a[0].set(title='IPF color Z', aspect='equal')\n", "a[1].scatter(x, y, s=s, c=[g.rgb_y for g in grains])\n", @@ -974,6 +987,10 @@ }, "outputs": [], "source": [ + "# filter out grains with more than 25 pixels in the label map\n", + "# this normally indicates a dodgy reconstruction for this grain\n", + "# only really applies if the grains are very small!\n", + "\n", "bad_gids = [int(label) for (label, count) in zip(labels, counts) if count > 25 and label > 0]\n", "bad_gids" ] @@ -1035,7 +1052,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -1053,6 +1070,9 @@ "\n", " samples_dict[sample] = dsets_list\n", " \n", + "# manual override:\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", + " \n", "# now we have our samples_dict, we can process our data:\n", "\n", "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", @@ -1062,6 +1082,7 @@ "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", "\n", "main_phase_cf_dstol = 0.0075\n", + "phase_name = \"Au\"\n", "\n", "cf_strong_frac = 0.9\n", "cf_strong_dstol = 0.005\n", @@ -1072,10 +1093,12 @@ "\n", "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", - "pad = 50\n", + "# pad = 50\n", "\n", "cutoff_level = 0.7\n", "\n", + "grain_too_many_px = 25\n", + "\n", "for sample, datasets in samples_dict.items():\n", " for dataset in datasets:\n", " print(f\"Processing dataset {dataset} in sample {sample}\")\n", @@ -1089,10 +1112,18 @@ " ds = ImageD11.sinograms.dataset.load(dset_path)\n", " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", " \n", - " if not os.path.exists(ds.grainsfile):\n", + " ds.grainsfile_minor_phase = os.path.join(ds.analysispath, ds.dsname + f'_grains_{phase_name}.h5')\n", + " \n", + " if not os.path.exists(ds.grainsfile_minor_phase):\n", " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", " continue\n", " \n", + " # check grains file for existance of slice_recon, skip if it's there\n", + " with h5py.File(ds.grainsfile_minor_phase, \"r\") as hin:\n", + " if \"slice_recon\" in hin.keys():\n", + " print(f\"Already reconstructed {dataset} in {sample}, skipping\")\n", + " continue\n", + " \n", " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", " cf_4d.parameters.loadparameters(par_path)\n", " cf_4d.updateGeometry()\n", @@ -1117,18 +1148,20 @@ " main_phase_grains = read_grains_main_phase(ds)\n", " whole_sample_mask = main_phase_grains[0].sample_mask\n", " y0 = main_phase_grains[0].y0\n", + " pad = ((major_phase_grains[0].recon.shape[0] - major_phase_grains[0].ssino.shape[0]))\n", " \n", " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", " \n", " for g in tqdm(grains):\n", " g.mask_4d = cf_strong.grain_id == g.gid\n", - " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", + " g.peaks_4d = cf_strong.index[g.mask_4d]\n", " \n", " for grain in tqdm(grains):\n", - " grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", + " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", + " utils.fit_grain_position_from_sino(grain, cf_strong)\n", " \n", " c0 = np.median([g.cen for g in grains])\n", " \n", @@ -1165,16 +1198,32 @@ " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1, nthreads-1)) as pool:\n", " for i in tqdm(pool.map(find_cens_from_recon, grains), total=len(grains)):\n", " pass\n", - " \n", + " \n", + " grains = [grain for grain in grains if not grain.bad_recon]\n", + " \n", " for g in grains:\n", " g.translation = np.array([g.x_blob, g.y_blob, 0])\n", " \n", " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", " \n", - " save_grains(grains, ds)\n", + " labels, counts = np.unique(grain_labels_array, return_counts=True)\n", + " bad_gids = [int(label) for (label, count) in zip(labels, counts) if count > grain_too_many_px and label > 0]\n", + " \n", + " grains = [grain for grain in grains if grain.gid not in bad_gids]\n", + " \n", + " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " \n", + " save_grains_minor_phase(grains, ds)\n", "\n", "print(\"Done!\")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb index 0f4e5e67..13d979ac 100755 --- a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb +++ b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb @@ -147,7 +147,7 @@ "\n", "### USER: specify where you want your processed data to go\n", "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240221\"" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" ] }, { @@ -616,7 +616,7 @@ "samples_dict = {}\n", "\n", "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_100um\"]\n", + " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", "\n", "dset_prefix = \"top\"\n", @@ -711,9 +711,11 @@ " \n", " \n", " ubifit = g.ubi.copy()\n", - " _ = cImageD11.score_and_refine(ubifit, np.transpose(grain.gve_2d_strong), tol)\n", - " grain.set_ubi(ubifit)\n", - " \n", + " _ = cImageD11.score_and_refine(ubifit, np.transpose(g.gve_2d_strong), peak_assign_tol)\n", + " g.set_ubi(ubifit)\n", + " \n", + " per_pixel_ubis = {}\n", + " \n", " for ginc, grain in enumerate(tqdm(grains[:])):\n", " def refine_ubis(pixel_position):\n", " i, j = pixel_position\n", @@ -730,7 +732,7 @@ " gve = np.transpose(grain.gve_2d_strong[:, mask])\n", "\n", " ubifit = grain.ubi.copy()\n", - " _ = cImageD11.score_and_refine(ubifit, gve, tol)\n", + " _ = cImageD11.score_and_refine(ubifit, gve, peak_assign_tol)\n", "\n", " return ubifit\n", "\n", @@ -797,10 +799,19 @@ " except KeyError:\n", " continue\n", " \n", + " ds.pbpubifile = os.path.join(ds.analysispath, ds.dsname + '_pbp_map.h5')\n", " save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map)\n", "\n", "print(\"Done!\")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c73580e-108c-4d95-8437-a6ce7f890326", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb b/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb index ea1a944c..08bf26e7 100644 --- a/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb +++ b/ImageD11/nbGui/S3DXRD/5_S3DXRD_plot_multiple_slices.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a545dd7f-4140-4abc-9c00-6edd96a2e501", "metadata": { "tags": [] @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "92baa2cd-4e85-4998-979b-88ef0a674c48", "metadata": { "tags": [] @@ -42,7 +42,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "74e066b0-ae13-46e9-81b0-a11ea524a469", "metadata": { "tags": [] @@ -78,30 +78,12 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "ac85f820-7358-4ab6-87bf-fa386793cedb", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "total 1180\n", - "drwxr-x--- 19 opid11 id11 4096 Dec 11 13:59 FeAu_0p5_tR\n", - "drwxr-x--- 15 opid11 id11 4096 Dec 11 16:43 FeAu_tR\n", - "drwxr-x--- 4 opid11 id11 4096 Dec 11 16:57 CeO2_ff_after_tR\n", - "drwxr-x--- 20 opid11 id11 4096 Dec 12 09:42 FeAu_0p5_tR_nscope\n", - "drwxr-x--- 19 opid11 id11 4096 Dec 12 15:23 FSH_steel\n", - "drwxr-x--- 7 opid11 id11 4096 Dec 12 17:44 test_furnace\n", - "drwxr-x--- 143 opid11 id11 16384 Dec 13 13:59 FSH_steel_creep\n", - "drwxr-x--- 4 opid11 id11 4096 Jan 5 15:53 FeAu_creep_no_beam\n", - "-rwxr-x--- 1 opid11 id11 1183582 Jan 15 10:11 ihma439_id11.h5\n", - "drwxr-x--- 2 opid11 id11 4096 Jan 17 16:33 __icat__\n" - ] - } - ], + "outputs": [], "source": [ "# NOTE: For old datasets before the new directory layout structure, we don't distinguish between RAW_DATA and PROCESSED_DATA\n", "# In this case, use this cell to specify where your experimental folder is, and do not run the cell below\n", @@ -120,7 +102,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "a1ff363a-8106-45c4-b4de-f557f33c852a", "metadata": { "tags": [] @@ -155,7 +137,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "968d1fcd-46e7-43ad-8f09-5c5b3a639ae1", "metadata": { "tags": [] @@ -194,91 +176,12 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "48cc2f6e-8cf1-4ec5-b8a0-2cab2a4f44d9", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Processing dataset top_150um in sample FeAu_0p5_tR_nscope\n", - "Importing DataSet object\n", - "Importing grains\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8342f023d1ef4d7281376676ff63336a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - " 0%| | 0/44 [00:00 Date: Tue, 27 Feb 2024 17:49:48 +0100 Subject: [PATCH 3/8] All notebooks now use nb_utils.py --- .../nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb | 110 +++-- .../3DXRD/1_3DXRD_refine_parameters.ipynb | 131 ++--- .../nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb | 456 +++++++----------- .../nbGui/3DXRD/3_3DXRD_look_at_peaks.ipynb | 8 +- .../nbGui/3DXRD/4_3DXRD_merge_slices.ipynb | 33 +- ImageD11/nbGui/3DXRD/CeO2.par | 30 -- .../nbGui/3DXRD/CeO2_20240210_1348_175mm.poni | 12 - .../SiO2_mp-7000_conventional_standard.cif | 35 -- ImageD11/nbGui/3DXRD/frelon_peaksearch.py | 23 +- ImageD11/nbGui/3DXRD/output.par | 30 -- ImageD11/nbGui/3DXRD/utils.py | 285 ----------- .../S3DXRD/0_S3DXRD_segment_and_label.ipynb | 16 +- ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb | 19 +- .../S3DXRD/1_S3DXRD_index_minor_phase.ipynb | 19 +- .../nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb | 19 +- .../S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb | 19 +- .../2_S3DXRD_sinograms_map_minor_phase.ipynb | 17 +- .../S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb | 20 +- ImageD11/nbGui/nb_utils.py | 17 + 19 files changed, 354 insertions(+), 945 deletions(-) delete mode 100644 ImageD11/nbGui/3DXRD/CeO2.par delete mode 100644 ImageD11/nbGui/3DXRD/CeO2_20240210_1348_175mm.poni delete mode 100755 ImageD11/nbGui/3DXRD/SiO2_mp-7000_conventional_standard.cif delete mode 100644 ImageD11/nbGui/3DXRD/output.par delete mode 100755 ImageD11/nbGui/3DXRD/utils.py diff --git a/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb b/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb index f9b5fa6a..c48715e0 100755 --- a/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb +++ b/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb @@ -7,7 +7,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 16/02/2024" + "## Date: 27/02/2024" ] }, { @@ -78,10 +78,11 @@ "from IPython.display import display\n", "%matplotlib widget\n", "\n", + "from ImageD11.nbGui import nb_utils as utils\n", "\n", "from frelon_peaksearch import worker, process\n", "\n", - "from utils import apply_spatial" + "# from utils import apply_spatial" ] }, { @@ -107,17 +108,13 @@ }, "outputs": [], "source": [ - "# NEW DATASETS\n", - "\n", "### USER: specify your experimental directory\n", "\n", - "base_dir = \"/data/visitor/ma5837/id11/20240208\"\n", - "\n", - "rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", "\n", "!ls -lrt {rawdata_path}\n", "\n", - "processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired" ] }, { @@ -131,8 +128,8 @@ "source": [ "# USER: pick a sample and a dataset you want to segment\n", "\n", - "sample = \"S12\"\n", - "dataset = \"FF_zeries_0\"\n", + "sample = \"FeAu_0p5_tR\"\n", + "dataset = \"ff1\"\n", "\n", "# USER: specify path to detector spline file\n", "\n", @@ -172,7 +169,7 @@ "source": [ "# USER: specify path to background file\n", "\n", - "bg_file = os.path.join(ds.analysisroot, \"CeO2/CeO2_bkg_3dxrd_beam_shutter_open/CeO2_bkg_3dxrd_beam_shutter_open.edf\")" + "bg_file = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/ff_bkg.edf\"" ] }, { @@ -197,7 +194,11 @@ }, "outputs": [], "source": [ - "test_image_worker = worker(bgfile=bg_file)\n", + "worker_args = {\n", + " \n", + "}\n", + "\n", + "test_image_worker = worker(bg_file, *worker_args)\n", "goodpeaks = test_image_worker.peaksearch(img=test_image, omega=0)\n", "fc, sc = goodpeaks[:, 23:25].T" ] @@ -232,7 +233,9 @@ "source": [ "# now we run the segmenter on all our data\n", "\n", - "cf_2d, cf_3d = process(ds, bg_file, 64)" + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", + "cf_2d, cf_3d = process(ds, bg_file, nthreads-1, worker_args)" ] }, { @@ -284,7 +287,7 @@ }, "outputs": [], "source": [ - "cf_2d = apply_spatial(cf_2d, spline_file)" + "cf_2d = utils.apply_spatial(cf_2d, spline_file)" ] }, { @@ -296,7 +299,7 @@ }, "outputs": [], "source": [ - "cf_3d = apply_spatial(cf_3d, spline_file)" + "cf_3d = utils.apply_spatial(cf_3d, spline_file)" ] }, { @@ -308,7 +311,7 @@ }, "outputs": [], "source": [ - "parfile = 'Fe_refined.par'" + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/fitted.par'" ] }, { @@ -340,6 +343,18 @@ "ImageD11.columnfile.colfile_to_hdf(cf_3d, ds.col3dfile)" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "148fb053-420b-4534-ba48-9e6a67e7a746", + "metadata": {}, + "outputs": [], + "source": [ + "# change to 0 to allow all cells to be run automatically\n", + "if 1:\n", + " raise ValueError(\"Hello!\")" + ] + }, { "cell_type": "code", "execution_count": null, @@ -348,30 +363,34 @@ "outputs": [], "source": [ "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", - "# just modify samples_dict accordingly!\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR\": []\n", + "}\n", + "\n", + "dset_prefix = \"ff\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR\"]\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", + " \n", + "# manual override:\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", "\n", "mask_path = '/data/id11/inhouse1/ewoks/detectors/files/Frelon2k_C36/mask.edf'\n", "\n", "spline_file = '/data/id11/inhouse1/ewoks/detectors/files/Frelon2k_C36/frelon36.spline'\n", - "parfile = 'Fe_refined.par'\n", - "\n", - "samples_dict = {\n", - " \"S13\" : [\n", - " \"FF_zeries_0\",\n", - " \"FF_zeries_1\",\n", - " \"FF_zeries_2\",\n", - " \"FF_zeries_3\",\n", - " \"FF_zeries_4\",\n", - " ],\n", - " \"S14\" : [\n", - " \"FF_zeries_0\",\n", - " \"FF_zeries_1\",\n", - " \"FF_zeries_2\",\n", - " \"FF_zeries_3\",\n", - " \"FF_zeries_4\",\n", - " ],\n", + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/fitted.par'\n", + "bg_file = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/ff_bkg.edf\"\n", + "\n", + "worker_args = {\n", + " \n", "}\n", "\n", + "nthreads = len(os.sched_getaffinity(os.getpid()))\n", + "\n", "for sample, datasets in samples_dict.items():\n", " for dataset in datasets:\n", " print(f\"Processing dataset {dataset} in sample {sample}\")\n", @@ -383,18 +402,21 @@ " detector=\"frelon3\",\n", " omegamotor=\"diffrz\",\n", " dtymotor=\"diffty\")\n", + " \n", + " if os.path.exists(ds.col2dfile):\n", + " print(f\"Found existing cf_2d for {dataset} in {sample}, skipping\")\n", + " continue\n", + " \n", " ds.import_all(scans=[\"1.1\"])\n", " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", " ds.save()\n", - " \n", - " bg_path = os.path.join(ds.analysisroot, \"CeO2/CeO2_bkg_3dxrd_beam_shutter_open/CeO2_bkg_3dxrd_beam_shutter_open.edf\")\n", - " \n", + "\n", " print(\"Peaksearching\")\n", - " cf_2d, cf_3d = process(ds, bg_path, 64)\n", + " cf_2d, cf_3d = process(ds, bg_file, nthreads-1, worker_args)\n", " \n", " print(\"Spatially correcting peaks\")\n", - " cf_2d = apply_spatial(cf_2d, spline_file)\n", - " cf_3d = apply_spatial(cf_3d, spline_file)\n", + " cf_2d = utils.apply_spatial(cf_2d, spline_file)\n", + " cf_3d = utils.apply_spatial(cf_3d, spline_file)\n", " \n", " print(\"Saving peaks to file\")\n", " cf_2d.parameters.loadparameters(parfile)\n", @@ -406,6 +428,14 @@ " cf_3d.updateGeometry()\n", " ImageD11.columnfile.colfile_to_hdf(cf_3d, ds.col3dfile)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab892957-5ce7-4f04-a01c-c04cc9a2715c", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/3DXRD/1_3DXRD_refine_parameters.ipynb b/ImageD11/nbGui/3DXRD/1_3DXRD_refine_parameters.ipynb index a12fef52..2f9588b8 100755 --- a/ImageD11/nbGui/3DXRD/1_3DXRD_refine_parameters.ipynb +++ b/ImageD11/nbGui/3DXRD/1_3DXRD_refine_parameters.ipynb @@ -6,7 +6,7 @@ "source": [ "# Jupyter notebook based on ImageD11 to process 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 13/02/2024" + "## Date: 27/02/2024" ] }, { @@ -68,7 +68,8 @@ "%matplotlib widget\n", "from matplotlib import pyplot as plt\n", "\n", - "import utils\n", + "# import utils\n", + "from ImageD11.nbGui import nb_utils as utils\n", "\n", "import ImageD11.grain\n", "import ImageD11.indexing\n", @@ -86,17 +87,13 @@ }, "outputs": [], "source": [ - "# NEW DATASETS\n", - "\n", "### USER: specify your experimental directory\n", "\n", - "base_dir = \"/data/visitor/ma5837/id11/20240208\"\n", - "\n", - "rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", "\n", "!ls -lrt {rawdata_path}\n", "\n", - "processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired" ] }, { @@ -109,8 +106,8 @@ "source": [ "# USER: pick a sample and a dataset you want to segment\n", "\n", - "sample = \"S18\"\n", - "dataset = \"FF_zeries_1\"\n", + "sample = \"FeAu_0p5_tR\"\n", + "dataset = \"ff1\"\n", "\n", "# USER: specify path to detector mask\n", "\n", @@ -131,7 +128,7 @@ "\n", "# USER: specify the path to the parameter file\n", "\n", - "parfile = 'Fe.par'\n", + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_refined_tdxrd.par'\n", "spline_file = '/data/id11/inhouse1/ewoks/detectors/files/Frelon2k_C36/frelon36.spline'" ] }, @@ -207,7 +204,7 @@ "# USER: modify the \"frac\" parameter below and re-run the cell until we have around 10,000 peaks for indexing\n", "# we may choose more for the full index in the next notebook, but for now we want to quickly index only our strong grains\n", "\n", - "cf_strong = utils.selectpeaks(cf_3d, frac=0.35, dsmax=0.92, doplot=0.05, dstol=0.01)\n", + "cf_strong = utils.selectpeaks(cf_3d, frac=0.95, dsmax=0.92, doplot=0.05, dstol=0.01)\n", "print(f\"Got {cf_strong.nrows} strong peaks for indexing\")\n", "cf_strong.writefile(f'{sample}_{dataset}_3d_peaks_strong.flt')" ] @@ -223,7 +220,7 @@ "# we will also export some additional strong peaks across all rings\n", "# this will be useful for grain refinement later (using makemap)\n", "\n", - "cf_strong_allrings = utils.selectpeaks(cf_3d, frac=0.50, dsmax=cf_3d.ds.max(), doplot=0.05, dstol=0.01)\n", + "cf_strong_allrings = utils.selectpeaks(cf_3d, frac=0.95, dsmax=cf_3d.ds.max(), doplot=0.05, dstol=0.01)\n", "print(f\"Got {cf_strong_allrings.nrows} strong peaks for makemap\")\n", "cf_strong_allrings_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt'\n", "cf_strong_allrings.writefile(cf_strong_allrings_path)" @@ -324,60 +321,29 @@ }, "outputs": [], "source": [ - "# we want to index low multiplicity rings\n", - "# choose max_multiplicity such that we get 2-3 low-multiplicity rings\n", - "# in this case, we will find orientations on 2 rings (faster for many peaks) and include the third ring for minpeaks calculations\n", - "\n", + "# now we are indexing!\n", + "# indexing will select all rings with a multiplicity below max_multiplity to search\n", "max_multiplicity = 13\n", - "\n", + "# the minimum number of peaks on a ring for a ring to be indexed on\n", "min_counts_on_ring = 0\n", - "\n", - "n_peaks_expected = 0\n", - "rings = []\n", - "for i, dstar in enumerate(indexer.unitcell.ringds):\n", - " multiplicity = len(indexer.unitcell.ringhkls[indexer.unitcell.ringds[i]])\n", - " counts_on_this_ring = (indexer.ra == i).sum()\n", - " if counts_on_this_ring > min_counts_on_ring:\n", - " n_peaks_expected += multiplicity\n", - " if multiplicity < max_multiplicity:\n", - " rings.append((counts_on_this_ring, multiplicity, i))\n", - " \n", - "rings.sort()\n", - "\n", - "print(f\"{n_peaks_expected} peaks expected\")\n", - "print(f\"Trying these rings (counts, multiplicity, ring number): {rings}\")\n", - "\n", - "# USER: specify the HKL tolerances you want to use for indexing\n", - "# hkl_tols_seq = [0.02, 0.03, 0.04, 0.05, 0.1] # BEST\n", + "# the sequence of hkl tolerances the indexer will iterate through\n", "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04]\n", - "\n", - "# USER: specify the fraction of the total expected peaks\n", - "# fracs = [0.9, 0.75] # BEST\n", + "# the sequence of minpks fractions the indexer will iterate through\n", "fracs = [0.9, 0.75]\n", + "# the tolerance in g-vector angle\n", + "cosine_tol = np.cos(np.radians(90.25))\n", + "# the max number of UBIs we can find per pair of rings\n", + "max_grains = 1000\n", "\n", - "# ImageD11.cImageD11.cimaged11_omp_set_num_threads(1)\n", - "ImageD11.indexing.loglevel=3\n", - "\n", - "# indexer.uniqueness = 0.3\n", - "indexer.cosine_tol = np.cos(np.radians(90.25))\n", - "\n", - "# iterate over HKL tolerances\n", - "for frac in fracs:\n", - " for tol in hkl_tols_seq:\n", - " indexer.minpks = n_peaks_expected*frac\n", - " indexer.hkl_tol = tol\n", - " \n", - " # iterate over rings\n", - " \n", - " for i in range(len(rings)):\n", - " for j in range(i, len(rings)):\n", - " indexer.ring_1 = rings[i][2]\n", - " indexer.ring_2 = rings[j][2]\n", - " \n", - " indexer.find()\n", - " indexer.scorethem() \n", - "\n", - " print(frac, tol, len(indexer.ubis))" + "grains, indexer = utils.do_index(cf=cf_strong,\n", + " dstol=indexer.ds_tol,\n", + " max_mult=max_multiplicity,\n", + " min_ring_count=min_counts_on_ring,\n", + " hkl_tols=hkl_tols_seq,\n", + " fracs=fracs,\n", + " cosine_tol=cosine_tol,\n", + " max_grains=max_grains\n", + ")" ] }, { @@ -556,47 +522,6 @@ "source": [ "# refined parameter file has now been created!" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(12, 12))\n", - "ax = fig.add_subplot(projection='3d')\n", - "xx = [grain.translation[0] for grain in grains2]\n", - "yy = [grain.translation[1] for grain in grains2]\n", - "zz = [grain.translation[2] for grain in grains2]\n", - "# col = [utils.grain_to_rgb(grain) for grain in grains2]\n", - "col = [float(grain.npks) for grain in grains2]\n", - "sizes = [0.01*(float(grain.intensity_info.split(\"mean = \")[1].split(\" , \")[0].replace(\"'\", \"\"))) for grain in grains2]\n", - "scatterplot = ax.scatter(xx-np.mean(xx), yy-np.mean(yy), zz, c=col, s=sizes)\n", - "ax.set_xlim(-200,200)\n", - "ax.set_ylim(-200,200)\n", - "ax.set_zlim(-200,200)\n", - "plt.colorbar(scatterplot)\n", - "ax.set_title(\"Grains coloured by n peaks\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# refine_em" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb b/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb index e5d8b2a1..251b4fab 100755 --- a/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb +++ b/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb @@ -6,16 +6,14 @@ "source": [ "# Jupyter notebook based on ImageD11 to process 3DXRD data\n", "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 13/02/2024" + "## Date: 27/02/2024" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Now we have good experimental parameters, we can index more grains!\n", - "\n", - "For this example I have chosen a deformed dataset." + "Now we have good experimental parameters, we can index more grains!" ] }, { @@ -62,7 +60,8 @@ "%matplotlib widget\n", "from matplotlib import pyplot as plt\n", "\n", - "import utils\n", + "# import utils\n", + "from ImageD11.nbGui import nb_utils as utils\n", "\n", "import ImageD11.grain\n", "import ImageD11.indexing\n", @@ -72,6 +71,34 @@ "from ImageD11.blobcorrector import eiger_spatial" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cmp = {'compression':'gzip',\n", + " 'compression_opts': 2,\n", + " 'shuffle' : True }\n", + "\n", + "def save_array(grp, name, ary):\n", + " hds = grp.require_dataset(name, \n", + " shape=ary.shape,\n", + " dtype=ary.dtype,\n", + " **cmp)\n", + " hds[:] = ary\n", + " return hds\n", + "\n", + "def save_grains(grains, ds):\n", + " with h5py.File(ds.grainsfile, 'w') as hout:\n", + " grn = hout.create_group('grains')\n", + " for g in tqdm(grains):\n", + " gg = grn.create_group(str(g.gid))\n", + " save_array(gg, 'peaks_3d_indexing', g.peaks_3d).attrs['description'] = \"Strong 3D peaks that were assigned to this grain during indexing\"\n", + " gg.attrs.update({'ubi':g.ubi,\n", + " 'translation':g.translation})" + ] + }, { "cell_type": "code", "execution_count": null, @@ -80,17 +107,13 @@ }, "outputs": [], "source": [ - "# NEW DATASETS\n", - "\n", "### USER: specify your experimental directory\n", "\n", - "base_dir = \"/data/visitor/ma5837/id11/20240208\"\n", - "\n", - "rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", "\n", "!ls -lrt {rawdata_path}\n", "\n", - "processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired" ] }, { @@ -103,8 +126,8 @@ "source": [ "# USER: pick a sample and a dataset you want to segment\n", "\n", - "sample = \"S12\"\n", - "dataset = \"FF_zeries_0\"" + "sample = \"FeAu_0p5_tR\"\n", + "dataset = \"ff1\"" ] }, { @@ -121,7 +144,7 @@ "\n", "# USER: specify the path to the parameter file\n", "\n", - "parfile = 'Fe_refined.par'" + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_tdxrd_refined.par'" ] }, { @@ -194,7 +217,7 @@ "\n", "# USER: modify the \"frac\" parameter below and re-run the cell until we have around 25,000 peaks for indexing\n", "\n", - "cf_strong = utils.selectpeaks(cf_3d, frac=0.6, dsmax=0.92, doplot=0.05, dstol=0.01)\n", + "cf_strong = utils.selectpeaks(cf_3d, frac=0.95, dsmax=0.92, doplot=0.05, dstol=0.01)\n", "print(f\"Got {cf_strong.nrows} strong peaks for indexing\")\n", "cf_strong.writefile(f'{sample}_{dataset}_3d_peaks_strong.flt')" ] @@ -210,7 +233,7 @@ "# we will also export some additional strong peaks across all rings\n", "# this will be useful for grain refinement later (using makemap)\n", "\n", - "cf_strong_allrings = utils.selectpeaks(cf_3d, frac=0.85, dsmax=cf_3d.ds.max(), doplot=0.8, dstol=0.01)\n", + "cf_strong_allrings = utils.selectpeaks(cf_3d, frac=0.95, dsmax=cf_3d.ds.max(), doplot=0.8, dstol=0.01)\n", "print(f\"Got {cf_strong_allrings.nrows} strong peaks for makemap\")\n", "cf_strong_allrings_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt'\n", "cf_strong_allrings.writefile(cf_strong_allrings_path)" @@ -311,60 +334,29 @@ }, "outputs": [], "source": [ - "%%time\n", - "\n", - "# we want to index low multiplicity rings\n", - "# choose max_multiplicity such that we get 2-3 low-multiplicity rings\n", - "# in this case, we will find orientations on 2 rings (faster for many peaks) and include the third ring for minpeaks calculations\n", - "\n", + "# now we are indexing!\n", + "# indexing will select all rings with a multiplicity below max_multiplity to search\n", "max_multiplicity = 13\n", - "\n", + "# the minimum number of peaks on a ring for a ring to be indexed on\n", "min_counts_on_ring = 0\n", - "\n", - "n_peaks_expected = 0\n", - "rings = []\n", - "for i, dstar in enumerate(indexer.unitcell.ringds):\n", - " multiplicity = len(indexer.unitcell.ringhkls[indexer.unitcell.ringds[i]])\n", - " counts_on_this_ring = (indexer.ra == i).sum()\n", - " if counts_on_this_ring > min_counts_on_ring:\n", - " n_peaks_expected += multiplicity\n", - " if multiplicity < max_multiplicity:\n", - " rings.append((counts_on_this_ring, multiplicity, i))\n", - " \n", - "rings.sort()\n", - "\n", - "print(f\"{n_peaks_expected} peaks expected\")\n", - "print(f\"Trying these rings (counts, multiplicity, ring number): {rings}\")\n", - "\n", - "# USER: specify the HKL tolerances you want to use for indexing\n", + "# the sequence of hkl tolerances the indexer will iterate through\n", "hkl_tols_seq = [0.01, 0.02, 0.03, 0.04, 0.05, 0.1]\n", - "\n", - "# USER: specify the fraction of the total expected peaks\n", + "# the sequence of minpks fractions the indexer will iterate through\n", "fracs = [0.9, 0.75]\n", + "# the tolerance in g-vector angle\n", + "cosine_tol = np.cos(np.radians(90.25))\n", + "# the max number of UBIs we can find per pair of rings\n", + "max_grains = 1000\n", "\n", - "# ImageD11.cImageD11.cimaged11_omp_set_num_threads(1)\n", - "ImageD11.indexing.loglevel=3\n", - "\n", - "# indexer.uniqueness = 0.3\n", - "indexer.cosine_tol = np.cos(np.radians(90.25))\n", - "\n", - "# iterate over HKL tolerances\n", - "for frac in fracs:\n", - " for tol in hkl_tols_seq:\n", - " indexer.minpks = n_peaks_expected*frac\n", - " indexer.hkl_tol = tol\n", - " \n", - " # iterate over rings\n", - " \n", - " for i in range(len(rings)):\n", - " for j in range(i, len(rings)):\n", - " indexer.ring_1 = rings[i][2]\n", - " indexer.ring_2 = rings[j][2]\n", - " \n", - " indexer.find()\n", - " indexer.scorethem() \n", - "\n", - " print(frac, tol, len(indexer.ubis))" + "grains, indexer = utils.do_index(cf=cf_strong,\n", + " dstol=indexer.ds_tol,\n", + " max_mult=max_multiplicity,\n", + " min_ring_count=min_counts_on_ring,\n", + " hkl_tols=hkl_tols_seq,\n", + " fracs=fracs,\n", + " cosine_tol=cosine_tol,\n", + " max_grains=max_grains\n", + ")" ] }, { @@ -596,19 +588,7 @@ "source": [ "tol = 0.05\n", "\n", - "# column to store the grain labels\n", - "labels = np.zeros(cf_strong_allrings.nrows, 'i')\n", - "# get all g-vectors from columnfile\n", - "gv = np.transpose((cf_strong_allrings.gx, cf_strong_allrings.gy, cf_strong_allrings.gz)).astype(float)\n", - "# column to store drlv2 (error in hkl)\n", - "drlv2 = np.ones(cf_strong_allrings.nrows, 'd')\n", - "# iterate over all grains\n", - "print(f\"Scoring and assigning {len(grains_filtered)} grains\")\n", - "for g in tqdm(grains_filtered):\n", - " n = ImageD11.cImageD11.score_and_assign(g.ubi, gv, tol, drlv2, labels, g.gid)\n", - "\n", - "# add the labels column to the columnfile\n", - "cf_strong_allrings.addcolumn(labels, 'grain_id')\n", + "utils.assign_peaks_to_grains(grains_filtered, cf_strong_allrings, tol)\n", "\n", "print(\"Storing peak data in grains\")\n", "# iterate through all the grains\n", @@ -632,36 +612,6 @@ "print(np.mean([np.mean(grain.unitcell[0:3]) for grain in grains_filtered]))" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", - "\n", - "def save_grains(grains, ds):\n", - " with h5py.File(ds.grainsfile, 'w') as hout:\n", - " grn = hout.create_group('grains')\n", - " for g in tqdm(grains):\n", - " gg = grn.create_group(str(g.gid))\n", - " save_array(gg, 'peaks_3d_indexing', g.peaks_3d).attrs['description'] = \"Strong 3D peaks that were assigned to this grain during indexing\"\n", - " gg.attrs.update({'ubi':g.ubi,\n", - " 'translation':g.translation})" - ] - }, { "cell_type": "code", "execution_count": null, @@ -689,203 +639,149 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "def index_dataset(ds):\n", - " cf_strong_frac = 0.6\n", - " cf_strong_dsmax = 0.92\n", - " cf_strong_dstol = 0.01\n", - " \n", - " cf_strong_allrings_frac = 0.85\n", - " cf_strong_allrings_dstol = 0.01\n", - " \n", - " indexer_dstol = 0.05\n", - " indexer_max_mult = 13\n", - " indexer_hkl_tols = [0.01, 0.02, 0.03, 0.04, 0.05, 0.1]\n", - " indexer_fracs = [0.9, 0.75]\n", - " indexer_cosine_tol = np.cos(np.radians(90.25))\n", - " indexer_max_grains = 1000\n", - " \n", - " makemap_min_ring_counts = 0\n", - " makemap_hkl_tol_seq = [0.05, 0.025, 0.01]\n", - " makemap_import_minpks = 30\n", - " \n", - " peak_assignment_hkl_tol = 0.05\n", - " \n", - " \n", - " print(\"Loading 3D peaks\")\n", - " cf_3d = ImageD11.columnfile.colfile_from_hdf(ds.col3dfile)\n", - " cf_3d.parameters.loadparameters(parfile)\n", - " cf_3d.updateGeometry()\n", - " if \"index\" not in cf_3d.titles:\n", - " cf_3d.addcolumn(np.arange(cf_3d.nrows), \"index\")\n", - "\n", - " print(\"Filtering 3D peaks\")\n", - " cf_strong = utils.selectpeaks(cf_3d, frac=cf_strong_frac, dsmax=cf_strong_dsmax, doplot=None, dstol=cf_strong_dstol)\n", - " print(f\"Got {cf_strong.nrows} strong peaks for indexing\")\n", - " cf_strong_path = f'{sample}_{dataset}_3d_peaks_strong.flt'\n", - " cf_strong.writefile(cf_strong_path)\n", - "\n", - " cf_strong_allrings = utils.selectpeaks(cf_3d, frac=cf_strong_allrings_frac, dsmax=cf_3d.ds.max(), doplot=None, dstol=cf_strong_allrings_dstol)\n", - " print(f\"Got {cf_strong_allrings.nrows} strong peaks for makemap\")\n", - " cf_strong_allrings_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt'\n", - " cf_strong_allrings.writefile(cf_strong_allrings_path)\n", - "\n", - " print(f\"Indexing {cf_strong.nrows} peaks\")\n", - " Fe = ImageD11.unitcell.unitcell_from_parameters(cf_strong.parameters)\n", - " Fe.makerings(cf_strong.ds.max())\n", - " indexer = ImageD11.indexing.indexer_from_colfile(cf_strong)\n", - "\n", - " ImageD11.indexing.loglevel = 3\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", + "# you can add samples and datasets to skip\n", + "\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR\": []\n", + "}\n", + "\n", + "dset_prefix = \"ff\"\n", + "\n", + "sample_list = [\"FeAu_0p5_tR\"]\n", " \n", - " indexer.ds_tol = indexer_dstol\n", - " indexer.assigntorings()\n", - " indexer.max_grains = indexer_max_grains\n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", "\n", - " max_multiplicity = indexer_max_mult\n", - " min_counts_on_ring = makemap_min_ring_counts\n", + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_tdxrd_refined.par'\n", "\n", - " n_peaks_expected = 0\n", - " rings = []\n", - " for i, dstar in enumerate(indexer.unitcell.ringds):\n", - " multiplicity = len(indexer.unitcell.ringhkls[indexer.unitcell.ringds[i]])\n", - " counts_on_this_ring = (indexer.ra == i).sum()\n", - " if counts_on_this_ring > min_counts_on_ring:\n", - " n_peaks_expected += multiplicity\n", - " if multiplicity < max_multiplicity:\n", - " rings.append((counts_on_this_ring, multiplicity, i))\n", + " \n", + "cf_strong_frac = 0.95\n", + "cf_strong_dsmax = 0.92\n", + "cf_strong_dstol = 0.01\n", "\n", - " rings.sort()\n", + "cf_strong_allrings_frac = 0.95\n", + "cf_strong_allrings_dstol = 0.01\n", "\n", - " print(f\"{n_peaks_expected} peaks expected\")\n", - " print(f\"Trying these rings (counts, multiplicity, ring number): {rings}\")\n", - " hkl_tols_seq = indexer_hkl_tols\n", - " fracs = indexer_fracs\n", - " indexer.cosine_tol = indexer_cosine_tol\n", + "indexer_dstol = 0.05\n", + "indexer_max_mult = 13\n", + "indexer_hkl_tols = [0.01, 0.02, 0.03, 0.04, 0.05, 0.1]\n", + "indexer_fracs = [0.9, 0.75]\n", + "indexer_cosine_tol = np.cos(np.radians(90.25))\n", + "indexer_max_grains = 1000\n", + "indexer_min_ring_count = 0\n", "\n", - " for frac in fracs:\n", - " for tol in hkl_tols_seq:\n", - " indexer.minpks = n_peaks_expected*frac\n", - " indexer.hkl_tol = tol\n", - " for i in range(len(rings)):\n", - " for j in range(i, len(rings)):\n", - " indexer.ring_1 = rings[i][2]\n", - " indexer.ring_2 = rings[j][2]\n", + "makemap_hkl_tol_seq = [0.05, 0.025, 0.01]\n", + "makemap_import_minpks = 30\n", "\n", - " indexer.find()\n", - " indexer.scorethem()\n", + "peak_assignment_hkl_tol = 0.05\n", "\n", + "for sample, datasets in samples_dict.items():\n", + " for dataset in datasets:\n", + " print(f\"Processing dataset {dataset} in sample {sample}\")\n", + " print(\"Importing DataSet object\")\n", + " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", + " ds = ImageD11.sinograms.dataset.load(dset_path)\n", + " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", + " \n", + " if os.path.exists(ds.grainsfile):\n", + " print(f\"Found existing grains file for {dataset} in {sample}, skipping\")\n", + " continue\n", + "\n", + "\n", + " print(\"Loading 3D peaks\")\n", + " cf_3d = ImageD11.columnfile.colfile_from_hdf(ds.col3dfile)\n", + " cf_3d.parameters.loadparameters(parfile)\n", + " cf_3d.updateGeometry()\n", + " if \"index\" not in cf_3d.titles:\n", + " cf_3d.addcolumn(np.arange(cf_3d.nrows), \"index\")\n", + "\n", + " print(\"Filtering 3D peaks\")\n", + " cf_strong = utils.selectpeaks(cf_3d, frac=cf_strong_frac, dsmax=cf_strong_dsmax, doplot=None, dstol=cf_strong_dstol)\n", + " print(f\"Got {cf_strong.nrows} strong peaks for indexing\")\n", + " cf_strong_path = f'{sample}_{dataset}_3d_peaks_strong.flt'\n", + " cf_strong.writefile(cf_strong_path)\n", + "\n", + " cf_strong_allrings = utils.selectpeaks(cf_3d, frac=cf_strong_allrings_frac, dsmax=cf_3d.ds.max(), doplot=None, dstol=cf_strong_allrings_dstol)\n", + " print(f\"Got {cf_strong_allrings.nrows} strong peaks for makemap\")\n", + " cf_strong_allrings_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt'\n", + " cf_strong_allrings.writefile(cf_strong_allrings_path)\n", + " \n", + " grains, indexer = utils.do_index(cf=cf_strong,\n", + " dstol=indexer_dstol,\n", + " max_mult=indexer_max_mult,\n", + " min_ring_count=indexer_min_ring_count,\n", + " hkl_tols=indexer_hkl_tols,\n", + " fracs=indexer_fracs,\n", + " cosine_tol=indexer_cosine_tol,\n", + " max_grains=indexer_max_grains\n", + " )\n", "\n", - " grains = [ImageD11.grain.grain(ubi, translation=np.array([0., 0., 0.])) for ubi in indexer.ubis]\n", - " print(f\"Found {len(grains)} grains\")\n", + " grains = [ImageD11.grain.grain(ubi, translation=np.array([0., 0., 0.])) for ubi in indexer.ubis]\n", "\n", - " for i, g in enumerate(grains):\n", - " g.gid = i\n", + " for i, g in enumerate(grains):\n", + " g.gid = i\n", "\n", - " tmp_ubi_path = f'{sample}_{dataset}_grains.ubi'\n", - " tmp_map_path = f'{sample}_{dataset}_grains.map'\n", + " tmp_ubi_path = f'{sample}_{dataset}_grains.ubi'\n", + " tmp_map_path = f'{sample}_{dataset}_grains.map'\n", "\n", - " new_flt_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt.new' # flt file containing assignments from makemap\n", - " unindexed_flt_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt.unindexed' # remaining unassigned peaks from makemap\n", + " new_flt_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt.new' # flt file containing assignments from makemap\n", + " unindexed_flt_path = f'{sample}_{dataset}_3d_peaks_strong_all_rings.flt.unindexed' # remaining unassigned peaks from makemap\n", "\n", - " ImageD11.grain.write_grain_file(tmp_ubi_path, grains)\n", + " ImageD11.grain.write_grain_file(tmp_ubi_path, grains)\n", "\n", - " omegas_sorted = np.sort(ds.omega)[0]\n", - " omega_slop = np.round(np.diff(omegas_sorted).mean(), 3)\n", + " omegas_sorted = np.sort(ds.omega)[0]\n", + " omega_slop = np.round(np.diff(omegas_sorted).mean(), 3)\n", "\n", - " makemap_hkl_tol_seq = makemap_hkl_tol_seq\n", + " makemap_hkl_tol_seq = makemap_hkl_tol_seq\n", "\n", - " for inc, makemap_tol in enumerate(makemap_hkl_tol_seq):\n", - " print(f\"Running makemap {inc+1}/{len(makemap_hkl_tol_seq)}\")\n", - " if inc == 0: # ubi into map\n", - " makemap_output = !makemap.py -p {parfile} -u {tmp_ubi_path} -U {tmp_map_path} -f {cf_strong_allrings_path} -F {unindexed_flt_path} -s cubic -t {makemap_hkl_tol_seq[inc]} --omega_slop={omega_slop} --no_sort\n", - " else: # map into map\n", - " makemap_output = !makemap.py -p {parfile} -u {tmp_map_path} -U {tmp_map_path} -f {cf_strong_allrings_path} -F {unindexed_flt_path} -s cubic -t {makemap_hkl_tol_seq[inc]} --omega_slop={omega_slop} --no_sort\n", + " for inc, makemap_tol in enumerate(makemap_hkl_tol_seq):\n", + " print(f\"Running makemap {inc+1}/{len(makemap_hkl_tol_seq)}\")\n", + " if inc == 0: # ubi into map\n", + " makemap_output = !makemap.py -p {parfile} -u {tmp_ubi_path} -U {tmp_map_path} -f {cf_strong_allrings_path} -F {unindexed_flt_path} -s cubic -t {makemap_hkl_tol_seq[inc]} --omega_slop={omega_slop} --no_sort\n", + " else: # map into map\n", + " makemap_output = !makemap.py -p {parfile} -u {tmp_map_path} -U {tmp_map_path} -f {cf_strong_allrings_path} -F {unindexed_flt_path} -s cubic -t {makemap_hkl_tol_seq[inc]} --omega_slop={omega_slop} --no_sort\n", "\n", - " grains2 = ImageD11.grain.read_grain_file(tmp_map_path)\n", - " absolute_minpks = makemap_import_minpks\n", - " grains_filtered = [grain for grain in grains2 if float(grain.npks) > absolute_minpks]\n", + " grains2 = ImageD11.grain.read_grain_file(tmp_map_path)\n", + " absolute_minpks = makemap_import_minpks\n", + " grains_filtered = [grain for grain in grains2 if float(grain.npks) > absolute_minpks]\n", "\n", - " for grain in grains_filtered:\n", - " grain.gid = int(grain.name.split(\":\")[0])\n", + " for grain in grains_filtered:\n", + " grain.gid = int(grain.name.split(\":\")[0])\n", "\n", - " tol = peak_assignment_hkl_tol\n", + " tol = peak_assignment_hkl_tol\n", "\n", - " labels = np.zeros(cf_strong_allrings.nrows, 'i')\n", - " gv = np.transpose((cf_strong_allrings.gx, cf_strong_allrings.gy, cf_strong_allrings.gz)).astype(float)\n", - " drlv2 = np.ones(cf_strong_allrings.nrows, 'd')\n", - " print(f\"Scoring and assigning {len(grains_filtered)} grains\")\n", - " for g in tqdm(grains_filtered):\n", - " n = ImageD11.cImageD11.score_and_assign(g.ubi, gv, tol, drlv2, labels, g.gid)\n", + " utils.assign_peaks_to_grains(grains_filtered, cf_strong_allrings, tol)\n", "\n", - " cf_strong_allrings.addcolumn(labels, 'grain_id')\n", + " print(\"Storing peak data in grains\")\n", + " # iterate through all the grains\n", + " for g in tqdm(grains_filtered):\n", + " # store this grain's peak indices so we know which 4D peaks we used for indexing\n", + " g.peaks_3d = cf_strong_allrings.index[cf_strong_allrings.grain_id == g.gid]\n", "\n", - " print(\"Storing peak data in grains\")\n", - " for g in tqdm(grains_filtered):\n", - " g.peaks_3d = cf_strong_allrings.index[cf_strong_allrings.grain_id == g.gid]\n", - " \n", - " print(\"Saving grains\")\n", - " save_grains(grains_filtered, ds)\n", - " \n", - " if os.path.exists(cf_strong_path):\n", - " os.remove(cf_strong_path)\n", - " \n", - " if os.path.exists(cf_strong_allrings_path):\n", - " os.remove(cf_strong_allrings_path)\n", - " \n", - " if os.path.exists(tmp_ubi_path):\n", - " os.remove(tmp_ubi_path)\n", - " \n", - " if os.path.exists(tmp_map_path):\n", - " os.remove(tmp_map_path)\n", - " \n", - " if os.path.exists(new_flt_path):\n", - " os.remove(new_flt_path)\n", - " \n", - " if os.path.exists(unindexed_flt_path):\n", - " os.remove(unindexed_flt_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", - "# just modify samples_dict accordingly!\n", - "\n", - "parfile = 'Fe_refined.par'\n", - "\n", - "samples_dict = {\n", - " \"S13\" : [\n", - " \"FF_zeries_0\",\n", - " \"FF_zeries_1\",\n", - " \"FF_zeries_2\",\n", - " \"FF_zeries_3\",\n", - " \"FF_zeries_4\",\n", - " ],\n", - " \"S14\" : [\n", - " \"FF_zeries_0\",\n", - " \"FF_zeries_1\",\n", - " \"FF_zeries_2\",\n", - " \"FF_zeries_3\",\n", - " \"FF_zeries_4\",\n", - " ],\n", - "}\n", + " print(\"Saving grains\")\n", + " save_grains(grains_filtered, ds)\n", "\n", - "for sample, datasets in samples_dict.items():\n", - " for dataset in datasets:\n", - " print(f\"Processing dataset {dataset} in sample {sample}\")\n", - " print(\"Importing DataSet object\")\n", - " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", - " ds = ImageD11.sinograms.dataset.load(dset_path)\n", - " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", - " \n", - " index_dataset(ds)" + " if os.path.exists(cf_strong_path):\n", + " os.remove(cf_strong_path)\n", + "\n", + " if os.path.exists(cf_strong_allrings_path):\n", + " os.remove(cf_strong_allrings_path)\n", + "\n", + " if os.path.exists(tmp_ubi_path):\n", + " os.remove(tmp_ubi_path)\n", + "\n", + " if os.path.exists(tmp_map_path):\n", + " os.remove(tmp_map_path)\n", + "\n", + " if os.path.exists(new_flt_path):\n", + " os.remove(new_flt_path)\n", + "\n", + " if os.path.exists(unindexed_flt_path):\n", + " os.remove(unindexed_flt_path)" ] }, { diff --git a/ImageD11/nbGui/3DXRD/3_3DXRD_look_at_peaks.ipynb b/ImageD11/nbGui/3DXRD/3_3DXRD_look_at_peaks.ipynb index 20cb93ef..373e0649 100755 --- a/ImageD11/nbGui/3DXRD/3_3DXRD_look_at_peaks.ipynb +++ b/ImageD11/nbGui/3DXRD/3_3DXRD_look_at_peaks.ipynb @@ -71,17 +71,13 @@ }, "outputs": [], "source": [ - "# NEW DATASETS\n", - "\n", "### USER: specify your experimental directory\n", "\n", - "base_dir = \"/data/visitor/ma5837/id11/20240208\"\n", - "\n", - "rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", "\n", "!ls -lrt {rawdata_path}\n", "\n", - "processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired" ] }, { diff --git a/ImageD11/nbGui/3DXRD/4_3DXRD_merge_slices.ipynb b/ImageD11/nbGui/3DXRD/4_3DXRD_merge_slices.ipynb index 7817c25e..f278b3fe 100755 --- a/ImageD11/nbGui/3DXRD/4_3DXRD_merge_slices.ipynb +++ b/ImageD11/nbGui/3DXRD/4_3DXRD_merge_slices.ipynb @@ -53,7 +53,8 @@ "%matplotlib widget\n", "from matplotlib import pyplot as plt\n", "\n", - "import utils\n", + "# import utils\n", + "from ImageD11.nbGui import nb_utils as utils\n", "\n", "import ImageD11.grain\n", "import ImageD11.indexing\n", @@ -71,17 +72,13 @@ }, "outputs": [], "source": [ - "# NEW DATASETS\n", - "\n", "### USER: specify your experimental directory\n", "\n", - "base_dir = \"/data/visitor/ma5837/id11/20240208\"\n", - "\n", - "rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n", + "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", "\n", "!ls -lrt {rawdata_path}\n", "\n", - "processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired" + "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired" ] }, { @@ -94,9 +91,15 @@ "source": [ "# USER: pick a sample you want to import\n", "\n", - "sample = \"S14\"\n", + "skips_dict = {\n", + " \"FeAu_0p5_tR\": []\n", + "}\n", + "\n", + "dset_prefix = \"ff\"\n", "\n", - "datasets = [\"FF_zeries_0\", \"FF_zeries_1\", \"FF_zeries_2\", \"FF_zeries_3\", \"FF_zeries_4\"]" + "sample_list = [\"FeAu_0p5_tR\"]\n", + "\n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)" ] }, { @@ -111,8 +114,18 @@ "\n", "from collections import OrderedDict\n", "\n", + "\n", + "# just take first sample for now\n", + "\n", + "sample = sample_list[0]\n", + "datasets = samples_dict[sample]\n", "ds_dict = OrderedDict()\n", - "for dataset in datasets:\n", + "\n", + "# try to sort datasets alphabetically\n", + "\n", + "datasets_sorted = sorted(datasets)\n", + "\n", + "for dataset in datasets_sorted:\n", " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", " ds = ImageD11.sinograms.dataset.load(dset_path)\n", " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", diff --git a/ImageD11/nbGui/3DXRD/CeO2.par b/ImageD11/nbGui/3DXRD/CeO2.par deleted file mode 100644 index 6ab0c045..00000000 --- a/ImageD11/nbGui/3DXRD/CeO2.par +++ /dev/null @@ -1,30 +0,0 @@ -cell__a 5.411 -cell__b 5.411 -cell__c 5.411 -cell_alpha 90.0 -cell_beta 90.0 -cell_gamma 90.0 -cell_lattice_[P,A,B,C,I,F,R] F -chi 0.0 -distance 190674.58355643408 -fit_tolerance 0.1 -min_bin_prob 1e-05 -no_bins 10000 -o11 1 -o12 0 -o21 0 -o22 -1 -omegasign 1.0 -t_x 0 -t_y 0 -t_z 0 -tilt_x 0.0 -tilt_y 0.002393893064996147 -tilt_z 0.0033759592569128647 -wavelength 0.18972 -wedge 0.0 -weight_hist_intensities 0 -y_center 1060.0551606107529 -y_size 47 -z_center 990.2991735064492 -z_size 47 diff --git a/ImageD11/nbGui/3DXRD/CeO2_20240210_1348_175mm.poni b/ImageD11/nbGui/3DXRD/CeO2_20240210_1348_175mm.poni deleted file mode 100644 index d2e80e99..00000000 --- a/ImageD11/nbGui/3DXRD/CeO2_20240210_1348_175mm.poni +++ /dev/null @@ -1,12 +0,0 @@ -# Nota: C-Order, 1 refers to the Y axis, 2 to the X axis -# Calibration done at Sat Feb 10 14:51:08 2024 -poni_version: 2 -Detector: FReLoN -Detector_config: {"splineFile": "/gpfs/jazzy/data/visitor/ma5837/id11/20240208/PROCESSED_DATA/CeO2/CeO2_20240210_1348_175mm/frelon36.spline"} -Distance: 0.1906845262656003 -Poni1: 0.04554793403539409 -Poni2: 0.049322695596799054 -Rot1: -0.002797223871659976 -Rot2: 0.005288319414788627 -Rot3: -1.3372706764104528e-07 -Wavelength: 1.8972100937770793e-11 diff --git a/ImageD11/nbGui/3DXRD/SiO2_mp-7000_conventional_standard.cif b/ImageD11/nbGui/3DXRD/SiO2_mp-7000_conventional_standard.cif deleted file mode 100755 index f3564d43..00000000 --- a/ImageD11/nbGui/3DXRD/SiO2_mp-7000_conventional_standard.cif +++ /dev/null @@ -1,35 +0,0 @@ -# generated using pymatgen -data_SiO2 -_symmetry_space_group_name_H-M 'P 1' -_cell_length_a 5.02150261 -_cell_length_b 5.02150261 -_cell_length_c 5.51057000 -_cell_angle_alpha 90.00000000 -_cell_angle_beta 90.00000000 -_cell_angle_gamma 120.00000000 -_symmetry_Int_Tables_number 1 -_chemical_formula_structural SiO2 -_chemical_formula_sum 'Si3 O6' -_cell_volume 120.33571438 -_cell_formula_units_Z 3 -loop_ - _symmetry_equiv_pos_site_id - _symmetry_equiv_pos_as_xyz - 1 'x, y, z' -loop_ - _atom_site_type_symbol - _atom_site_label - _atom_site_symmetry_multiplicity - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - _atom_site_occupancy - Si Si0 1 0.52369500 0.52369500 0.00000000 1 - Si Si1 1 0.00000000 0.47630500 0.66666667 1 - Si Si2 1 0.47630500 0.00000000 0.33333333 1 - O O3 1 0.25609400 0.41485400 0.79454300 1 - O O4 1 0.58514600 0.84124000 0.12787633 1 - O O5 1 0.15876000 0.74390600 0.46120967 1 - O O6 1 0.41485400 0.25609400 0.20545700 1 - O O7 1 0.74390600 0.15876000 0.53879033 1 - O O8 1 0.84124000 0.58514600 0.87212367 1 diff --git a/ImageD11/nbGui/3DXRD/frelon_peaksearch.py b/ImageD11/nbGui/3DXRD/frelon_peaksearch.py index 1a9593ba..afd7a275 100755 --- a/ImageD11/nbGui/3DXRD/frelon_peaksearch.py +++ b/ImageD11/nbGui/3DXRD/frelon_peaksearch.py @@ -92,19 +92,18 @@ def do3dmerge(cf_2d_dict, n, omega): class worker: """ subtracts background, custom for ma4750 """ - def __init__(self, bgfile): + def __init__(self, bgfile, threshold=50, smoothsigma=1., bgc=0.9, minpx=3, m_offset_thresh=80, m_ratio_thresh=135): self.bg = fabio.open(bgfile).data - self.threshold = 50 # was 50 # ADU to zero out image - self.smoothsigma = 1. # sigma for Gaussian before labelleing - self.bgc = 0.9 # fractional part of bg per peak to remove - self.minpx = 3 + self.threshold = threshold # was 50 # ADU to zero out image + self.smoothsigma = smoothsigma # sigma for Gaussian before labelleing + self.bgc = bgc # fractional part of bg per peak to remove + self.minpx = minpx - self.m_offset = self.bg < 80 + self.m_offset = self.bg < m_offset_thresh self.mbg = np.mean(self.bg[self.m_offset]) - # self.m_ratio = self.bg > 135 # best for undeformed data - self.m_ratio = self.bg > 200 + self.m_ratio = self.bg > m_ratio_thresh self.bg -= self.mbg # remove dark self.invbg = 1 / self.bg[self.m_ratio] @@ -171,9 +170,9 @@ def get_dset(h5name, dsetname): def pps(arg): - hname, dsetname, num, omega, bgfile = arg + hname, dsetname, num, omega, bgfile, worker_args = arg if pps.worker is None: - pps.worker = worker(bgfile) + pps.worker = worker(bgfile, *worker_args) frm = get_dset(hname, dsetname)[num] pks = pps.worker.peaksearch(frm, omega=omega) return num, pks @@ -185,7 +184,7 @@ def pps(arg): PKSAVE = ["s_1", "s_I", "s_I2", "s_fI", "s_ffI", "s_sI", "s_ssI", "s_sfI", "s_oI", "s_ooI", "s_soI", "s_foI", "mx_I", "mx_I_f", "mx_I_s", "mx_I_o", "bb_mx_f", "bb_mx_s", "bb_mx_o", "bb_mn_f", "bb_mn_s", "bb_mn_o", "avg_i", "f_raw", "s_raw", "o_raw", "m_ss", "m_ff", "m_oo", "m_sf", "m_so", "m_fo"] PKCOL = [getattr(ImageD11.cImageD11, p) for p in PKSAVE] -def process(ds, bgfile, ncpu): +def process(ds, bgfile, ncpu, worker_args): hname = ds.masterfile scan_name = ds.scans[0] frames_dset = scan_name + "/measurement/" + ds.detector @@ -193,7 +192,7 @@ def process(ds, bgfile, ncpu): n_frames = omega.shape[0] - args = [(hname, frames_dset, i, omega[i], bgfile) for i in range(n_frames)] + args = [(hname, frames_dset, i, omega[i], bgfile, worker_args) for i in range(n_frames)] all_peaks = process_map(pps, args, chunksize=1) diff --git a/ImageD11/nbGui/3DXRD/output.par b/ImageD11/nbGui/3DXRD/output.par deleted file mode 100644 index a3a6b024..00000000 --- a/ImageD11/nbGui/3DXRD/output.par +++ /dev/null @@ -1,30 +0,0 @@ -cell__a 5.02150261 -cell__b 5.02150261 -cell__c 5.51057 -cell_alpha 90.0 -cell_beta 90.0 -cell_gamma 120.0 -cell_lattice_[P,A,B,C,I,F,R] P -chi 0.0 -distance 190687.93868384438 -fit_tolerance 0.1 -min_bin_prob 1e-05 -no_bins 10000 -o11 1 -o12 0 -o21 0 -o22 -1 -omegasign 1.0 -t_x 0 -t_y 0 -t_z 0 -tilt_x -1.3372706764104528e-07 -tilt_y 0.005288319414788627 -tilt_z 0.002797223871659976 -wavelength 0.18972100937770794 -wedge 0.0 -weight_hist_intensities 0 -y_center 1060.7677509954437 -y_size 47.0 -z_center 990.560597093928 -z_size 47.0 \ No newline at end of file diff --git a/ImageD11/nbGui/3DXRD/utils.py b/ImageD11/nbGui/3DXRD/utils.py deleted file mode 100755 index 59a42dd8..00000000 --- a/ImageD11/nbGui/3DXRD/utils.py +++ /dev/null @@ -1,285 +0,0 @@ -import numpy as np -import numba - -from tqdm.contrib.concurrent import process_map -import ImageD11.unitcell -import ImageD11.refinegrains -import ImageD11.blobcorrector -from ImageD11.blobcorrector import eiger_spatial - - -from matplotlib import pyplot as plt - - -def correct_pixel(pixel, spline_file): - sr, fr = pixel - sc, fc = ImageD11.blobcorrector.correctorclass(spline_file).correct(sr, fr) - return (sc, fc) - - -def apply_spatial(cf, spline_file): - # sc = np.zeros(cf.nrows) - # fc = np.zeros(cf.nrows) - - print("Spatial correction...") - - raw_pixels = np.vstack((cf['s_raw'], cf['f_raw'])).T - - corrected_pixels = process_map(correct_pixel, raw_pixels, [spline_file] * len(raw_pixels), max_workers=63, chunksize=len(raw_pixels)//63) - - sc, fc = [list(t) for t in zip(*corrected_pixels)] - - cf.addcolumn(sc, "sc") - cf.addcolumn(fc, "fc") - - return cf - -def grain_to_rgb(g, ax=(0,0,1)): - return hkl_to_color_cubic(crystal_direction_cubic(g.ubi, ax)) - -def crystal_direction_cubic(ubi, axis): - hkl = np.dot(ubi, axis) - # cubic symmetry implies: - # 24 permutations of h,k,l - # one has abs(h) <= abs(k) <= abs(l) - hkl= abs(hkl) - hkl.sort() - return hkl - -def hkl_to_color_cubic(hkl): - """ - https://mathematica.stackexchange.com/questions/47492/how-to-create-an-inverse-pole-figure-color-map - [x,y,z]=u⋅[0,0,1]+v⋅[0,1,1]+w⋅[1,1,1]. - These are: - u=z−y, v=y−x, w=x - This triple is used to assign each direction inside the standard triangle - - makeColor[{x_, y_, z_}] := - RGBColor @@ ({z - y, y - x, x}/Max@{z - y, y - x, x}) - """ - x,y,z = hkl - assert x<=y<=z - assert z>=0 - u,v,w = z-y, y-x, x - m = max( u, v, w ) - r,g,b = u/m, v/m, w/m - return (r,g,b) - -def hkl_to_pf_cubic(hkl): - x,y,z = hkl - assert x<=y<=z - assert z>=0 - m = np.sqrt((hkl**2).sum()) - return x/(z+m), y/(z+m) - -def triangle(): - """ compute a series of point on the edge of the triangle """ - xy = [ np.array(v) for v in ( (0,1,1), (0,0,1), (1,1,1)) ] - xy += [ xy[2]*(1-t) + xy[0]*t for t in np.linspace(0.1,1,5)] - return np.array( [hkl_to_pf_cubic( np.array(p) ) for p in xy] ) - - -def calcy(cos_omega, sin_omega, sol): - return sol[0] + cos_omega*sol[1] + sin_omega*sol[2] - -def fity(y, cos_omega, sin_omega, wt=1): - """ - Fit a sinogram to get a grain centroid - # calc = d0 + x*co + y*so - # dc/dpar : d0 = 1 - # : x = co - # : y = so - # gradients - # What method is being used here??????????? - """ - g = [wt*np.ones(y.shape, float), wt*cos_omega, wt*sin_omega] - nv = len(g) - m = np.zeros((nv,nv),float) - r = np.zeros( nv, float ) - for i in range(nv): - r[i] = np.dot( g[i], wt * y ) - for j in range(i,nv): - m[i,j] = np.dot( g[i], g[j] ) - m[j,i] = m[i,j] - sol = np.dot(np.linalg.inv(m), r) - return sol - -def fity_robust(dty, co, so, nsigma=5, doplot=False): - # NEEDS COMMENTING - cen, dx, dy = fity(dty, co, so) - calc2 = calc1 = calcy(co, so, (cen, dx, dy)) - # mask for columnfile, we're selecting specific 4D peaks - # that come from the right place in y, I think? - selected = np.ones(co.shape, bool) - for i in range(3): - err = dty - calc2 - estd = max( err[selected].std(), 1.0 ) # 1 micron - #print(i,estd) - es = estd*nsigma - selected = abs(err) < es - cen, dx, dy = fity( dty, co, so, selected.astype(float) ) - calc2 = calcy(co, so, (cen, dx, dy)) - # bad peaks are > 5 sigma - if doplot: - f, a = plt.subplots(1,2) - theta = np.arctan2( so, co ) - a[0].plot(theta, calc1, ',') - a[0].plot(theta, calc2, ',') - a[0].plot(theta[selected], dty[selected], "o") - a[0].plot(theta[~selected], dty[~selected], 'x') - a[1].plot(theta[selected], (calc2 - dty)[selected], 'o') - a[1].plot(theta[~selected], (calc2 - dty)[~selected], 'x') - a[1].set(ylim = (-es, es)) - plt.show() - return selected, cen, dx, dy - -def graincen(gid, colf, doplot=True): - # Get peaks beloging to this grain ID - m = colf.grain_id == gid - # Get omega values of peaks in radians - romega = np.radians(colf.omega[m]) - # Calculate cos and sin of omega - co = np.cos(romega) - so = np.sin(romega) - # Get dty values of peaks - dty = colf.dty[m] - selected, cen, dx, dy = fity_robust(dty, co, so, doplot=doplot) - return selected, cen, dx, dy - - - - -@numba.njit(parallel=True) -def pmax(ary): - """ Find the min/max of an array in parallel """ - mx = ary.flat[0] - mn = ary.flat[0] - for i in numba.prange(1,ary.size): - mx = max( ary.flat[i], mx ) - mn = min( ary.flat[i], mn ) - return mn, mx - -@numba.njit(parallel=True) -def palloc(shape, dtype): - """ Allocate and fill an array with zeros in parallel """ - ary = np.empty(shape, dtype=dtype) - for i in numba.prange( ary.size ): - ary.flat[i] = 0 - return ary - -# counting sort by grain_id -@numba.njit -def counting_sort(ary, maxval=None, minval=None): - """ Radix sort for integer array. Single threaded. O(n) - Numpy should be doing this... - """ - if maxval is None: - assert minval is None - minval, maxval = pmax( ary ) # find with a first pass - maxval = int(maxval) - minval = int(minval) - histogram = palloc( (maxval - minval + 1,), np.int64 ) - indices = palloc( (maxval - minval + 2,), np.int64 ) - result = palloc( ary.shape, np.int64 ) - for gid in ary: - histogram[gid - minval] += 1 - indices[0] = 0 - for i in range(len(histogram)): - indices[ i + 1 ] = indices[i] + histogram[i] - i = 0 - for gid in ary: - j = gid - minval - result[indices[j]] = i - indices[j] += 1 - i += 1 - return result, histogram - - -@numba.njit(parallel=True) -def find_grain_id(spot3d_id, grain_id, spot2d_label, grain_label, order, nthreads=20): - """ - Assignment grain labels into the peaks 2d array - spot3d_id = the 3d spot labels that are merged and indexed - grain_id = the grains assigned to the 3D merged peaks - spot2d_label = the 3d label for each 2d peak - grain_label => output, which grain is this peak - order = the order to traverse spot2d_label sorted - """ - assert spot3d_id.shape == grain_id.shape - assert spot2d_label.shape == grain_label.shape - assert spot2d_label.shape == order.shape - T = nthreads - print("Using",T,"threads") - for tid in numba.prange( T ): - pcf = 0 # thread local I hope? - for i in order[tid::T]: - grain_label[i] = -1 - pkid = spot2d_label[i] - while spot3d_id[pcf] < pkid: - pcf += 1 - if spot3d_id[pcf] == pkid: - grain_label[i] = grain_id[pcf] - - -def tocolf(pkd, parfile, dxfile, dyfile): - """ Converts a dictionary of peaks into an ImageD11 columnfile - adds on the geometric computations (tth, eta, gvector, etc) """ - spat = eiger_spatial(dxfile=dxfile, dyfile=dyfile) - cf = ImageD11.columnfile.colfile_from_dict(spat(pkd)) - cf.parameters.loadparameters(parfile) - cf.updateGeometry() - return cf - - - - - - - - - -def unitcell_peaks_mask(cf, dstol, dsmax): - cell = ImageD11.unitcell.unitcell_from_parameters(cf.parameters) - cell.makerings(dsmax) - m = np.zeros(cf.nrows, bool) - for v in cell.ringds: - if v < dsmax: - m |= (abs(cf.ds - v) < dstol) - - return m - -def strongest_peaks(colf, uself=True, frac=0.995, B=0.2, doplot=None): - # correct intensities for structure factor (decreases with 2theta) - cor_intensity = colf.sum_intensity * (np.exp(colf.ds*colf.ds*B)) - if uself: - lf = ImageD11.refinegrains.lf(colf.tth, colf.eta) - cor_intensity *= lf - order = np.argsort( cor_intensity )[::-1] # sort the peaks by intensity - sortedpks = cor_intensity[order] - cums = np.cumsum(sortedpks) - cums /= cums[-1] - enough = np.searchsorted(cums, frac) - # Aim is to select the strongest peaks for indexing. - cutoff = sortedpks[enough] - mask = cor_intensity > cutoff - if doplot is not None: - fig, axs = plt.subplots(1,2,figsize=(10,5)) - axs[0].plot(cums/cums[-1], ',') - axs[0].set(xlabel='npks',ylabel='fractional intensity') - axs[0].plot([mask.sum(),], [frac,], "o" ) - axs[1].plot(cums/cums[-1], ',') - axs[1].set(xlabel='npks logscale',ylabel='fractional intensity', xscale='log', ylim=(doplot,1.), - xlim=(np.searchsorted(cums, doplot), len(cums))) - axs[1].plot( [mask.sum(),], [frac,], "o" ) - plt.show() - return mask - -def selectpeaks(cf, dstol=0.005, dsmax=10, frac=0.99, doplot=None): - m = unitcell_peaks_mask(cf, dstol=dstol, dsmax=dsmax) - cfc = cf.copy() - cfc.filter(m) - ms = strongest_peaks(cfc, frac=frac, doplot=doplot) - cfc.filter(ms) - return cfc - - diff --git a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb index d9f139e1..a2ee4602 100755 --- a/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb +++ b/ImageD11/nbGui/S3DXRD/0_S3DXRD_segment_and_label.ipynb @@ -359,8 +359,6 @@ "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -368,20 +366,10 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "\n", "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb index 8bf462f1..8d0ed4ce 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb @@ -621,12 +621,10 @@ }, "outputs": [], "source": [ - "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -634,20 +632,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_250um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb index 3e005ccb..1e58e15a 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index_minor_phase.ipynb @@ -535,12 +535,10 @@ "metadata": {}, "outputs": [], "source": [ - "# Now that we're happy with our indexing parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -548,20 +546,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_250um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb index c9c6f50b..13261b2b 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb @@ -1262,12 +1262,10 @@ }, "outputs": [], "source": [ - "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -1275,20 +1273,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb index 88a15604..9618eb70 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb @@ -1184,12 +1184,10 @@ }, "outputs": [], "source": [ - "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -1197,20 +1195,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb index 8dc0ab4b..e8065c9b 100755 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb @@ -1049,8 +1049,6 @@ "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -1058,20 +1056,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", " \n", "# manual override:\n", - "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_400um\"]}\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb index 13d979ac..67a18408 100755 --- a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb +++ b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb @@ -609,12 +609,10 @@ "metadata": {}, "outputs": [], "source": [ - "# Now that we're happy with our sinogram parameters, we can run the below cell to do this in bulk for many samples/datasets\n", + "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", "# you can add samples and datasets to skip\n", "\n", - "samples_dict = {}\n", - "\n", "skips_dict = {\n", " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", "}\n", @@ -622,17 +620,11 @@ "dset_prefix = \"top\"\n", "\n", "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - "\n", - "for sample in sample_list:\n", - " all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample))\n", - " dsets_list = []\n", - " for folder in all_dset_folders_for_sample:\n", - " if dset_prefix in folder:\n", - " dset_name = folder.split(f\"{sample}_\")[1]\n", - " if dset_name not in skips_dict[sample]:\n", - " dsets_list.append(dset_name)\n", - "\n", - " samples_dict[sample] = dsets_list\n", + " \n", + "samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n", + " \n", + "# manual override:\n", + "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", " \n", "# now we have our samples_dict, we can process our data:\n", "\n", diff --git a/ImageD11/nbGui/nb_utils.py b/ImageD11/nbGui/nb_utils.py index bdc6bac2..288eb07b 100644 --- a/ImageD11/nbGui/nb_utils.py +++ b/ImageD11/nbGui/nb_utils.py @@ -19,6 +19,23 @@ from scipy.optimize import curve_fit +def find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list): + samples_dict = {} + + for sample in sample_list: + all_dset_folders_for_sample = os.listdir(os.path.join(rawdata_path, sample)) + dsets_list = [] + for folder in all_dset_folders_for_sample: + if dset_prefix in folder: + dset_name = folder.split(f"{sample}_")[1] + if dset_name not in skips_dict[sample]: + dsets_list.append(dset_name) + + samples_dict[sample] = dsets_list + + return samples_dict + + def sine_function(x, offset, a, b): return b * np.sin(np.radians(x)) + a * np.cos(np.radians(x)) + offset From 24a7407c8d5ef796ca3b610a1a58d48228f36398 Mon Sep 17 00:00:00 2001 From: James Ball Date: Tue, 27 Feb 2024 17:52:29 +0100 Subject: [PATCH 4/8] Remove WIP notebook from commit --- .../S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb | 1389 ----------------- 1 file changed, 1389 deletions(-) delete mode 100644 ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb deleted file mode 100644 index 9618eb70..00000000 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_all2d.ipynb +++ /dev/null @@ -1,1389 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Jupyter notebook based on ImageD11 to process scanning 3DXRD data\n", - "# Written by Haixing Fang, Jon Wright and James Ball\n", - "## Date: 26/02/2024" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# There is a bug with the current version of ImageD11 in the site-wide Jupyter env.\n", - "# This has been fixed here: https://github.com/FABLE-3DXRD/ImageD11/commit/4af88b886b1775585e868f2339a0eb975401468f\n", - "# Until a new release has been made and added to the env, we need to get the latest version of ImageD11 from GitHub\n", - "# Put it in your home directory\n", - "# USER: Change the path below to point to your local copy of ImageD11:\n", - "\n", - "import os\n", - "\n", - "username = os.environ.get(\"USER\")\n", - "\n", - "id11_code_path = f\"/home/esrf/{username}/Code/ImageD11\"\n", - "\n", - "import sys\n", - "\n", - "sys.path.insert(0, id11_code_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# import functions we need\n", - "\n", - "import concurrent.futures\n", - "import timeit\n", - "import glob\n", - "import pprint\n", - "from shutil import rmtree\n", - "import time\n", - "from functools import partial\n", - "\n", - "import matplotlib\n", - "%matplotlib ipympl\n", - "\n", - "from scipy.optimize import curve_fit\n", - "\n", - "import h5py\n", - "from tqdm.notebook import tqdm\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import ImageD11.columnfile\n", - "from ImageD11.sinograms import properties, roi_iradon\n", - "from ImageD11.blobcorrector import eiger_spatial\n", - "from ImageD11.grain import grain\n", - "\n", - "from skimage.filters import threshold_otsu\n", - "from skimage.morphology import convex_hull_image\n", - "\n", - "import ImageD11.nbGui.nb_utils as utils\n", - "\n", - "import ipywidgets as widgets\n", - "from ipywidgets import interact" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# define our functions\n", - "\n", - "# save recons and 2d properties to existing grain file\n", - "\n", - "def read_grains(ds):\n", - " with h5py.File(ds.grainsfile, 'r') as hin: \n", - " grains_group = 'grains'\n", - " \n", - " grains = []\n", - " for gid_string in tqdm(sorted(hin[grains_group].keys(), key=lambda x: int(x))):\n", - " gg = hin[grains_group][gid_string]\n", - " ubi = gg.attrs['ubi'][:]\n", - " g = ImageD11.grain.grain(ubi)\n", - " g.gid = int(gid_string)\n", - " grains.append(g)\n", - " \n", - " return grains\n", - "\n", - "\n", - "def map_grain_from_peaks(g, ds):\n", - " \"\"\"\n", - " Computes sinogram\n", - " flt is already the peaks for this grain\n", - " Returns angles, sino\n", - " \"\"\" \n", - " NY = len(ds.ybincens) # number of y translations\n", - " iy = np.round((g.dty - ds.ybincens[0]) / (ds.ybincens[1]-ds.ybincens[0])).astype(int) # flt column for y translation index\n", - "\n", - " # The problem is to assign each spot to a place in the sinogram\n", - " hklmin = g.hkl_2d_strong.min(axis=1) # Get minimum integer hkl (e.g -10, -9, -10)\n", - " dh = g.hkl_2d_strong - hklmin[:,np.newaxis] # subtract minimum hkl from all integer hkls\n", - " de = (g.etasigns_2d_strong.astype(int) + 1)//2 # something signs related\n", - " # 4D array of h,k,l,+/-\n", - " # pkmsk is whether a peak has been observed with this HKL or not\n", - " pkmsk = np.zeros(list(dh.max(axis=1) + 1 )+[2,], int) # make zeros-array the size of (max dh +1) and add another axis of length 2\n", - " pkmsk[ dh[0], dh[1], dh[2], de ] = 1 # we found these HKLs for this grain\n", - " # sinogram row to hit\n", - " pkrow = np.cumsum(pkmsk.ravel()).reshape(pkmsk.shape) - 1 #\n", - " # counting where we hit an HKL position with a found peak\n", - " # e.g (-10, -9, -10) didn't get hit, but the next one did, so increment\n", - "\n", - " npks = pkmsk.sum( )\n", - " destRow = pkrow[ dh[0], dh[1], dh[2], de ] \n", - " sino = np.zeros( ( npks, NY ), 'f' )\n", - " hits = np.zeros( ( npks, NY ), 'f' )\n", - " angs = np.zeros( ( npks, NY ), 'f' )\n", - " adr = destRow * NY + iy \n", - " # Just accumulate \n", - " sig = g.sum_intensity\n", - " ImageD11.cImageD11.put_incr64( sino, adr, sig )\n", - " ImageD11.cImageD11.put_incr64( hits, adr, np.ones(len(de),dtype='f'))\n", - " ImageD11.cImageD11.put_incr64( angs, adr, g.omega)\n", - " \n", - " sinoangles = angs.sum( axis = 1) / hits.sum( axis = 1 )\n", - " # Normalise:\n", - " sino = (sino.T/sino.max( axis=1 )).T\n", - " # Sort (cosmetic):\n", - " order = np.lexsort((np.arange(npks), sinoangles))\n", - " sinoangles = sinoangles[order]\n", - " ssino = sino[order].T\n", - " return sinoangles, ssino, hits[order].T\n", - "\n", - "def do_sinos(g, hkltol=0.25):\n", - "# # flt = utils.tocolf({p:p2d[p][g.peaks_2d] for p in p2d}, par_path, dxfile=e2dx_path, dyfile=e2dy_path) # convert it to a columnfile and spatially correct\n", - " \n", - " \n", - "# flt = cf_2d.copy()\n", - "# flt.filter(g.mask_2d)\n", - " \n", - " # hkl_real = np.dot(g.ubi, (g.gx, g.gy, g.gz)) # calculate hkl of all assigned peaks\n", - " # hkl_int = np.round(hkl_real).astype(int) # round to nearest integer\n", - " # dh = ((hkl_real - hkl_int)**2).sum(axis = 0) # calculate square of difference\n", - "\n", - " # g.dherrall = dh.mean() # mean hkl error across all assigned peaks\n", - " # g.npksall = flt.nrows # total number of assigned peaks\n", - " # flt.filter(dh < hkltol*hkltol) # filter all assigned peaks to be less than hkltol squared\n", - " # hkl_real = np.dot(g.ubi, (flt.gx, flt.gy, flt.gz)) # recalculate error after filtration\n", - " # hkl_int = np.round(hkl_real).astype(int)\n", - " # dh = ((hkl_real - hkl_int)**2).sum(axis = 0)\n", - " # g.dherr = dh.mean() # dherr is mean hkl error across assigned peaks after hkltol filtering\n", - " # g.npks = flt.nrows # total number of assigned peaks after hkltol filtering\n", - " \n", - " hkl_real = np.dot(g.ubi, (g.gx, g.gy, g.gz)) # recalculate error after filtration\n", - " hkl_int = np.round(hkl_real).astype(int)\n", - " \n", - " g.etasigns_2d_strong = np.sign(g.eta)\n", - " g.hkl_2d_strong = hkl_int # integer hkl of assigned peaks after hkltol filtering\n", - " g.sinoangles, g.ssino, g.hits = map_grain_from_peaks(g, ds)\n", - " # return i,g\n", - "\n", - "\n", - "def run_iradon_id11(grain, pad=20, y0=0, workers=1, sample_mask=None, apply_halfmask=False, mask_central_zingers=False):\n", - " outsize = grain.ssino.shape[0] + pad\n", - " \n", - " if apply_halfmask:\n", - " halfmask = np.zeros_like(grain.ssino)\n", - "\n", - " halfmask[:len(halfmask)//2-1, :] = 1\n", - " halfmask[len(halfmask)//2-1, :] = 0.5\n", - " \n", - " ssino_to_recon = grain.ssino * halfmask\n", - " else:\n", - " ssino_to_recon = grain.ssino\n", - " \n", - " # # pad the sample mask\n", - " # sample_mask_padded = np.pad(sample_mask, pad//2)\n", - "\n", - " \n", - " # Perform iradon transform of grain sinogram, store result (reconstructed grain shape) in g.recon\n", - " grain.recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", - " theta=grain.sinoangles, \n", - " mask=sample_mask,\n", - " output_size=outsize,\n", - " projection_shifts=np.full(grain.ssino.shape, -y0),\n", - " filter_name='hamming',\n", - " interpolation='linear',\n", - " workers=workers)\n", - " \n", - " if mask_central_zingers:\n", - " grs = grain.recon.shape[0]\n", - " xpr, ypr = -grs//2 + np.mgrid[:grs, :grs]\n", - " inner_mask_radius = 25\n", - " outer_mask_radius = inner_mask_radius + 2\n", - "\n", - " inner_circle_mask = (xpr ** 2 + ypr ** 2) < inner_mask_radius ** 2\n", - " outer_circle_mask = (xpr ** 2 + ypr ** 2) < outer_mask_radius ** 2\n", - "\n", - " mask_ring = inner_circle_mask & outer_circle_mask\n", - " # we now have a mask to apply\n", - " fill_value = np.median(grain.recon[mask_ring])\n", - " grain.recon[inner_circle_mask] = fill_value\n", - " \n", - " return grain\n", - "\n", - "\n", - "# write og_recon and ssino and circle_mask to disk\n", - "\n", - "cmp = {'compression':'gzip',\n", - " 'compression_opts': 2,\n", - " 'shuffle' : True }\n", - "\n", - "def save_array(grp, name, ary):\n", - " hds = grp.require_dataset(name, \n", - " shape=ary.shape,\n", - " dtype=ary.dtype,\n", - " **cmp)\n", - " hds[:] = ary\n", - " return hds\n", - "\n", - "def save_grains_for_mlem(grains, ds, y0):\n", - " with h5py.File(ds.grainsfile, 'r+') as hout:\n", - " try:\n", - " grp = hout.create_group('peak_assignments')\n", - " except ValueError:\n", - " grp = hout['peak_assignments']\n", - "\n", - " # ds_gord = save_array( grp, 'gord', gord )\n", - " # ds_gord.attrs['description'] = 'Grain ordering: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " # ds_inds = save_array( grp, 'inds', inds )\n", - " # ds_inds.attrs['description'] = 'Grain indices: g[i].pks = gord[ inds[i] : inds[i+1] ]'\n", - " \n", - " grains_group = 'grains'\n", - " for g in tqdm(grains):\n", - " gg = hout[grains_group][str(g.gid)]\n", - " # save stuff for sinograms\n", - " \n", - " save_array(gg, 'ssino', g.ssino).attrs['description'] = 'Sinogram of peak intensities sorted by omega'\n", - " save_array(gg, 'sinoangles', g.sinoangles).attrs['description'] = 'Projection angles for sinogram'\n", - " save_array(gg, 'og_recon', g.og_recon).attrs['description'] = 'Original ID11 iRadon reconstruction'\n", - " save_array(gg, 'circle_mask', whole_sample_mask).attrs['description'] = 'Reconstruction mask to use for MLEM'\n", - " \n", - " # might as well save peaks stuff while we're here\n", - " save_array(gg, 'translation', g.translation).attrs['description'] = 'Grain translation in lab frame'\n", - " save_array(gg, 'peaks_2d_sinograms', g.peaks_2d).attrs['description'] = \"2D peaks from strong 4D peaks that were assigned to this grain for sinograms\"\n", - " save_array(gg, 'peaks_4d_sinograms', g.peaks_4d).attrs['description'] = \"Strong 4D peaks that were assigned to this grain for sinograms\"\n", - "\n", - " gg.attrs['cen'] = g.cen\n", - " gg.attrs['y0'] = y0\n", - " \n", - " \n", - "def prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50):\n", - " \n", - " slurm_mlem_path = os.path.join(ds.analysispath, \"slurm_mlem\")\n", - "\n", - " if os.path.exists(slurm_mlem_path):\n", - " print(f\"Removing {slurm_mlem_path}\")\n", - " rmtree(slurm_mlem_path)\n", - "\n", - " os.mkdir(slurm_mlem_path)\n", - " \n", - " recons_path = os.path.join(ds.analysispath, \"mlem_recons\")\n", - "\n", - " if os.path.exists(recons_path):\n", - " print(f\"Removing {recons_path}\")\n", - " rmtree(recons_path)\n", - "\n", - " os.mkdir(recons_path)\n", - " \n", - " if is_half_scan:\n", - " dohm = \"Yes\"\n", - " mask_cen = \"Yes\"\n", - " else:\n", - " dohm = \"No\"\n", - " mask_cen = \"No\"\n", - " \n", - " bash_script_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm.sh')\n", - " python_script_path = os.path.join(id11_code_path, \"ImageD11/nbGui/S3DXRD/run_mlem_recon.py\") \n", - " outfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.out')\n", - " errfile_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_%A_%a.err')\n", - " log_path = os.path.join(slurm_mlem_path, ds.dsname + '_mlem_recon_slurm_$SLURM_ARRAY_JOB_ID_$SLURM_ARRAY_TASK_ID.log')\n", - "\n", - " reconfile = os.path.join(recons_path, ds.dsname + \"_mlem_recon_$SLURM_ARRAY_TASK_ID.txt\")\n", - "\n", - " bash_script_string = f\"\"\"#!/bin/bash\n", - "#SBATCH --job-name=mlem-recon\n", - "#SBATCH --output={outfile_path}\n", - "#SBATCH --error={errfile_path}\n", - "#SBATCH --array=0-{len(grains)-1}%{n_simultaneous_jobs}\n", - "#SBATCH --time=02:00:00\n", - "# define memory needs and number of tasks for each array job\n", - "#SBATCH --ntasks=1\n", - "#SBATCH --cpus-per-task={cores_per_task}\n", - "#\n", - "date\n", - "echo python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", - "python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\n", - "date\n", - " \"\"\"\n", - " \n", - " # print(f\"python3 {python_script_path} {ds.grainsfile} $SLURM_ARRAY_TASK_ID {reconfile} {pad} {niter} {dohm} {mask_cen} > {log_path} 2>&1\")\n", - "\n", - " with open(bash_script_path, \"w\") as bashscriptfile:\n", - " bashscriptfile.writelines(bash_script_string)\n", - " \n", - " return bash_script_path, recons_path\n", - "\n", - "\n", - "def save_grains(grains, ds):\n", - " with h5py.File(ds.grainsfile, 'r+') as hout:\n", - " try:\n", - " grp = hout.create_group('slice_recon')\n", - " except ValueError:\n", - " grp = hout['slice_recon']\n", - " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", - " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", - " \n", - " grains_group = 'grains'\n", - "\n", - " for g in tqdm(grains):\n", - " gg = hout[grains_group][str(g.gid)]\n", - "\n", - " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", - " \n", - " \n", - "# without a mask, MLEM can introduce artifacts in the corners\n", - "# so we can manually mask those out\n", - "\n", - "# we can incoporate our own mask too\n", - "# by modifying the below function\n", - "\n", - "def apply_manual_mask(mask_in):\n", - " mask_out = mask_in.copy()\n", - " \n", - " mask_out[200:, 250:] = 0\n", - " \n", - "# mask_out[:8, :] = 0\n", - "# mask_out[:, 87:] = 0\n", - "\n", - "# mask_out[:, :8] = 0\n", - "# mask_out[82:, :] = 0\n", - " \n", - "# mask_out[74:, :10] = 0\n", - "\n", - "# mask_out[:5, :] = 0\n", - "# # mask_out[131:, :] = 0\n", - "# # mask_out[:, 131:] = 0\n", - "\n", - "# mask_out[:20, 90:] = 0\n", - "# mask_out[119:, :45] = 0\n", - "# mask_out[:30, 100:] = 0\n", - "# # mask_out[112:, 81:] = 0\n", - "\n", - "# # mask_out[100:, 100:] = 0\n", - "# mask_out[90:, 118:] = 0\n", - "# mask_out[118:, 90:] = 0\n", - "\n", - "# mask_out[:40, 112:] = 0\n", - "# mask_out[:52, 120:] = 0\n", - "\n", - "# mask_out[:48, 81:] = 0\n", - " \n", - " return mask_out" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# NOTE: For old datasets before the new directory layout structure, we don't distinguish between RAW_DATA and PROCESSED_DATA\n", - "# In this case, use this cell to specify where your experimental folder is, and do not run the cell below\n", - "# e.g /data/visitor/ma4752/id11/20210513\n", - "\n", - "### USER: specify your experimental directory\n", - "\n", - "rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n", - "\n", - "!ls -lrt {rawdata_path}\n", - "\n", - "### USER: specify where you want your processed data to go\n", - "\n", - "processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# USER: pick a sample and a dataset you want to segment\n", - "\n", - "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_250um\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# desination of H5 files\n", - "\n", - "dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", - "\n", - "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", - "\n", - "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", - "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Load the dataset (for motor positions, not sure why these are not in peaks)\n", - "ds = ImageD11.sinograms.dataset.load(dset_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Import 4D peaks\n", - "\n", - "cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", - "\n", - "cf_4d.parameters.loadparameters(par_path)\n", - "cf_4d.updateGeometry()\n", - "\n", - "print(f\"Read {cf_4d.nrows} 4D peaks\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "grains = read_grains(ds)\n", - "\n", - "for grain in grains:\n", - " # print(grain.gid)\n", - " grain.a = np.cbrt(np.linalg.det(grain.ubi))\n", - " \n", - "print(f\"{len(grains)} grains imported\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# here we are filtering our peaks (cf_4d) to select only the strongest ones\n", - "# this time as opposed to indexing, our frac is slightly weaker but we are NOT filtering in dstar!!!!!\n", - "# this means many more peaks per grain = stronger sinograms\n", - "\n", - "# USER: modify the \"frac\" parameter below and re-run the cell until the orange dot sits nicely on the \"elbow\" of the blue line\n", - "# this indicates the fractional intensity cutoff we will select\n", - "# if the blue line does not look elbow-shaped in the logscale plot, try changing the \"doplot\" parameter (the y scale of the logscale plot) until it does\n", - "\n", - "cf_strong = utils.selectpeaks(cf_4d, frac=0.995, dsmax=cf_4d.ds.max(), doplot=0.9)\n", - "print(cf_4d.nrows)\n", - "cf_strong.nrows" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# # now let's do a whole-sample tomographic reconstruction" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# If the sinograms are only half-sinograms (we scanned dty across half the sample rather than the full sample), set the below to true:\n", - "is_half_scan = False" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if is_half_scan:\n", - " utils.correct_half_scan(ds)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "utils.assign_peaks_to_grains(grains, cf_strong, tol=0.25)\n", - "\n", - "print(\"Storing peak data in grains\")\n", - "# iterate through all the grains\n", - "for g in tqdm(grains):\n", - " # store this grain's peak indices so we know which 4D peaks we used for indexing\n", - " g.mask_4d = cf_strong.grain_id == g.gid\n", - " g.peaks_4d = cf_strong.index[g.mask_4d]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "for grain in grains:\n", - " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=True, nsigma=1)\n", - " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", - " utils.fit_grain_position_from_sino(grain, cf_strong)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "c0 = np.median([g.cen for g in grains])\n", - "\n", - "print('Center of rotation in dty', c0)\n", - "\n", - "y0 = c0/2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# generate sinogram for whole sample\n", - "\n", - "whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", - "\n", - "fig, ax = plt.subplots()\n", - "ax.imshow(whole_sample_sino, interpolation=\"nearest\", vmin=0)\n", - "ax.set_aspect(4)\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# \"quick\" MLEM reconstruction\n", - "\n", - "pad = 50\n", - "\n", - "outsize = whole_sample_sino.shape[0] + pad\n", - "\n", - "nthreads = len(os.sched_getaffinity(os.getpid()))\n", - "\n", - "if is_half_scan:\n", - " halfmask = np.zeros_like(whole_sample_sino)\n", - "\n", - " halfmask[:len(halfmask)//2-1, :] = 1\n", - " halfmask[len(halfmask)//2-1, :] = 0.5\n", - "\n", - " ssino_to_recon = whole_sample_sino * halfmask\n", - "else:\n", - " ssino_to_recon = whole_sample_sino\n", - "\n", - "recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", - " theta=ds.obincens, \n", - " output_size=outsize,\n", - " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", - " filter_name='hamming',\n", - " interpolation='linear',\n", - " workers=nthreads)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# we should be able to easily segment this using scikit-image\n", - "recon_man_mask = apply_manual_mask(recon)\n", - "\n", - "thresh = threshold_otsu(recon_man_mask)\n", - "\n", - "# we can also override the threshold if we don't like it:\n", - "\n", - "# thresh = 0.05\n", - "\n", - "binary = recon_man_mask > thresh\n", - "\n", - "chull = convex_hull_image(binary)\n", - "\n", - "fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, constrained_layout=True)\n", - "axs[0].imshow(recon_man_mask, vmin=0)\n", - "axs[1].imshow(binary)\n", - "axs[2].imshow(chull)\n", - "\n", - "axs[0].set_title(\"Reconstruction\")\n", - "axs[1].set_title(\"Binarised threshold\")\n", - "axs[2].set_title(\"Convex hull\")\n", - "\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "whole_sample_mask = chull" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# populate translations of grains\n", - "for g in grains:\n", - " g.translation = np.array([g.dx, g.dy, 0])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Import 2D peaks\n", - "\n", - "cf_2d = ImageD11.columnfile.columnfile(ds.col2dfile)\n", - "\n", - "cf_2d.parameters.loadparameters(par_path)\n", - "cf_2d.updateGeometry()\n", - "\n", - "print(f\"Read {cf_2d.nrows} 2D peaks\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# here we are filtering our peaks (cf_4d) to select only the strongest ones\n", - "# this time as opposed to indexing, our frac is slightly weaker but we are NOT filtering in dstar!!!!!\n", - "# this means many more peaks per grain = stronger sinograms\n", - "\n", - "# USER: modify the \"frac\" parameter below and re-run the cell until the orange dot sits nicely on the \"elbow\" of the blue line\n", - "# this indicates the fractional intensity cutoff we will select\n", - "# if the blue line does not look elbow-shaped in the logscale plot, try changing the \"doplot\" parameter (the y scale of the logscale plot) until it does\n", - "\n", - "cf_2d_strong = utils.selectpeaks(cf_2d, frac=0.995, dsmax=cf_2d.ds.max(), doplot=0.9)\n", - "print(cf_2d.nrows)\n", - "cf_2d_strong.nrows" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# add index column to cf_2d\n", - "\n", - "cf_2d_strong.addcolumn(np.arange(cf_2d_strong.nrows), \"index\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# take your grains\n", - "# greedy assign 2d peaks\n", - "# build sinos with those\n", - "\n", - "# get all g-vectors from columnfile\n", - "gv = np.transpose((cf_2d_strong.gx, cf_2d_strong.gy, cf_2d_strong.gz)).astype(float)\n", - "\n", - "tol = 0.06\n", - "\n", - "print(\"Scoring and assigning {} grains\".format(len(grains)))\n", - "\n", - "for grain in tqdm(grains):\n", - " # column to store the grain labels\n", - " labels = np.zeros(cf_2d_strong.nrows, 'i')\n", - "\n", - " # column to store drlv2 (error in hkl)\n", - " drlv2 = np.ones(cf_2d_strong.nrows, 'd')\n", - " # iterate over all grains\n", - " # \n", - " n = ImageD11.cImageD11.score_and_assign(grain.ubi, gv, tol, drlv2, labels, grain.gid)\n", - " \n", - " grain.mask_2d = labels == grain.gid\n", - " grain.peaks_2d = cf_2d_strong.index[grain.mask_2d]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# populate the data for each grain that we need\n", - "\n", - "for grain in tqdm(grains):\n", - " grain.dty = cf_2d_strong.dty[grain.mask_2d]\n", - " grain.omega = cf_2d_strong.omega[grain.mask_2d]\n", - " grain.gx = cf_2d_strong.gx[grain.mask_2d]\n", - " grain.gy = cf_2d_strong.gy[grain.mask_2d]\n", - " grain.gz = cf_2d_strong.gz[grain.mask_2d]\n", - " grain.eta = cf_2d_strong.eta[grain.mask_2d]\n", - " grain.sum_intensity = cf_2d_strong.sum_intensity[grain.mask_2d]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Determine sinograms of all grains\n", - "\n", - "nthreads = len(os.sched_getaffinity(os.getpid()))\n", - "\n", - "with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", - " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", - " pass" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Show sinogram of single grain\n", - "\n", - "g = grains[0]\n", - "\n", - "fig, ax = plt.subplots()\n", - "\n", - "ax.imshow((g.ssino/g.ssino.mean(axis=0)), norm=matplotlib.colors.LogNorm(), interpolation='nearest', origin=\"lower\")\n", - "\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# if you want, you can override the y0 value here\n", - "\n", - "# y0 = 1.5 # for example!\n", - "\n", - "y0 = c0/2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "g = grains[1]\n", - "\n", - "run_iradon_id11(g, pad=pad, y0=y0, workers=max(nthreads, 20), sample_mask=whole_sample_mask, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "g = grains[1]\n", - "\n", - "fig, axs = plt.subplots(1,2, figsize=(10,5))\n", - "axs[0].imshow(g.recon, vmin=0)\n", - "axs[0].set_title(\"ID11 iradon\")\n", - "axs[1].imshow(g.ssino, aspect='auto')\n", - "axs[1].set_title(\"ssino\")\n", - "\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "nthreads = len(os.sched_getaffinity(os.getpid()))\n", - "\n", - "run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", - "\n", - "with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", - " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", - " pass" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "for grain in grains:\n", - " grain.og_recon = grain.recon" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig, a = plt.subplots(1,2,figsize=(10,5))\n", - "rec = a[0].imshow(grains[8].og_recon, vmin=0)\n", - "sin = a[1].imshow(grains[8].ssino, aspect='auto')\n", - "\n", - "# Function to update the displayed image based on the selected frame\n", - "def update_frame(i):\n", - " rec.set_array(grains[i].og_recon)\n", - " sin.set_array(grains[i].ssino)\n", - " a[0].set(title=str(i))\n", - " fig.canvas.draw()\n", - "\n", - "# Create a slider widget to select the frame number\n", - "frame_slider = widgets.IntSlider(\n", - " value=0,\n", - " min=0,\n", - " max=len(grains) - 1,\n", - " step=1,\n", - " description='Grain:'\n", - ")\n", - "\n", - "interact(update_frame, i=frame_slider)\n", - "\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "f,a = plt.subplots( 1,3, figsize=(15,5) )\n", - "ty, tx = utils.triangle().T\n", - "for i,title in enumerate( 'xyz' ):\n", - " ax = np.zeros(3)\n", - " ax[i] = 1.\n", - " hkl = [utils.crystal_direction_cubic( g.ubi, ax ) for g in grains]\n", - " xy = np.array([utils.hkl_to_pf_cubic(h) for h in hkl ])\n", - " rgb = np.array([utils.hkl_to_color_cubic(h) for h in hkl ])\n", - " for j in range(len(grains)):\n", - " grains[j].rgb = rgb[j]\n", - " a[i].scatter( xy[:,1], xy[:,0], c = rgb ) # Note the \"x\" axis of the plot is the 'k' direction and 'y' is h (smaller)\n", - " a[i].set(title=title, aspect='equal', facecolor='silver', xticks=[], yticks=[])\n", - " a[i].plot( tx, ty, 'k-', lw = 1 )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.4)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# plot initial output\n", - "\n", - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(rgb_array)\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(grain_labels_array) # originally 1,2,0\n", - "ax.set_title(\"Grain label map\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(raw_intensity_array)\n", - "ax.set_title(\"Raw intensity array\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# we can clean up these reconstructions using an MLEM iterative recon\n", - "# we can use the whole sample shape mask for this" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "save_grains_for_mlem(grains, ds, y0=y0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, n_simultaneous_jobs=50, cores_per_task=8, niter=50)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "utils.slurm_submit_and_wait(bash_script_path, 30)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# collect results into grain attributes\n", - "# the filenames are element position not gid\n", - "\n", - "for i, grain in enumerate(tqdm(grains)):\n", - " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# look at all our grains\n", - "\n", - "n_grains_to_plot = 25\n", - "\n", - "grains_step = len(grains)//n_grains_to_plot\n", - "\n", - "grid_size = np.ceil(np.sqrt(len(grains[::grains_step]))).astype(int)\n", - "nrows = (len(grains[::grains_step])+grid_size-1)//grid_size\n", - "\n", - "fig, axs = plt.subplots(grid_size, nrows, figsize=(10,10), layout=\"constrained\", sharex=True, sharey=True)\n", - "for i, ax in enumerate(axs.ravel()):\n", - " if i < len(grains[::grains_step]):\n", - " # get corresponding grain for this axis\n", - " g = grains[::grains_step][i]\n", - " ax.imshow(g.recon, vmin=0)\n", - " # ax.invert_yaxis()\n", - " ax.set_title(i)\n", - " \n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.3)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(rgb_array)\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(raw_intensity_array)\n", - "ax.set_title(\"Sinogram raw intensity map\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(grain_labels_array)\n", - "ax.set_title(\"Grain label map\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "save_grains(grains, ds)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if 1:\n", - " raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n", - "# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n", - "# you can add samples and datasets to skip\n", - "\n", - "skips_dict = {\n", - " \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n", - "}\n", - "\n", - "dset_prefix = \"top\"\n", - "\n", - "sample_list = [\"FeAu_0p5_tR_nscope\"]\n", - " \n", - "samples_dict = utils.find_datasets_to_process(skips_dict, dset_prefix, sample_list)\n", - " \n", - "# manual override:\n", - "# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n", - " \n", - "# now we have our samples_dict, we can process our data:\n", - "\n", - "par_path = os.path.join(processed_data_root_dir, 'Fe_refined.par')\n", - "\n", - "e2dx_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dx_E-08-0173_20231127.edf')\n", - "e2dy_path = os.path.join(processed_data_root_dir, '../../CeO2/e2dy_E-08-0173_20231127.edf')\n", - "\n", - "cf_strong_frac = 0.995\n", - "cf_strong_dstol = 0.01\n", - "\n", - "is_half_scan = False\n", - "\n", - "peak_assign_tol = 0.25\n", - "\n", - "manual_threshold = None\n", - "# manual_threshold = 0.025\n", - "\n", - "nthreads = len(os.sched_getaffinity(os.getpid()))\n", - "\n", - "pad = 50\n", - "\n", - "# y0 = -1.4\n", - "\n", - "mlem_wholesample_niter = 25\n", - "mlem_n_simultaneous_jobs = 50\n", - "mlem_cores_per_task = 8\n", - "mlem_niter = 50\n", - "\n", - "cutoff_level = 0.2\n", - "\n", - "for sample, datasets in samples_dict.items():\n", - " for dataset in datasets:\n", - " print(f\"Processing dataset {dataset} in sample {sample}\")\n", - " dset_path = os.path.join(processed_data_root_dir, sample, f\"{sample}_{dataset}\", f\"{sample}_{dataset}_dataset.h5\")\n", - " if not os.path.exists(dset_path):\n", - " print(f\"Missing DataSet file for {dataset} in sample {sample}, skipping\")\n", - " continue\n", - " \n", - " print(\"Importing DataSet object\")\n", - " \n", - " ds = ImageD11.sinograms.dataset.load(dset_path)\n", - " print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n", - " \n", - " if not os.path.exists(ds.grainsfile):\n", - " print(f\"Missing grains file for {dataset} in sample {sample}, skipping\")\n", - " continue\n", - " \n", - " cf_4d = ImageD11.columnfile.columnfile(ds.col4dfile)\n", - " cf_4d.parameters.loadparameters(par_path)\n", - " cf_4d.updateGeometry()\n", - " \n", - " grains = read_grains(ds)\n", - " \n", - " cf_strong = utils.selectpeaks(cf_4d, frac=cf_strong_frac, dsmax=cf_4d.ds.max(), dstol=cf_strong_dstol)\n", - " \n", - " if is_half_scan:\n", - " utils.correct_half_scan(ds)\n", - " \n", - " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", - " \n", - " for g in tqdm(grains):\n", - " g.mask_4d = cf_strong.grain_id == g.gid\n", - " g.peaks_4d = cf_strong.index[cf_strong.grain_id == g.gid]\n", - " \n", - " for grain in tqdm(grains):\n", - " # grain.peaks_4d_selected, grain.cen, grain.dx, grain.dy = utils.graincen(grain.gid, cf_strong, doplot=False)\n", - " grain.rgb_z = utils.grain_to_rgb(grain, ax=(0,0,1),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_y = utils.grain_to_rgb(grain, ax=(0,1,0),)# symmetry = Symmetry.cubic)\n", - " grain.rgb_x = utils.grain_to_rgb(grain, ax=(1,0,0),)# symmetry = Symmetry.cubic)\n", - " utils.fit_grain_position_from_sino(grain)\n", - " \n", - " c0 = np.median([g.cen for g in grains])\n", - " \n", - " y0 = c0/2\n", - " \n", - " whole_sample_sino, xedges, yedges = np.histogram2d(cf_4d.dty, cf_4d.omega, bins=[ds.ybinedges, ds.obinedges])\n", - " \n", - " print(\"Whole sample mask\")\n", - " outsize = whole_sample_sino.shape[0] + pad\n", - "\n", - " if is_half_scan:\n", - " halfmask = np.zeros_like(whole_sample_sino)\n", - "\n", - " halfmask[:len(halfmask)//2-1, :] = 1\n", - " halfmask[len(halfmask)//2-1, :] = 0.5\n", - "\n", - " ssino_to_recon = whole_sample_sino * halfmask\n", - " else:\n", - " ssino_to_recon = whole_sample_sino\n", - " \n", - " recon = ImageD11.sinograms.roi_iradon.iradon(ssino_to_recon, \n", - " theta=ds.obincens, \n", - " output_size=outsize,\n", - " projection_shifts=np.full(whole_sample_sino.shape, -y0),\n", - " filter_name='hamming',\n", - " interpolation='linear',\n", - " workers=nthreads)\n", - " \n", - " recon_man_mask = apply_manual_mask(recon)\n", - " if manual_threshold is None:\n", - " thresh = threshold_otsu(recon_man_mask)\n", - " else:\n", - " thresh = manual_threshold\n", - " \n", - " binary = recon_man_mask > thresh\n", - " whole_sample_mask = convex_hull_image(binary)\n", - " \n", - " for g in grains:\n", - " g.translation = np.array([g.dx, g.dy, 0])\n", - " \n", - " print(\"Peak 2D organise\")\n", - " pks = ImageD11.sinograms.properties.pks_table.load(ds.pksfile)\n", - " p2d = pks.pk2d(ds.omega, ds.dty)\n", - " numba_order, numba_histo = utils.counting_sort(p2d['spot3d_id'])\n", - " grain_2d_id = utils.palloc(p2d['spot3d_id'].shape, np.dtype(int))\n", - " cleanid = cf_strong.grain_id.copy()\n", - " utils.find_grain_id(cf_strong.spot3d_id, cleanid, p2d['spot3d_id'], grain_2d_id, numba_order)\n", - " gord, counts = utils.counting_sort(grain_2d_id)\n", - " inds = np.concatenate(((0,), np.cumsum(counts)))\n", - " \n", - " for grain in tqdm(grains):\n", - " i = grain.gid\n", - " grain.peaks_2d = gord[inds[i+1] : inds[i+2]]\n", - " \n", - " print(\"Making sinograms\")\n", - " with concurrent.futures.ThreadPoolExecutor(max_workers= max(1,nthreads-1)) as pool:\n", - " for i in tqdm(pool.map(do_sinos, grains), total=len(grains)):\n", - " pass\n", - " \n", - " print(\"Running iradon\")\n", - " \n", - " run_this_iradon = partial(run_iradon_id11, pad=pad, y0=y0, sample_mask=whole_sample_mask, workers=1, apply_halfmask=is_half_scan, mask_central_zingers=is_half_scan)\n", - "\n", - " with concurrent.futures.ThreadPoolExecutor( max_workers= max(1,nthreads-1) ) as pool:\n", - " for i in tqdm(pool.map(run_this_iradon, grains), total=len(grains)):\n", - " pass\n", - " \n", - " for grain in grains:\n", - " grain.og_recon = grain.recon\n", - " \n", - " save_grains_for_mlem(grains, ds, y0)\n", - " \n", - " bash_script_path, recons_path = prepare_mlem_bash(ds, grains, pad, is_half_scan, mlem_n_simultaneous_jobs, mlem_cores_per_task, mlem_niter)\n", - " \n", - " utils.slurm_submit_and_wait(bash_script_path, 30)\n", - " \n", - " for i, grain in enumerate(tqdm(grains)):\n", - " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))\n", - " \n", - " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", - " \n", - " save_grains(grains, ds)\n", - "\n", - "print(\"Done!\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (main)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} From 260111405df6a2db2dcf3ccb7d4e62ed45760307 Mon Sep 17 00:00:00 2001 From: James Ball Date: Tue, 27 Feb 2024 18:04:46 +0100 Subject: [PATCH 5/8] Add RGB colour output to sinogram notebooks --- ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb | 1 + ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb | 1 + ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb index 13261b2b..01fba849 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb @@ -346,6 +346,7 @@ " grp = hout['slice_recon']\n", " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", + " save_array(grp, 'ipf_z_col_map', rgb_array).attrs['description'] = 'IPF Z color at each pixel'\n", " \n", " grains_group = 'grains'\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb index e8065c9b..26e116b5 100755 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb @@ -277,6 +277,7 @@ " grp = hout['slice_recon']\n", " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", + " save_array(grp, 'ipf_z_col_map', rgb_array).attrs['description'] = 'IPF Z color at each pixel'\n", " \n", " grains_group = hout.create_group('grains')\n", " for g in tqdm(grains):\n", diff --git a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb index 67a18408..a02a5975 100755 --- a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb +++ b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb @@ -588,7 +588,7 @@ }, "outputs": [], "source": [ - "save_ubi_map(ds)" + "save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map)" ] }, { From 69689a236009cba69816072081bb4c1ba92378f0 Mon Sep 17 00:00:00 2001 From: James Ball Date: Tue, 27 Feb 2024 19:15:48 +0100 Subject: [PATCH 6/8] Add widget with sliders for Frelon peaksearch data --- .../nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb | 117 +++++++++++++----- .../nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb | 22 +++- ImageD11/nbGui/3DXRD/frelon_peaksearch.py | 6 +- ImageD11/nbGui/nb_utils.py | 26 +++- 4 files changed, 136 insertions(+), 35 deletions(-) diff --git a/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb b/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb index c48715e0..132f0594 100755 --- a/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb +++ b/ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb @@ -175,51 +175,114 @@ { "cell_type": "code", "execution_count": null, - "id": "21b07c8a-7b1d-4fb7-be52-15ecde46130d", + "id": "069b343d-4695-45fe-9ead-eab2c4c4cd16", "metadata": { "tags": [] }, "outputs": [], "source": [ - "with h5py.File(ds.masterfile, 'r') as h5In:\n", - " test_image = h5In['1.1/measurement/frelon3'][0].astype('uint16')" + "#Define the initial parameters\n", + "start_worker_args = {\n", + " \"bgfile\":bg_file,\n", + " \"threshold\":50,\n", + " \"smoothsigma\":1.0,\n", + " \"bgc\":0.9,\n", + " \"minpx\":3,\n", + " \"m_offset_thresh\":80,\n", + " \"m_ratio_thresh\":135,\n", + "}" ] }, { "cell_type": "code", "execution_count": null, - "id": "e04317c9-f029-4167-94b0-ed68b02ed9d6", + "id": "ef30f6f8-8611-4f66-be3b-006c890b91fa", "metadata": { "tags": [] }, "outputs": [], "source": [ - "worker_args = {\n", - " \n", - "}\n", + "with h5py.File(ds.masterfile, 'r') as h5In:\n", + " test_image = h5In['1.1/measurement/frelon3'][0].astype('uint16')\n", "\n", - "test_image_worker = worker(bg_file, *worker_args)\n", + "# Display the image initially\n", + "fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(16, 5))\n", + "test_image_worker = worker(**start_worker_args)\n", "goodpeaks = test_image_worker.peaksearch(img=test_image, omega=0)\n", - "fc, sc = goodpeaks[:, 23:25].T" + "fc, sc = goodpeaks[:, 23:25].T # 23 and 24 are the columns for fc and sc from blob properties\n", + "\n", + "im1 = axs[0].imshow(test_image, norm=LogNorm(vmax=1000))\n", + "axs[0].set_title(\"Original image\")\n", + "im2 = axs[1].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n", + "im3 = axs[2].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n", + "sc1, = axs[2].plot(fc, sc, marker='+', c=\"r\", ls=\"\")\n", + "axs[2].set_aspect(1)\n", + "plt.show()\n", + "\n", + "thresh_slider = widgets.IntSlider(value=start_worker_args[\"threshold\"], min=1, max=100, step=1, description='Threshold:')\n", + "smsig_slider = widgets.FloatSlider(value=start_worker_args[\"smoothsigma\"], min=0.0, max=1.0, step=0.05, description='Smoothsigma:')\n", + "bgc_slider = widgets.FloatSlider(value=start_worker_args[\"bgc\"], min=0.0, max=1.0, step=0.05, description='bgc:')\n", + "minpx_slider = widgets.IntSlider(value=start_worker_args[\"minpx\"], min=1, max=5, step=1, description='minpx:')\n", + "mofft_slider = widgets.IntSlider(value=start_worker_args[\"m_offset_thresh\"], min=1, max=200, step=1, description='m_offset_thresh:')\n", + "mratt_slider = widgets.IntSlider(value=start_worker_args[\"m_ratio_thresh\"], min=1, max=200, step=1, description='m_ratio_thresh:')\n", + "\n", + "\n", + "def update(threshold, smoothsigma, bgc, minpx, m_offset_thresh, m_ratio_thresh):\n", + " image_worker = worker(bg_file,\n", + " threshold,\n", + " smoothsigma,\n", + " bgc,\n", + " minpx,\n", + " m_offset_thresh,\n", + " m_ratio_thresh)\n", + " goodpeaks = image_worker.peaksearch(img=test_image, omega=0)\n", + " fc, sc = goodpeaks[:, 23:25].T\n", + " im2.set_data(image_worker.smoothed)\n", + " im3.set_data(image_worker.smoothed)\n", + " sc1.set_data(fc, sc)\n", + " plt.draw()\n", + "\n", + "interactive_plot = widgets.interactive(update,\n", + " threshold=thresh_slider,\n", + " smoothsigma=smsig_slider,\n", + " bgc=bgc_slider,\n", + " minpx=minpx_slider,\n", + " m_offset_thresh=mofft_slider,\n", + " m_ratio_thresh=mratt_slider)\n", + "\n", + "display(interactive_plot)" ] }, { "cell_type": "code", "execution_count": null, - "id": "cbde6264-a634-49f3-b858-dfdc55aa94b4", + "id": "c0a98042-065d-4d22-bd1e-e9c656432f44", "metadata": { "tags": [] }, "outputs": [], "source": [ - "fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(16, 5))\n", - "axs[0].imshow(test_image, norm=LogNorm(vmax=1000))\n", - "axs[0].set_title(\"Original image\")\n", - "axs[1].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n", - "axs[2].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n", - "axs[2].scatter(fc, sc, marker='+', c=\"r\")\n", - "axs[2].set_aspect(1)\n", - "plt.show()" + "end_worker_args = {\n", + " \"bgfile\":bg_file,\n", + " \"threshold\":thresh_slider.value,\n", + " \"smoothsigma\":smsig_slider.value,\n", + " \"bgc\":bgc_slider.value,\n", + " \"minpx\":minpx_slider.value,\n", + " \"m_offset_thresh\":mofft_slider.value,\n", + " \"m_ratio_thresh\":mratt_slider.value,\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eee00548-3a48-44d0-b4ad-e71b71de95ca", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "print(end_worker_args)" ] }, { @@ -235,7 +298,7 @@ "\n", "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", - "cf_2d, cf_3d = process(ds, bg_file, nthreads-1, worker_args)" + "cf_2d, cf_3d = process(ds, bg_file, nthreads-1, end_worker_args)" ] }, { @@ -256,7 +319,7 @@ "hits_dict = dict(zip(unique, counts))\n", "hits_dict_max = sorted(hits_dict.items(), key=lambda x: x[1], reverse=True)\n", "\n", - "m = np.isin(cf_3d.index, [spot3d_id for spot3d_id, count in hits_dict_max[4:5]])\n", + "m = np.isin(cf_3d.index, [spot3d_id for spot3d_id, count in hits_dict_max[500:501]])\n", "cf_3d_single_peak = cf_3d.copy()\n", "cf_3d_single_peak.filter(m)\n", "\n", @@ -287,7 +350,7 @@ }, "outputs": [], "source": [ - "cf_2d = utils.apply_spatial(cf_2d, spline_file)" + "cf_2d = utils.apply_spatial(cf_2d, spline_file, nthreads)" ] }, { @@ -299,7 +362,7 @@ }, "outputs": [], "source": [ - "cf_3d = utils.apply_spatial(cf_3d, spline_file)" + "cf_3d = utils.apply_spatial(cf_3d, spline_file, nthreads)" ] }, { @@ -311,7 +374,7 @@ }, "outputs": [], "source": [ - "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/fitted.par'" + "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_tdxrd_refined.par'" ] }, { @@ -385,9 +448,7 @@ "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/fitted.par'\n", "bg_file = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/ff_bkg.edf\"\n", "\n", - "worker_args = {\n", - " \n", - "}\n", + "worker_args = end_worker_args\n", "\n", "nthreads = len(os.sched_getaffinity(os.getpid()))\n", "\n", @@ -415,8 +476,8 @@ " cf_2d, cf_3d = process(ds, bg_file, nthreads-1, worker_args)\n", " \n", " print(\"Spatially correcting peaks\")\n", - " cf_2d = utils.apply_spatial(cf_2d, spline_file)\n", - " cf_3d = utils.apply_spatial(cf_3d, spline_file)\n", + " cf_2d = utils.apply_spatial(cf_2d, spline_file, nthreads)\n", + " cf_3d = utils.apply_spatial(cf_3d, spline_file, nthreads)\n", " \n", " print(\"Saving peaks to file\")\n", " cf_2d.parameters.loadparameters(parfile)\n", diff --git a/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb b/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb index 251b4fab..edad0b1e 100755 --- a/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb +++ b/ImageD11/nbGui/3DXRD/2_3DXRD_index_z_slice.ipynb @@ -453,6 +453,19 @@ "grains2 = ImageD11.grain.read_grain_file(tmp_map_path)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# remove grains with no peaks\n", + "\n", + "grains2 = [grain for grain in grains2 if \"no peaks\" not in grain.intensity_info]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -508,7 +521,7 @@ "outputs": [], "source": [ "# find the spike\n", - "absolute_minpks = 30" + "absolute_minpks = 25" ] }, { @@ -658,7 +671,6 @@ "\n", "parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_tdxrd_refined.par'\n", "\n", - " \n", "cf_strong_frac = 0.95\n", "cf_strong_dsmax = 0.92\n", "cf_strong_dstol = 0.01\n", @@ -675,7 +687,7 @@ "indexer_min_ring_count = 0\n", "\n", "makemap_hkl_tol_seq = [0.05, 0.025, 0.01]\n", - "makemap_import_minpks = 30\n", + "makemap_import_minpks = 25\n", "\n", "peak_assignment_hkl_tol = 0.05\n", "\n", @@ -746,6 +758,10 @@ " makemap_output = !makemap.py -p {parfile} -u {tmp_map_path} -U {tmp_map_path} -f {cf_strong_allrings_path} -F {unindexed_flt_path} -s cubic -t {makemap_hkl_tol_seq[inc]} --omega_slop={omega_slop} --no_sort\n", "\n", " grains2 = ImageD11.grain.read_grain_file(tmp_map_path)\n", + " \n", + " # remove grains with no peaks\n", + " grains2 = [grain for grain in grains2 if \"no peaks\" not in grain.intensity_info]\n", + " \n", " absolute_minpks = makemap_import_minpks\n", " grains_filtered = [grain for grain in grains2 if float(grain.npks) > absolute_minpks]\n", "\n", diff --git a/ImageD11/nbGui/3DXRD/frelon_peaksearch.py b/ImageD11/nbGui/3DXRD/frelon_peaksearch.py index afd7a275..e849b705 100755 --- a/ImageD11/nbGui/3DXRD/frelon_peaksearch.py +++ b/ImageD11/nbGui/3DXRD/frelon_peaksearch.py @@ -170,9 +170,9 @@ def get_dset(h5name, dsetname): def pps(arg): - hname, dsetname, num, omega, bgfile, worker_args = arg + hname, dsetname, num, omega, worker_args = arg if pps.worker is None: - pps.worker = worker(bgfile, *worker_args) + pps.worker = worker(**worker_args) frm = get_dset(hname, dsetname)[num] pks = pps.worker.peaksearch(frm, omega=omega) return num, pks @@ -192,7 +192,7 @@ def process(ds, bgfile, ncpu, worker_args): n_frames = omega.shape[0] - args = [(hname, frames_dset, i, omega[i], bgfile, worker_args) for i in range(n_frames)] + args = [(hname, frames_dset, i, omega[i], worker_args) for i in range(n_frames)] all_peaks = process_map(pps, args, chunksize=1) diff --git a/ImageD11/nbGui/nb_utils.py b/ImageD11/nbGui/nb_utils.py index 288eb07b..758a30a2 100644 --- a/ImageD11/nbGui/nb_utils.py +++ b/ImageD11/nbGui/nb_utils.py @@ -6,6 +6,7 @@ import numpy as np from matplotlib import pyplot as plt from tqdm import tqdm +from tqdm.contrib.concurrent import process_map import ImageD11.cImageD11 import ImageD11.columnfile @@ -14,11 +15,34 @@ import ImageD11.refinegrains import ImageD11.unitcell -from ImageD11.blobcorrector import eiger_spatial +from ImageD11.blobcorrector import eiger_spatial, correctorclass from scipy.optimize import curve_fit +def correct_pixel(pixel, spline_file): + sr, fr = pixel + sc, fc = ImageD11.blobcorrector.correctorclass(spline_file).correct(sr, fr) + return (sc, fc) + + +def apply_spatial(cf, spline_file, workers): + # sc = np.zeros(cf.nrows) + # fc = np.zeros(cf.nrows) + + print("Spatial correction...") + + raw_pixels = np.vstack((cf['s_raw'], cf['f_raw'])).T + + corrected_pixels = process_map(correct_pixel, raw_pixels, [spline_file] * len(raw_pixels), max_workers=workers, chunksize=len(raw_pixels)//workers) + + sc, fc = [list(t) for t in zip(*corrected_pixels)] + + cf.addcolumn(sc, "sc") + cf.addcolumn(fc, "fc") + + return cf + def find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list): samples_dict = {} From b780ba6a51ff13e886f1115c585f48e7943673b5 Mon Sep 17 00:00:00 2001 From: James Ball Date: Tue, 27 Feb 2024 19:59:25 +0100 Subject: [PATCH 7/8] Add RGB IPF X,Y,Z outputs to HDF, allows viewing with H5Web --- ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb | 2 +- .../nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb | 50 ++++++----------- .../2_S3DXRD_sinograms_map_minor_phase.ipynb | 42 +++++++------- .../S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb | 56 ++++++++++++++++++- ImageD11/nbGui/nb_utils.py | 41 ++++++++++---- 5 files changed, 120 insertions(+), 71 deletions(-) diff --git a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb index 8d0ed4ce..ebf16050 100755 --- a/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb +++ b/ImageD11/nbGui/S3DXRD/1_S3DXRD_index.ipynb @@ -126,7 +126,7 @@ "# USER: pick a sample and a dataset you want to segment\n", "\n", "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_200um\"" + "dataset = \"top_250um\"" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb index 01fba849..8fdc2d32 100644 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map.ipynb @@ -86,8 +86,6 @@ "source": [ "# define our functions\n", "\n", - "# save recons and 2d properties to existing grain file\n", - "\n", "def read_grains(ds):\n", " with h5py.File(ds.grainsfile, 'r') as hin: \n", " grains_group = 'grains'\n", @@ -102,31 +100,6 @@ " \n", " return grains\n", "\n", - "# def fit_sine_wave(x_data, y_data, frequency):\n", - "# initial_guess = (ds.ymax - ds.ymin, np.mean(y_data), 0) # Initial guess for amplitude, offset, and phase\n", - " \n", - "# def sine_function(x, amplitude, offset, phase):\n", - "# return amplitude * np.sin(2 * np.pi * frequency * x + phase) + offset\n", - "\n", - "# # Fit the sine function to the data\n", - "# popt, _ = curve_fit(sine_function, x_data, y_data, p0=initial_guess, method='trf', loss='soft_l1', max_nfev=10000)\n", - "\n", - "# # Extract fitted parameters\n", - "# amplitude_fit, offset_fit, phase_fit = popt\n", - "\n", - "# return amplitude_fit, offset_fit, phase_fit\n", - "\n", - "# def get_cen_robust(grain, ds):\n", - " \n", - "# frequency = 0.5/(ds.omax - ds.omin)\n", - "# amplitude, offset, phase = fit_sine_wave(cf_strong.omega[grain.mask_4d], cf_strong.dty[grain.mask_4d], frequency)\n", - " \n", - "# x_translation = amplitude/2 * np.sin(phase)\n", - "# y_translation = amplitude/2 * np.cos(phase)\n", - " \n", - "# grain.cen_me = offset\n", - "# grain.dx_me = x_translation\n", - "# grain.dy_me = y_translation\n", "\n", "def map_grain_from_peaks(g, flt, ds):\n", " \"\"\"\n", @@ -346,13 +319,22 @@ " grp = hout['slice_recon']\n", " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", - " save_array(grp, 'ipf_z_col_map', rgb_array).attrs['description'] = 'IPF Z color at each pixel'\n", + " \n", + " ipfxdset = save_array(grp, 'ipf_x_col_map', rgb_x_array)\n", + " ipfxdset.attrs['description'] = 'IPF X color at each pixel'\n", + " ipfxdset.attrs['CLASS'] = 'IMAGE'\n", + " ipfydset = save_array(grp, 'ipf_y_col_map', rgb_y_array)\n", + " ipfydset.attrs['description'] = 'IPF Y color at each pixel'\n", + " ipfydset.attrs['CLASS'] = 'IMAGE'\n", + " ipfzdset = save_array(grp, 'ipf_z_col_map', rgb_z_array)\n", + " ipfzdset.attrs['description'] = 'IPF Z color at each pixel'\n", + " ipfzdset.attrs['CLASS'] = 'IMAGE'\n", " \n", " grains_group = 'grains'\n", "\n", " for g in tqdm(grains):\n", " gg = hout[grains_group][str(g.gid)]\n", - "\n", + " \n", " save_array(gg, 'recon', g.recon).attrs['description'] = 'Final reconstruction'\n", " \n", " \n", @@ -1051,7 +1033,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.4)" + "rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.4)" ] }, { @@ -1065,7 +1047,7 @@ "# plot initial output\n", "\n", "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(rgb_array)\n", + "ax.imshow(rgb_z_array)\n", "plt.show()" ] }, @@ -1190,7 +1172,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.3)" + "rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.3)" ] }, { @@ -1202,7 +1184,7 @@ "outputs": [], "source": [ "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(rgb_array)\n", + "ax.imshow(rgb_z_array)\n", "plt.show()" ] }, @@ -1435,7 +1417,7 @@ " for i, grain in enumerate(tqdm(grains)):\n", " grain.recon = np.loadtxt(os.path.join(recons_path, ds.dsname + f\"_mlem_recon_{i}.txt\"))\n", " \n", - " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", " \n", " save_grains(grains, ds)\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb index 26e116b5..205cbb47 100755 --- a/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/2_S3DXRD_sinograms_map_minor_phase.ipynb @@ -277,7 +277,16 @@ " grp = hout['slice_recon']\n", " save_array(grp, 'intensity', raw_intensity_array).attrs['description'] = 'Raw intensity array for all grains'\n", " save_array(grp, 'labels', grain_labels_array).attrs['description'] = 'Grain labels array for all grains'\n", - " save_array(grp, 'ipf_z_col_map', rgb_array).attrs['description'] = 'IPF Z color at each pixel'\n", + " \n", + " ipfxdset = save_array(grp, 'ipf_x_col_map', rgb_x_array)\n", + " ipfxdset.attrs['description'] = 'IPF X color at each pixel'\n", + " ipfxdset.attrs['CLASS'] = 'IMAGE'\n", + " ipfydset = save_array(grp, 'ipf_y_col_map', rgb_y_array)\n", + " ipfydset.attrs['description'] = 'IPF Y color at each pixel'\n", + " ipfydset.attrs['CLASS'] = 'IMAGE'\n", + " ipfzdset = save_array(grp, 'ipf_z_col_map', rgb_z_array)\n", + " ipfzdset.attrs['description'] = 'IPF Z color at each pixel'\n", + " ipfzdset.attrs['CLASS'] = 'IMAGE'\n", " \n", " grains_group = hout.create_group('grains')\n", " for g in tqdm(grains):\n", @@ -336,7 +345,7 @@ "# USER: pick a sample and a dataset you want to segment\n", "\n", "sample = \"FeAu_0p5_tR_nscope\"\n", - "dataset = \"top_100um\"" + "dataset = \"top_250um\"" ] }, { @@ -834,17 +843,6 @@ " pass" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# bad_gids = [46, 95, 102, 130, 137, 160, 123, 125, 136, 165, 174, 175, 176, 177]" - ] - }, { "cell_type": "code", "execution_count": null, @@ -869,7 +867,7 @@ "outputs": [], "source": [ "for g in grains:\n", - " g.translation = np.array([g.x_blob, g.y_blob, 0])" + " g.translation = np.array([g.x_blob, -g.y_blob, 0])" ] }, { @@ -884,7 +882,7 @@ "fig, ax = plt.subplots(2,2, figsize=(12,12))\n", "a = ax.ravel()\n", "x = [g.x_blob for g in grains]\n", - "y = [g.y_blob for g in grains]\n", + "y = [-g.y_blob for g in grains]\n", "# s = [g.peaks_4d_selected.sum()/10 for g in grains]\n", "s = [10 for g in grains]\n", "a[0].scatter(x, y, s=s, c=[g.rgb_z for g in grains])\n", @@ -933,7 +931,7 @@ }, "outputs": [], "source": [ - "rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.7)" + "rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level=0.7)" ] }, { @@ -947,7 +945,7 @@ "# plot initial output\n", "\n", "fig, ax = plt.subplots(constrained_layout=True)\n", - "ax.imshow(rgb_array) # originally 1,2,0\n", + "ax.imshow(rgb_z_array) # originally 1,2,0\n", "plt.show()" ] }, @@ -1087,7 +1085,7 @@ "\n", "cutoff_level = 0.7\n", "\n", - "grain_too_many_px = 25\n", + "grain_too_many_px = 22\n", "\n", "for sample, datasets in samples_dict.items():\n", " for dataset in datasets:\n", @@ -1138,7 +1136,7 @@ " main_phase_grains = read_grains_main_phase(ds)\n", " whole_sample_mask = main_phase_grains[0].sample_mask\n", " y0 = main_phase_grains[0].y0\n", - " pad = ((major_phase_grains[0].recon.shape[0] - major_phase_grains[0].ssino.shape[0]))\n", + " pad = ((main_phase_grains[0].recon.shape[0] - main_phase_grains[0].ssino.shape[0]))\n", " \n", " utils.assign_peaks_to_grains(grains, cf_strong, tol=peak_assign_tol)\n", " \n", @@ -1192,16 +1190,16 @@ " grains = [grain for grain in grains if not grain.bad_recon]\n", " \n", " for g in grains:\n", - " g.translation = np.array([g.x_blob, g.y_blob, 0])\n", + " g.translation = np.array([g.x_blob, -g.y_blob, 0])\n", " \n", - " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", " \n", " labels, counts = np.unique(grain_labels_array, return_counts=True)\n", " bad_gids = [int(label) for (label, count) in zip(labels, counts) if count > grain_too_many_px and label > 0]\n", " \n", " grains = [grain for grain in grains if grain.gid not in bad_gids]\n", " \n", - " rgb_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", + " rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array = utils.build_slice_arrays(grains, cutoff_level)\n", " \n", " save_grains_minor_phase(grains, ds)\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb index a02a5975..8f50ff29 100755 --- a/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb +++ b/ImageD11/nbGui/S3DXRD/3_S3DXRD_strain_maps_pbp.ipynb @@ -119,13 +119,21 @@ " return hds\n", "\n", "\n", - "def save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map):\n", + "def save_ubi_map(ds):\n", " with h5py.File(ds.pbpubifile, 'w') as hout:\n", " grp = hout.create_group('arrays')\n", " save_array(grp, 'ubi_map', ubi_map).attrs['description'] = 'Refined UBI values at each pixel'\n", " save_array(grp, 'eps_map', eps_map).attrs['description'] = 'Strain matrices (sample ref) at each pixel'\n", " save_array(grp, 'misorientation_map', misorientation_map).attrs['description'] = 'Misorientation to grain avg at each pixel'\n", - " save_array(grp, 'ipf_z_col_map', ipf_z_col_map).attrs['description'] = 'IPF Z color at each pixel'" + " ipfxdset = save_array(grp, 'ipf_x_col_map', ipf_x_col_map)\n", + " ipfxdset.attrs['description'] = 'IPF X color at each pixel'\n", + " ipfxdset.attrs['CLASS'] = 'IMAGE'\n", + " ipfydset = save_array(grp, 'ipf_y_col_map', ipf_y_col_map)\n", + " ipfydset.attrs['description'] = 'IPF Y color at each pixel'\n", + " ipfydset.attrs['CLASS'] = 'IMAGE'\n", + " ipfzdset = save_array(grp, 'ipf_z_col_map', ipf_z_col_map)\n", + " ipfzdset.attrs['description'] = 'IPF Z color at each pixel'\n", + " ipfzdset.attrs['CLASS'] = 'IMAGE'" ] }, { @@ -458,6 +466,48 @@ " continue" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "21ce0f7d-6be5-4dcd-9d35-cd6178d077bc", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ipf_x_col_map = np.empty((grains[0].recon.shape + (3,)))\n", + "ipf_x_col_map.fill(np.nan)\n", + "for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " try:\n", + " this_grain = pixel_grain_lut[i, j]\n", + " this_ipf_x_col = utils.hkl_to_color_cubic(utils.crystal_direction_cubic(this_grain.ubi, (1, 0, 0)))\n", + " ipf_x_col_map[i, j] = this_ipf_x_col\n", + " except KeyError:\n", + " continue" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "08768b2d-0146-4163-8e8d-231fa14bd4a0", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ipf_y_col_map = np.empty((grains[0].recon.shape + (3,)))\n", + "ipf_y_col_map.fill(np.nan)\n", + "for i in tqdm(range(grains[0].recon.shape[0])):\n", + " for j in range(grains[0].recon.shape[1]):\n", + " try:\n", + " this_grain = pixel_grain_lut[i, j]\n", + " this_ipf_y_col = utils.hkl_to_color_cubic(utils.crystal_direction_cubic(this_grain.ubi, (0, 1, 0)))\n", + " ipf_y_col_map[i, j] = this_ipf_y_col\n", + " except KeyError:\n", + " continue" + ] + }, { "cell_type": "code", "execution_count": null, @@ -588,7 +638,7 @@ }, "outputs": [], "source": [ - "save_ubi_map(ds, ubi_map, eps_map, misorientation_map, ipf_z_col_map)" + "save_ubi_map(ds)" ] }, { diff --git a/ImageD11/nbGui/nb_utils.py b/ImageD11/nbGui/nb_utils.py index 758a30a2..2b2236bc 100644 --- a/ImageD11/nbGui/nb_utils.py +++ b/ImageD11/nbGui/nb_utils.py @@ -560,9 +560,18 @@ def refine_grain_positions(cf_3d, ds, grains, parfile, symmetry="cubic", cf_frac def build_slice_arrays(grains, cutoff_level=0.0): grain_labels_array = np.zeros_like(grains[0].recon) - 1 - red = np.zeros_like(grains[0].recon) - grn = np.zeros_like(grains[0].recon) - blu = np.zeros_like(grains[0].recon) + + redx = np.zeros_like(grains[0].recon) + grnx = np.zeros_like(grains[0].recon) + blux = np.zeros_like(grains[0].recon) + + redy = np.zeros_like(grains[0].recon) + grny = np.zeros_like(grains[0].recon) + bluy = np.zeros_like(grains[0].recon) + + redz = np.zeros_like(grains[0].recon) + grnz = np.zeros_like(grains[0].recon) + bluz = np.zeros_like(grains[0].recon) raw_intensity_array = np.zeros_like(grains[0].recon) @@ -579,21 +588,31 @@ def norm(r): g_raw_intensity_mask = g_raw_intensity > raw_intensity_array - g_raw_intenstiy_map = g_raw_intensity[g_raw_intensity_mask] + g_raw_intensity_map = g_raw_intensity[g_raw_intensity_mask] - raw_intensity_array[g_raw_intensity_mask] = g_raw_intenstiy_map + raw_intensity_array[g_raw_intensity_mask] = g_raw_intensity_map + + redx[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_x[0] + grnx[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_x[1] + blux[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_x[2] + + redy[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_y[0] + grny[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_y[1] + bluy[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_y[2] - red[g_raw_intensity_mask] = g_raw_intenstiy_map * g.rgb_z[0] - grn[g_raw_intensity_mask] = g_raw_intenstiy_map * g.rgb_z[1] - blu[g_raw_intensity_mask] = g_raw_intenstiy_map * g.rgb_z[2] + redz[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_z[0] + grnz[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_z[1] + bluz[g_raw_intensity_mask] = g_raw_intensity_map * g.rgb_z[2] grain_labels_array[g_raw_intensity_mask] = i raw_intensity_array[raw_intensity_array == cutoff_level] = 0 + + rgb_x_array = np.transpose((redx, grnx, blux), axes=(1, 2, 0)) + rgb_y_array = np.transpose((redy, grny, bluy), axes=(1, 2, 0)) + rgb_z_array = np.transpose((redz, grnz, bluz), axes=(1, 2, 0)) - rgb_array = np.transpose((red, grn, blu), axes=(1, 2, 0)) - - return rgb_array, grain_labels_array, raw_intensity_array + return rgb_x_array, rgb_y_array, rgb_z_array, grain_labels_array, raw_intensity_array def slurm_submit_and_wait(bash_script_path, wait_time_sec=60): From ebd4cff4f5dae0083ed57dcdee5e9fe350f5777e Mon Sep 17 00:00:00 2001 From: James Ball Date: Tue, 27 Feb 2024 20:01:32 +0100 Subject: [PATCH 8/8] py2 compat --- ImageD11/nbGui/nb_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ImageD11/nbGui/nb_utils.py b/ImageD11/nbGui/nb_utils.py index 2b2236bc..7ead96fb 100644 --- a/ImageD11/nbGui/nb_utils.py +++ b/ImageD11/nbGui/nb_utils.py @@ -51,7 +51,7 @@ def find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list) dsets_list = [] for folder in all_dset_folders_for_sample: if dset_prefix in folder: - dset_name = folder.split(f"{sample}_")[1] + dset_name = folder.split(sample + "_")[1] if dset_name not in skips_dict[sample]: dsets_list.append(dset_name)