Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Many notebook changes, better sinogram position fitting #227

Merged
merged 8 commits into from
Feb 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
203 changes: 147 additions & 56 deletions ImageD11/nbGui/3DXRD/0_3DXRD_segment_frelon.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"source": [
"# Jupyter notebook based on ImageD11 to process 3DXRD data\n",
"# Written by Haixing Fang, Jon Wright and James Ball\n",
"## Date: 16/02/2024"
"## Date: 27/02/2024"
]
},
{
Expand Down Expand Up @@ -78,10 +78,11 @@
"from IPython.display import display\n",
"%matplotlib widget\n",
"\n",
"from ImageD11.nbGui import nb_utils as utils\n",
"\n",
"from frelon_peaksearch import worker, process\n",
"\n",
"from utils import apply_spatial"
"# from utils import apply_spatial"
]
},
{
Expand All @@ -107,17 +108,13 @@
},
"outputs": [],
"source": [
"# NEW DATASETS\n",
"\n",
"### USER: specify your experimental directory\n",
"\n",
"base_dir = \"/data/visitor/ma5837/id11/20240208\"\n",
"\n",
"rawdata_path = os.path.join(base_dir, 'RAW_DATA')\n",
"rawdata_path = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/RAW_DATA\"\n",
"\n",
"!ls -lrt {rawdata_path}\n",
"\n",
"processed_data_root_dir = os.path.join(base_dir, 'PROCESSED_DATA') # USER: modify this to change the destination folder if desired"
"processed_data_root_dir = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226\" # USER: modify this to change the destination folder if desired"
]
},
{
Expand All @@ -131,8 +128,8 @@
"source": [
"# USER: pick a sample and a dataset you want to segment\n",
"\n",
"sample = \"S12\"\n",
"dataset = \"FF_zeries_0\"\n",
"sample = \"FeAu_0p5_tR\"\n",
"dataset = \"ff1\"\n",
"\n",
"# USER: specify path to detector spline file\n",
"\n",
Expand Down Expand Up @@ -172,53 +169,120 @@
"source": [
"# USER: specify path to background file\n",
"\n",
"bg_file = os.path.join(ds.analysisroot, \"CeO2/CeO2_bkg_3dxrd_beam_shutter_open/CeO2_bkg_3dxrd_beam_shutter_open.edf\")"
"bg_file = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/ff_bkg.edf\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "21b07c8a-7b1d-4fb7-be52-15ecde46130d",
"id": "069b343d-4695-45fe-9ead-eab2c4c4cd16",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"with h5py.File(ds.masterfile, 'r') as h5In:\n",
" test_image = h5In['1.1/measurement/frelon3'][0].astype('uint16')"
"#Define the initial parameters\n",
"start_worker_args = {\n",
" \"bgfile\":bg_file,\n",
" \"threshold\":50,\n",
" \"smoothsigma\":1.0,\n",
" \"bgc\":0.9,\n",
" \"minpx\":3,\n",
" \"m_offset_thresh\":80,\n",
" \"m_ratio_thresh\":135,\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e04317c9-f029-4167-94b0-ed68b02ed9d6",
"id": "ef30f6f8-8611-4f66-be3b-006c890b91fa",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"test_image_worker = worker(bgfile=bg_file)\n",
"with h5py.File(ds.masterfile, 'r') as h5In:\n",
" test_image = h5In['1.1/measurement/frelon3'][0].astype('uint16')\n",
"\n",
"# Display the image initially\n",
"fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(16, 5))\n",
"test_image_worker = worker(**start_worker_args)\n",
"goodpeaks = test_image_worker.peaksearch(img=test_image, omega=0)\n",
"fc, sc = goodpeaks[:, 23:25].T"
"fc, sc = goodpeaks[:, 23:25].T # 23 and 24 are the columns for fc and sc from blob properties\n",
"\n",
"im1 = axs[0].imshow(test_image, norm=LogNorm(vmax=1000))\n",
"axs[0].set_title(\"Original image\")\n",
"im2 = axs[1].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n",
"im3 = axs[2].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n",
"sc1, = axs[2].plot(fc, sc, marker='+', c=\"r\", ls=\"\")\n",
"axs[2].set_aspect(1)\n",
"plt.show()\n",
"\n",
"thresh_slider = widgets.IntSlider(value=start_worker_args[\"threshold\"], min=1, max=100, step=1, description='Threshold:')\n",
"smsig_slider = widgets.FloatSlider(value=start_worker_args[\"smoothsigma\"], min=0.0, max=1.0, step=0.05, description='Smoothsigma:')\n",
"bgc_slider = widgets.FloatSlider(value=start_worker_args[\"bgc\"], min=0.0, max=1.0, step=0.05, description='bgc:')\n",
"minpx_slider = widgets.IntSlider(value=start_worker_args[\"minpx\"], min=1, max=5, step=1, description='minpx:')\n",
"mofft_slider = widgets.IntSlider(value=start_worker_args[\"m_offset_thresh\"], min=1, max=200, step=1, description='m_offset_thresh:')\n",
"mratt_slider = widgets.IntSlider(value=start_worker_args[\"m_ratio_thresh\"], min=1, max=200, step=1, description='m_ratio_thresh:')\n",
"\n",
"\n",
"def update(threshold, smoothsigma, bgc, minpx, m_offset_thresh, m_ratio_thresh):\n",
" image_worker = worker(bg_file,\n",
" threshold,\n",
" smoothsigma,\n",
" bgc,\n",
" minpx,\n",
" m_offset_thresh,\n",
" m_ratio_thresh)\n",
" goodpeaks = image_worker.peaksearch(img=test_image, omega=0)\n",
" fc, sc = goodpeaks[:, 23:25].T\n",
" im2.set_data(image_worker.smoothed)\n",
" im3.set_data(image_worker.smoothed)\n",
" sc1.set_data(fc, sc)\n",
" plt.draw()\n",
"\n",
"interactive_plot = widgets.interactive(update,\n",
" threshold=thresh_slider,\n",
" smoothsigma=smsig_slider,\n",
" bgc=bgc_slider,\n",
" minpx=minpx_slider,\n",
" m_offset_thresh=mofft_slider,\n",
" m_ratio_thresh=mratt_slider)\n",
"\n",
"display(interactive_plot)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cbde6264-a634-49f3-b858-dfdc55aa94b4",
"id": "c0a98042-065d-4d22-bd1e-e9c656432f44",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"fig, axs = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(16, 5))\n",
"axs[0].imshow(test_image, norm=LogNorm(vmax=1000))\n",
"axs[0].set_title(\"Original image\")\n",
"axs[1].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n",
"axs[2].imshow(test_image_worker.smoothed, cmap=\"viridis\", norm=LogNorm(vmax=1000), interpolation=\"nearest\")\n",
"axs[2].scatter(fc, sc, marker='+', c=\"r\")\n",
"axs[2].set_aspect(1)\n",
"plt.show()"
"end_worker_args = {\n",
" \"bgfile\":bg_file,\n",
" \"threshold\":thresh_slider.value,\n",
" \"smoothsigma\":smsig_slider.value,\n",
" \"bgc\":bgc_slider.value,\n",
" \"minpx\":minpx_slider.value,\n",
" \"m_offset_thresh\":mofft_slider.value,\n",
" \"m_ratio_thresh\":mratt_slider.value,\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "eee00548-3a48-44d0-b4ad-e71b71de95ca",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"print(end_worker_args)"
]
},
{
Expand All @@ -232,7 +296,9 @@
"source": [
"# now we run the segmenter on all our data\n",
"\n",
"cf_2d, cf_3d = process(ds, bg_file, 64)"
"nthreads = len(os.sched_getaffinity(os.getpid()))\n",
"\n",
"cf_2d, cf_3d = process(ds, bg_file, nthreads-1, end_worker_args)"
]
},
{
Expand All @@ -253,7 +319,7 @@
"hits_dict = dict(zip(unique, counts))\n",
"hits_dict_max = sorted(hits_dict.items(), key=lambda x: x[1], reverse=True)\n",
"\n",
"m = np.isin(cf_3d.index, [spot3d_id for spot3d_id, count in hits_dict_max[4:5]])\n",
"m = np.isin(cf_3d.index, [spot3d_id for spot3d_id, count in hits_dict_max[500:501]])\n",
"cf_3d_single_peak = cf_3d.copy()\n",
"cf_3d_single_peak.filter(m)\n",
"\n",
Expand Down Expand Up @@ -284,7 +350,7 @@
},
"outputs": [],
"source": [
"cf_2d = apply_spatial(cf_2d, spline_file)"
"cf_2d = utils.apply_spatial(cf_2d, spline_file, nthreads)"
]
},
{
Expand All @@ -296,7 +362,7 @@
},
"outputs": [],
"source": [
"cf_3d = apply_spatial(cf_3d, spline_file)"
"cf_3d = utils.apply_spatial(cf_3d, spline_file, nthreads)"
]
},
{
Expand All @@ -308,7 +374,7 @@
},
"outputs": [],
"source": [
"parfile = 'Fe_refined.par'"
"parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/James/20240226/Fe_tdxrd_refined.par'"
]
},
{
Expand Down Expand Up @@ -340,6 +406,18 @@
"ImageD11.columnfile.colfile_to_hdf(cf_3d, ds.col3dfile)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "148fb053-420b-4534-ba48-9e6a67e7a746",
"metadata": {},
"outputs": [],
"source": [
"# change to 0 to allow all cells to be run automatically\n",
"if 1:\n",
" raise ValueError(\"Hello!\")"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -348,29 +426,31 @@
"outputs": [],
"source": [
"# Now that we're happy with our segmentation parameters, we can run the below cell to do this in bulk for many samples/datasets\n",
"# just modify samples_dict accordingly!\n",
"# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n",
"# you can add samples and datasets to skip\n",
"\n",
"skips_dict = {\n",
" \"FeAu_0p5_tR\": []\n",
"}\n",
"\n",
"dset_prefix = \"ff\"\n",
"\n",
"sample_list = [\"FeAu_0p5_tR\"]\n",
" \n",
"samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n",
" \n",
"# manual override:\n",
"# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_200um\"]}\n",
"\n",
"mask_path = '/data/id11/inhouse1/ewoks/detectors/files/Frelon2k_C36/mask.edf'\n",
"\n",
"spline_file = '/data/id11/inhouse1/ewoks/detectors/files/Frelon2k_C36/frelon36.spline'\n",
"parfile = 'Fe_refined.par'\n",
"\n",
"samples_dict = {\n",
" \"S13\" : [\n",
" \"FF_zeries_0\",\n",
" \"FF_zeries_1\",\n",
" \"FF_zeries_2\",\n",
" \"FF_zeries_3\",\n",
" \"FF_zeries_4\",\n",
" ],\n",
" \"S14\" : [\n",
" \"FF_zeries_0\",\n",
" \"FF_zeries_1\",\n",
" \"FF_zeries_2\",\n",
" \"FF_zeries_3\",\n",
" \"FF_zeries_4\",\n",
" ],\n",
"}\n",
"parfile = '/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/fitted.par'\n",
"bg_file = \"/home/esrf/james1997a/Data/ihma439/id11/20231211/PROCESSED_DATA/FeAu_0p5_tR/tdxrd_all/ff_bkg.edf\"\n",
"\n",
"worker_args = end_worker_args\n",
"\n",
"nthreads = len(os.sched_getaffinity(os.getpid()))\n",
"\n",
"for sample, datasets in samples_dict.items():\n",
" for dataset in datasets:\n",
Expand All @@ -383,18 +463,21 @@
" detector=\"frelon3\",\n",
" omegamotor=\"diffrz\",\n",
" dtymotor=\"diffty\")\n",
" \n",
" if os.path.exists(ds.col2dfile):\n",
" print(f\"Found existing cf_2d for {dataset} in {sample}, skipping\")\n",
" continue\n",
" \n",
" ds.import_all(scans=[\"1.1\"])\n",
" print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n",
" ds.save()\n",
" \n",
" bg_path = os.path.join(ds.analysisroot, \"CeO2/CeO2_bkg_3dxrd_beam_shutter_open/CeO2_bkg_3dxrd_beam_shutter_open.edf\")\n",
" \n",
"\n",
" print(\"Peaksearching\")\n",
" cf_2d, cf_3d = process(ds, bg_path, 64)\n",
" cf_2d, cf_3d = process(ds, bg_file, nthreads-1, worker_args)\n",
" \n",
" print(\"Spatially correcting peaks\")\n",
" cf_2d = apply_spatial(cf_2d, spline_file)\n",
" cf_3d = apply_spatial(cf_3d, spline_file)\n",
" cf_2d = utils.apply_spatial(cf_2d, spline_file, nthreads)\n",
" cf_3d = utils.apply_spatial(cf_3d, spline_file, nthreads)\n",
" \n",
" print(\"Saving peaks to file\")\n",
" cf_2d.parameters.loadparameters(parfile)\n",
Expand All @@ -406,6 +489,14 @@
" cf_3d.updateGeometry()\n",
" ImageD11.columnfile.colfile_to_hdf(cf_3d, ds.col3dfile)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ab892957-5ce7-4f04-a01c-c04cc9a2715c",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
Loading
Loading