Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

No more forks for the peak properties. #223

Merged
merged 3 commits into from
Feb 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
196 changes: 144 additions & 52 deletions ImageD11/sinograms/assemble_label.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@

from __future__ import print_function
import h5py, os, time, numpy as np
import numba
import fabio

"""WARNING: work in progresss"""

Expand All @@ -11,35 +12,105 @@
# ... this is not really needed, but saves redoing the pixel labelling code


SCANMOTORS = ("diffrz", "diffrz_cen360", "diffrz_center", "fpico4", "fpico3", "diffty", "diffty_center",
"rot", "rot_cen360", "rot_center", "fpico6", "dty", "dty_center")
HEADERMOTORS = ("diffty", "diffrz", "samtx", "samty", "samtz", "diffry", "samrx", "samry",
"dty", "rot", "pz", "px", "py", "shtz", "shty", "shtx")
SCANMOTORS = (
"diffrz",
"diffrz_cen360",
"diffrz_center",
"fpico4",
"fpico3",
"diffty",
"diffty_center",
"rot",
"rot_cen360",
"rot_center",
"fpico6",
"dty",
"dty_center",
)
HEADERMOTORS = (
"diffty",
"diffrz",
"samtx",
"samty",
"samtz",
"diffry",
"samrx",
"samry",
"dty",
"rot",
"pz",
"px",
"py",
"shtz",
"shty",
"shtx",
)


def testready(dsobj):
""" assume they are finish if they exist. Might be still writing ??? """
"""assume they are finish if they exist. Might be still writing ???"""
done, todo = dsobj.check_sparse()
return (done>0) and (todo == 0)
return (done > 0) and (todo == 0)


def getsparse( dsobj, num, titles = ('row','col','intensity','nnz') ):
"""
def getsparse(dsobj, num):
"""
dsobj = DataSet object
returns the pixels from the sparse segmentation
"""
with h5py.File( os.path.join( dsobj.analysispath, dsobj.sparsefiles[num]) , "r" ) as hin:
pixels = { name : hin[dsobj.limapath][name][:] for name in titles }
return pixels
with h5py.File(
os.path.join(dsobj.analysispath, dsobj.sparsefiles[num]), "r"
) as hin:
row = hin[dsobj.limapath]["row"][:]
col = hin[dsobj.limapath]["col"][:]
intensity = hin[dsobj.limapath]["intensity"][:]
nnz = hin[dsobj.limapath]["nnz"][:]
return row, col, intensity, nnz


@numba.njit(boundscheck=True)
def filterpixels(cutimage, row, col, intensity, nnz):
"""
cutimage = 2D image of thresholds to use
row, col, intensity = pixels
nnz = pixels per frame for a stack of images in row/col/intensity
"""
# 1: scan all of the pixels to decide on the output space required
nnz_out = np.zeros_like(nnz)
ntotal = 0
ip = 0
for iframe in range(len(nnz)):
for j in range(nnz[iframe]):
if intensity[j + ip] > cutimage[row[j + ip], col[j + ip]]:
nnz_out[iframe] += 1
ntotal += 1
ip += nnz[iframe]
row_out = np.zeros((ntotal,), dtype=row.dtype)
col_out = np.zeros((ntotal,), dtype=col.dtype)
intensity_out = np.zeros((ntotal,), dtype=intensity.dtype)
nout = 0
ip = 0
for iframe in range(len(nnz)):
for j in range(nnz[iframe]):
if intensity[j + ip] > cutimage[row[j + ip], col[j + ip]]:
row_out[nout] = row[j + ip]
col_out[nout] = col[j + ip]
intensity_out[nout] = intensity[j + ip]
nout += 1
ip += nnz[iframe]
return row_out, col_out, intensity_out, nnz_out


def harvest_masterfile( dset, outname,
scanmotors=SCANMOTORS,
headermotors=HEADERMOTORS, ):
def harvest_masterfile(
dset, outname, scanmotors=SCANMOTORS, headermotors=HEADERMOTORS, cutimage=None
):
"""
dset = ImageD11.sinograms.dataset.DataSet object
outname = sparse file to write
"""

cut = image = 2D image of cutoffs for keeping pixels (e.g. keep high angle
and use a higher threshold for low angle or noisy)
"""
opts = {
"chunks": (10000,),
"maxshape": (None,),
Expand All @@ -48,7 +119,7 @@ def harvest_masterfile( dset, outname,
}
with h5py.File(outname, "a") as hout:
hout.attrs["h5input"] = dset.masterfile
print("Harvesting",dset.masterfile,end=": ")
print("Harvesting", dset.masterfile, end=": ")
with h5py.File(dset.masterfile, "r") as hin:
done = []
for scan in dset.scans:
Expand All @@ -58,9 +129,9 @@ def harvest_masterfile( dset, outname,
continue
gin = hin[scan]
bad = False
for check in ('title','measurement','measurement/'+dset.detector):
for check in ("title", "measurement", "measurement/" + dset.detector):
if check not in hin[scan]:
print(scan,"missing",check,'skipping')
print(scan, "missing", check, "skipping")
bad = True
if bad:
print("Skipping", scan)
Expand All @@ -70,14 +141,14 @@ def harvest_masterfile( dset, outname,
gm = g.require_group("measurement")
for m in scanmotors: # vary : many
if m in gin["measurement"]:
data = data=gin["measurement"][m][:]
ds = gm.require_dataset(m, shape=data.shape, dtype = data.dtype )
data = data = gin["measurement"][m][:]
ds = gm.require_dataset(m, shape=data.shape, dtype=data.dtype)
ds[()] = data
gip = g.require_group("instrument/positioners")
for m in headermotors: # fixed : scalar
if "instrument/positioners/%s"%(m) in gin:
data=gin["instrument/positioners"][m][()]
ds = gip.require_dataset(m, shape = data.shape, dtype = data.dtype )
if "instrument/positioners/%s" % (m) in gin:
data = gin["instrument/positioners"][m][()]
ds = gip.require_dataset(m, shape=data.shape, dtype=data.dtype)
ds[()] = data
try:
frms = gin["measurement"][dset.detector]
Expand All @@ -86,49 +157,70 @@ def harvest_masterfile( dset, outname,
print(list(gin))
print(list(gin["measurement"]))
print(dset.detector)
raise
raise
g.attrs["itype"] = frms.dtype.name
g.attrs["nframes"] = frms.shape[0]
g.attrs["shape0"] = frms.shape[1]
g.attrs["shape1"] = frms.shape[2]
print(scan, end=', ')
print(scan, end=", ")
done.append(scan)
print()

# Finished with master file. Now harvest the segmented files.
idx = 0
titles = ('row','col','intensity','nnz')
print('Loading pixels:',end=' ')
print("Loading pixels:", end=" ")
for scan in done:
g = hout.require_group( scan )
for name in 'row', 'col':
g = hout.require_group(scan)
for name in "row", "col":
if name not in g:
g.create_dataset(name, shape = (0,), dtype=np.uint16, **opts)
g.create_dataset(name, shape=(0,), dtype=np.uint16, **opts)
if "intensity" not in g:
g.create_dataset("intensity", shape = (0,), dtype=g.attrs['itype'], **opts)
nfrm = g.attrs['nframes']
g.require_dataset("nnz", shape = (nfrm,), dtype=np.uint32)
g.create_dataset(
"intensity", shape=(0,), dtype=g.attrs["itype"], **opts
)
nfrm = g.attrs["nframes"]
g.require_dataset("nnz", shape=(nfrm,), dtype=np.uint32)
nstart = nread = npx = pstart = 0
while nread < nfrm:
pixels = getsparse( dset, idx, titles )
idx += 1 # loop over sparse files in this scan
nread = nstart + len(pixels['nnz']) # number of frames in this limafile
g['nnz'][nstart : nread] = pixels['nnz']
row, col, intensity, nnz = getsparse(dset, idx)
if cutimage is not None:
row, col, intensity, nnz = filterpixels(
cutimage, row, col, intensity, nnz
)
idx += 1 # loop over sparse files in this scan
nread = nstart + len(nnz) # number of frames in this limafile
g["nnz"][nstart:nread] = nnz
nstart = nread
pread = pstart + len(pixels['row']) # number of pixels in this limafile
for name in 'row', 'col', 'intensity':
g[name].resize( (pread, ) )
g[name][pstart:pread] = pixels[name]
pread = pstart + len(row) # number of pixels in this limafile
g["row"].resize((pread,))
g["row"][pstart:pread] = row
g["col"].resize((pread,))
g["col"][pstart:pread] = col
g["intensity"].resize((pread,))
g["intensity"][pstart:pread] = intensity
pstart = pread
print(scan,end=', ')
print(scan, end=", ")
print()
return outname


def main( dsname, outname ):
dset = ImageD11.sinograms.dataset.load( dsname )
harvest_masterfile( dset, outname )

if __name__=="__main__":

def main(dsname, outname=None, cutimage=None):
"""
dsname = Dataset describing the masterfile + segmentation etc
outname = sparse pixels file to write. Defaults to dset.sparsefile
"""
if isinstance(dsname, ImageD11.sinograms.dataset.Dataset):
dset = dsname
else:
dset = ImageD11.sinograms.dataset.load(dsname)
if outname is None:
outname = dset.sparsefile
if cutimage is not None:
cutimage = fabio.open(cutimage).data
harvest_masterfile(dset, outname, cutimage=cutimage)


if __name__ == "__main__":
import sys
main( sys.argv[1], sys.argv[2] )

main(sys.argv[1], sys.argv[2])
Loading
Loading