Skip to content

Commit

Permalink
Remove deprecation warnings. (#235)
Browse files Browse the repository at this point in the history
* Remove deprecation warnings.  Remove references to err_gehrels.  Clean fpavg code so it
doesn't divide by zero when calculating the conversion from counts to flux

* Ensure numpy < 2.0, put devdeps back in for pytest
  • Loading branch information
stscirij authored Feb 20, 2024
1 parent 8eb63d1 commit 703b926
Show file tree
Hide file tree
Showing 9 changed files with 20 additions and 89 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python_testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,4 @@ jobs:
pytest-results-summary: true
- macos: py3-xdist
pytest-results-summary: true
- linux: py3-devdeps-xdist
- linux: py3-devdeps-xdist
10 changes: 5 additions & 5 deletions calcos/concurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -864,7 +864,7 @@ def shift1Corr(self, n, i0, i1, extrapolate=False):
for segment in self.segment_list:

# Restrict the correction to the applicable regions.
shift_flags = np.zeros(i1 - i0, dtype=np.bool8)
shift_flags = np.zeros(i1 - i0, dtype=np.bool_)
locn_list = self.regions[segment]
for region in locn_list:
if region[0] is None:
Expand Down Expand Up @@ -1323,7 +1323,7 @@ def setShiftKeywords(self, avg_dx, avg_dy):
self.ofd[1].header[key] = sum_ndf

# use self.regions for dpixel1[abc]
shift_flags = np.zeros(len(self.eta), dtype=np.bool8)
shift_flags = np.zeros(len(self.eta), dtype=np.bool_)
locn_list = self.regions[segment]
# if NUV, take the region for the PSA (lower pixel numbers)
region = locn_list[0]
Expand Down Expand Up @@ -1699,7 +1699,7 @@ def shift2Corr(self, n, i0, i1, extrapolate=False):

# Restrict the correction to the applicable region. Note that the
# limits of the region (the active area) are not adjusted by shift2.
shift_flags = np.zeros(i1 - i0, dtype=np.bool8)
shift_flags = np.zeros(i1 - i0, dtype=np.bool_)
region = self.regions[self.segment_list[0]][0]
shift_flags |= np.logical_and(self.eta[i0:i1] >= region[0],
self.eta[i0:i1] <= region[1])
Expand Down Expand Up @@ -1953,13 +1953,13 @@ def findShifts(self):

# Now find the mean values of xi and of eta, but restrict
# the range to the median plus or minus DX or DY.
select = np.zeros(len(self.xi), dtype=np.bool8)
select = np.zeros(len(self.xi), dtype=np.bool_)
select[i0:i1] = 1
select = np.where(self.xi < x_median-DX, False, select)
select = np.where(self.xi > x_median+DX, False, select)
select = np.where(self.eta < y_median-DY, False, select)
select = np.where(self.eta > y_median+DY, False, select)
select = select.astype(np.bool8)
select = select.astype(np.bool_)

x = self.xi[select].mean(dtype=np.float64)
y = self.eta[select].mean(dtype=np.float64)
Expand Down
25 changes: 0 additions & 25 deletions calcos/cosutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -3734,31 +3734,6 @@ def centerOfQuartic(x, coeff):

return x_min

def errGehrels(counts):
"""Compute error estimate.
The error estimate is computed using the Gehrels approximation for the
upper confidence limit.
Parameters
----------
counts: array_like or float
Number of counts (not necessarily integer values).
Returns
-------
tuple of 2 array_like or float
(The lower error estimate for counts,
the upper error estimate for counts)
"""
icounts = (counts + .5).astype(int)
upper = (1. + np.sqrt(icounts + 0.75))
lower = np.where(icounts > 0., Gehrels_lower(icounts), 0.)
return (lower.astype(np.float32), upper.astype(np.float32))

def Gehrels_lower(counts):
return counts - counts * (1.0 - 1.0 / (9.0 * counts) - 1.0 / (3.0 * np.sqrt(counts)))**3

def errFrequentist(counts):
"""Compute errors using the 'frequentist-confidence' option of astropy's poisson_conf_interval
Expand Down
8 changes: 5 additions & 3 deletions calcos/fpavg.py
Original file line number Diff line number Diff line change
Expand Up @@ -1200,10 +1200,12 @@ def normalizeSums(self, data, sumweight):
error_frequentist_lower, error_frequentist_upper = cosutil.errFrequentist(variance)
error_frequentist_lower[zeroweight] = 0.0
error_frequentist_upper[zeroweight] = 0.0
conversion = data.field("flux") / data.field("net")
bad = np.where(data.field("net") == 0.0)
good = np.where(data.field("net") != 0.0)
nelements = len(data.field("flux"))
conversion = np.zeros(nelements)
conversion[good] = data.field("flux")[good] / data.field("net")[good]
# Clean out NaNs from where flux and net are zero
good = np.where(~np.isnan(conversion))
bad = np.where(np.isnan(conversion))
wavelength = data.field("wavelength")
interpolated_values = np.interp(wavelength[bad], wavelength[good], conversion[good])
conversion[bad] = interpolated_values
Expand Down
4 changes: 2 additions & 2 deletions calcos/timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def createTimeline(input, fd, info, reffiles,
(key.upper(), x0, x1+1, y0, y1+1), VERBOSE)
# A value of 1 (True) in region_flags means the corresponding
# event is within the area that includes the airglow line.
region_flags = np.ones(len(xfull), dtype=np.bool8)
region_flags = np.ones(len(xfull), dtype=np.bool_)
region_flags = np.where(xfull > x1, False, region_flags)
region_flags = np.where(xfull < x0, False, region_flags)
if isinstance(y0, (list, tuple)):
Expand All @@ -139,7 +139,7 @@ def createTimeline(input, fd, info, reffiles,
region_flags = np.where(yfull > y1, False, region_flags)
region_flags = np.where(yfull < y0, False, region_flags)
npixels = 1.
region_flags = region_flags.astype(np.bool8)
region_flags = region_flags.astype(np.bool_)
# scratch array for counts per second within each time bin
temp = np.zeros(len(tl_time), dtype=np.float32)
if time[-1] - time[0] < 1.: # e.g. ACCUM data
Expand Down
7 changes: 4 additions & 3 deletions calcos/timetag.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ def setActiveArea(events, info, brftab):

xi = events.field(xcorr)
eta = events.field(ycorr)
active_area = np.ones(len(xi), dtype=np.bool8)
active_area = np.ones(len(xi), dtype=np.bool_)

# A value of 1 (True) in active_area means the corresponding event
# is within the active area.
Expand All @@ -401,7 +401,7 @@ def setActiveArea(events, info, brftab):
active_area = np.where(eta > b_high, False, active_area)
active_area = np.where(eta < b_low, False, active_area)
# Make sure the data type is still boolean.
active_area = active_area.astype(np.bool8)
active_area = active_area.astype(np.bool_)

def mkHeaders(phdr, events_header, extver=1):
"""Create a list of four headers for creating the flt and counts files.
Expand Down Expand Up @@ -3879,7 +3879,8 @@ def writeImages(x, y, epsilon, dq,
# E_rate = E_counts / t
# reciprocal_flat_field = E_counts / C_counts
# the corresponding error arrays are:
# errC_rate = errGehrels(C_counts) / t
# err_lower, err_upper from cosutil.errFrequentist(C_counts)
# errC_rate = err_upper / t
# errE_rate = errC_rate * reciprocal_flat_field

if outcounts is not None:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def c_includes(parent, depth=1):
setup_requires=['setuptools_scm'],
install_requires=[
'astropy>=5.0.4',
'numpy',
'numpy<2.0',
'scipy',
'stsci.tools>=4.0.0',
],
Expand Down
4 changes: 1 addition & 3 deletions tests/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,6 @@ def calref_from_image(input_image):
@pytest.mark.bigdata
@pytest.mark.usefixtures('_jail', 'envopt')
class BaseCOS:
# Timeout in seconds for file downloads.
timeout = 30

instrument = 'cos'
ignore_keywords = ['DATE', 'CAL_VER']
Expand Down Expand Up @@ -164,7 +162,7 @@ def get_input_files(self, filenames):
os.environ[var] = ref_path # hacky hack hack

# Download reference files, if needed only.
download_crds(ref_file, timeout=self.timeout, verbose=True)
download_crds(ref_file, verbose=True)

def compare_outputs(self, outputs, atol=0, rtol=1e-7, raise_error=True,
ignore_keywords_overwrite=None):
Expand Down
47 changes: 1 addition & 46 deletions tests/test_cosutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,51 +180,6 @@ def test_is_product():
os.remove(raw_file)


def test_gehrels_lower():
# Setup
counts = 4.0
actual = 1.9090363511659807
# Test
test_value = cosutil.Gehrels_lower(counts)
# Verify
assert actual == test_value


def test_err_gehrels():
"""
unit test for err_gehrels(counts)
test ran
- create 3 arrays, one should be random float values, the other two should be the lower (zero) and upper (one) error estimates
- following the math for calculating the upper limit by taking the sqrt of counts + 0.5 and then adding 1 to the result.
- similarly for the lower we add counts + 0.5 and then counts - counts * (1.0 - 1.0 / (9.0 * counts) - 1.0 / (3.0 * np.sqrt(counts))) ** 3
we will be able to get the lower array.
- finally assert the upper array and the lower array with the results obtained from err_gehrels().
"""
# Setup
# values to be tested on
zeros = np.zeros(5)
ones = np.ones(5)
random_values = np.array([2.2400559, 0.85776844, 5.31731382, 8.98167105, 7.88191824]).astype(np.float32)
# Actual results expected
true_lower1 = np.random.uniform(low=0.0, high=0.0, size=(5,))
true_upper1 = np.array([1.8660254, 1.8660254, 1.8660254, 1.8660254, 1.8660254]).astype(np.float32)

true_lower2 = np.array([0.8285322, 0.8285322, 0.8285322, 0.8285322, 0.8285322]).astype(np.float32)
true_upper2 = np.array([2.3228757, 2.3228757, 2.3228757, 2.3228757, 2.3228757]).astype(np.float32)

true_lower3 = np.array([1.2879757, 0.8285322, 2.1544096, 2.9387457, 2.7635214]).astype(np.float32)
true_upper3 = np.array([2.6583123, 2.3228757, 3.3979158, 4.122499, 3.95804]).astype(np.float32)
# Test
lower1, upper1 = cosutil.errGehrels(zeros) # should produce a warning
lower2, upper2 = cosutil.errGehrels(ones)
lower3, upper3 = cosutil.errGehrels(random_values)
# Verify
np.testing.assert_array_equal(true_lower1, lower1)
np.testing.assert_array_equal(true_upper1, upper1)
np.testing.assert_array_equal(true_lower2, lower2)
np.testing.assert_array_equal(true_upper2, upper2)
np.testing.assert_array_equal(true_lower3, lower3)
np.testing.assert_array_equal(true_upper3, upper3)


def test_cmp_part_exception():
Expand Down Expand Up @@ -316,7 +271,7 @@ def test_return_gti():
def test_err_frequentist():
"""
unit test for err_frequentist(counts)
- create 3 arrays similar to the test in err_gehrels().
- create 3 arrays.
- find the poisson confidence interval for each array.
- assert the result with the expected err_lower and err_upper.
"""
Expand Down

0 comments on commit 703b926

Please sign in to comment.