diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d6efa7d..e93de2b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,13 +16,13 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.10", "3.11", "3.12"] + python-version: ["3.11", "3.12"] numpy_ver: ["latest"] test_config: ["latest"] include: # NEP29 compliance settings - - python-version: "3.9" - numpy_ver: "1.23" + - python-version: "3.10" + numpy_ver: "1.24" os: ubuntu-latest test_config: "NEP29" # Operational compliance settings @@ -70,22 +70,25 @@ jobs: run: flake8 . --count --exit-zero --max-complexity=10 --statistics - name: Test with pytest - run: pytest + run: pytest --cov=pysatSpaceWeather --cov-report xml - - name: Publish results to coveralls - env: - GITHUB_TOKEN: ${{ secrets.github_token }} - COVERALLS_PARALLEL: true - run: coveralls --rcfile=pyproject.toml --service=github + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + flag-name: run=${{ join(matrix.*, '-') }} + parallel: true + format: cobertura + debug: true finish: name: Finish Coverage Analysis needs: build + if: ${{ always() }} runs-on: ubuntu-latest steps: - name: Coveralls Finished - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - pip install --upgrade coveralls - coveralls --service=github --finish + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/.github/workflows/pysat_rc.yml b/.github/workflows/pysat_rc.yml index 3fe807c..a9afe6f 100644 --- a/.github/workflows/pysat_rc.yml +++ b/.github/workflows/pysat_rc.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.11"] + python-version: ["3.12"] name: Python ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -35,22 +35,25 @@ jobs: python -c "import pysat; pysat.params['data_dirs'] = 'pysatData'" - name: Test with pytest - run: pytest + run: pytest --cov=pysatSpaceWeather --cov-report xml - - name: Publish results to coveralls - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_PARALLEL: true - run: coveralls --rcfile=pyproject.toml --service=github + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + flag-name: run=${{ join(matrix.*, '-') }} + parallel: true + format: cobertura + debug: true finish: name: Finish Coverage Analysis needs: build + if: ${{ always() }} runs-on: ubuntu-latest steps: - name: Coveralls Finished - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - pip install --upgrade coveralls - coveralls --service=github --finish + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/CHANGELOG.md b/CHANGELOG.md index b31c4d7..2f89ef1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ This project adheres to [Semantic Versioning](https://semver.org/). -------------------- * Maintenance * Removed unneeded keyword arguments from Kp method functions + * Replaces `fillna` with `asfreq` to maintain the same behaviour + * Implemented `iloc` in pandas Series and DataFrame index access + * Added `verify=False` to GFZ requests + * Updated documentation links and fixed intersphinx mapping [0.1.0] - 2024-02-16 -------------------- diff --git a/docs/conf.py b/docs/conf.py index e35e7b2..acf098b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -177,8 +177,9 @@ epub_exclude_files = ['search.html'] # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} +intersphinx_mapping = {'': ('https://docs.python.org/', None)} # Links to ignore when checking for stability linkcheck_ignore = ['https://lasp.colorado.edu/space_weather/dsttemerin/', - 'https://*QUERY'] + 'https://*QUERY', + 'https://datapub.gfz-potsdam.de/download/10.5880.Kp.0001/*'] diff --git a/docs/supported_instruments.rst b/docs/supported_instruments.rst index efb508a..04fc8e4 100644 --- a/docs/supported_instruments.rst +++ b/docs/supported_instruments.rst @@ -179,10 +179,9 @@ Dst ^^^ The Disturbance Storm Time (Dst) Index is a measure of magnetic activity -associated with the ring current. The National Geophysical Data Center (NGDC) -maintains the -`current database `_ from which -the historic Dst is downloaded. +associated with the ring current. The National Centers for Environmental +Information (NCEI), formerly the National Geophysical Data Center (NGDC), +maintains the current database from which the historic Dst is downloaded. `LASP `_ performs the calculates and provides the predicted Dst for the last 96 hours. You can learn more about the Dst Index at the diff --git a/pyproject.toml b/pyproject.toml index fe8ae65..2c14319 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,6 @@ classifiers = [ "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -50,7 +49,6 @@ dependencies = [ [project.optional-dependencies] test = [ - "coveralls < 3.3", "flake8", "flake8-docstrings", "hacking >= 1.0", @@ -75,7 +73,6 @@ Source = "https://github.com/pysat/pysatSpaceWeather" [tool.coverage.report] [tool.pytest.ini_options] -addopts = "--cov=pysatSpaceWeather" markers = [ "all_inst", "download", diff --git a/pysatSpaceWeather/instruments/ace_sis.py b/pysatSpaceWeather/instruments/ace_sis.py index 61f6546..5ad0596 100644 --- a/pysatSpaceWeather/instruments/ace_sis.py +++ b/pysatSpaceWeather/instruments/ace_sis.py @@ -115,8 +115,10 @@ def clean(self): # Evaluate the different proton fluxes. Replace bad values with NaN and # times with no valid data - self.data['int_pflux_10MeV'][self.data['status_10'] > max_status] = np.nan - self.data['int_pflux_30MeV'][self.data['status_30'] > max_status] = np.nan + self.data['int_pflux_10MeV'] = self.data['int_pflux_10MeV'].where( + (self.data['status_10'] <= max_status), other=np.nan) + self.data['int_pflux_30MeV'] = self.data['int_pflux_30MeV'].where( + (self.data['status_30'] <= max_status), other=np.nan) eval_cols = ['int_pflux_10MeV', 'int_pflux_30MeV'] diff --git a/pysatSpaceWeather/instruments/methods/f107.py b/pysatSpaceWeather/instruments/methods/f107.py index 3fe7edf..2f3b4a6 100644 --- a/pysatSpaceWeather/instruments/methods/f107.py +++ b/pysatSpaceWeather/instruments/methods/f107.py @@ -304,7 +304,7 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): # Resample the output data, filling missing values if (date_range.shape != f107_inst.index.shape or abs(date_range - f107_inst.index).max().total_seconds() > 0.0): - f107_inst.data = f107_inst.data.resample(freq).fillna(method=None) + f107_inst.data = f107_inst.data.resample(freq).asfreq() if np.isfinite(fill_val): f107_inst.data[np.isnan(f107_inst.data)] = fill_val @@ -354,7 +354,7 @@ def calc_f107a(f107_inst, f107_name='f107', f107a_name='f107a', min_pnts=41): # # Ensure the data are evenly sampled at a daily frequency, since this is # how often F10.7 is calculated. - f107_fill = f107_inst.data.resample('1D').fillna(method=None) + f107_fill = f107_inst.data.resample('1D').asfreq() # Replace the time index with an ordinal time_ind = f107_fill.index @@ -373,14 +373,14 @@ def calc_f107a(f107_inst, f107_name='f107', f107a_name='f107a', min_pnts=41): # Resample to the original frequency, if it is not equal to 1 day freq = pysat.utils.time.calc_freq(f107_inst.index) - if freq != "86400S": + if freq != "86400s": # Resample to the desired frequency f107_fill = f107_fill.resample(freq).ffill() # Save the output in a list f107a = list(f107_fill[f107a_name]) - # Fill any dates that fall + # Fill any dates that fall just outside of the range time_ind = pds.date_range(f107_fill.index[0], f107_inst.index[-1], freq=freq) for itime in time_ind[f107_fill.index.shape[0]:]: diff --git a/pysatSpaceWeather/instruments/methods/gfz.py b/pysatSpaceWeather/instruments/methods/gfz.py index 12ffceb..2d5ad55 100644 --- a/pysatSpaceWeather/instruments/methods/gfz.py +++ b/pysatSpaceWeather/instruments/methods/gfz.py @@ -128,7 +128,7 @@ def json_downloads(date_array, data_path, local_file_prefix, local_date_fmt, query_url = '{:s}&status=def'.format(query_url) # The data is returned as a JSON file - req = requests.get(query_url) + req = requests.get(query_url, verify=False) # Process the JSON file if req.text.find('Gateway Timeout') >= 0: @@ -249,7 +249,7 @@ def kp_ap_cp_download(platform, name, date_array, tag, inst_id, data_path, dl_date.strftime('%Y')))) if mock_download_dir is None: furl = ''.join([burl, fname]) - req = requests.get(furl) + req = requests.get(furl, verify=False) raw_txt = req.text if req.ok else None else: diff --git a/pysatSpaceWeather/instruments/methods/kp_ap.py b/pysatSpaceWeather/instruments/methods/kp_ap.py index efc2eec..4dfeb4b 100644 --- a/pysatSpaceWeather/instruments/methods/kp_ap.py +++ b/pysatSpaceWeather/instruments/methods/kp_ap.py @@ -306,12 +306,12 @@ def calc_daily_Ap(ap_inst, ap_name='3hr_ap', daily_name='Ap', index=[ap_mean.index[0] - pds.DateOffset(hours=3)]) # Extract the mean that only uses data for one day - ap_sel = ap_pad.combine_first(ap_mean[[i for i, tt in - enumerate(ap_mean.index) - if tt.hour == 21]]) + ap_sel = ap_pad.combine_first(ap_mean.iloc[[i for i, tt in + enumerate(ap_mean.index) + if tt.hour == 21]]) # Backfill this data - ap_data = ap_sel.resample('3H').bfill() + ap_data = ap_sel.resample('3h').bfill() # Save the output for the original time range ap_inst[daily_name] = pds.Series(ap_data[1:], index=ap_data.index[1:]) @@ -665,9 +665,12 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, good_vals = recent_inst['Kp'][good_times] != local_fill_val # Save output data and cycle time - kp_times.extend(list(recent_inst.index[good_times][good_vals])) - kp_values.extend(list(recent_inst['Kp'][good_times][good_vals])) - itime = kp_times[-1] + pds.DateOffset(hours=3) + if len(good_vals): + kp_times.extend(list( + recent_inst.index[good_times][good_vals])) + kp_values.extend(list( + recent_inst['Kp'][good_times][good_vals])) + itime = kp_times[-1] + pds.DateOffset(hours=3) inst_flag = 'forecast' if forecast_inst is not None else None notes += "{:})".format(itime.date()) @@ -751,7 +754,7 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, # Resample the output data, filling missing values if (date_range.shape != kp_inst.index.shape or abs(date_range - kp_inst.index).max().total_seconds() > 0.0): - kp_inst.data = kp_inst.data.resample(freq).fillna(method=None) + kp_inst.data = kp_inst.data.resample(freq).asfreq() if np.isfinite(fill_val): kp_inst.data[np.isnan(kp_inst.data)] = fill_val diff --git a/pysatSpaceWeather/instruments/methods/swpc.py b/pysatSpaceWeather/instruments/methods/swpc.py index 0635a59..0d0ffc3 100644 --- a/pysatSpaceWeather/instruments/methods/swpc.py +++ b/pysatSpaceWeather/instruments/methods/swpc.py @@ -318,6 +318,7 @@ def rewrite_daily_solar_data_file(year, outfiles, lines): # Write out as a file data.to_csv(outfiles[data_name], header=True) + pysat.logger.info('Wrote: {:}'.format(outfiles[data_name])) return @@ -478,7 +479,7 @@ def solar_geomag_predictions_download(name, date_array, data_path, # Process the Kp data hr_strs = ['00-03UT', '03-06UT', '06-09UT', '09-12UT', '12-15UT', '15-18UT', '18-21UT', '21-00UT'] - data_times['kp'] = pds.date_range(pred_times[0], periods=24, freq='3H') + data_times['kp'] = pds.date_range(pred_times[0], periods=24, freq='3h') for line in kp_raw.split('\n'): if line.find("Prob_Mid") >= 0: @@ -643,7 +644,7 @@ def geomag_forecast_download(name, date_array, data_path, kp_day2.append(float(cols[-2])) kp_day3.append(float(cols[-1])) - kp_times = pds.date_range(forecast_date, periods=24, freq='3H') + kp_times = pds.date_range(forecast_date, periods=24, freq='3h') kp_day = [] for dd in [kp_day1, kp_day2, kp_day3]: kp_day.extend(dd) @@ -784,7 +785,7 @@ def kp_ap_recent_download(name, date_array, data_path, mock_download_dir=None): sub_aps[i].append(np.int64(ap_sub_lines[i])) # Create times on 3 hour cadence - kp_times = pds.date_range(times[0], periods=(8 * 30), freq='3H') + kp_times = pds.date_range(times[0], periods=(8 * 30), freq='3h') # Put both data sets into DataFrames data = {'kp': pds.DataFrame({'mid_lat_Kp': sub_kps[0], diff --git a/pysatSpaceWeather/instruments/sw_f107.py b/pysatSpaceWeather/instruments/sw_f107.py index d3ca6b5..0bcf106 100644 --- a/pysatSpaceWeather/instruments/sw_f107.py +++ b/pysatSpaceWeather/instruments/sw_f107.py @@ -399,7 +399,7 @@ def download(date_array, tag, inst_id, data_path, update_files=False, # Cut the date from the end of the local files for i, lfile in enumerate(local_files): - local_files[i] = lfile[:-11] + local_files.iloc[i] = lfile[:-11] methods.swpc.old_indices_dsd_download( name, date_array, data_path, local_files, today, diff --git a/pysatSpaceWeather/tests/test_methods_kp.py b/pysatSpaceWeather/tests/test_methods_kp.py index 9f10e3c..8e6c80e 100644 --- a/pysatSpaceWeather/tests/test_methods_kp.py +++ b/pysatSpaceWeather/tests/test_methods_kp.py @@ -395,7 +395,7 @@ def test_convert_ap_to_kp_middle(self): """Test conversion of ap to Kp where ap is not an exact Kp value.""" kp_ap.convert_3hr_kp_to_ap(self.testInst) - new_val = self.testInst['3hr_ap'][8] + 1 + new_val = self.testInst['3hr_ap'].iloc[8] + 1 self.testInst.data.at[self.testInst.index[8], '3hr_ap'] = new_val kp_out, kp_meta = kp_ap.convert_ap_to_kp(self.testInst['3hr_ap'])