Skip to content

Commit

Permalink
[FIX] Docs issues (#318)
Browse files Browse the repository at this point in the history
  • Loading branch information
elephaint authored and mattbuot committed Jan 31, 2025
1 parent ed1a512 commit 5ec9b3d
Show file tree
Hide file tree
Showing 22 changed files with 160 additions and 755 deletions.
9 changes: 3 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ from hierarchicalforecast.evaluation import evaluate
from hierarchicalforecast.methods import BottomUp, TopDown, MiddleOut
from utilsforecast.losses import mse


# Load TourismSmall dataset
Y_df, S, tags = HierarchicalData.load('./data', 'TourismSmall')
Y_df['ds'] = pd.to_datetime(Y_df['ds'])
Expand All @@ -92,10 +91,9 @@ Y_test_df = Y_df.groupby('unique_id').tail(4)
Y_train_df = Y_df.drop(Y_test_df.index)

# Compute base auto-ARIMA predictions
fcst = StatsForecast(df=Y_train_df,
models=[AutoARIMA(season_length=4), Naive()],
freq='Q', n_jobs=-1)
Y_hat_df = fcst.forecast(h=4)
fcst = StatsForecast(models=[AutoARIMA(season_length=4), Naive()],
freq='QE', n_jobs=-1)
Y_hat_df = fcst.forecast(df=Y_train_df, h=4)

# Reconcile the base predictions
reconcilers = [
Expand All @@ -116,7 +114,6 @@ Assumes you have a test dataframe.
df = Y_rec_df.merge(Y_test_df, on=['unique_id', 'ds'])
evaluation = evaluate(df = df,
tags = tags,
train_df = Y_train_df,
metrics = [mse],
benchmark = "Naive")
```
Expand Down
6 changes: 3 additions & 3 deletions hierarchicalforecast/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import narwhals as nw
import numpy as np

# %% ../nbs/src/core.ipynb 7
# %% ../nbs/src/core.ipynb 6
def _build_fn_name(fn) -> str:
fn_name = type(fn).__name__
func_params = fn.__dict__
Expand All @@ -42,7 +42,7 @@ def _build_fn_name(fn) -> str:
fn_name += "_" + "_".join(func_params)
return fn_name

# %% ../nbs/src/core.ipynb 11
# %% ../nbs/src/core.ipynb 10
def _reverse_engineer_sigmah(
Y_hat_df: Frame,
y_hat: np.ndarray,
Expand Down Expand Up @@ -87,7 +87,7 @@ def _reverse_engineer_sigmah(

return sigmah

# %% ../nbs/src/core.ipynb 12
# %% ../nbs/src/core.ipynb 11
class HierarchicalReconciliation:
"""Hierarchical Reconciliation Class.
Expand Down
20 changes: 10 additions & 10 deletions hierarchicalforecast/evaluation.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/src/evaluation.ipynb.

# %% auto 0
__all__ = ['rel_mse', 'msse', 'scaled_crps', 'energy_score', 'log_score', 'HierarchicalEvaluation']
__all__ = ['evaluate']

# %% ../nbs/src/evaluation.ipynb 3
import narwhals as nw
Expand All @@ -14,14 +14,14 @@
from scipy.stats import multivariate_normal
from typing import Callable, Optional, Union

# %% ../nbs/src/evaluation.ipynb 7
# %% ../nbs/src/evaluation.ipynb 5
def _loss_deprecation_notice(loss):
warnings.warn(
f"This loss function ({loss}) will be deprecated in future releases. Please use the `utilsforecast.losses` function instead.",
FutureWarning,
)

# %% ../nbs/src/evaluation.ipynb 8
# %% ../nbs/src/evaluation.ipynb 6
def _metric_protections(
y: np.ndarray, y_hat: np.ndarray, weights: Optional[np.ndarray]
) -> None:
Expand Down Expand Up @@ -130,7 +130,7 @@ def mqloss(

return mqloss

# %% ../nbs/src/evaluation.ipynb 10
# %% ../nbs/src/evaluation.ipynb 7
def rel_mse(y, y_hat, y_train, mask=None):
"""Relative Mean Squared Error
Expand Down Expand Up @@ -170,7 +170,7 @@ def rel_mse(y, y_hat, y_train, mask=None):
loss = loss / (norm + eps)
return loss

# %% ../nbs/src/evaluation.ipynb 12
# %% ../nbs/src/evaluation.ipynb 8
def msse(y, y_hat, y_train, mask=None):
"""Mean Squared Scaled Error
Expand Down Expand Up @@ -211,7 +211,7 @@ def msse(y, y_hat, y_train, mask=None):
loss = loss / (norm + eps)
return loss

# %% ../nbs/src/evaluation.ipynb 14
# %% ../nbs/src/evaluation.ipynb 9
def scaled_crps(y, y_hat, quantiles):
"""Scaled Continues Ranked Probability Score
Expand Down Expand Up @@ -255,7 +255,7 @@ def scaled_crps(y, y_hat, quantiles):
loss = 2 * loss * np.sum(np.ones(y.shape)) / (norm + eps)
return loss

# %% ../nbs/src/evaluation.ipynb 16
# %% ../nbs/src/evaluation.ipynb 10
def energy_score(y, y_sample1, y_sample2, beta=2):
"""Energy Score
Expand Down Expand Up @@ -302,7 +302,7 @@ def energy_score(y, y_sample1, y_sample2, beta=2):
score = np.mean(term2 - 0.5 * term1)
return score

# %% ../nbs/src/evaluation.ipynb 17
# %% ../nbs/src/evaluation.ipynb 11
def log_score(y, y_hat, cov, allow_singular=True):
"""Log Score.
Expand Down Expand Up @@ -348,7 +348,7 @@ def log_score(y, y_hat, cov, allow_singular=True):
score = np.mean(scores)
return score

# %% ../nbs/src/evaluation.ipynb 19
# %% ../nbs/src/evaluation.ipynb 12
class HierarchicalEvaluation:
"""Hierarchical Evaluation Class.
Expand Down Expand Up @@ -493,7 +493,7 @@ def evaluate(

return evaluation

# %% ../nbs/src/evaluation.ipynb 20
# %% ../nbs/src/evaluation.ipynb 13
def evaluate(
df: FrameT,
metrics: list[Callable],
Expand Down
22 changes: 11 additions & 11 deletions hierarchicalforecast/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
from sklearn.preprocessing import OneHotEncoder
from typing import Optional, Union, Sequence

# %% ../nbs/src/utils.ipynb 6
# %% ../nbs/src/utils.ipynb 5
# Global variables
NUMBA_NOGIL = True
NUMBA_CACHE = True
NUMBA_PARALLEL = True
NUMBA_FASTMATH = True

# %% ../nbs/src/utils.ipynb 7
# %% ../nbs/src/utils.ipynb 6
class CodeTimer:
def __init__(self, name=None, verbose=True):
self.name = " '" + name + "'" if name else ""
Expand All @@ -43,7 +43,7 @@ def __exit__(self, exc_type, exc_value, traceback):
+ " seconds"
)

# %% ../nbs/src/utils.ipynb 8
# %% ../nbs/src/utils.ipynb 7
def is_strictly_hierarchical(S: np.ndarray, tags: dict[str, np.ndarray]) -> bool:
# main idea:
# if S represents a strictly hierarchical structure
Expand All @@ -60,7 +60,7 @@ def is_strictly_hierarchical(S: np.ndarray, tags: dict[str, np.ndarray]) -> bool
nodes = levels_.popitem()[1].size
return paths == nodes

# %% ../nbs/src/utils.ipynb 10
# %% ../nbs/src/utils.ipynb 9
def _to_upper_hierarchy(
bottom_split: list[str], bottom_values: str, upper_key: str
) -> list[str]:
Expand All @@ -73,7 +73,7 @@ def join_upper(bottom_value):

return [join_upper(val) for val in bottom_values]

# %% ../nbs/src/utils.ipynb 11
# %% ../nbs/src/utils.ipynb 10
def aggregate(
df: Frame,
spec: list[list[str]],
Expand Down Expand Up @@ -238,7 +238,7 @@ def aggregate(

return Y_df, S_df, tags

# %% ../nbs/src/utils.ipynb 25
# %% ../nbs/src/utils.ipynb 24
class HierarchicalPlot:
"""Hierarchical Plot
Expand Down Expand Up @@ -523,7 +523,7 @@ def plot_hierarchical_predictions_gap(
plt.grid()
plt.show()

# %% ../nbs/src/utils.ipynb 46
# %% ../nbs/src/utils.ipynb 45
# convert levels to output quantile names
def level_to_outputs(level: list[int]) -> tuple[list[float], list[str]]:
"""Converts list of levels into output names matching StatsForecast and NeuralForecast methods.
Expand Down Expand Up @@ -568,7 +568,7 @@ def quantiles_to_outputs(quantiles: list[float]) -> tuple[list[float], list[str]
output_names.append("-median")
return quantiles, output_names

# %% ../nbs/src/utils.ipynb 47
# %% ../nbs/src/utils.ipynb 46
# given input array of sample forecasts and inptut quantiles/levels,
# output a Pandas Dataframe with columns of quantile predictions
def samples_to_quantiles_df(
Expand Down Expand Up @@ -653,7 +653,7 @@ def samples_to_quantiles_df(

return _quantiles, df_nw.to_native()

# %% ../nbs/src/utils.ipynb 55
# %% ../nbs/src/utils.ipynb 53
# Masked empirical covariance matrix
@njit(
"Array(float64, 2, 'F')(Array(float64, 2, 'C'), Array(bool_, 2, 'C'))",
Expand Down Expand Up @@ -691,7 +691,7 @@ def _ma_cov(residuals: np.ndarray, not_nan_mask: np.ndarray):

return W

# %% ../nbs/src/utils.ipynb 56
# %% ../nbs/src/utils.ipynb 54
# Shrunk covariance matrix using the Schafer-Strimmer method


Expand Down Expand Up @@ -842,7 +842,7 @@ def _shrunk_covariance_schaferstrimmer_with_nans(

return W

# %% ../nbs/src/utils.ipynb 58
# %% ../nbs/src/utils.ipynb 56
# Lasso cyclic coordinate descent
@njit(
"Array(float64, 1, 'C')(Array(float64, 2, 'C'), Array(float64, 1, 'C'), float64, int64, float64)",
Expand Down

Large diffs are not rendered by default.

3 changes: 0 additions & 3 deletions nbs/examples/AustralianDomesticTourism-Intervals.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['NIXTLA_ID_AS_COL'] = '1'\n",
"\n",
"import pandas as pd\n",
"\n",
"# compute base forecast no coherent\n",
Expand Down
3 changes: 0 additions & 3 deletions nbs/examples/AustralianDomesticTourism-Permbu-Intervals.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['NIXTLA_ID_AS_COL'] = '1'\n",
"\n",
"import pandas as pd\n",
"\n",
"# compute base forecast no coherent\n",
Expand Down
3 changes: 0 additions & 3 deletions nbs/examples/AustralianDomesticTourism.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['NIXTLA_ID_AS_COL'] = '1'\n",
"\n",
"import numpy as np\n",
"import pandas as pd"
]
Expand Down
3 changes: 0 additions & 3 deletions nbs/examples/AustralianPrisonPopulation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['NIXTLA_ID_AS_COL'] = '1'\n",
"\n",
"import numpy as np\n",
"import pandas as pd"
]
Expand Down
3 changes: 0 additions & 3 deletions nbs/examples/Introduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['NIXTLA_ID_AS_COL'] = '1'\n",
"\n",
"import numpy as np\n",
"import pandas as pd"
]
Expand Down
Loading

0 comments on commit 5ec9b3d

Please sign in to comment.