Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Learner1D: return inf loss when the bounds aren't done #271

Merged
merged 5 commits into from
Sep 24, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 14 additions & 7 deletions adaptive/learner/learner1D.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import collections.abc
import itertools
import math
from copy import deepcopy
from copy import copy, deepcopy
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, Union

import cloudpickle
Expand Down Expand Up @@ -290,6 +290,7 @@ def __init__(
self._dx_eps = 2 * max(np.abs(bounds)) * np.finfo(float).eps

self.bounds = list(bounds)
self.__missing_bounds = set(self.bounds) # cache of missing bounds

self._vdim: Optional[int] = None

Expand Down Expand Up @@ -325,6 +326,8 @@ def npoints(self) -> int:

@cache_latest
def loss(self, real: bool = True) -> float:
if self._missing_bounds():
return np.inf
losses = self.losses if real else self.losses_combined
if not losses:
return np.inf
Expand Down Expand Up @@ -604,6 +607,15 @@ def ask(self, n: int, tell_pending: bool = True) -> Tuple[List[float], List[floa

return points, loss_improvements

def _missing_bounds(self) -> List[Real]:
missing_bounds = []
for b in copy(self.__missing_bounds):
if b in self.data:
self.__missing_bounds.remove(b)
elif b not in self.pending_points:
missing_bounds.append(b)
return sorted(missing_bounds)

def _ask_points_without_adding(self, n: int) -> Tuple[List[float], List[float]]:
"""Return 'n' points that are expected to maximally reduce the loss.
Without altering the state of the learner"""
Expand All @@ -619,12 +631,7 @@ def _ask_points_without_adding(self, n: int) -> Tuple[List[float], List[float]]:
return [], []

# If the bounds have not been chosen yet, we choose them first.
missing_bounds = [
b
for b in self.bounds
if b not in self.data and b not in self.pending_points
]

missing_bounds = self._missing_bounds()
if len(missing_bounds) >= n:
return missing_bounds[:n], [np.inf] * n

Expand Down
24 changes: 23 additions & 1 deletion adaptive/tests/test_learner1d.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
import random
import time

import flaky
import numpy as np

from adaptive.learner import Learner1D
from adaptive.learner.learner1D import curvature_loss_function
from adaptive.runner import simple
from adaptive.runner import BlockingRunner, simple


def flat_middle(x):
x *= 1e7
xs = np.array([0.0, 0.1, 0.9, 1.0])
ys = [0, 1, 1, 0]
if x < xs[1] or x > xs[-2]:
time.sleep(1)
return np.interp(x, xs, ys)


def test_pending_loss_intervals():
Expand Down Expand Up @@ -389,3 +399,15 @@ def f(x):

learner = Learner1D(f, bounds=(-1, 1))
simple(learner, lambda l: l.npoints > 100)


def test_inf_loss_with_missing_bounds():
learner = Learner1D(
flat_middle,
bounds=(0, 1e-7),
loss_per_interval=curvature_loss_function(),
)
# must be done in parallel because otherwise the bounds will be evaluated first
BlockingRunner(learner, goal=lambda learner: learner.loss() < 0.01)

learner.npoints > 20