Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
4d03a75
make loss function based on simplex volume in higher dimensional space
jhoofwijk Jul 17, 2018
d9a45e1
make point_in_simplex and circumcircle global methods
jhoofwijk Jul 17, 2018
5f1eb3b
change choose_point_in_simplex such that it sometimes takes the center
jhoofwijk Jul 17, 2018
b90a25d
pep8
jhoofwijk Jul 20, 2018
9ae7426
update docstring
jhoofwijk Jul 20, 2018
d73443c
remove n from _ask signature and split off _ask_bound_point
jhoofwijk Jul 13, 2018
23efe57
split _ask_point_without_known_simplices into separate function
jhoofwijk Jul 13, 2018
ebc6dc0
use SortedList instead of heap
jhoofwijk Jul 16, 2018
65e04ac
lose indentation
jhoofwijk Jul 16, 2018
d8f508a
make learnerND.ask O(log N)
jhoofwijk Jul 17, 2018
fd047a0
improve triangulation performance in 3d
jhoofwijk Jul 17, 2018
fce140a
make learner pass the tests
jhoofwijk Jul 17, 2018
bded62b
use heaps instead of sortedlist
jhoofwijk Jul 20, 2018
79ddeff
add tell_pending to _ask_point_without_known_simplices
jhoofwijk Jul 20, 2018
d136d19
make bowyer_watson faster by looking at less circumcircles
jhoofwijk Jul 20, 2018
d1c5192
move simplex_volume function to triangulation file
jhoofwijk Jul 23, 2018
01c53f4
merge lists with zip
jhoofwijk Jul 23, 2018
c1d5917
tetrahedron -> simplex
jhoofwijk Jul 23, 2018
7921d1d
add quick volume computation for dim == 2
jhoofwijk Jul 23, 2018
74601c6
try to add anisotropicity, however, this fails...
jhoofwijk Jul 23, 2018
5de6a57
Merge branch '80--evaluate-less-circumcircles' into 74--add-anisotrop…
jhoofwijk Jul 25, 2018
b1fc0bd
make anisotropicity work
jhoofwijk Jul 25, 2018
9710430
out-comment the maximum shape factor of two
jhoofwijk Aug 20, 2018
1df1216
remove dependence on RTree
jhoofwijk Aug 20, 2018
2439803
Merge branch 'master' into 74--add-anisotropicity-to-learnerND
jhoofwijk Sep 26, 2018
672247c
enable plotting of custom triangulations
jhoofwijk Sep 27, 2018
d777dc1
small style change
basnijholt Sep 27, 2018
d9d981e
change inside bounds check to have eps precision
jhoofwijk Sep 30, 2018
f5eb8ef
add checks for volume consistency
jhoofwijk Sep 30, 2018
ff8845a
add .pytest_cache to gitignore
jhoofwijk Oct 1, 2018
2efbfb6
fix: Python 3.10+ compatibility (collections.abc, math.factorial)
basnijholt Apr 3, 2026
f84cab5
fix: use uniform scale for Bowyer-Watson insertion, not anisotropic t…
basnijholt Apr 3, 2026
7859a36
Merge remote-tracking branch 'origin/main' into 74--add-anisotropicit…
basnijholt Apr 5, 2026
94214ca
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 5, 2026
4339e65
Make anisotropic keyword-only in LearnerND
basnijholt Apr 6, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ nosetests.xml
coverage.xml
*,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
Expand Down
87 changes: 72 additions & 15 deletions adaptive/learner/learnerND.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ class LearnerND(BaseLearner):
children based on volume.
"""

def __init__(self, func, bounds, loss_per_simplex=None):
def __init__(self, func, bounds, loss_per_simplex=None, *, anisotropic=False):
self._vdim = None
self.loss_per_simplex = loss_per_simplex or default_loss

Expand Down Expand Up @@ -363,6 +363,7 @@ def __init__(self, func, bounds, loss_per_simplex=None):

# create a private random number generator with fixed seed
self._random = random.Random(1)
self.anisotropic = anisotropic

# all real triangles that have not been subdivided and the pending
# triangles heap of tuples (-loss, real simplex, sub_simplex or None)
Expand All @@ -379,7 +380,12 @@ def __init__(self, func, bounds, loss_per_simplex=None):

def new(self) -> LearnerND:
"""Create a new learner with the same function and bounds."""
return LearnerND(self.function, self.bounds, self.loss_per_simplex)
return LearnerND(
self.function,
self.bounds,
self.loss_per_simplex,
anisotropic=self.anisotropic,
)

@property
def npoints(self):
Expand Down Expand Up @@ -529,7 +535,6 @@ def points(self):

def tell(self, point, value):
point = tuple(point)

if point in self.data:
return # we already know about the point

Expand All @@ -548,9 +553,54 @@ def tell(self, point, value):
simplex = self._pending_to_simplex.get(point)
if simplex is not None and not self._simplex_exists(simplex):
simplex = None
# Keep Bowyer-Watson in the globally scaled coordinates. The
# anisotropic per-simplex transform is only used for point
# selection, not for cavity construction.
to_delete, to_add = tri.add_point(point, simplex, transform=self._transform)
self._update_losses(to_delete, to_add)

def get_local_transform_matrix(self, simplex):
scale = self._transform

if simplex is None or self.tri is None or not self.anisotropic:
return scale

neighbors = set.union(*[self.tri.vertex_to_simplices[i] for i in simplex])
indices = set.union(set(), *neighbors)

points = np.array([self.points[i] for i in indices])
values = [self.data[tuple(p)] for p in points]

if isinstance(values[0], Iterable):
raise ValueError("Anisotropic learner only works with 1D output")

# Do a linear least square fit
# A x = B, find x
points = points @ scale
ones = np.ones((len(points), 1))
A = np.hstack((points, ones))
B = np.array(values, dtype=float)
fit = np.linalg.lstsq(A, B, rcond=None)[0]
*gradient, _constant = fit
# we do not need the constant, only the gradient

# gradient is a vector of the amount of the slope in each direction
magnitude = np.linalg.norm(gradient)

if np.isclose(magnitude, 0):
return scale # there is no noticable gradient

# Make it a 2d numpy array and normalise it.
gradient = np.array([gradient], dtype=float) / magnitude
projection_matrix = gradient.T @ gradient
identity = np.eye(self.ndim)

factor = math.sqrt(magnitude**2 + 1) - 1

scale_along_gradient = projection_matrix * factor + identity
m = np.dot(scale_along_gradient, scale)
return m.T

def _simplex_exists(self, simplex):
simplex = tuple(sorted(simplex))
return simplex in self.tri.simplices
Expand Down Expand Up @@ -679,22 +729,29 @@ def _pop_highest_existing_simplex(self):
def _ask_best_point(self):
assert self.tri is not None

loss, simplex, subsimplex = self._pop_highest_existing_simplex()
while True:
loss, simplex, subsimplex = self._pop_highest_existing_simplex()

if subsimplex is None:
# We found a real simplex and want to subdivide it
points = self.tri.get_vertices(simplex)
else:
# We found a pending simplex and want to subdivide it
subtri = self._subtriangulations[simplex]
points = subtri.get_vertices(subsimplex)
if subsimplex is None:
# We found a real simplex and want to subdivide it
points = self.tri.get_vertices(simplex)
else:
# We found a pending simplex and want to subdivide it
subtri = self._subtriangulations[simplex]
points = subtri.get_vertices(subsimplex)

point_new = tuple(choose_point_in_simplex(points, transform=self._transform))
transform = self.get_local_transform_matrix(simplex)
point_new = tuple(choose_point_in_simplex(points, transform=transform))

# The priority queue may still contain stale simplices from before a
# pending split. Skip those rather than returning duplicate work.
if point_new in self.data or point_new in self.pending_points:
continue

self._pending_to_simplex[point_new] = simplex
self.tell_pending(point_new, simplex=simplex) # O(??)
self._pending_to_simplex[point_new] = simplex
self.tell_pending(point_new, simplex=simplex) # O(??)

return point_new, loss
return point_new, loss

@property
def _bounds_available(self):
Expand Down
54 changes: 53 additions & 1 deletion adaptive/learner/triangulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from itertools import chain, combinations
from math import factorial, sqrt

import numpy as np
import scipy.spatial
from numpy import abs as np_abs
from numpy import (
Expand Down Expand Up @@ -252,6 +253,8 @@ def simplex_volume_in_embedding(vertices) -> float:
ValueError
if the vertices do not form a simplex (for example,
because they are coplanar, colinear or coincident).

Warning: this algorithm has not been tested for numerical stability.
"""
# Implements http://mathworld.wolfram.com/Cayley-MengerDeterminant.html
# Modified from https://codereview.stackexchange.com/questions/77593/calculating-the-volume-of-a-tetrahedron
Expand Down Expand Up @@ -566,7 +569,14 @@ def bowyer_watson(self, pt_index, containing_simplex=None, transform=None):
self.add_simplex(simplex)

new_triangles = self.vertex_to_simplices[pt_index]
return bad_triangles - new_triangles, new_triangles - bad_triangles
deleted_simplices = bad_triangles - new_triangles
new_simplices = new_triangles - bad_triangles

old_vol = sum([self.volume(simplex) for simplex in deleted_simplices])
new_vol = sum([self.volume(simplex) for simplex in new_simplices])
assert np.isclose(old_vol, new_vol), f"{old_vol} !== {new_vol}"

return deleted_simplices, new_simplices

def _simplex_is_almost_flat(self, simplex):
return self._relative_volume(simplex) < 1e-8
Expand Down Expand Up @@ -713,3 +723,45 @@ def hull(self):
def convex_invariant(self, vertex):
"""Hull is convex."""
raise NotImplementedError


class FakeDelaunay(scipy.spatial.Delaunay):
def __init__(self, vertices, simplices):
"""
Throw in a Triangulation object, and receive a brandnew
scipy.spatial.Delaunay

Parameters
----------
vertices : 2d arraylike of floats

simplices : 2d arraylike of integers
"""

# just create some Delaunay triangulation object, any would do.
super().__init__([[0, 1], [1, 0], [0, 0]])

self.ndim = len(vertices[0])
self._points = np.array(vertices, dtype="float64")
self.simplices = np.array(list(simplices), dtype="int32")
# self.neighbors = np.zero # () todo
self.nsimplex = len(simplices)
self.npoints = len(vertices)
self.min_bound = self._points.min(axis=0)
self.max_bound = self._points.max(axis=0)
self._transform = None # Set it to None to recompute it when needed

# now the hard part: finding the neighbours
self.neighbors = -np.ones_like(self.simplices) # set all to -1

faces_dict = {}
for n, simplex in enumerate(self.simplices):
simplex = tuple(simplex)
for i, _point in enumerate(simplex):
face = tuple(sorted(simplex[:i] + simplex[i + 1 :]))
if face in faces_dict:
other_simpl_index, other_point_index = faces_dict[face]
self.neighbors[other_simpl_index, other_point_index] = n
self.neighbors[n, i] = other_simpl_index
else:
faces_dict[face] = (n, i)
28 changes: 26 additions & 2 deletions adaptive/tests/test_learnernd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,18 @@
import scipy.spatial

from adaptive.learner import LearnerND
from adaptive.runner import replay_log, simple
from adaptive.runner import BlockingRunner, replay_log, simple

from .test_learners import generate_random_parametrization, ring_of_fire


def test_faiure_case_LearnerND():
def sphere(xyz):
x, y, z = xyz
a = 0.4
return x + z**2 + np.exp(-((x**2 + y**2 + z**2 - 0.75**2) ** 2) / a**4)


def test_failure_case_LearnerND():
log = [
("ask", 4),
("tell", (-1, -1, -1), 1.607873907219222e-101),
Expand All @@ -26,6 +32,24 @@ def test_faiure_case_LearnerND():
replay_log(learner, log)


def test_anisotropic_3d():
# There was a bug where the total simplex volume would exceed the bounding
# box volume for the anisotropic 3D learner.
# volume for the anisotropic 3d learnerND
# learner = adaptive.LearnerND(ring, bounds=[(-1, 1), (-1, 1)], anisotropic=True)
learner = LearnerND(sphere, bounds=[(-1, 1), (-1, 1), (-1, 1)], anisotropic=True)

def goal(learner):
if learner.tri:
sum_of_simplex_volumes = np.sum(learner.tri.volumes())
assert sum_of_simplex_volumes < 8.00000000001
return learner.npoints >= 1000

BlockingRunner(learner, goal, ntasks=1)

assert learner.npoints >= 1000


def test_interior_vs_bbox_gives_same_result():
f = generate_random_parametrization(ring_of_fire)

Expand Down
5 changes: 5 additions & 0 deletions adaptive/tests/unit/test_learnernd.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@ def test_learnerND_accepts_ConvexHull_as_input():
assert np.allclose(learner._bbox, [(0, 2), (0, 1)])


def test_learnerND_anisotropic_is_keyword_only():
with pytest.raises(TypeError):
LearnerND(ring_of_fire, [(-1, 1), (-1, 1)], None, True)


def test_learnerND_raises_if_too_many_neigbors():
@uses_nth_neighbors(2)
def loss(*args):
Expand Down
6 changes: 2 additions & 4 deletions docs/source/logo.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,11 @@ jupytext:
extension: .md
format_name: myst
format_version: 0.13
jupytext_version: 1.14.5
jupytext_version: 1.19.1
kernelspec:
display_name: Python 3 (ipykernel)
language: python
name: python3
execution:
timeout: 300
---

```{code-cell} ipython3
Expand Down Expand Up @@ -168,7 +166,7 @@ def animate_png(folder=None, nseconds=15):


def save_webp(fname_webp, ims):
(im, *_ims) = ims
im, *_ims = ims
im.save(
fname_webp,
save_all=True,
Expand Down
Loading