@@ -4,7 +4,7 @@ jupytext:
44 extension : .md
55 format_name : myst
66 format_version : 0.13
7- jupytext_version : 1.14.5
7+ jupytext_version : 1.16.1
88kernelspec :
99 display_name : python3
1010 name : python3
@@ -72,7 +72,9 @@ def f_divergent_1d(x):
7272
7373
7474learner = adaptive.Learner1D(
75- f_divergent_1d, (-1, 1), loss_per_interval=uniform_sampling_1d
75+ f_divergent_1d,
76+ (-1, 1),
77+ loss_per_interval=uniform_sampling_1d,
7678)
7779runner = adaptive.BlockingRunner(learner, loss_goal=0.01)
7880learner.plot().select(y=(0, 10000))
@@ -92,12 +94,15 @@ def f_divergent_2d(xy):
9294
9395
9496learner = adaptive.Learner2D(
95- f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=uniform_sampling_2d
97+ f_divergent_2d,
98+ [(-1, 1), (-1, 1)],
99+ loss_per_triangle=uniform_sampling_2d,
96100)
97101
98102# this takes a while, so use the async Runner so we know *something* is happening
99103runner = adaptive.Runner(
100- learner, goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000
104+ learner,
105+ goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000,
101106)
102107```
103108
@@ -134,7 +139,8 @@ After all subdomains are appropriately small it will prioritise places where the
134139``` {code-cell} ipython3
135140def resolution_loss_function(min_distance=0, max_distance=1):
136141 """min_distance and max_distance should be in between 0 and 1
137- because the total area is normalized to 1."""
142+ because the total area is normalized to 1.
143+ """
138144
139145 def resolution_loss(ip):
140146 from adaptive.learner.learner2D import areas, default_loss
@@ -143,10 +149,10 @@ def resolution_loss_function(min_distance=0, max_distance=1):
143149
144150 A = areas(ip)
145151 # Setting areas with a small area to zero such that they won't be chosen again
146- loss[A < min_distance**2] = 0
152+ loss[min_distance**2 > A ] = 0
147153
148154 # Setting triangles that have a size larger than max_distance to infinite loss
149- loss[A > max_distance**2] = np.inf
155+ loss[max_distance**2 < A ] = np.inf
150156
151157 return loss
152158
@@ -158,7 +164,8 @@ loss = resolution_loss_function(min_distance=0.01)
158164learner = adaptive.Learner2D(f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=loss)
159165runner = adaptive.BlockingRunner(learner, loss_goal=0.02)
160166learner.plot(tri_alpha=0.3).relabel("1 / (x^2 + y^2) in log scale").opts(
161- hv.opts.EdgePaths(color="w"), hv.opts.Image(logz=True, colorbar=True)
167+ hv.opts.EdgePaths(color="w"),
168+ hv.opts.Image(logz=True, colorbar=True),
162169)
163170```
164171
0 commit comments