@@ -4,7 +4,7 @@ jupytext:
4
4
extension : .md
5
5
format_name : myst
6
6
format_version : 0.13
7
- jupytext_version : 1.14.5
7
+ jupytext_version : 1.16.1
8
8
kernelspec :
9
9
display_name : python3
10
10
name : python3
@@ -72,7 +72,7 @@ def f_divergent_1d(x):
72
72
73
73
74
74
learner = adaptive.Learner1D(
75
- f_divergent_1d, (-1, 1), loss_per_interval=uniform_sampling_1d
75
+ f_divergent_1d, (-1, 1), loss_per_interval=uniform_sampling_1d,
76
76
)
77
77
runner = adaptive.BlockingRunner(learner, loss_goal=0.01)
78
78
learner.plot().select(y=(0, 10000))
@@ -92,12 +92,12 @@ def f_divergent_2d(xy):
92
92
93
93
94
94
learner = adaptive.Learner2D(
95
- f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=uniform_sampling_2d
95
+ f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=uniform_sampling_2d,
96
96
)
97
97
98
98
# this takes a while, so use the async Runner so we know *something* is happening
99
99
runner = adaptive.Runner(
100
- learner, goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000
100
+ learner, goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000,
101
101
)
102
102
```
103
103
@@ -134,7 +134,8 @@ After all subdomains are appropriately small it will prioritise places where the
134
134
``` {code-cell} ipython3
135
135
def resolution_loss_function(min_distance=0, max_distance=1):
136
136
"""min_distance and max_distance should be in between 0 and 1
137
- because the total area is normalized to 1."""
137
+ because the total area is normalized to 1.
138
+ """
138
139
139
140
def resolution_loss(ip):
140
141
from adaptive.learner.learner2D import areas, default_loss
@@ -143,10 +144,10 @@ def resolution_loss_function(min_distance=0, max_distance=1):
143
144
144
145
A = areas(ip)
145
146
# Setting areas with a small area to zero such that they won't be chosen again
146
- loss[A < min_distance**2] = 0
147
+ loss[min_distance**2 > A ] = 0
147
148
148
149
# Setting triangles that have a size larger than max_distance to infinite loss
149
- loss[A > max_distance**2] = np.inf
150
+ loss[max_distance**2 < A ] = np.inf
150
151
151
152
return loss
152
153
@@ -158,7 +159,7 @@ loss = resolution_loss_function(min_distance=0.01)
158
159
learner = adaptive.Learner2D(f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=loss)
159
160
runner = adaptive.BlockingRunner(learner, loss_goal=0.02)
160
161
learner.plot(tri_alpha=0.3).relabel("1 / (x^2 + y^2) in log scale").opts(
161
- hv.opts.EdgePaths(color="w"), hv.opts.Image(logz=True, colorbar=True)
162
+ hv.opts.EdgePaths(color="w"), hv.opts.Image(logz=True, colorbar=True),
162
163
)
163
164
```
164
165
0 commit comments