Skip to content

Commit 58e27f5

Browse files
authored
ci fixes (#198)
1 parent 2c93207 commit 58e27f5

File tree

2 files changed

+20
-15
lines changed

2 files changed

+20
-15
lines changed

src/resample/bootstrap.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -320,8 +320,9 @@ def variance(
320320
>>> from resample.bootstrap import variance
321321
>>> import numpy as np
322322
>>> x = np.arange(10)
323-
>>> round(variance(np.mean, x, size=10000, random_state=1), 1)
324-
0.8
323+
>>> v = variance(np.mean, x, size=10000, random_state=1)
324+
>>> f"{v:.1f}"
325+
'0.8'
325326
326327
"""
327328
thetas = bootstrap(fn, sample, *args, **kwargs)
@@ -413,8 +414,8 @@ def confidence_interval(
413414
>>> import numpy as np
414415
>>> x = np.arange(10)
415416
>>> a, b = confidence_interval(np.mean, x, size=10000, random_state=1)
416-
>>> round(a, 1), round(b, 1)
417-
(2.6, 6.2)
417+
>>> f"{a:.1f} to {b:.1f}"
418+
'2.6 to 6.2'
418419
419420
Notes
420421
-----

src/resample/jackknife.py

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,10 @@
1010
preferred, especially when the sample is small. The computational cost of the jackknife
1111
increases quadratically with the sample size, but only linearly for the bootstrap. An
1212
advantage of the jackknife can be the deterministic outcome, since no random sampling
13-
is involved.
13+
is involved, but this can be overcome by fixing the seed for the bootstrap.
14+
15+
The jackknife should be used to estimate the bias, since the bootstrap cannot (easily)
16+
estimate bias. The bootstrap should be preferred when computing the variance.
1417
"""
1518

1619
__all__ = [
@@ -222,10 +225,10 @@ def bias(
222225
>>> from resample.jackknife import bias
223226
>>> import numpy as np
224227
>>> x = np.arange(10)
225-
>>> round(bias(np.var, x), 1)
226-
-0.9
227-
>>> round(bias(lambda x: np.var(x, ddof=1), x), 1)
228-
0.0
228+
>>> b1 = bias(np.var, x)
229+
>>> b2 = bias(lambda x: np.var(x, ddof=1), x)
230+
>>> f"bias of naive sample variance {b1:.1f}, bias of corrected variance {b2:.1f}"
231+
'bias of naive sample variance -0.9, bias of corrected variance 0.0'
229232
230233
"""
231234
sample = np.atleast_1d(sample)
@@ -270,10 +273,10 @@ def bias_corrected(
270273
>>> from resample.jackknife import bias_corrected
271274
>>> import numpy as np
272275
>>> x = np.arange(10)
273-
>>> round(np.var(x), 1)
274-
8.2
275-
>>> round(bias_corrected(np.var, x), 1)
276-
9.2
276+
>>> v1 = np.var(x)
277+
>>> v2 = bias_corrected(np.var, x)
278+
>>> f"naive variance {v1:.1f}, bias-corrected variance {v2:.1f}"
279+
'naive variance 8.2, bias-corrected variance 9.2'
277280
278281
"""
279282
sample = np.atleast_1d(sample)
@@ -314,8 +317,9 @@ def variance(
314317
>>> from resample.jackknife import variance
315318
>>> import numpy as np
316319
>>> x = np.arange(10)
317-
>>> round(variance(np.mean, x), 1)
318-
0.9
320+
>>> v = variance(np.mean, x)
321+
>>> f"{v:.1f}"
322+
'0.9'
319323
320324
"""
321325
# formula is (n - 1) / n * sum((fj - mean(fj)) ** 2)

0 commit comments

Comments
 (0)