1 22
import outcome
2 22
import pytest
3 22
import time
4

5 22
from .._core.tests.tutil import slow
6 22
from .. import _core
7 22
from ..testing import assert_checkpoints
8 22
from .._timeouts import *
9

10

11 22
async def check_takes_about(f, expected_dur):
12 22
    start = time.perf_counter()
13 22
    result = await outcome.acapture(f)
14 22
    dur = time.perf_counter() - start
15 22
    print(dur / expected_dur)
16
    # 1.5 is an arbitrary fudge factor because there's always some delay
17
    # between when we become eligible to wake up and when we actually do. We
18
    # used to sleep for 0.05, and regularly observed overruns of 1.6x on
19
    # Appveyor, and then started seeing overruns of 2.3x on Travis's macOS, so
20
    # now we bumped up the sleep to 1 second, marked the tests as slow, and
21
    # hopefully now the proportional error will be less huge.
22
    #
23
    # We also also for durations that are a hair shorter than expected. For
24
    # example, here's a run on Windows where a 1.0 second sleep was measured
25
    # to take 0.9999999999999858 seconds:
26
    #   https://ci.appveyor.com/project/njsmith/trio/build/1.0.768/job/3lbdyxl63q3h9s21
27
    # I believe that what happened here is that Windows's low clock resolution
28
    # meant that our calls to time.monotonic() returned exactly the same
29
    # values as the calls inside the actual run loop, but the two subtractions
30
    # returned slightly different values because the run loop's clock adds a
31
    # random floating point offset to both times, which should cancel out, but
32
    # lol floating point we got slightly different rounding errors. (That
33
    # value above is exactly 128 ULPs below 1.0, which would make sense if it
34
    # started as a 1 ULP error at a different dynamic range.)
35 22
    assert (1 - 1e-8) <= (dur / expected_dur) < 1.5
36 22
    return result.unwrap()
37

38

39
# How long to (attempt to) sleep for when testing. Smaller numbers make the
40
# test suite go faster.
41 22
TARGET = 1.0
42

43

44 22
@slow
45 9
async def test_sleep():
46 22
    async def sleep_1():
47 22
        await sleep_until(_core.current_time() + TARGET)
48

49 22
    await check_takes_about(sleep_1, TARGET)
50

51 22
    async def sleep_2():
52 22
        await sleep(TARGET)
53

54 22
    await check_takes_about(sleep_2, TARGET)
55

56 22
    with pytest.raises(ValueError):
57 22
        await sleep(-1)
58

59 22
    with assert_checkpoints():
60 22
        await sleep(0)
61
    # This also serves as a test of the trivial move_on_at
62 22
    with move_on_at(_core.current_time()):
63 22
        with pytest.raises(_core.Cancelled):
64 22
            await sleep(0)
65

66

67 22
@slow
68 9
async def test_move_on_after():
69 22
    with pytest.raises(ValueError):
70 22
        with move_on_after(-1):
71
            pass  # pragma: no cover
72

73 22
    async def sleep_3():
74 22
        with move_on_after(TARGET):
75 22
            await sleep(100)
76

77 22
    await check_takes_about(sleep_3, TARGET)
78

79

80 22
@slow
81 9
async def test_fail():
82 22
    async def sleep_4():
83 22
        with fail_at(_core.current_time() + TARGET):
84 22
            await sleep(100)
85

86 22
    with pytest.raises(TooSlowError):
87 22
        await check_takes_about(sleep_4, TARGET)
88

89 22
    with fail_at(_core.current_time() + 100):
90 22
        await sleep(0)
91

92 22
    async def sleep_5():
93 22
        with fail_after(TARGET):
94 22
            await sleep(100)
95

96 22
    with pytest.raises(TooSlowError):
97 22
        await check_takes_about(sleep_5, TARGET)
98

99 22
    with fail_after(100):
100 22
        await sleep(0)
101

102 22
    with pytest.raises(ValueError):
103 22
        with fail_after(-1):
104
            pass  # pragma: no cover

Read our documentation on viewing source code .

Loading