Coverage for human_requests / pytest_plugin / _runtime.py: 85%

47 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-03-07 17:38 +0000

1from __future__ import annotations 

2 

3import asyncio 

4from collections.abc import Coroutine 

5from typing import Any, TypeVar 

6 

7import pytest 

8 

9from ..autotest import execute_autotests, execute_autotests_with_subtests 

10from ._config import get_typecheck_mode, resolve_runtime_dependencies 

11 

12T = TypeVar("T") 

13 

14 

15def run_autotest_tree_sync(request: pytest.FixtureRequest) -> None: 

16 api, schemashot = resolve_runtime_dependencies(request) 

17 typecheck_mode = get_typecheck_mode(request.config) 

18 subtests = _resolve_subtests_fixture(request) 

19 executed_count = run_coroutine( 

20 _execute_autotests_async( 

21 api=api, 

22 schemashot=schemashot, 

23 typecheck_mode=typecheck_mode, 

24 subtests=subtests, 

25 ) 

26 ) 

27 if executed_count == 0: 

28 pytest.skip("No methods marked with @autotest were found in the api tree.") 

29 

30 

31@pytest.mark.usefixtures("_autotest_anyio_runner") 

32def run_autotest_tree_anyio(request: pytest.FixtureRequest) -> None: 

33 runner = request.getfixturevalue("_autotest_anyio_runner") 

34 api, schemashot = resolve_runtime_dependencies(request) 

35 typecheck_mode = get_typecheck_mode(request.config) 

36 subtests = _resolve_subtests_fixture(request) 

37 executed_count = runner.run_test( 

38 _execute_autotests_async, 

39 { 

40 "api": api, 

41 "schemashot": schemashot, 

42 "typecheck_mode": typecheck_mode, 

43 "subtests": subtests, 

44 }, 

45 ) 

46 if executed_count == 0: 

47 pytest.skip("No methods marked with @autotest were found in the api tree.") 

48 

49 

50@pytest.fixture 

51def _autotest_anyio_runner(anyio_backend: Any) -> Any: 

52 from anyio.pytest_plugin import extract_backend_and_options, get_runner 

53 

54 backend_name, backend_options = extract_backend_and_options(anyio_backend) 

55 with get_runner(backend_name, backend_options) as runner: 

56 yield runner 

57 

58 

59async def _execute_autotests_async( 

60 api: object, 

61 schemashot: Any, 

62 typecheck_mode: str, 

63 subtests: Any | None = None, 

64) -> int: 

65 if subtests is not None: 

66 return await execute_autotests_with_subtests( 

67 api=api, 

68 schemashot=schemashot, 

69 subtests=subtests, 

70 typecheck_mode=typecheck_mode, 

71 ) 

72 return await execute_autotests( 

73 api=api, 

74 schemashot=schemashot, 

75 typecheck_mode=typecheck_mode, 

76 ) 

77 

78 

79def _resolve_subtests_fixture(request: pytest.FixtureRequest) -> Any | None: 

80 if not request.config.pluginmanager.has_plugin("subtests"): 

81 return None 

82 try: 

83 return request.getfixturevalue("subtests") 

84 except pytest.FixtureLookupError: 

85 return None 

86 

87 

88def run_coroutine(coro: Coroutine[Any, Any, T]) -> T: 

89 try: 

90 asyncio.get_running_loop() 

91 except RuntimeError: 

92 return asyncio.run(coro) 

93 raise RuntimeError( 

94 "Autotest plugin is running inside an active event loop. " 

95 "Run it from a synchronous pytest context." 

96 )