1import pytest
4
5
6async def test_basic(service_client, monitor_client):
7 response = await service_client.get('/metrics')
8 assert response.status_code == 200
9 assert 'application/json' in response.headers['Content-Type']
10
11 metric = await monitor_client.single_metric('sample-metrics.foo')
12 assert metric.value > 0
13
14
15
16@pytest.mark.uservice_oneshot
17async def test_initial_metrics(service_client, monitor_client):
18 metric = await monitor_client.single_metric('sample-metrics.foo')
19 assert metric.value == 0
20
21
22
23
25
26 await service_client.reset_metrics()
27
28
30 assert metric.value == 0
31 assert not metric.labels
32
33
34 response = await service_client.get('/metrics')
35 assert response.status_code == 200
36 assert 'application/json' in response.headers['Content-Type']
37
38 metric = await monitor_client.single_metric('sample-metrics.foo')
39 assert metric.value == 1
40
41 await service_client.reset_metrics()
42 metric = await monitor_client.single_metric('sample-metrics.foo')
43 assert metric.value == 0
44
45
46
47async def test_engine_metrics(service_client, monitor_client: pytest_userver.client.ClientMonitor):
49 'engine.task-processors.tasks.finished.v2',
50 labels={'task_processor': 'main-task-processor'},
51 )
52 assert metric.value > 0
53 assert metric.labels == {'task_processor': 'main-task-processor'}
54
56 prefix='http.',
57 labels={'http_path': '/ping'},
58 )
59
60 assert metrics_dict
61 assert 'http.handler.cancelled-by-deadline' in metrics_dict
62
63 assert (
64 metrics_dict.value_at(
65 'http.handler.in-flight',
66 labels={
67 'http_path': '/ping',
68 'http_handler': 'handler-ping',
69 'version': '2',
70 },
71 )
72 == 0
73 )
74
75
76
77
78async def test_engine_tasks_alive_metric(service_client, monitor_client: pytest_userver.client.ClientMonitor):
80 'engine.task-processors.tasks.alive',
81 labels={'task_processor': 'main-task-processor'},
82 )
83 assert metric.value > 0
84 assert metric.labels == {'task_processor': 'main-task-processor'}
85
86
87
88
89async def test_some_optional_metric(service_client, monitor_client: pytest_userver.client.ClientMonitor):
91 'some.metric.error',
92 labels={'task_processor': 'main-task-processor'},
93 )
94 assert metric is None or metric.value == 0
95
96
97
98
99async def test_diff_metrics(service_client, monitor_client: pytest_userver.client.ClientMonitor):
100 async with monitor_client.metrics_diff(prefix='sample-metrics') as differ:
101
102 response = await service_client.get('/metrics')
103 assert response.status == 200
104
105
106 assert differ.value_at('foo') == 1
107
108
109
110
111async def test_engine_logger_metrics(service_client, monitor_client: pytest_userver.client.ClientMonitor):
113 prefix='logger.',
114 labels={'logger': 'default'},
115 )
116
117 assert metrics_dict
118 assert 'logger.total' in metrics_dict
119 assert metrics_dict.value_at('logger.total') > 0
120
121 assert (
122 metrics_dict.value_at(
123 'logger.dropped',
124 labels={
125 'logger': 'default',
126 'version': '2',
127 },
128 )
129 == 0
130 )
131