1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
|
import pytest
import psycopg
from psycopg import pq
@pytest.mark.libpq("< 14")
def test_old_libpq(pgconn):
assert pgconn.pipeline_status == 0
with pytest.raises(psycopg.NotSupportedError):
pgconn.enter_pipeline_mode()
with pytest.raises(psycopg.NotSupportedError):
pgconn.exit_pipeline_mode()
with pytest.raises(psycopg.NotSupportedError):
pgconn.pipeline_sync()
with pytest.raises(psycopg.NotSupportedError):
pgconn.send_flush_request()
@pytest.mark.libpq(">= 14")
def test_work_in_progress(pgconn):
assert not pgconn.nonblocking
assert pgconn.pipeline_status == pq.PipelineStatus.OFF
pgconn.enter_pipeline_mode()
pgconn.send_query_params(b"select $1", [b"1"])
with pytest.raises(psycopg.OperationalError, match="cannot exit pipeline mode"):
pgconn.exit_pipeline_mode()
@pytest.mark.libpq(">= 14")
def test_multi_pipelines(pgconn):
assert pgconn.pipeline_status == pq.PipelineStatus.OFF
pgconn.enter_pipeline_mode()
pgconn.send_query_params(b"select $1", [b"1"], param_types=[25])
pgconn.pipeline_sync()
pgconn.send_query_params(b"select $1", [b"2"], param_types=[25])
pgconn.pipeline_sync()
# result from first query
result1 = pgconn.get_result()
assert result1 is not None
assert result1.status == pq.ExecStatus.TUPLES_OK
# NULL signals end of result
assert pgconn.get_result() is None
# first sync result
sync_result = pgconn.get_result()
assert sync_result is not None
assert sync_result.status == pq.ExecStatus.PIPELINE_SYNC
# result from second query
result2 = pgconn.get_result()
assert result2 is not None
assert result2.status == pq.ExecStatus.TUPLES_OK
# NULL signals end of result
assert pgconn.get_result() is None
# second sync result
sync_result = pgconn.get_result()
assert sync_result is not None
assert sync_result.status == pq.ExecStatus.PIPELINE_SYNC
# pipeline still ON
assert pgconn.pipeline_status == pq.PipelineStatus.ON
pgconn.exit_pipeline_mode()
assert pgconn.pipeline_status == pq.PipelineStatus.OFF
assert result1.get_value(0, 0) == b"1"
assert result2.get_value(0, 0) == b"2"
@pytest.mark.libpq(">= 14")
def test_flush_request(pgconn):
assert pgconn.pipeline_status == pq.PipelineStatus.OFF
pgconn.enter_pipeline_mode()
pgconn.send_query_params(b"select $1", [b"1"], param_types=[25])
pgconn.send_flush_request()
r = pgconn.get_result()
assert r.status == pq.ExecStatus.TUPLES_OK
assert r.get_value(0, 0) == b"1"
pgconn.exit_pipeline_mode()
@pytest.fixture
def table(pgconn):
tablename = "pipeline"
pgconn.exec_(f"create table {tablename} (s text)".encode("ascii"))
yield tablename
pgconn.exec_(f"drop table if exists {tablename}".encode("ascii"))
@pytest.mark.libpq(">= 14")
def test_pipeline_abort(pgconn, table):
assert pgconn.pipeline_status == pq.PipelineStatus.OFF
pgconn.enter_pipeline_mode()
pgconn.send_query_params(b"insert into pipeline values ($1)", [b"1"])
pgconn.send_query_params(b"select no_such_function($1)", [b"1"])
pgconn.send_query_params(b"insert into pipeline values ($1)", [b"2"])
pgconn.pipeline_sync()
pgconn.send_query_params(b"insert into pipeline values ($1)", [b"3"])
pgconn.pipeline_sync()
# result from first INSERT
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.COMMAND_OK
# NULL signals end of result
assert pgconn.get_result() is None
# error result from second query (SELECT)
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.FATAL_ERROR
# NULL signals end of result
assert pgconn.get_result() is None
# pipeline should be aborted, due to previous error
assert pgconn.pipeline_status == pq.PipelineStatus.ABORTED
# result from second INSERT, aborted due to previous error
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.PIPELINE_ABORTED
# NULL signals end of result
assert pgconn.get_result() is None
# pipeline is still aborted
assert pgconn.pipeline_status == pq.PipelineStatus.ABORTED
# sync result
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.PIPELINE_SYNC
# aborted flag is clear, pipeline is on again
assert pgconn.pipeline_status == pq.PipelineStatus.ON
# result from the third INSERT
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.COMMAND_OK
# NULL signals end of result
assert pgconn.get_result() is None
# second sync result
r = pgconn.get_result()
assert r is not None
assert r.status == pq.ExecStatus.PIPELINE_SYNC
# NULL signals end of result
assert pgconn.get_result() is None
pgconn.exit_pipeline_mode()
|