1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
|
import json
import threading
from uuid import uuid4
from queue import Queue
from typing import Any
import pytest
from psycopg import pq, errors as e
from psycopg.rows import namedtuple_row
pytestmark = pytest.mark.crdb
@pytest.fixture
def testfeed(svcconn):
name = f"test_feed_{str(uuid4()).replace('-', '')}"
svcconn.execute("set cluster setting kv.rangefeed.enabled to true")
svcconn.execute(f"create table {name} (id serial primary key, data text)")
yield name
svcconn.execute(f"drop table {name}")
@pytest.mark.slow
@pytest.mark.parametrize("fmt_out", pq.Format)
def test_changefeed(conn_cls, dsn, conn, testfeed, fmt_out):
conn.autocommit = True
q: "Queue[Any]" = Queue()
def worker():
try:
with conn_cls.connect(dsn, autocommit=True) as conn:
cur = conn.cursor(binary=fmt_out, row_factory=namedtuple_row)
try:
for row in cur.stream(f"experimental changefeed for {testfeed}"):
q.put(row)
except e.QueryCanceled:
assert conn.info.transaction_status == conn.TransactionStatus.IDLE
q.put(None)
except Exception as ex:
q.put(ex)
t = threading.Thread(target=worker)
t.start()
cur = conn.cursor()
cur.execute(f"insert into {testfeed} (data) values ('hello') returning id")
(key,) = cur.fetchone()
row = q.get(timeout=1)
assert row.table == testfeed
assert json.loads(row.key) == [key]
assert json.loads(row.value)["after"] == {"id": key, "data": "hello"}
cur.execute(f"delete from {testfeed} where id = %s", [key])
row = q.get(timeout=1)
assert row.table == testfeed
assert json.loads(row.key) == [key]
assert json.loads(row.value)["after"] is None
cur.execute("select query_id from [show statements] where query !~ 'show'")
(qid,) = cur.fetchone()
cur.execute("cancel query %s", [qid])
assert cur.statusmessage == "CANCEL QUERIES 1"
assert q.get(timeout=1) is None
t.join()
|