summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 17:41:08 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 17:41:08 +0000
commit506ed8899b3a97e512be3fd6d44d5b11463bf9bf (patch)
tree808913770c5e6935d3714058c2a066c57b4632ec /tools
parentInitial commit. (diff)
downloadpsycopg3-506ed8899b3a97e512be3fd6d44d5b11463bf9bf.tar.xz
psycopg3-506ed8899b3a97e512be3fd6d44d5b11463bf9bf.zip
Adding upstream version 3.1.7.upstream/3.1.7upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools')
-rwxr-xr-xtools/build/build_libpq.sh173
-rwxr-xr-xtools/build/build_macos_arm64.sh93
-rwxr-xr-xtools/build/ci_test.sh29
-rwxr-xr-xtools/build/copy_to_binary.py39
-rwxr-xr-xtools/build/print_so_versions.sh37
-rwxr-xr-xtools/build/run_build_macos_arm64.sh40
-rwxr-xr-xtools/build/strip_wheel.sh48
-rwxr-xr-xtools/build/wheel_linux_before_all.sh48
-rwxr-xr-xtools/build/wheel_macos_before_all.sh28
-rw-r--r--tools/build/wheel_win32_before_build.bat3
-rwxr-xr-xtools/bump_version.py310
-rwxr-xr-xtools/update_backer.py134
-rwxr-xr-xtools/update_errors.py217
-rwxr-xr-xtools/update_oids.py217
14 files changed, 1416 insertions, 0 deletions
diff --git a/tools/build/build_libpq.sh b/tools/build/build_libpq.sh
new file mode 100755
index 0000000..4cc79af
--- /dev/null
+++ b/tools/build/build_libpq.sh
@@ -0,0 +1,173 @@
+#!/bin/bash
+
+# Build a modern version of libpq and depending libs from source on Centos 5
+
+set -euo pipefail
+set -x
+
+# Last release: https://www.postgresql.org/ftp/source/
+# IMPORTANT! Change the cache key in packages.yml when upgrading libraries
+postgres_version="${LIBPQ_VERSION:-15.0}"
+
+# last release: https://www.openssl.org/source/
+openssl_version="${OPENSSL_VERSION:-1.1.1r}"
+
+# last release: https://openldap.org/software/download/
+ldap_version="2.6.3"
+
+# last release: https://github.com/cyrusimap/cyrus-sasl/releases
+sasl_version="2.1.28"
+
+export LIBPQ_BUILD_PREFIX=${LIBPQ_BUILD_PREFIX:-/tmp/libpq.build}
+
+if [[ -f "${LIBPQ_BUILD_PREFIX}/lib/libpq.so" ]]; then
+ echo "libpq already available: build skipped" >&2
+ exit 0
+fi
+
+source /etc/os-release
+
+case "$ID" in
+ centos)
+ yum update -y
+ yum install -y zlib-devel krb5-devel pam-devel
+ ;;
+
+ alpine)
+ apk upgrade
+ apk add --no-cache zlib-dev krb5-dev linux-pam-dev openldap-dev
+ ;;
+
+ *)
+ echo "$0: unexpected Linux distribution: '$ID'" >&2
+ exit 1
+ ;;
+esac
+
+if [ "$ID" == "centos" ]; then
+
+ # Build openssl if needed
+ openssl_tag="OpenSSL_${openssl_version//./_}"
+ openssl_dir="openssl-${openssl_tag}"
+ if [ ! -d "${openssl_dir}" ]; then curl -sL \
+ https://github.com/openssl/openssl/archive/${openssl_tag}.tar.gz \
+ | tar xzf -
+
+ cd "${openssl_dir}"
+
+ ./config --prefix=${LIBPQ_BUILD_PREFIX} --openssldir=${LIBPQ_BUILD_PREFIX} \
+ zlib -fPIC shared
+ make depend
+ make
+ else
+ cd "${openssl_dir}"
+ fi
+
+ # Install openssl
+ make install_sw
+ cd ..
+
+fi
+
+
+if [ "$ID" == "centos" ]; then
+
+ # Build libsasl2 if needed
+ # The system package (cyrus-sasl-devel) causes an amazing error on i686:
+ # "unsupported version 0 of Verneed record"
+ # https://github.com/pypa/manylinux/issues/376
+ sasl_tag="cyrus-sasl-${sasl_version}"
+ sasl_dir="cyrus-sasl-${sasl_tag}"
+ if [ ! -d "${sasl_dir}" ]; then
+ curl -sL \
+ https://github.com/cyrusimap/cyrus-sasl/archive/${sasl_tag}.tar.gz \
+ | tar xzf -
+
+ cd "${sasl_dir}"
+
+ autoreconf -i
+ ./configure --prefix=${LIBPQ_BUILD_PREFIX} \
+ CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
+ make
+ else
+ cd "${sasl_dir}"
+ fi
+
+ # Install libsasl2
+ # requires missing nroff to build
+ touch saslauthd/saslauthd.8
+ make install
+ cd ..
+
+fi
+
+
+if [ "$ID" == "centos" ]; then
+
+ # Build openldap if needed
+ ldap_tag="${ldap_version}"
+ ldap_dir="openldap-${ldap_tag}"
+ if [ ! -d "${ldap_dir}" ]; then
+ curl -sL \
+ https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${ldap_tag}.tgz \
+ | tar xzf -
+
+ cd "${ldap_dir}"
+
+ ./configure --prefix=${LIBPQ_BUILD_PREFIX} --enable-backends=no --enable-null \
+ CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
+
+ make depend
+ make -C libraries/liblutil/
+ make -C libraries/liblber/
+ make -C libraries/libldap/
+ else
+ cd "${ldap_dir}"
+ fi
+
+ # Install openldap
+ make -C libraries/liblber/ install
+ make -C libraries/libldap/ install
+ make -C include/ install
+ chmod +x ${LIBPQ_BUILD_PREFIX}/lib/{libldap,liblber}*.so*
+ cd ..
+
+fi
+
+
+# Build libpq if needed
+postgres_tag="REL_${postgres_version//./_}"
+postgres_dir="postgres-${postgres_tag}"
+if [ ! -d "${postgres_dir}" ]; then
+ curl -sL \
+ https://github.com/postgres/postgres/archive/${postgres_tag}.tar.gz \
+ | tar xzf -
+
+ cd "${postgres_dir}"
+
+ # Match the default unix socket dir default with what defined on Ubuntu and
+ # Red Hat, which seems the most common location
+ sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
+'|#define DEFAULT_PGSOCKET_DIR "/var/run/postgresql"|' \
+ src/include/pg_config_manual.h
+
+ # Often needed, but currently set by the workflow
+ # export LD_LIBRARY_PATH="${LIBPQ_BUILD_PREFIX}/lib"
+
+ ./configure --prefix=${LIBPQ_BUILD_PREFIX} --sysconfdir=/etc/postgresql-common \
+ --without-readline --with-gssapi --with-openssl --with-pam --with-ldap \
+ CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
+ make -C src/interfaces/libpq
+ make -C src/bin/pg_config
+ make -C src/include
+else
+ cd "${postgres_dir}"
+fi
+
+# Install libpq
+make -C src/interfaces/libpq install
+make -C src/bin/pg_config install
+make -C src/include install
+cd ..
+
+find ${LIBPQ_BUILD_PREFIX} -name \*.so.\* -type f -exec strip --strip-unneeded {} \;
diff --git a/tools/build/build_macos_arm64.sh b/tools/build/build_macos_arm64.sh
new file mode 100755
index 0000000..f8c2fd7
--- /dev/null
+++ b/tools/build/build_macos_arm64.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+# Build psycopg-binary wheel packages for Apple M1 (cpNNN-macosx_arm64)
+#
+# This script is designed to run on Scaleway Apple Silicon machines.
+#
+# The script cannot be run as sudo (installing brew fails), but requires sudo,
+# so it can pretty much only be executed by a sudo user as it is.
+
+set -euo pipefail
+set -x
+
+python_versions="3.8.10 3.9.13 3.10.5 3.11.0"
+pg_version=15
+
+# Move to the root of the project
+dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+cd "${dir}/../../"
+
+# Add /usr/local/bin to the path. It seems it's not, in non-interactive sessions
+if ! (echo $PATH | grep -q '/usr/local/bin'); then
+ export PATH=/usr/local/bin:$PATH
+fi
+
+# Install brew, if necessary. Otherwise just make sure it's in the path
+if [[ -x /opt/homebrew/bin/brew ]]; then
+ eval "$(/opt/homebrew/bin/brew shellenv)"
+else
+ command -v brew > /dev/null || (
+ # Not necessary: already installed
+ # xcode-select --install
+ NONINTERACTIVE=1 /bin/bash -c "$(curl -fsSL \
+ https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
+ )
+ eval "$(/opt/homebrew/bin/brew shellenv)"
+fi
+
+export PGDATA=/opt/homebrew/var/postgresql@${pg_version}
+
+# Install PostgreSQL, if necessary
+command -v pg_config > /dev/null || (
+ brew install postgresql@${pg_version}
+)
+
+# After PostgreSQL 15, the bin path is not in the path.
+export PATH=$(ls -d1 /opt/homebrew/Cellar/postgresql@${pg_version}/*/bin):$PATH
+
+# Make sure the server is running
+
+# Currently not working
+# brew services start postgresql@${pg_version}
+
+if ! pg_ctl status; then
+ pg_ctl -l /opt/homebrew/var/log/postgresql@${pg_version}.log start
+fi
+
+
+# Install the Python versions we want to build
+for ver3 in $python_versions; do
+ ver2=$(echo $ver3 | sed 's/\([^\.]*\)\(\.[^\.]*\)\(.*\)/\1\2/')
+ command -v python${ver2} > /dev/null || (
+ (cd /tmp &&
+ curl -fsSl -O \
+ https://www.python.org/ftp/python/${ver3}/python-${ver3}-macos11.pkg)
+ sudo installer -pkg /tmp/python-${ver3}-macos11.pkg -target /
+ )
+done
+
+# Create a virtualenv where to work
+if [[ ! -x .venv/bin/python ]]; then
+ python3 -m venv .venv
+fi
+
+source .venv/bin/activate
+pip install cibuildwheel
+
+# Create the psycopg_binary source package
+rm -rf psycopg_binary
+python tools/build/copy_to_binary.py
+
+# Build the binary packages
+export CIBW_PLATFORM=macos
+export CIBW_ARCHS=arm64
+export CIBW_BUILD='cp{38,39,310,311}-*'
+export CIBW_TEST_REQUIRES="./psycopg[test] ./psycopg_pool"
+export CIBW_TEST_COMMAND="pytest {project}/tests -m 'not slow and not flakey' --color yes"
+
+export PSYCOPG_IMPL=binary
+export PSYCOPG_TEST_DSN="dbname=postgres"
+export PSYCOPG_TEST_WANT_LIBPQ_BUILD=">= ${pg_version}"
+export PSYCOPG_TEST_WANT_LIBPQ_IMPORT=">= ${pg_version}"
+
+cibuildwheel psycopg_binary
diff --git a/tools/build/ci_test.sh b/tools/build/ci_test.sh
new file mode 100755
index 0000000..d1d2ee4
--- /dev/null
+++ b/tools/build/ci_test.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+# Run the tests in Github Action
+#
+# Failed tests run up to three times, to take into account flakey tests.
+# Of course the random generator is not re-seeded between runs, in order to
+# repeat the same result.
+
+set -euo pipefail
+set -x
+
+# Assemble a markers expression from the MARKERS and NOT_MARKERS env vars
+markers=""
+for m in ${MARKERS:-}; do
+ [[ "$markers" != "" ]] && markers="$markers and"
+ markers="$markers $m"
+done
+for m in ${NOT_MARKERS:-}; do
+ [[ "$markers" != "" ]] && markers="$markers and"
+ markers="$markers not $m"
+done
+
+pytest="python -bb -m pytest --color=yes"
+
+$pytest -m "$markers" "$@" && exit 0
+
+$pytest -m "$markers" --lf --randomly-seed=last "$@" && exit 0
+
+$pytest -m "$markers" --lf --randomly-seed=last "$@"
diff --git a/tools/build/copy_to_binary.py b/tools/build/copy_to_binary.py
new file mode 100755
index 0000000..7cab25c
--- /dev/null
+++ b/tools/build/copy_to_binary.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+
+# Create the psycopg-binary package by renaming and patching psycopg-c
+
+import os
+import re
+import shutil
+from pathlib import Path
+from typing import Union
+
+curdir = Path(__file__).parent
+pdir = curdir / "../.."
+target = pdir / "psycopg_binary"
+
+if target.exists():
+ raise Exception(f"path {target} already exists")
+
+
+def sed_i(pattern: str, repl: str, filename: Union[str, Path]) -> None:
+ with open(filename, "rb") as f:
+ data = f.read()
+ newdata = re.sub(pattern.encode("utf8"), repl.encode("utf8"), data)
+ if newdata != data:
+ with open(filename, "wb") as f:
+ f.write(newdata)
+
+
+shutil.copytree(pdir / "psycopg_c", target)
+shutil.move(str(target / "psycopg_c"), str(target / "psycopg_binary"))
+shutil.move(str(target / "README-binary.rst"), str(target / "README.rst"))
+sed_i("psycopg-c", "psycopg-binary", target / "setup.cfg")
+sed_i(
+ r"__impl__\s*=.*", '__impl__ = "binary"', target / "psycopg_binary/pq.pyx"
+)
+for dirpath, dirnames, filenames in os.walk(target):
+ for filename in filenames:
+ if os.path.splitext(filename)[1] not in (".pyx", ".pxd", ".py"):
+ continue
+ sed_i(r"\bpsycopg_c\b", "psycopg_binary", Path(dirpath) / filename)
diff --git a/tools/build/print_so_versions.sh b/tools/build/print_so_versions.sh
new file mode 100755
index 0000000..a3c4ecd
--- /dev/null
+++ b/tools/build/print_so_versions.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# Take a .so file as input and print the Debian packages and versions of the
+# libraries it links.
+
+set -euo pipefail
+# set -x
+
+source /etc/os-release
+
+sofile="$1"
+
+case "$ID" in
+ alpine)
+ depfiles=$( (ldd "$sofile" 2>/dev/null || true) | grep '=>' | sed 's/.*=> \(.*\) (.*)/\1/')
+ (for depfile in $depfiles; do
+ echo "$(basename "$depfile") => $(apk info --who-owns "${depfile}" | awk '{print $(NF)}')"
+ done) | sort | uniq
+ ;;
+
+ debian)
+ depfiles=$(ldd "$sofile" | grep '=>' | sed 's/.*=> \(.*\) (.*)/\1/')
+ (for depfile in $depfiles; do
+ pkgname=$(dpkg -S "${depfile}" | sed 's/\(\): .*/\1/')
+ dpkg -l "${pkgname}" | grep '^ii' | awk '{print $2 " => " $3}'
+ done) | sort | uniq
+ ;;
+
+ centos)
+ echo "TODO!"
+ ;;
+
+ *)
+ echo "$0: unexpected Linux distribution: '$ID'" >&2
+ exit 1
+ ;;
+esac
diff --git a/tools/build/run_build_macos_arm64.sh b/tools/build/run_build_macos_arm64.sh
new file mode 100755
index 0000000..f5ae617
--- /dev/null
+++ b/tools/build/run_build_macos_arm64.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+# Build psycopg-binary wheel packages for Apple M1 (cpNNN-macosx_arm64)
+#
+# This script is designed to run on a local machine: it will clone the repos
+# remotely and execute the `build_macos_arm64.sh` script remotely, then will
+# download the built packages. A tag to build must be specified.
+#
+# In order to run the script, the `m1` host must be specified in
+# `~/.ssh/config`; for instance:
+#
+# Host m1
+# User m1
+# HostName 1.2.3.4
+
+set -euo pipefail
+# set -x
+
+tag=${1:-}
+
+if [[ ! "${tag}" ]]; then
+ echo "Usage: $0 TAG" >&2
+ exit 2
+fi
+
+rdir=psycobuild
+
+# Clone the repos
+ssh m1 rm -rf "${rdir}"
+ssh m1 git clone https://github.com/psycopg/psycopg.git --branch ${tag} "${rdir}"
+
+# Allow sudoing without password, to allow brew to install
+ssh -t m1 bash -c \
+ 'test -f /etc/sudoers.d/m1 || echo "m1 ALL=(ALL) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/m1'
+
+# Build the wheel packages
+ssh m1 "${rdir}/tools/build/build_macos_arm64.sh"
+
+# Transfer the packages locally
+scp -r "m1:${rdir}/wheelhouse" .
diff --git a/tools/build/strip_wheel.sh b/tools/build/strip_wheel.sh
new file mode 100755
index 0000000..bfcd302
--- /dev/null
+++ b/tools/build/strip_wheel.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# Strip symbols inplace from the libraries in a zip archive.
+#
+# Stripping symbols is beneficial (reduction of 30% of the final package, >
+# %90% of the installed libraries. However just running `auditwheel repair
+# --strip` breaks some of the libraries included from the system, which fail at
+# import with errors such as "ELF load command address/offset not properly
+# aligned".
+#
+# System libraries are already pretty stripped. Ours go around 24Mb -> 1.5Mb...
+#
+# This script is designed to run on a wheel archive before auditwheel.
+
+set -euo pipefail
+# set -x
+
+source /etc/os-release
+dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+wheel=$(realpath "$1")
+shift
+
+tmpdir=$(mktemp -d)
+trap "rm -r ${tmpdir}" EXIT
+
+cd "${tmpdir}"
+python -m zipfile -e "${wheel}" .
+
+echo "
+Libs before:"
+# Busybox doesn't have "find -ls"
+find . -name \*.so | xargs ls -l
+
+# On Debian, print the package versions libraries come from
+echo "
+Dependencies versions of '_psycopg.so' library:"
+"${dir}/print_so_versions.sh" "$(find . -name \*_psycopg\*.so)"
+
+find . -name \*.so -exec strip "$@" {} \;
+
+echo "
+Libs after:"
+find . -name \*.so | xargs ls -l
+
+python -m zipfile -c ${wheel} *
+
+cd -
diff --git a/tools/build/wheel_linux_before_all.sh b/tools/build/wheel_linux_before_all.sh
new file mode 100755
index 0000000..663e3ef
--- /dev/null
+++ b/tools/build/wheel_linux_before_all.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# Configure the libraries needed to build wheel packages on linux.
+# This script is designed to be used by cibuildwheel as CIBW_BEFORE_ALL_LINUX
+
+set -euo pipefail
+set -x
+
+dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+source /etc/os-release
+
+# Install PostgreSQL development files.
+case "$ID" in
+ alpine)
+ # tzdata is required for datetime tests.
+ apk update
+ apk add --no-cache tzdata
+ "${dir}/build_libpq.sh" > /dev/null
+ ;;
+
+ debian)
+ # Note that the pgdg doesn't have an aarch64 repository so wheels are
+ # build with the libpq packaged with Debian 9, which is 9.6.
+ if [ "$AUDITWHEEL_ARCH" != 'aarch64' ]; then
+ echo "deb http://apt.postgresql.org/pub/repos/apt $VERSION_CODENAME-pgdg main" \
+ > /etc/apt/sources.list.d/pgdg.list
+ # TODO: On 2021-11-09 curl fails on 'ppc64le' with:
+ # curl: (60) SSL certificate problem: certificate has expired
+ # Test again later if -k can be removed.
+ curl -skf https://www.postgresql.org/media/keys/ACCC4CF8.asc \
+ > /etc/apt/trusted.gpg.d/postgresql.asc
+ fi
+
+ apt-get update
+ apt-get -y upgrade
+ apt-get -y install libpq-dev
+ ;;
+
+ centos)
+ "${dir}/build_libpq.sh" > /dev/null
+ ;;
+
+ *)
+ echo "$0: unexpected Linux distribution: '$ID'" >&2
+ exit 1
+ ;;
+esac
diff --git a/tools/build/wheel_macos_before_all.sh b/tools/build/wheel_macos_before_all.sh
new file mode 100755
index 0000000..285a063
--- /dev/null
+++ b/tools/build/wheel_macos_before_all.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+# Configure the environment needed to build wheel packages on Mac OS.
+# This script is designed to be used by cibuildwheel as CIBW_BEFORE_ALL_MACOS
+
+set -euo pipefail
+set -x
+
+dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+brew update
+brew install gnu-sed postgresql@14
+# Fetch 14.1 if 14.0 is still the default version
+brew reinstall postgresql
+
+# Start the database for testing
+brew services start postgresql
+
+# Wait for postgres to come up
+for i in $(seq 10 -1 0); do
+ eval pg_isready && break
+ if [ $i == 0 ]; then
+ echo "PostgreSQL service not ready, giving up"
+ exit 1
+ fi
+ echo "PostgreSQL service not ready, waiting a bit, attempts left: $i"
+ sleep 5
+done
diff --git a/tools/build/wheel_win32_before_build.bat b/tools/build/wheel_win32_before_build.bat
new file mode 100644
index 0000000..fd35f5d
--- /dev/null
+++ b/tools/build/wheel_win32_before_build.bat
@@ -0,0 +1,3 @@
+@echo on
+pip install delvewheel
+choco upgrade postgresql
diff --git a/tools/bump_version.py b/tools/bump_version.py
new file mode 100755
index 0000000..50dbe0b
--- /dev/null
+++ b/tools/bump_version.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+"""Bump the version number of the project.
+"""
+
+from __future__ import annotations
+
+import re
+import sys
+import logging
+import subprocess as sp
+from enum import Enum
+from pathlib import Path
+from argparse import ArgumentParser, Namespace
+from functools import cached_property
+from dataclasses import dataclass
+
+from packaging.version import parse as parse_version, Version
+
+PROJECT_DIR = Path(__file__).parent.parent
+
+logger = logging.getLogger()
+logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
+
+
+@dataclass
+class Package:
+ name: str
+ version_files: list[Path]
+ history_file: Path
+ tag_format: str
+
+ def __post_init__(self) -> None:
+ packages[self.name] = self
+
+
+packages: dict[str, Package] = {}
+
+Package(
+ name="psycopg",
+ version_files=[
+ PROJECT_DIR / "psycopg/psycopg/version.py",
+ PROJECT_DIR / "psycopg_c/psycopg_c/version.py",
+ ],
+ history_file=PROJECT_DIR / "docs/news.rst",
+ tag_format="{version}",
+)
+
+Package(
+ name="psycopg_pool",
+ version_files=[PROJECT_DIR / "psycopg_pool/psycopg_pool/version.py"],
+ history_file=PROJECT_DIR / "docs/news_pool.rst",
+ tag_format="pool-{version}",
+)
+
+
+class Bumper:
+ def __init__(self, package: Package, *, bump_level: str | BumpLevel):
+ self.package = package
+ self.bump_level = BumpLevel(bump_level)
+
+ self._version_regex = re.compile(
+ r"""(?ix)
+ ^
+ (?P<pre>__version__\s*=\s*(?P<quote>["']))
+ (?P<ver>[^'"]+)
+ (?P<post>(?P=quote)\s*(?:\#.*)?)
+ $
+ """
+ )
+
+ @cached_property
+ def current_version(self) -> Version:
+ versions = set(
+ self._parse_version_from_file(f) for f in self.package.version_files
+ )
+ if len(versions) > 1:
+ raise ValueError(
+ f"inconsistent versions ({', '.join(map(str, sorted(versions)))})"
+ f" in {self.package.version_files}"
+ )
+
+ return versions.pop()
+
+ @cached_property
+ def want_version(self) -> Version:
+ current = self.current_version
+ parts = [current.major, current.minor, current.micro, current.dev or 0]
+
+ match self.bump_level:
+ case BumpLevel.MAJOR:
+ # 1.2.3 -> 2.0.0
+ parts[0] += 1
+ parts[1] = parts[2] = parts[3] = 0
+ case BumpLevel.MINOR:
+ # 1.2.3 -> 1.3.0
+ parts[1] += 1
+ parts[2] = parts[3] = 0
+ case BumpLevel.PATCH:
+ # 1.2.3 -> 1.2.4
+ # 1.2.3.dev4 -> 1.2.3
+ if parts[3] == 0:
+ parts[2] += 1
+ else:
+ parts[3] = 0
+ case BumpLevel.DEV:
+ # 1.2.3 -> 1.2.4.dev1
+ # 1.2.3.dev1 -> 1.2.3.dev2
+ if parts[3] == 0:
+ parts[2] += 1
+ parts[3] += 1
+
+ sparts = [str(part) for part in parts[:3]]
+ if parts[3]:
+ sparts.append(f"dev{parts[3]}")
+ return Version(".".join(sparts))
+
+ def update_files(self) -> None:
+ for f in self.package.version_files:
+ self._update_version_in_file(f, self.want_version)
+
+ if self.bump_level != BumpLevel.DEV:
+ self._update_history_file(self.package.history_file, self.want_version)
+
+ def commit(self) -> None:
+ logger.debug("committing version changes")
+ msg = f"""\
+chore: bump {self.package.name} package version to {self.want_version}
+"""
+ files = self.package.version_files + [self.package.history_file]
+ cmdline = ["git", "commit", "-m", msg] + list(map(str, files))
+ sp.check_call(cmdline)
+
+ def create_tag(self) -> None:
+ logger.debug("tagging version %s", self.want_version)
+ tag_name = self.package.tag_format.format(version=self.want_version)
+ changes = self._get_changes_lines(
+ self.package.history_file,
+ self.want_version,
+ )
+ msg = f"""\
+{self.package.name} {self.want_version} released
+
+{''.join(changes)}
+"""
+ cmdline = ["git", "tag", "-a", "-s", "-m", msg, tag_name]
+ sp.check_call(cmdline)
+
+ def _parse_version_from_file(self, fp: Path) -> Version:
+ logger.debug("looking for version in %s", fp)
+ matches = []
+ with fp.open() as f:
+ for line in f:
+ m = self._version_regex.match(line)
+ if m:
+ matches.append(m)
+
+ if not matches:
+ raise ValueError(f"no version found in {fp}")
+ elif len(matches) > 1:
+ raise ValueError(f"more than one version found in {fp}")
+
+ vs = parse_version(matches[0].group("ver"))
+ assert isinstance(vs, Version)
+ return vs
+
+ def _update_version_in_file(self, fp: Path, version: Version) -> None:
+ logger.debug("upgrading version to %s in %s", version, fp)
+ lines = []
+ with fp.open() as f:
+ for line in f:
+ if self._version_regex.match(line):
+ line = self._version_regex.sub(f"\\g<pre>{version}\\g<post>", line)
+ lines.append(line)
+
+ with fp.open("w") as f:
+ for line in lines:
+ f.write(line)
+
+ def _update_history_file(self, fp: Path, version: Version) -> None:
+ logger.debug("upgrading history file %s", fp)
+ with fp.open() as f:
+ lines = f.readlines()
+
+ vln: int = -1
+ lns = self._find_lines(
+ r"^[^\s]+ " + re.escape(str(version)) + r"\s*\(unreleased\)?$", lines
+ )
+ assert len(lns) <= 1
+ if len(lns) == 1:
+ vln = lns[0]
+ lines[vln] = lines[vln].rsplit(None, 1)[0]
+ lines[vln + 1] = lines[vln + 1][0] * len(lines[lns[0]])
+
+ lns = self._find_lines("^Future", lines)
+ assert len(lns) <= 1
+ if len(lns) == 1:
+ del lines[lns[0] : lns[0] + 3]
+ if vln > lns[0]:
+ vln -= 3
+
+ lns = self._find_lines("^Current", lines)
+ assert len(lns) <= 1
+ if len(lns) == 1 and vln >= 0:
+ clines = lines[lns[0] : lns[0] + 3]
+ del lines[lns[0] : lns[0] + 3]
+ if vln > lns[0]:
+ vln -= 3
+ lines[vln:vln] = clines
+
+ with fp.open("w") as f:
+ for line in lines:
+ f.write(line)
+ if not line.endswith("\n"):
+ f.write("\n")
+
+ def _get_changes_lines(self, fp: Path, version: Version) -> list[str]:
+ with fp.open() as f:
+ lines = f.readlines()
+
+ lns = self._find_lines(r"^[^\s]+ " + re.escape(str(version)), lines)
+ assert len(lns) == 1
+ start = end = lns[0] + 3
+ while lines[end].rstrip():
+ end += 1
+
+ return lines[start:end]
+
+ def _find_lines(self, pattern: str, lines: list[str]) -> list[int]:
+ rv = []
+ rex = re.compile(pattern)
+ for i, line in enumerate(lines):
+ if rex.match(line):
+ rv.append(i)
+
+ return rv
+
+
+def main() -> int | None:
+ opt = parse_cmdline()
+ logger.setLevel(opt.loglevel)
+ bumper = Bumper(packages[opt.package], bump_level=opt.level)
+ logger.info("current version: %s", bumper.current_version)
+ logger.info("bumping to version: %s", bumper.want_version)
+ if not opt.dry_run:
+ bumper.update_files()
+ bumper.commit()
+ if opt.level != BumpLevel.DEV:
+ bumper.create_tag()
+
+ return 0
+
+
+class BumpLevel(str, Enum):
+ MAJOR = "major"
+ MINOR = "minor"
+ PATCH = "patch"
+ DEV = "dev"
+
+
+def parse_cmdline() -> Namespace:
+ parser = ArgumentParser(description=__doc__)
+
+ parser.add_argument(
+ "--level",
+ choices=[level.value for level in BumpLevel],
+ default=BumpLevel.PATCH.value,
+ type=BumpLevel,
+ help="the level to bump [default: %(default)s]",
+ )
+
+ parser.add_argument(
+ "--package",
+ choices=list(packages.keys()),
+ default="psycopg",
+ help="the package to bump version [default: %(default)s]",
+ )
+
+ parser.add_argument(
+ "-n",
+ "--dry-run",
+ help="Just pretend",
+ action="store_true",
+ )
+
+ g = parser.add_mutually_exclusive_group()
+ g.add_argument(
+ "-q",
+ "--quiet",
+ help="Talk less",
+ dest="loglevel",
+ action="store_const",
+ const=logging.WARN,
+ default=logging.INFO,
+ )
+ g.add_argument(
+ "-v",
+ "--verbose",
+ help="Talk more",
+ dest="loglevel",
+ action="store_const",
+ const=logging.DEBUG,
+ default=logging.INFO,
+ )
+ opt = parser.parse_args()
+
+ return opt
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/update_backer.py b/tools/update_backer.py
new file mode 100755
index 0000000..0088527
--- /dev/null
+++ b/tools/update_backer.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python3
+r"""Add or edit github users in the backers file
+"""
+
+import sys
+import logging
+import requests
+from pathlib import Path
+from ruamel.yaml import YAML # pip install ruamel.yaml
+
+logger = logging.getLogger()
+logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
+
+
+def fetch_user(username):
+ logger.info("fetching %s", username)
+ resp = requests.get(
+ f"https://api.github.com/users/{username}",
+ headers={"Accept": "application/vnd.github.v3+json"},
+ )
+ resp.raise_for_status()
+ return resp.json()
+
+
+def get_user_data(data):
+ """
+ Get the data to save from the request data
+ """
+ out = {
+ "username": data["login"],
+ "avatar": data["avatar_url"],
+ "name": data["name"],
+ }
+ if data["blog"]:
+ website = data["blog"]
+ if not website.startswith("http"):
+ website = "http://" + website
+
+ out["website"] = website
+
+ return out
+
+
+def add_entry(opt, filedata, username):
+ userdata = get_user_data(fetch_user(username))
+ if opt.top:
+ userdata["tier"] = "top"
+
+ filedata.append(userdata)
+
+
+def update_entry(opt, filedata, entry):
+ # entry is an username or an user entry daat
+ if isinstance(entry, str):
+ username = entry
+ entry = [e for e in filedata if e["username"] == username]
+ if not entry:
+ raise Exception(f"{username} not found")
+ entry = entry[0]
+ else:
+ username = entry["username"]
+
+ userdata = get_user_data(fetch_user(username))
+ for k, v in userdata.items():
+ if entry.get("keep_" + k):
+ continue
+ entry[k] = v
+
+
+def main():
+ opt = parse_cmdline()
+ logger.info("reading %s", opt.file)
+ yaml = YAML(typ="rt")
+ filedata = yaml.load(opt.file)
+
+ for username in opt.add or ():
+ add_entry(opt, filedata, username)
+
+ for username in opt.update or ():
+ update_entry(opt, filedata, username)
+
+ if opt.update_all:
+ for entry in filedata:
+ update_entry(opt, filedata, entry)
+
+ # yamllint happy
+ yaml.explicit_start = True
+ logger.info("writing %s", opt.file)
+ yaml.dump(filedata, opt.file)
+
+
+def parse_cmdline():
+ from argparse import ArgumentParser
+
+ parser = ArgumentParser(description=__doc__)
+ parser.add_argument(
+ "--file",
+ help="the file to update [default: %(default)s]",
+ default=Path(__file__).parent.parent / "BACKERS.yaml",
+ type=Path,
+ )
+ parser.add_argument(
+ "--add",
+ metavar="USERNAME",
+ nargs="+",
+ help="add USERNAME to the backers",
+ )
+
+ parser.add_argument(
+ "--top",
+ action="store_true",
+ help="add to the top tier",
+ )
+
+ parser.add_argument(
+ "--update",
+ metavar="USERNAME",
+ nargs="+",
+ help="update USERNAME data",
+ )
+
+ parser.add_argument(
+ "--update-all",
+ action="store_true",
+ help="update all the existing backers data",
+ )
+
+ opt = parser.parse_args()
+
+ return opt
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/update_errors.py b/tools/update_errors.py
new file mode 100755
index 0000000..638d352
--- /dev/null
+++ b/tools/update_errors.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# type: ignore
+"""
+Generate per-sqlstate errors from PostgreSQL source code.
+
+The script can be run at a new PostgreSQL release to refresh the module.
+"""
+
+# Copyright (C) 2020 The Psycopg Team
+
+import os
+import re
+import sys
+import logging
+from urllib.request import urlopen
+from collections import defaultdict, namedtuple
+
+from psycopg.errors import get_base_exception
+
+logger = logging.getLogger()
+logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
+
+
+def main():
+ classes, errors = fetch_errors("9.6 10 11 12 13 14 15".split())
+
+ fn = os.path.dirname(__file__) + "/../psycopg/psycopg/errors.py"
+ update_file(fn, generate_module_data(classes, errors))
+
+ fn = os.path.dirname(__file__) + "/../docs/api/errors.rst"
+ update_file(fn, generate_docs_data(classes, errors))
+
+
+def parse_errors_txt(url):
+ classes = {}
+ errors = defaultdict(dict)
+
+ page = urlopen(url)
+ for line in page.read().decode("ascii").splitlines():
+ # Strip comments and skip blanks
+ line = line.split("#")[0].strip()
+ if not line:
+ continue
+
+ # Parse a section
+ m = re.match(r"Section: (Class (..) - .+)", line)
+ if m:
+ label, class_ = m.groups()
+ classes[class_] = label
+ continue
+
+ # Parse an error
+ m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line)
+ if m:
+ sqlstate, macro, spec = m.groups()
+ # skip sqlstates without specs as they are not publicly visible
+ if not spec:
+ continue
+ errlabel = spec.upper()
+ errors[class_][sqlstate] = errlabel
+ continue
+
+ # We don't expect anything else
+ raise ValueError("unexpected line:\n%s" % line)
+
+ return classes, errors
+
+
+errors_txt_url = (
+ "http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob_plain;"
+ "f=src/backend/utils/errcodes.txt;hb=%s"
+)
+
+
+Error = namedtuple("Error", "sqlstate errlabel clsname basename")
+
+
+def fetch_errors(versions):
+ classes = {}
+ errors = defaultdict(dict)
+
+ for version in versions:
+ logger.info("fetching errors from version %s", version)
+ tver = tuple(map(int, version.split()[0].split(".")))
+ tag = "%s%s_STABLE" % (
+ (tver[0] >= 10 and "REL_" or "REL"),
+ version.replace(".", "_"),
+ )
+ c1, e1 = parse_errors_txt(errors_txt_url % tag)
+ classes.update(c1)
+
+ for c, cerrs in e1.items():
+ errors[c].update(cerrs)
+
+ # clean up data
+
+ # success and warning - never raised
+ del classes["00"]
+ del classes["01"]
+ del errors["00"]
+ del errors["01"]
+
+ specific = {
+ "38002": "ModifyingSqlDataNotPermittedExt",
+ "38003": "ProhibitedSqlStatementAttemptedExt",
+ "38004": "ReadingSqlDataNotPermittedExt",
+ "39004": "NullValueNotAllowedExt",
+ "XX000": "InternalError_",
+ }
+
+ seen = set(
+ """
+ Error Warning InterfaceError DataError DatabaseError ProgrammingError
+ IntegrityError InternalError NotSupportedError OperationalError
+ """.split()
+ )
+
+ for c, cerrs in errors.items():
+ for sqstate, errlabel in list(cerrs.items()):
+ if sqstate in specific:
+ clsname = specific[sqstate]
+ else:
+ clsname = errlabel.title().replace("_", "")
+ if clsname in seen:
+ raise Exception("class already existing: %s" % clsname)
+ seen.add(clsname)
+
+ basename = get_base_exception(sqstate).__name__
+ cerrs[sqstate] = Error(sqstate, errlabel, clsname, basename)
+
+ return classes, errors
+
+
+def generate_module_data(classes, errors):
+ yield ""
+
+ for clscode, clslabel in sorted(classes.items()):
+ yield f"""
+# {clslabel}
+"""
+ for _, e in sorted(errors[clscode].items()):
+ yield f"""\
+class {e.clsname}({e.basename},
+ code={e.sqlstate!r}, name={e.errlabel!r}):
+ pass
+"""
+ yield ""
+
+
+def generate_docs_data(classes, errors):
+ Line = namedtuple("Line", "colstate colexc colbase, sqlstate")
+ lines = [Line("SQLSTATE", "Exception", "Base exception", None)]
+
+ for clscode in sorted(classes):
+ for _, error in sorted(errors[clscode].items()):
+ lines.append(
+ Line(
+ f"``{error.sqlstate}``",
+ f"`!{error.clsname}`",
+ f"`!{error.basename}`",
+ error.sqlstate,
+ )
+ )
+
+ widths = [max(len(line[c]) for line in lines) for c in range(3)]
+ h = Line(*(["=" * w for w in widths] + [None]))
+ lines.insert(0, h)
+ lines.insert(2, h)
+ lines.append(h)
+
+ h1 = "-" * (sum(widths) + len(widths) - 1)
+ sqlclass = None
+
+ yield ""
+ for line in lines:
+ cls = line.sqlstate[:2] if line.sqlstate else None
+ if cls and cls != sqlclass:
+ yield re.sub(r"(Class\s+[^\s]+)", r"**\1**", classes[cls])
+ yield h1
+ sqlclass = cls
+
+ yield (
+ "%-*s %-*s %-*s"
+ % (
+ widths[0],
+ line.colstate,
+ widths[1],
+ line.colexc,
+ widths[2],
+ line.colbase,
+ )
+ ).rstrip()
+
+ yield ""
+
+
+def update_file(fn, new_lines):
+ logger.info("updating %s", fn)
+
+ with open(fn, "r") as f:
+ lines = f.read().splitlines()
+
+ istart, iend = [
+ i
+ for i, line in enumerate(lines)
+ if re.match(r"\s*(#|\.\.)\s*autogenerated:\s+(start|end)", line)
+ ]
+
+ lines[istart + 1 : iend] = new_lines
+
+ with open(fn, "w") as f:
+ for line in lines:
+ f.write(line + "\n")
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/update_oids.py b/tools/update_oids.py
new file mode 100755
index 0000000..df4f969
--- /dev/null
+++ b/tools/update_oids.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+"""
+Update the maps of builtin types and names.
+
+This script updates some of the files in psycopg source code with data read
+from a database catalog.
+
+Hint: use docker to upgrade types from a new version in isolation. Run:
+
+ docker run --rm -p 11111:5432 --name pg -e POSTGRES_PASSWORD=password postgres:TAG
+
+with a specified version tag, and then query it using:
+
+ %(prog)s "host=localhost port=11111 user=postgres password=password"
+"""
+
+import re
+import argparse
+import subprocess as sp
+from typing import List
+from pathlib import Path
+from typing_extensions import TypeAlias
+
+import psycopg
+from psycopg.rows import TupleRow
+from psycopg.crdb import CrdbConnection
+
+Connection: TypeAlias = psycopg.Connection[TupleRow]
+
+ROOT = Path(__file__).parent.parent
+
+
+def main() -> None:
+ opt = parse_cmdline()
+ conn = psycopg.connect(opt.dsn, autocommit=True)
+
+ if CrdbConnection.is_crdb(conn):
+ conn = CrdbConnection.connect(opt.dsn, autocommit=True)
+ update_crdb_python_oids(conn)
+ else:
+ update_python_oids(conn)
+ update_cython_oids(conn)
+
+
+def update_python_oids(conn: Connection) -> None:
+ fn = ROOT / "psycopg/psycopg/postgres.py"
+
+ lines = []
+ lines.extend(get_version_comment(conn))
+ lines.extend(get_py_types(conn))
+ lines.extend(get_py_ranges(conn))
+ lines.extend(get_py_multiranges(conn))
+
+ update_file(fn, lines)
+ sp.check_call(["black", "-q", fn])
+
+
+def update_cython_oids(conn: Connection) -> None:
+ fn = ROOT / "psycopg_c/psycopg_c/_psycopg/oids.pxd"
+
+ lines = []
+ lines.extend(get_version_comment(conn))
+ lines.extend(get_cython_oids(conn))
+
+ update_file(fn, lines)
+
+
+def update_crdb_python_oids(conn: Connection) -> None:
+ fn = ROOT / "psycopg/psycopg/crdb/_types.py"
+
+ lines = []
+ lines.extend(get_version_comment(conn))
+ lines.extend(get_py_types(conn))
+
+ update_file(fn, lines)
+ sp.check_call(["black", "-q", fn])
+
+
+def get_version_comment(conn: Connection) -> List[str]:
+ if conn.info.vendor == "PostgreSQL":
+ # Assume PG > 10
+ num = conn.info.server_version
+ version = f"{num // 10000}.{num % 100}"
+ elif conn.info.vendor == "CockroachDB":
+ assert isinstance(conn, CrdbConnection)
+ num = conn.info.server_version
+ version = f"{num // 10000}.{num % 10000 // 100}.{num % 100}"
+ else:
+ raise NotImplementedError(f"unexpected vendor: {conn.info.vendor}")
+ return ["", f" # Generated from {conn.info.vendor} {version}", ""]
+
+
+def get_py_types(conn: Connection) -> List[str]:
+ # Note: "record" is a pseudotype but still a useful one to have.
+ # "pg_lsn" is a documented public type and useful in streaming replication
+ lines = []
+ for (typname, oid, typarray, regtype, typdelim) in conn.execute(
+ """
+select typname, oid, typarray,
+ -- CRDB might have quotes in the regtype representation
+ replace(typname::regtype::text, '''', '') as regtype,
+ typdelim
+from pg_type t
+where
+ oid < 10000
+ and oid != '"char"'::regtype
+ and (typtype = 'b' or typname = 'record')
+ and (typname !~ '^(_|pg_)' or typname = 'pg_lsn')
+order by typname
+"""
+ ):
+ # Weird legacy type in postgres catalog
+ if typname == "char":
+ typname = regtype = '"char"'
+
+ # https://github.com/cockroachdb/cockroach/issues/81645
+ if typname == "int4" and conn.info.vendor == "CockroachDB":
+ regtype = typname
+
+ params = [f"{typname!r}, {oid}, {typarray}"]
+ if regtype != typname:
+ params.append(f"regtype={regtype!r}")
+ if typdelim != ",":
+ params.append(f"delimiter={typdelim!r}")
+ lines.append(f"TypeInfo({','.join(params)}),")
+
+ return lines
+
+
+def get_py_ranges(conn: Connection) -> List[str]:
+ lines = []
+ for (typname, oid, typarray, rngsubtype) in conn.execute(
+ """
+select typname, oid, typarray, rngsubtype
+from
+ pg_type t
+ join pg_range r on t.oid = rngtypid
+where
+ oid < 10000
+ and typtype = 'r'
+order by typname
+"""
+ ):
+ params = [f"{typname!r}, {oid}, {typarray}, subtype_oid={rngsubtype}"]
+ lines.append(f"RangeInfo({','.join(params)}),")
+
+ return lines
+
+
+def get_py_multiranges(conn: Connection) -> List[str]:
+ lines = []
+ for (typname, oid, typarray, rngtypid, rngsubtype) in conn.execute(
+ """
+select typname, oid, typarray, rngtypid, rngsubtype
+from
+ pg_type t
+ join pg_range r on t.oid = rngmultitypid
+where
+ oid < 10000
+ and typtype = 'm'
+order by typname
+"""
+ ):
+ params = [
+ f"{typname!r}, {oid}, {typarray},"
+ f" range_oid={rngtypid}, subtype_oid={rngsubtype}"
+ ]
+ lines.append(f"MultirangeInfo({','.join(params)}),")
+
+ return lines
+
+
+def get_cython_oids(conn: Connection) -> List[str]:
+ lines = []
+ for (typname, oid) in conn.execute(
+ """
+select typname, oid
+from pg_type
+where
+ oid < 10000
+ and (typtype = any('{b,r,m}') or typname = 'record')
+ and (typname !~ '^(_|pg_)' or typname = 'pg_lsn')
+order by typname
+"""
+ ):
+ const_name = typname.upper() + "_OID"
+ lines.append(f" {const_name} = {oid}")
+
+ return lines
+
+
+def update_file(fn: Path, new: List[str]) -> None:
+ with fn.open("r") as f:
+ lines = f.read().splitlines()
+ istart, iend = [
+ i
+ for i, line in enumerate(lines)
+ if re.match(r"\s*#\s*autogenerated:\s+(start|end)", line)
+ ]
+ lines[istart + 1 : iend] = new
+
+ with fn.open("w") as f:
+ f.write("\n".join(lines))
+ f.write("\n")
+
+
+def parse_cmdline() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+ parser.add_argument("dsn", help="where to connect to")
+ opt = parser.parse_args()
+ return opt
+
+
+if __name__ == "__main__":
+ main()