summaryrefslogtreecommitdiffstats
path: root/src/arrow/dev/release
diff options
context:
space:
mode:
Diffstat (limited to 'src/arrow/dev/release')
-rw-r--r--src/arrow/dev/release/.env.example32
-rw-r--r--src/arrow/dev/release/.gitignore21
-rw-r--r--src/arrow/dev/release/01-prepare-test.rb586
-rwxr-xr-xsrc/arrow/dev/release/01-prepare.sh103
-rw-r--r--src/arrow/dev/release/02-source-test.rb148
-rwxr-xr-xsrc/arrow/dev/release/02-source.sh164
-rwxr-xr-xsrc/arrow/dev/release/03-binary-submit.sh45
-rwxr-xr-xsrc/arrow/dev/release/04-binary-download.sh39
-rwxr-xr-xsrc/arrow/dev/release/05-binary-upload.sh122
-rw-r--r--src/arrow/dev/release/README.md24
-rw-r--r--src/arrow/dev/release/Rakefile37
-rw-r--r--src/arrow/dev/release/VERIFY.md76
-rw-r--r--src/arrow/dev/release/binary-task.rb1910
-rw-r--r--src/arrow/dev/release/binary/.dockerignore18
-rw-r--r--src/arrow/dev/release/binary/Dockerfile70
-rwxr-xr-xsrc/arrow/dev/release/binary/runner.sh36
-rw-r--r--src/arrow/dev/release/check-rat-report.py59
-rwxr-xr-xsrc/arrow/dev/release/download_rc_binaries.py184
-rwxr-xr-xsrc/arrow/dev/release/post-01-upload.sh71
-rwxr-xr-xsrc/arrow/dev/release/post-02-binary.sh101
-rwxr-xr-xsrc/arrow/dev/release/post-03-website.sh266
-rwxr-xr-xsrc/arrow/dev/release/post-04-ruby.sh92
-rwxr-xr-xsrc/arrow/dev/release/post-05-js.sh48
-rwxr-xr-xsrc/arrow/dev/release/post-06-csharp.sh60
-rwxr-xr-xsrc/arrow/dev/release/post-08-remove-rc.sh50
-rwxr-xr-xsrc/arrow/dev/release/post-09-docs.sh67
-rwxr-xr-xsrc/arrow/dev/release/post-10-python.sh52
-rwxr-xr-xsrc/arrow/dev/release/post-11-java.sh81
-rwxr-xr-xsrc/arrow/dev/release/post-12-bump-versions.sh79
-rw-r--r--src/arrow/dev/release/post-13-go.sh34
-rw-r--r--src/arrow/dev/release/rat_exclude_files.txt208
-rwxr-xr-xsrc/arrow/dev/release/run-rat.sh43
-rwxr-xr-xsrc/arrow/dev/release/run-test.rb31
-rw-r--r--src/arrow/dev/release/setup-gpg-agent.sh24
-rw-r--r--src/arrow/dev/release/test-helper.rb96
-rw-r--r--src/arrow/dev/release/utils-binary.sh86
-rw-r--r--src/arrow/dev/release/utils-prepare.sh145
-rwxr-xr-xsrc/arrow/dev/release/verify-apt.sh194
-rw-r--r--src/arrow/dev/release/verify-release-candidate-wheels.bat107
-rw-r--r--src/arrow/dev/release/verify-release-candidate.bat130
-rwxr-xr-xsrc/arrow/dev/release/verify-release-candidate.sh817
-rwxr-xr-xsrc/arrow/dev/release/verify-yum.sh204
42 files changed, 6760 insertions, 0 deletions
diff --git a/src/arrow/dev/release/.env.example b/src/arrow/dev/release/.env.example
new file mode 100644
index 000000000..50c8ec8e6
--- /dev/null
+++ b/src/arrow/dev/release/.env.example
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# The GPG key ID to sign artifacts. The GPG key ID must be registered
+# to both of the followings:
+#
+# * https://dist.apache.org/repos/dist/dev/arrow/KEYS
+# * https://dist.apache.org/repos/dist/release/arrow/KEYS
+#
+# See these files how to import your GPG key ID to these files.
+#
+# You must set this.
+#GPG_KEY_ID=08D3564B7C6A9CAFBFF6A66791D18FCF079F8007
+
+# The Artifactory API key to upload artifacts to Artifactory.
+#
+# You must set this.
+#ARTIFACTORY_API_KEY=secret
diff --git a/src/arrow/dev/release/.gitignore b/src/arrow/dev/release/.gitignore
new file mode 100644
index 000000000..f3d708a6a
--- /dev/null
+++ b/src/arrow/dev/release/.gitignore
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+/.env
+/binary/id_rsa
+/binary/id_rsa.pub
+/binary/tmp/
diff --git a/src/arrow/dev/release/01-prepare-test.rb b/src/arrow/dev/release/01-prepare-test.rb
new file mode 100644
index 000000000..51665ec02
--- /dev/null
+++ b/src/arrow/dev/release/01-prepare-test.rb
@@ -0,0 +1,586 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+class PrepareTest < Test::Unit::TestCase
+ include GitRunnable
+ include VersionDetectable
+
+ def setup
+ @current_commit = git_current_commit
+ detect_versions
+
+ top_dir = Pathname(__dir__).parent.parent
+ @original_git_repository = top_dir + ".git"
+ Dir.mktmpdir do |dir|
+ @test_git_repository = Pathname(dir) + "arrow"
+ git("clone", @original_git_repository.to_s, @test_git_repository.to_s)
+ Dir.chdir(@test_git_repository) do
+ @tag_name = "apache-arrow-#{@release_version}"
+ @release_branch = "testing-release-#{@release_version}-rc0"
+ git("checkout", "-b", @release_branch, @current_commit)
+ yield
+ end
+ FileUtils.rm_rf(@test_git_repository)
+ end
+ end
+
+ def omit_on_release_branch
+ omit("Not for release branch") if on_release_branch?
+ end
+
+ def prepare(*targets)
+ if targets.last.is_a?(Hash)
+ additional_env = targets.pop
+ else
+ additional_env = {}
+ end
+ env = { "PREPARE_DEFAULT" => "0" }
+ targets.each do |target|
+ env["PREPARE_#{target}"] = "1"
+ end
+ env = env.merge(additional_env)
+ sh(env, "dev/release/01-prepare.sh", @release_version, @next_version, "0")
+ end
+
+ def bump_versions(*targets)
+ env = { "BUMP_DEFAULT" => "0" }
+ targets.each do |target|
+ env["BUMP_#{target}"] = "1"
+ end
+ sh(env, "dev/release/post-12-bump-versions.sh", @release_version,
+ @next_version)
+ end
+
+ def parse_patch(patch)
+ diffs = []
+ in_hunk = false
+ patch.each_line do |line|
+ case line
+ when /\A--- a\//
+ path = $POSTMATCH.chomp
+ diffs << { path: path, hunks: [] }
+ in_hunk = false
+ when /\A@@/
+ in_hunk = true
+ diffs.last[:hunks] << []
+ when /\A[-+]/
+ next unless in_hunk
+ diffs.last[:hunks].last << line.chomp
+ end
+ end
+ diffs.sort_by do |diff|
+ diff[:path]
+ end
+ end
+
+ def test_linux_packages
+ user = "Arrow Developers"
+ email = "dev@arrow.apache.org"
+ prepare("LINUX_PACKAGES", "DEBFULLNAME" => user, "DEBEMAIL" => email)
+ changes = parse_patch(git("log", "-n", "1", "-p"))
+ sampled_changes = changes.collect do |change|
+ {
+ path: change[:path],
+ sampled_hunks: change[:hunks].collect(&:first),
+ }
+ end
+ base_dir = "dev/tasks/linux-packages"
+ today = Time.now.utc.strftime("%a %b %d %Y")
+ expected_changes = [
+ {
+ path: "#{base_dir}/apache-arrow-apt-source/debian/changelog",
+ sampled_hunks: [
+ "+apache-arrow-apt-source (#{@release_version}-1) " +
+ "unstable; urgency=low",
+ ],
+ },
+ {
+ path: "#{base_dir}/apache-arrow-release/yum/apache-arrow-release.spec.in",
+ sampled_hunks: [
+ "+* #{today} #{user} <#{email}> - #{@release_version}-1",
+ ],
+ },
+ {
+ path: "#{base_dir}/apache-arrow/debian/changelog",
+ sampled_hunks: [
+ "+apache-arrow (#{@release_version}-1) unstable; urgency=low",
+ ],
+ },
+ {
+ path: "#{base_dir}/apache-arrow/yum/arrow.spec.in",
+ sampled_hunks: [
+ "+* #{today} #{user} <#{email}> - #{@release_version}-1",
+ ],
+ },
+ ]
+ assert_equal(expected_changes, sampled_changes)
+ end
+
+ def test_version_pre_tag
+ omit_on_release_branch
+ prepare("VERSION_PRE_TAG")
+ assert_equal([
+ {
+ path: "c_glib/meson.build",
+ hunks: [
+ ["-version = '#{@snapshot_version}'",
+ "+version = '#{@release_version}'"],
+ ],
+ },
+ {
+ path: "ci/scripts/PKGBUILD",
+ hunks: [
+ ["-pkgver=#{@previous_version}.9000",
+ "+pkgver=#{@release_version}"],
+ ],
+ },
+ {
+ path: "cpp/CMakeLists.txt",
+ hunks: [
+ ["-set(ARROW_VERSION \"#{@snapshot_version}\")",
+ "+set(ARROW_VERSION \"#{@release_version}\")"],
+ ],
+ },
+ {
+ path: "cpp/vcpkg.json",
+ hunks: [
+ ["- \"version-string\": \"#{@snapshot_version}\",",
+ "+ \"version-string\": \"#{@release_version}\","],
+ ],
+ },
+ {
+ path: "csharp/Directory.Build.props",
+ hunks: [
+ ["- <Version>#{@snapshot_version}</Version>",
+ "+ <Version>#{@release_version}</Version>"],
+ ],
+ },
+ {
+ path: "dev/tasks/homebrew-formulae/apache-arrow.rb",
+ hunks: [
+ ["- url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@snapshot_version}/apache-arrow-#{@snapshot_version}.tar.gz\"",
+ "+ url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@release_version}/apache-arrow-#{@release_version}.tar.gz\""],
+ ],
+ },
+ {
+ path: "dev/tasks/homebrew-formulae/autobrew/apache-arrow.rb",
+ hunks: [
+ ["- url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@previous_version}.9000/apache-arrow-#{@previous_version}.9000.tar.gz\"",
+ "+ url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@release_version}/apache-arrow-#{@release_version}.tar.gz\""],
+ ],
+ },
+ {
+ path: "java/adapter/avro/pom.xml",
+ hunks: [
+ ["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"],
+ ],
+ },
+ {
+ hunks: [
+ ["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"],
+ ],
+ path: "java/adapter/jdbc/pom.xml",
+ },
+ {
+ hunks: [
+ ["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"],
+ ],
+ path: "java/adapter/orc/pom.xml",
+ },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/algorithm/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/c/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/compression/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/dataset/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/flight/flight-core/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/flight/flight-grpc/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>", "+ <version>#{@release_version}</version>"]],
+ path: "java/format/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/gandiva/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/memory/memory-core/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/memory/memory-netty/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/memory/memory-unsafe/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/memory/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"],
+ ["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/performance/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/plasma/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>", "+ <version>#{@release_version}</version>"]],
+ path: "java/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/tools/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@release_version}</version>"]],
+ path: "java/vector/pom.xml" },
+ {
+ path: "js/package.json",
+ hunks: [
+ ["- \"version\": \"#{@snapshot_version}\"",
+ "+ \"version\": \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "matlab/CMakeLists.txt",
+ hunks: [
+ ["-set(MLARROW_VERSION \"#{@snapshot_version}\")",
+ "+set(MLARROW_VERSION \"#{@release_version}\")"],
+ ],
+ },
+ {
+ path: "python/setup.py",
+ hunks: [
+ ["-default_version = '#{@snapshot_version}'",
+ "+default_version = '#{@release_version}'"],
+ ],
+ },
+ {
+ path: "r/DESCRIPTION",
+ hunks: [
+ ["-Version: #{@previous_version}.9000",
+ "+Version: #{@release_version}"],
+ ],
+ },
+ {
+ path: "r/NEWS.md",
+ hunks: [
+ ["-\# arrow #{@previous_version}.9000",
+ "+\# arrow #{@release_version}"],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-cuda/lib/arrow-cuda/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-dataset/lib/arrow-dataset/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-flight/lib/arrow-flight/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow/lib/arrow/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-gandiva/lib/gandiva/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-parquet/lib/parquet/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-plasma/lib/plasma/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@release_version}\""],
+ ],
+ },
+ ],
+ parse_patch(git("log", "-n", "1", "-p")))
+ end
+
+ def test_version_post_tag
+ omit_on_release_branch
+ bump_versions("VERSION_POST_TAG")
+ assert_equal([
+ {
+ path: "c_glib/meson.build",
+ hunks: [
+ ["-version = '#{@snapshot_version}'",
+ "+version = '#{@next_snapshot_version}'"],
+ ],
+ },
+ {
+ path: "ci/scripts/PKGBUILD",
+ hunks: [
+ ["-pkgver=#{@previous_version}.9000",
+ "+pkgver=#{@release_version}.9000"],
+ ],
+ },
+ {
+ path: "cpp/CMakeLists.txt",
+ hunks: [
+ ["-set(ARROW_VERSION \"#{@snapshot_version}\")",
+ "+set(ARROW_VERSION \"#{@next_snapshot_version}\")"],
+ ],
+ },
+ {
+ path: "cpp/vcpkg.json",
+ hunks: [
+ ["- \"version-string\": \"#{@snapshot_version}\",",
+ "+ \"version-string\": \"#{@next_snapshot_version}\","],
+ ],
+ },
+ {
+ path: "csharp/Directory.Build.props",
+ hunks: [
+ ["- <Version>#{@snapshot_version}</Version>",
+ "+ <Version>#{@next_snapshot_version}</Version>"],
+ ],
+ },
+ {
+ path: "dev/tasks/homebrew-formulae/apache-arrow.rb",
+ hunks: [
+ ["- url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@snapshot_version}/apache-arrow-#{@snapshot_version}.tar.gz\"",
+ "+ url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@next_snapshot_version}/apache-arrow-#{@next_snapshot_version}.tar.gz\""],
+ ],
+ },
+ {
+ path: "dev/tasks/homebrew-formulae/autobrew/apache-arrow.rb",
+ hunks: [
+ ["- url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@previous_version}.9000/apache-arrow-#{@previous_version}.9000.tar.gz\"",
+ "+ url \"https://www.apache.org/dyn/closer.lua?path=arrow/arrow-#{@release_version}.9000/apache-arrow-#{@release_version}.9000.tar.gz\""],
+ ],
+ },
+ { path: "java/adapter/avro/pom.xml",
+ hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]] },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/adapter/jdbc/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/adapter/orc/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/algorithm/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/c/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/compression/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/dataset/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/flight/flight-core/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/flight/flight-grpc/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>", "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/format/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/gandiva/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/memory/memory-core/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/memory/memory-netty/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/memory/memory-unsafe/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/memory/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"],
+ ["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/performance/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/plasma/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>", "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/tools/pom.xml" },
+ { hunks: [["- <version>#{@snapshot_version}</version>",
+ "+ <version>#{@next_snapshot_version}</version>"]],
+ path: "java/vector/pom.xml" },
+ {
+ path: "js/package.json",
+ hunks: [
+ ["- \"version\": \"#{@snapshot_version}\"",
+ "+ \"version\": \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "matlab/CMakeLists.txt",
+ hunks: [
+ ["-set(MLARROW_VERSION \"#{@snapshot_version}\")",
+ "+set(MLARROW_VERSION \"#{@next_snapshot_version}\")"],
+ ],
+ },
+ {
+ path: "python/setup.py",
+ hunks: [
+ ["-default_version = '#{@snapshot_version}'",
+ "+default_version = '#{@next_snapshot_version}'"],
+ ],
+ },
+ {
+ path: "r/DESCRIPTION",
+ hunks: [
+ ["-Version: #{@previous_version}.9000",
+ "+Version: #{@release_version}.9000"],
+ ],
+ },
+ {
+ path: "r/NEWS.md",
+ hunks: [
+ ["-# arrow #{@previous_version}.9000",
+ "+# arrow #{@release_version}.9000",
+ "+",
+ "+# arrow #{@release_version}",],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-cuda/lib/arrow-cuda/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-dataset/lib/arrow-dataset/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow-flight/lib/arrow-flight/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-arrow/lib/arrow/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-gandiva/lib/gandiva/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-parquet/lib/parquet/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ {
+ path: "ruby/red-plasma/lib/plasma/version.rb",
+ hunks: [
+ ["- VERSION = \"#{@snapshot_version}\"",
+ "+ VERSION = \"#{@next_snapshot_version}\""],
+ ],
+ },
+ ],
+ parse_patch(git("log", "-n", "1", "-p")))
+ end
+
+ def test_deb_package_names
+ bump_versions("DEB_PACKAGE_NAMES")
+ changes = parse_patch(git("log", "-n", "1", "-p"))
+ sampled_changes = changes.collect do |change|
+ first_hunk = change[:hunks][0]
+ first_removed_line = first_hunk.find { |line| line.start_with?("-") }
+ first_added_line = first_hunk.find { |line| line.start_with?("+") }
+ {
+ sampled_diff: [first_removed_line, first_added_line],
+ path: change[:path],
+ }
+ end
+ expected_changes = [
+ {
+ sampled_diff: [
+ "-dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib#{@so_version}.install",
+ "+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib#{@next_so_version}.install",
+ ],
+ path: "dev/release/rat_exclude_files.txt",
+ },
+ {
+ sampled_diff: [
+ "-Package: libarrow#{@so_version}",
+ "+Package: libarrow#{@next_so_version}",
+ ],
+ path: "dev/tasks/linux-packages/apache-arrow/debian/control.in",
+ },
+ {
+ sampled_diff: [
+ "- - libarrow-dataset-glib#{@so_version}-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb",
+ "+ - libarrow-dataset-glib#{@next_so_version}-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb",
+ ],
+ path: "dev/tasks/tasks.yml",
+ },
+ ]
+ assert_equal(expected_changes, sampled_changes)
+ end
+end
diff --git a/src/arrow/dev/release/01-prepare.sh b/src/arrow/dev/release/01-prepare.sh
new file mode 100755
index 000000000..b1e917390
--- /dev/null
+++ b/src/arrow/dev/release/01-prepare.sh
@@ -0,0 +1,103 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -ue
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 3 ]; then
+ echo "Usage: $0 <version> <next_version> <rc-num>"
+ exit 1
+fi
+
+. $SOURCE_DIR/utils-prepare.sh
+
+version=$1
+next_version=$2
+next_version_snapshot="${next_version}-SNAPSHOT"
+rc_number=$3
+
+release_tag="apache-arrow-${version}"
+release_branch="release-${version}"
+release_candidate_branch="release-${version}-rc${rc_number}"
+
+: ${PREPARE_DEFAULT:=1}
+: ${PREPARE_CHANGELOG:=${PREPARE_DEFAULT}}
+: ${PREPARE_LINUX_PACKAGES:=${PREPARE_DEFAULT}}
+: ${PREPARE_VERSION_PRE_TAG:=${PREPARE_DEFAULT}}
+: ${PREPARE_BRANCH:=${PREPARE_DEFAULT}}
+: ${PREPARE_TAG:=${PREPARE_DEFAULT}}
+
+if [ ${PREPARE_TAG} -gt 0 ]; then
+ if [ $(git tag -l "${release_tag}") ]; then
+ echo "Delete existing git tag $release_tag"
+ git tag -d "${release_tag}"
+ fi
+fi
+
+if [ ${PREPARE_BRANCH} -gt 0 ]; then
+ if [[ $(git branch -l "${release_candidate_branch}") ]]; then
+ next_rc_number=$(($rc_number+1))
+ echo "Branch ${release_candidate_branch} already exists, so create a new release candidate:"
+ echo "1. Checkout the master branch for major releases and maint-<version> for patch releases."
+ echo "2. Execute the script again with bumped RC number."
+ echo "Commands:"
+ echo " git checkout master"
+ echo " dev/release/01-prepare.sh ${version} ${next_version} ${next_rc_number}"
+ exit 1
+ fi
+
+ echo "Create local branch ${release_candidate_branch} for release candidate ${rc_number}"
+ git checkout -b ${release_candidate_branch}
+fi
+
+############################## Pre-Tag Commits ##############################
+
+if [ ${PREPARE_CHANGELOG} -gt 0 ]; then
+ echo "Updating changelog for $version"
+ # Update changelog
+ archery release changelog add $version
+ git add ${SOURCE_DIR}/../../CHANGELOG.md
+ git commit -m "[Release] Update CHANGELOG.md for $version"
+fi
+
+if [ ${PREPARE_LINUX_PACKAGES} -gt 0 ]; then
+ echo "Updating .deb/.rpm changelogs for $version"
+ cd $SOURCE_DIR/../tasks/linux-packages
+ rake \
+ version:update \
+ ARROW_RELEASE_TIME="$(date +%Y-%m-%dT%H:%M:%S%z)" \
+ ARROW_VERSION=${version}
+ git add */debian*/changelog */yum/*.spec.in
+ git commit -m "[Release] Update .deb/.rpm changelogs for $version"
+ cd -
+fi
+
+if [ ${PREPARE_VERSION_PRE_TAG} -gt 0 ]; then
+ echo "Prepare release ${version} on tag ${release_tag} then reset to version ${next_version_snapshot}"
+
+ update_versions "${version}" "${next_version}" "release"
+ git commit -m "[Release] Update versions for ${version}"
+fi
+
+############################## Tag the Release ##############################
+
+if [ ${PREPARE_TAG} -gt 0 ]; then
+ git tag -a "${release_tag}" -m "[Release] Apache Arrow Release ${version}"
+fi
diff --git a/src/arrow/dev/release/02-source-test.rb b/src/arrow/dev/release/02-source-test.rb
new file mode 100644
index 000000000..652d4c07f
--- /dev/null
+++ b/src/arrow/dev/release/02-source-test.rb
@@ -0,0 +1,148 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+class SourceTest < Test::Unit::TestCase
+ include GitRunnable
+ include VersionDetectable
+
+ def setup
+ @current_commit = git_current_commit
+ detect_versions
+ @tag_name = "apache-arrow-#{@release_version}"
+ @script = File.expand_path("dev/release/02-source.sh")
+
+ Dir.mktmpdir do |dir|
+ Dir.chdir(dir) do
+ yield
+ end
+ end
+ end
+
+ def source(*targets)
+ env = {
+ "SOURCE_DEFAULT" => "0",
+ "release_hash" => @current_commit,
+ }
+ targets.each do |target|
+ env["SOURCE_#{target}"] = "1"
+ end
+ output = sh(env, @script, @release_version, "0")
+ sh("tar", "xf", "#{@tag_name}.tar.gz")
+ output
+ end
+
+ def test_symbolic_links
+ source
+ Dir.chdir(@tag_name) do
+ assert_equal([],
+ Find.find(".").find_all {|path| File.symlink?(path)})
+ end
+ end
+
+ def test_csharp_git_commit_information
+ source
+ Dir.chdir("#{@tag_name}/csharp") do
+ FileUtils.mv("dummy.git", "../.git")
+ sh("dotnet", "pack", "-c", "Release")
+ FileUtils.mv("../.git", "dummy.git")
+ Dir.chdir("artifacts/Apache.Arrow/Release") do
+ sh("unzip", "Apache.Arrow.#{@snapshot_version}.nupkg")
+ FileUtils.chmod(0400, "Apache.Arrow.nuspec")
+ nuspec = REXML::Document.new(File.read("Apache.Arrow.nuspec"))
+ nuspec_repository = nuspec.elements["package/metadata/repository"]
+ attributes = {}
+ nuspec_repository.attributes.each do |key, value|
+ attributes[key] = value
+ end
+ assert_equal({
+ "type" => "git",
+ "url" => "https://github.com/apache/arrow",
+ "commit" => @current_commit,
+ },
+ attributes)
+ end
+ end
+ end
+
+ def test_python_version
+ source
+ Dir.chdir("#{@tag_name}/python") do
+ sh("python3", "setup.py", "sdist")
+ if on_release_branch?
+ pyarrow_source_archive = "dist/pyarrow-#{@release_version}.tar.gz"
+ else
+ pyarrow_source_archive = "dist/pyarrow-#{@release_version}a0.tar.gz"
+ end
+ assert_equal([pyarrow_source_archive],
+ Dir.glob("dist/pyarrow-*.tar.gz"))
+ end
+ end
+
+ def test_vote
+ jira_url = "https://issues.apache.org/jira"
+ jql_conditions = [
+ "project = ARROW",
+ "status in (Resolved, Closed)",
+ "fixVersion = #{@release_version}",
+ ]
+ jql = jql_conditions.join(" AND ")
+ n_resolved_issues = nil
+ search_url = URI("#{jira_url}/rest/api/2/search?jql=#{CGI.escape(jql)}")
+ search_url.open do |response|
+ n_resolved_issues = JSON.parse(response.read)["total"]
+ end
+ output = source("VOTE")
+ assert_equal(<<-VOTE.strip, output[/^-+$(.+?)^-+$/m, 1].strip)
+To: dev@arrow.apache.org
+Subject: [VOTE] Release Apache Arrow #{@release_version} - RC0
+
+Hi,
+
+I would like to propose the following release candidate (RC0) of Apache
+Arrow version #{@release_version}. This is a release consisting of #{n_resolved_issues}
+resolved JIRA issues[1].
+
+This release candidate is based on commit:
+#{@current_commit} [2]
+
+The source release rc0 is hosted at [3].
+The binary artifacts are hosted at [4][5][6][7][8][9].
+The changelog is located at [10].
+
+Please download, verify checksums and signatures, run the unit tests,
+and vote on the release. See [11] for how to validate a release candidate.
+
+The vote will be open for at least 72 hours.
+
+[ ] +1 Release this as Apache Arrow #{@release_version}
+[ ] +0
+[ ] -1 Do not release this as Apache Arrow #{@release_version} because...
+
+[1]: https://issues.apache.org/jira/issues/?jql=project%20%3D%20ARROW%20AND%20status%20in%20%28Resolved%2C%20Closed%29%20AND%20fixVersion%20%3D%20#{@release_version}
+[2]: https://github.com/apache/arrow/tree/#{@current_commit}
+[3]: https://dist.apache.org/repos/dist/dev/arrow/apache-arrow-#{@release_version}-rc0
+[4]: https://apache.jfrog.io/artifactory/arrow/amazon-linux-rc/
+[5]: https://apache.jfrog.io/artifactory/arrow/centos-rc/
+[6]: https://apache.jfrog.io/artifactory/arrow/debian-rc/
+[7]: https://apache.jfrog.io/artifactory/arrow/nuget-rc/#{@release_version}-rc0
+[8]: https://apache.jfrog.io/artifactory/arrow/python-rc/#{@release_version}-rc0
+[9]: https://apache.jfrog.io/artifactory/arrow/ubuntu-rc/
+[10]: https://github.com/apache/arrow/blob/#{@current_commit}/CHANGELOG.md
+[11]: https://cwiki.apache.org/confluence/display/ARROW/How+to+Verify+Release+Candidates
+ VOTE
+ end
+end
diff --git a/src/arrow/dev/release/02-source.sh b/src/arrow/dev/release/02-source.sh
new file mode 100755
index 000000000..156eccc1b
--- /dev/null
+++ b/src/arrow/dev/release/02-source.sh
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set -e
+
+: ${SOURCE_DEFAULT:=1}
+: ${SOURCE_RAT:=${SOURCE_DEFAULT}}
+: ${SOURCE_UPLOAD:=${SOURCE_DEFAULT}}
+: ${SOURCE_VOTE:=${SOURCE_DEFAULT}}
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+SOURCE_TOP_DIR="$(cd "${SOURCE_DIR}/../../" && pwd)"
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc=$2
+
+tag=apache-arrow-${version}
+tagrc=${tag}-rc${rc}
+rc_url="https://dist.apache.org/repos/dist/dev/arrow/${tagrc}"
+
+echo "Preparing source for tag ${tag}"
+
+: ${release_hash:=$(cd "${SOURCE_TOP_DIR}" && git rev-list --max-count=1 ${tag})}
+
+if [ ${SOURCE_UPLOAD} -gt 0 ]; then
+ if [ -z "$release_hash" ]; then
+ echo "Cannot continue: unknown git tag: $tag"
+ exit
+ fi
+fi
+
+echo "Using commit $release_hash"
+
+tarball=${tag}.tar.gz
+
+rm -rf ${tag}
+# be conservative and use the release hash, even though git produces the same
+# archive (identical hashes) using the scm tag
+(cd "${SOURCE_TOP_DIR}" && \
+ git archive ${release_hash} --prefix ${tag}/) | \
+ tar xf -
+
+# Resolve all hard and symbolic links
+rm -rf ${tag}.tmp
+mv ${tag} ${tag}.tmp
+cp -R -L ${tag}.tmp ${tag}
+rm -rf ${tag}.tmp
+
+# Create a dummy .git/ directory to download the source files from GitHub with Source Link in C#.
+dummy_git=${tag}/csharp/dummy.git
+mkdir ${dummy_git}
+pushd ${dummy_git}
+echo ${release_hash} > HEAD
+echo '[remote "origin"] url = https://github.com/apache/arrow.git' >> config
+mkdir objects refs
+popd
+
+# Create new tarball from modified source directory
+tar czf ${tarball} ${tag}
+rm -rf ${tag}
+
+if [ ${SOURCE_RAT} -gt 0 ]; then
+ "${SOURCE_DIR}/run-rat.sh" ${tarball}
+fi
+
+if [ ${SOURCE_UPLOAD} -gt 0 ]; then
+ # sign the archive
+ gpg --armor --output ${tarball}.asc --detach-sig ${tarball}
+ shasum -a 256 $tarball > ${tarball}.sha256
+ shasum -a 512 $tarball > ${tarball}.sha512
+
+ # check out the arrow RC folder
+ svn co --depth=empty https://dist.apache.org/repos/dist/dev/arrow tmp
+
+ # add the release candidate for the tag
+ mkdir -p tmp/${tagrc}
+
+ # copy the rc tarball into the tmp dir
+ cp ${tarball}* tmp/${tagrc}
+
+ # commit to svn
+ svn add tmp/${tagrc}
+ svn ci -m "Apache Arrow ${version} RC${rc}" tmp/${tagrc}
+
+ # clean up
+ rm -rf tmp
+
+ echo "Success! The release candidate is available here:"
+ echo " ${rc_url}"
+ echo ""
+ echo "Commit SHA1: ${release_hash}"
+ echo ""
+fi
+
+if [ ${SOURCE_VOTE} -gt 0 ]; then
+ echo "The following draft email has been created to send to the"
+ echo "dev@arrow.apache.org mailing list"
+ echo ""
+ echo "---------------------------------------------------------"
+ jira_url="https://issues.apache.org/jira"
+ jql="project%20%3D%20ARROW%20AND%20status%20in%20%28Resolved%2C%20Closed%29%20AND%20fixVersion%20%3D%20${version}"
+ n_resolved_issues=$(curl "${jira_url}/rest/api/2/search/?jql=${jql}" | jq ".total")
+ cat <<MAIL
+To: dev@arrow.apache.org
+Subject: [VOTE] Release Apache Arrow ${version} - RC${rc}
+
+Hi,
+
+I would like to propose the following release candidate (RC${rc}) of Apache
+Arrow version ${version}. This is a release consisting of ${n_resolved_issues}
+resolved JIRA issues[1].
+
+This release candidate is based on commit:
+${release_hash} [2]
+
+The source release rc${rc} is hosted at [3].
+The binary artifacts are hosted at [4][5][6][7][8][9].
+The changelog is located at [10].
+
+Please download, verify checksums and signatures, run the unit tests,
+and vote on the release. See [11] for how to validate a release candidate.
+
+The vote will be open for at least 72 hours.
+
+[ ] +1 Release this as Apache Arrow ${version}
+[ ] +0
+[ ] -1 Do not release this as Apache Arrow ${version} because...
+
+[1]: ${jira_url}/issues/?jql=${jql}
+[2]: https://github.com/apache/arrow/tree/${release_hash}
+[3]: ${rc_url}
+[4]: https://apache.jfrog.io/artifactory/arrow/amazon-linux-rc/
+[5]: https://apache.jfrog.io/artifactory/arrow/centos-rc/
+[6]: https://apache.jfrog.io/artifactory/arrow/debian-rc/
+[7]: https://apache.jfrog.io/artifactory/arrow/nuget-rc/${version}-rc${rc}
+[8]: https://apache.jfrog.io/artifactory/arrow/python-rc/${version}-rc${rc}
+[9]: https://apache.jfrog.io/artifactory/arrow/ubuntu-rc/
+[10]: https://github.com/apache/arrow/blob/${release_hash}/CHANGELOG.md
+[11]: https://cwiki.apache.org/confluence/display/ARROW/How+to+Verify+Release+Candidates
+MAIL
+ echo "---------------------------------------------------------"
+fi
diff --git a/src/arrow/dev/release/03-binary-submit.sh b/src/arrow/dev/release/03-binary-submit.sh
new file mode 100755
index 000000000..b22a54bfd
--- /dev/null
+++ b/src/arrow/dev/release/03-binary-submit.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc_number=$2
+version_with_rc="${version}-rc${rc_number}"
+crossbow_job_prefix="release-${version_with_rc}"
+release_tag="apache-arrow-${version}"
+
+: ${ARROW_REPOSITORY:="apache/arrow"}
+: ${ARROW_BRANCH:=$release_tag}
+
+# archery will submit a job with id: "${crossbow_job_prefix}-0" unless there
+# are jobs submitted with the same prefix (the integer at the end is auto
+# incremented)
+archery crossbow submit \
+ --no-fetch \
+ --job-prefix ${crossbow_job_prefix} \
+ --arrow-version ${version_with_rc} \
+ --arrow-remote "https://github.com/${ARROW_REPOSITORY}" \
+ --arrow-branch ${ARROW_BRANCH} \
+ --group packaging
diff --git a/src/arrow/dev/release/04-binary-download.sh b/src/arrow/dev/release/04-binary-download.sh
new file mode 100755
index 000000000..b433a3f9c
--- /dev/null
+++ b/src/arrow/dev/release/04-binary-download.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set -e
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc_number=$2
+version_with_rc="${version}-rc${rc_number}"
+crossbow_job_prefix="release-${version_with_rc}"
+
+# archery will submit a job with id: "${crossbow_job_prefix}-0" unless there
+# are jobs submitted with the same prefix (the integer at the end is auto
+# incremented)
+: ${CROSSBOW_JOB_NUMBER:="0"}
+: ${CROSSBOW_JOB_ID:="${crossbow_job_prefix}-${CROSSBOW_JOB_NUMBER}"}
+
+archery crossbow download-artifacts ${CROSSBOW_JOB_ID} --no-fetch
diff --git a/src/arrow/dev/release/05-binary-upload.sh b/src/arrow/dev/release/05-binary-upload.sh
new file mode 100755
index 000000000..5a30fc8bd
--- /dev/null
+++ b/src/arrow/dev/release/05-binary-upload.sh
@@ -0,0 +1,122 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -u
+set -o pipefail
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc=$2
+
+version_with_rc="${version}-rc${rc}"
+crossbow_job_prefix="release-${version_with_rc}"
+crossbow_package_dir="${SOURCE_DIR}/../../packages"
+
+: ${CROSSBOW_JOB_NUMBER:="0"}
+: ${CROSSBOW_JOB_ID:="${crossbow_job_prefix}-${CROSSBOW_JOB_NUMBER}"}
+artifact_dir="${crossbow_package_dir}/${CROSSBOW_JOB_ID}"
+
+if [ ! -e "$artifact_dir" ]; then
+ echo "$artifact_dir does not exist"
+ exit 1
+fi
+
+if [ ! -d "$artifact_dir" ]; then
+ echo "$artifact_dir is not a directory"
+ exit 1
+fi
+
+cd "${SOURCE_DIR}"
+
+if [ ! -f .env ]; then
+ echo "You must create $(pwd)/.env"
+ echo "You can use $(pwd)/.env.example as template"
+ exit 1
+fi
+. .env
+
+. utils-binary.sh
+
+# By default upload all artifacts.
+# To deactivate one category, deactivate the category and all of its dependents.
+# To explicitly select one category, set UPLOAD_DEFAULT=0 UPLOAD_X=1.
+: ${UPLOAD_DEFAULT:=1}
+: ${UPLOAD_ALMALINUX:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_AMAZON_LINUX:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_CENTOS:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_DEBIAN:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_NUGET:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_PYTHON:=${UPLOAD_DEFAULT}}
+: ${UPLOAD_UBUNTU:=${UPLOAD_DEFAULT}}
+
+rake_tasks=()
+apt_targets=()
+yum_targets=()
+if [ ${UPLOAD_ALMALINUX} -gt 0 ]; then
+ rake_tasks+=(yum:rc)
+ yum_targets+=(almalinux)
+fi
+if [ ${UPLOAD_AMAZON_LINUX} -gt 0 ]; then
+ rake_tasks+=(yum:rc)
+ yum_targets+=(amazon-linux)
+fi
+if [ ${UPLOAD_CENTOS} -gt 0 ]; then
+ rake_tasks+=(yum:rc)
+ yum_targets+=(centos)
+fi
+if [ ${UPLOAD_DEBIAN} -gt 0 ]; then
+ rake_tasks+=(apt:rc)
+ apt_targets+=(debian)
+fi
+if [ ${UPLOAD_NUGET} -gt 0 ]; then
+ rake_tasks+=(nuget:rc)
+fi
+if [ ${UPLOAD_PYTHON} -gt 0 ]; then
+ rake_tasks+=(python:rc)
+fi
+if [ ${UPLOAD_UBUNTU} -gt 0 ]; then
+ rake_tasks+=(apt:rc)
+ apt_targets+=(ubuntu)
+fi
+rake_tasks+=(summary:rc)
+
+tmp_dir=binary/tmp
+mkdir -p "${tmp_dir}"
+source_artifacts_dir="${tmp_dir}/artifacts"
+rm -rf "${source_artifacts_dir}"
+cp -a "${artifact_dir}" "${source_artifacts_dir}"
+
+docker_run \
+ ./runner.sh \
+ rake \
+ "${rake_tasks[@]}" \
+ APT_TARGETS=$(IFS=,; echo "${apt_targets[*]}") \
+ ARTIFACTORY_API_KEY="${ARTIFACTORY_API_KEY}" \
+ ARTIFACTS_DIR="${tmp_dir}/artifacts" \
+ RC=${rc} \
+ STAGING=${STAGING:-no} \
+ VERSION=${version} \
+ YUM_TARGETS=$(IFS=,; echo "${yum_targets[*]}")
diff --git a/src/arrow/dev/release/README.md b/src/arrow/dev/release/README.md
new file mode 100644
index 000000000..0a9cc3e04
--- /dev/null
+++ b/src/arrow/dev/release/README.md
@@ -0,0 +1,24 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+## Release management scripts
+
+To learn more, see the project wiki:
+
+https://cwiki.apache.org/confluence/display/ARROW/Release+Management+Guide
diff --git a/src/arrow/dev/release/Rakefile b/src/arrow/dev/release/Rakefile
new file mode 100644
index 000000000..ff57bad5e
--- /dev/null
+++ b/src/arrow/dev/release/Rakefile
@@ -0,0 +1,37 @@
+# -*- ruby -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+require_relative "binary-task"
+
+if File.exist?(".env")
+ File.open(".env") do |env|
+ env.each_line do |line|
+ case line.strip
+ when /\A#/
+ when /\A([^=]+)=(.*)\z/
+ key = $1
+ value = $2
+ ENV[key] ||= value
+ end
+ end
+ end
+end
+
+binary_task = BinaryTask.new
+binary_task.define
diff --git a/src/arrow/dev/release/VERIFY.md b/src/arrow/dev/release/VERIFY.md
new file mode 100644
index 000000000..5b441ac13
--- /dev/null
+++ b/src/arrow/dev/release/VERIFY.md
@@ -0,0 +1,76 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+# Verifying Arrow releases
+
+## Windows
+
+We've provided a convenience script for verifying the C++ and Python builds on
+Windows. Read the comments in `verify-release-candidate.bat` for instructions.
+
+## Linux and macOS
+
+We've provided a convenience script for verifying the C++, Python, C
+GLib, Java and JavaScript builds on Linux and macOS. Read the comments in
+`verify-release-candidate.sh` for instructions.
+
+### C GLib
+
+You need the followings to verify C GLib build:
+
+ * GLib
+ * GObject Introspection
+ * Ruby (not EOL-ed version is required)
+ * gobject-introspection gem
+ * test-unit gem
+
+You can install them by the followings on Debian GNU/Linux and Ubuntu:
+
+```console
+% sudo apt install -y -V libgirepository1.0-dev ruby-dev
+% sudo gem install gobject-introspection test-unit
+```
+
+You can install them by the followings on CentOS:
+
+```console
+% sudo yum install -y gobject-introspection-devel
+% git clone https://github.com/sstephenson/rbenv.git ~/.rbenv
+% git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build
+% echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >> ~/.bash_profile
+% echo 'eval "$(rbenv init -)"' >> ~/.bash_profile
+% exec ${SHELL} --login
+% sudo yum install -y gcc make patch openssl-devel readline-devel zlib-devel
+% rbenv install 2.4.2
+% rbenv global 2.4.2
+% gem install gobject-introspection test-unit
+```
+
+You can install them by the followings on macOS:
+
+```console
+% brew install -y gobject-introspection
+% gem install gobject-introspection test-unit
+```
+
+You need to set `PKG_CONFIG_PATH` to find libffi on macOS:
+
+```console
+% export PKG_CONFIG_PATH=$(brew --prefix libffi)/lib/pkgconfig:$PKG_CONFIG_PATH
+```
diff --git a/src/arrow/dev/release/binary-task.rb b/src/arrow/dev/release/binary-task.rb
new file mode 100644
index 000000000..5f88e477e
--- /dev/null
+++ b/src/arrow/dev/release/binary-task.rb
@@ -0,0 +1,1910 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+require "cgi/util"
+require "digest/sha2"
+require "io/console"
+require "json"
+require "net/http"
+require "pathname"
+require "tempfile"
+require "thread"
+require "time"
+
+begin
+ require "apt-dists-merge"
+rescue LoadError
+ warn("apt-dists-merge is needed for apt:* tasks")
+end
+
+class BinaryTask
+ include Rake::DSL
+
+ class ThreadPool
+ def initialize(use_case, &worker)
+ @n_workers = choose_n_workers(use_case)
+ @worker = worker
+ @jobs = Thread::Queue.new
+ @workers = @n_workers.times.collect do
+ Thread.new do
+ loop do
+ job = @jobs.pop
+ break if job.nil?
+ @worker.call(job)
+ end
+ end
+ end
+ end
+
+ def <<(job)
+ @jobs << job
+ end
+
+ def join
+ @n_workers.times do
+ @jobs << nil
+ end
+ @workers.each(&:join)
+ end
+
+ private
+ def choose_n_workers(use_case)
+ case use_case
+ when :artifactory
+ # Too many workers cause Artifactory error.
+ 6
+ when :gpg
+ # Too many workers cause gpg-agent error.
+ 2
+ else
+ raise "Unknown use case: #{use_case}"
+ end
+ end
+ end
+
+ class ProgressReporter
+ def initialize(label, count_max=0)
+ @label = label
+ @count_max = count_max
+
+ @mutex = Thread::Mutex.new
+
+ @time_start = Time.now
+ @time_previous = Time.now
+ @count_current = 0
+ @count_previous = 0
+ end
+
+ def advance
+ @mutex.synchronize do
+ @count_current += 1
+
+ return if @count_max.zero?
+
+ time_current = Time.now
+ if time_current - @time_previous <= 1
+ return
+ end
+
+ show_progress(time_current)
+ end
+ end
+
+ def increment_max
+ @mutex.synchronize do
+ @count_max += 1
+ show_progress(Time.now) if @count_max == 1
+ end
+ end
+
+ def finish
+ @mutex.synchronize do
+ return if @count_max.zero?
+ show_progress(Time.now)
+ $stderr.puts
+ end
+ end
+
+ private
+ def show_progress(time_current)
+ n_finishes = @count_current - @count_previous
+ throughput = n_finishes.to_f / (time_current - @time_previous)
+ @time_previous = time_current
+ @count_previous = @count_current
+
+ message = build_message(time_current, throughput)
+ $stderr.print("\r#{message}") if message
+ end
+
+ def build_message(time_current, throughput)
+ percent = (@count_current / @count_max.to_f) * 100
+ formatted_count = "[%s/%s]" % [
+ format_count(@count_current),
+ format_count(@count_max),
+ ]
+ elapsed_second = time_current - @time_start
+ if throughput.zero?
+ rest_second = 0
+ else
+ rest_second = (@count_max - @count_current) / throughput
+ end
+ separator = " - "
+ progress = "%5.1f%% %s %s %s %s" % [
+ percent,
+ formatted_count,
+ format_time_interval(elapsed_second),
+ format_time_interval(rest_second),
+ format_throughput(throughput),
+ ]
+ label = @label
+
+ width = guess_terminal_width
+ return "#{label}#{separator}#{progress}" if width.nil?
+
+ return nil if progress.size > width
+
+ label_width = width - progress.size - separator.size
+ if label.size > label_width
+ ellipsis = "..."
+ shorten_label_width = label_width - ellipsis.size
+ if shorten_label_width < 1
+ return progress
+ else
+ label = label[0, shorten_label_width] + ellipsis
+ end
+ end
+ "#{label}#{separator}#{progress}"
+ end
+
+ def format_count(count)
+ "%d" % count
+ end
+
+ def format_time_interval(interval)
+ if interval < 60
+ "00:00:%02d" % interval
+ elsif interval < (60 * 60)
+ minute, second = interval.divmod(60)
+ "00:%02d:%02d" % [minute, second]
+ elsif interval < (60 * 60 * 24)
+ minute, second = interval.divmod(60)
+ hour, minute = minute.divmod(60)
+ "%02d:%02d:%02d" % [hour, minute, second]
+ else
+ minute, second = interval.divmod(60)
+ hour, minute = minute.divmod(60)
+ day, hour = hour.divmod(24)
+ "%dd %02d:%02d:%02d" % [day, hour, minute, second]
+ end
+ end
+
+ def format_throughput(throughput)
+ "%2d/s" % throughput
+ end
+
+ def guess_terminal_width
+ guess_terminal_width_from_io ||
+ guess_terminal_width_from_command ||
+ guess_terminal_width_from_env ||
+ 80
+ end
+
+ def guess_terminal_width_from_io
+ if IO.respond_to?(:console) and IO.console
+ IO.console.winsize[1]
+ elsif $stderr.respond_to?(:winsize)
+ begin
+ $stderr.winsize[1]
+ rescue SystemCallError
+ nil
+ end
+ else
+ nil
+ end
+ end
+
+ def guess_terminal_width_from_command
+ IO.pipe do |input, output|
+ begin
+ pid = spawn("tput", "cols", {:out => output, :err => output})
+ rescue SystemCallError
+ return nil
+ end
+
+ output.close
+ _, status = Process.waitpid2(pid)
+ return nil unless status.success?
+
+ result = input.read.chomp
+ begin
+ Integer(result, 10)
+ rescue ArgumentError
+ nil
+ end
+ end
+ end
+
+ def guess_terminal_width_from_env
+ env = ENV["COLUMNS"] || ENV["TERM_WIDTH"]
+ return nil if env.nil?
+
+ begin
+ Integer(env, 10)
+ rescue ArgumentError
+ nil
+ end
+ end
+ end
+
+ class ArtifactoryClient
+ class Error < StandardError
+ attr_reader :request
+ attr_reader :response
+ def initialize(request, response, message)
+ @request = request
+ @response = response
+ super(message)
+ end
+ end
+
+ def initialize(prefix, api_key)
+ @prefix = prefix
+ @api_key = api_key
+ @http = nil
+ restart
+ end
+
+ def restart
+ close
+ @http = start_http(build_url(""))
+ end
+
+ private def start_http(url, &block)
+ http = Net::HTTP.new(url.host, url.port)
+ http.set_debug_output($stderr) if ENV["DEBUG"]
+ http.use_ssl = true
+ if block_given?
+ http.start(&block)
+ else
+ http
+ end
+ end
+
+ def close
+ return if @http.nil?
+ @http.finish if @http.started?
+ @http = nil
+ end
+
+ def request(method, headers, url, body: nil, &block)
+ request = build_request(method, url, headers, body: body)
+ if ENV["DRY_RUN"]
+ case request
+ when Net::HTTP::Get, Net::HTTP::Head
+ else
+ p [method, url]
+ return
+ end
+ end
+ request_internal(@http, request, &block)
+ end
+
+ private def request_internal(http, request, &block)
+ http.request(request) do |response|
+ case response
+ when Net::HTTPSuccess,
+ Net::HTTPNotModified
+ if block_given?
+ return yield(response)
+ else
+ response.read_body
+ return response
+ end
+ when Net::HTTPRedirection
+ redirected_url = URI(response["Location"])
+ redirected_request = Net::HTTP::Get.new(redirected_url, {})
+ start_http(redirected_url) do |redirected_http|
+ request_internal(redirected_http, redirected_request, &block)
+ end
+ else
+ message = "failed to request: "
+ message << "#{request.uri}: #{request.method}: "
+ message << "#{response.message} #{response.code}"
+ if response.body
+ message << "\n"
+ message << response.body
+ end
+ raise Error.new(request, response, message)
+ end
+ end
+ end
+
+ def files
+ _files = []
+ directories = [""]
+ until directories.empty?
+ directory = directories.shift
+ list(directory).each do |path|
+ resolved_path = "#{directory}#{path}"
+ case path
+ when "../"
+ when /\/\z/
+ directories << resolved_path
+ else
+ _files << resolved_path
+ end
+ end
+ end
+ _files
+ end
+
+ def list(path)
+ url = build_url(path)
+ with_retry(3, url) do
+ begin
+ request(:get, {}, url) do |response|
+ response.body.scan(/<a href="(.+?)"/).flatten
+ end
+ rescue Error => error
+ case error.response
+ when Net::HTTPNotFound
+ return []
+ else
+ raise
+ end
+ end
+ end
+ end
+
+ def head(path)
+ url = build_url(path)
+ with_retry(3, url) do
+ request(:head, {}, url)
+ end
+ end
+
+ def exist?(path)
+ begin
+ head(path)
+ true
+ rescue Error => error
+ case error.response
+ when Net::HTTPNotFound
+ false
+ else
+ raise
+ end
+ end
+ end
+
+ def upload(path, destination_path)
+ destination_url = build_url(destination_path)
+ with_retry(3, destination_url) do
+ sha1 = Digest::SHA1.file(path).hexdigest
+ sha256 = Digest::SHA256.file(path).hexdigest
+ headers = {
+ "X-Artifactory-Last-Modified" => File.mtime(path).rfc2822,
+ "X-Checksum-Deploy" => "false",
+ "X-Checksum-Sha1" => sha1,
+ "X-Checksum-Sha256" => sha256,
+ "Content-Length" => File.size(path).to_s,
+ "Content-Type" => "application/octet-stream",
+ }
+ File.open(path, "rb") do |input|
+ request(:put, headers, destination_url, body: input)
+ end
+ end
+ end
+
+ def download(path, output_path)
+ url = build_url(path)
+ with_retry(5, url) do
+ begin
+ begin
+ headers = {}
+ if File.exist?(output_path)
+ headers["If-Modified-Since"] = File.mtime(output_path).rfc2822
+ end
+ request(:get, headers, url) do |response|
+ case response
+ when Net::HTTPNotModified
+ else
+ File.open(output_path, "wb") do |output|
+ response.read_body do |chunk|
+ output.write(chunk)
+ end
+ end
+ last_modified = response["Last-Modified"]
+ if last_modified
+ FileUtils.touch(output_path,
+ mtime: Time.rfc2822(last_modified))
+ end
+ end
+ end
+ rescue Error => error
+ case error.response
+ when Net::HTTPNotFound
+ $stderr.puts(error.message)
+ return
+ else
+ raise
+ end
+ end
+ end
+ rescue
+ FileUtils.rm_f(output_path)
+ raise
+ end
+ end
+
+ def delete(path)
+ url = build_url(path)
+ with_retry(3, url) do
+ request(:delete, {}, url)
+ end
+ end
+
+ def copy(source, destination)
+ uri = build_api_url("copy/arrow/#{source}",
+ "to" => "/arrow/#{destination}")
+ with_read_timeout(300) do
+ request(:post, {}, uri)
+ end
+ end
+
+ private
+ def build_url(path)
+ uri_string = "https://apache.jfrog.io/artifactory/arrow"
+ uri_string << "/#{@prefix}" unless @prefix.nil?
+ uri_string << "/#{path}"
+ URI(uri_string)
+ end
+
+ def build_api_url(path, parameters)
+ uri_string = "https://apache.jfrog.io/artifactory/api/#{path}"
+ unless parameters.empty?
+ uri_string << "?"
+ escaped_parameters = parameters.collect do |key, value|
+ "#{CGI.escape(key)}=#{CGI.escape(value)}"
+ end
+ uri_string << escaped_parameters.join("&")
+ end
+ URI(uri_string)
+ end
+
+ def build_request(method, url, headers, body: nil)
+ need_auth = false
+ case method
+ when :head
+ request = Net::HTTP::Head.new(url, headers)
+ when :get
+ request = Net::HTTP::Get.new(url, headers)
+ when :post
+ need_auth = true
+ request = Net::HTTP::Post.new(url, headers)
+ when :put
+ need_auth = true
+ request = Net::HTTP::Put.new(url, headers)
+ when :delete
+ need_auth = true
+ request = Net::HTTP::Delete.new(url, headers)
+ else
+ raise "unsupported HTTP method: #{method.inspect}"
+ end
+ request["Connection"] = "Keep-Alive"
+ request["X-JFrog-Art-Api"] = @api_key if need_auth
+ if body
+ if body.is_a?(String)
+ request.body = body
+ else
+ request.body_stream = body
+ end
+ end
+ request
+ end
+
+ def with_retry(max_n_retries, target)
+ n_retries = 0
+ begin
+ yield
+ rescue Net::OpenTimeout,
+ OpenSSL::OpenSSLError,
+ SocketError,
+ SystemCallError,
+ Timeout::Error => error
+ n_retries += 1
+ if n_retries <= max_n_retries
+ $stderr.puts
+ $stderr.puts("Retry #{n_retries}: #{target}: " +
+ "#{error.class}: #{error.message}")
+ restart
+ retry
+ else
+ raise
+ end
+ end
+ end
+
+ def with_read_timeout(timeout)
+ current_timeout = @http.read_timeout
+ begin
+ @http.read_timeout = timeout
+ yield
+ ensure
+ @http.read_timeout = current_timeout
+ end
+ end
+ end
+
+ class ArtifactoryClientPool
+ class << self
+ def open(prefix, api_key)
+ pool = new(prefix, api_key)
+ begin
+ yield(pool)
+ ensure
+ pool.close
+ end
+ end
+ end
+
+ def initialize(prefix, api_key)
+ @prefix = prefix
+ @api_key = api_key
+ @mutex = Thread::Mutex.new
+ @clients = []
+ end
+
+ def pull
+ client = @mutex.synchronize do
+ if @clients.empty?
+ ArtifactoryClient.new(@prefix, @api_key)
+ else
+ @clients.pop
+ end
+ end
+ begin
+ yield(client)
+ ensure
+ release(client)
+ end
+ end
+
+ def release(client)
+ @mutex.synchronize do
+ @clients << client
+ end
+ end
+
+ def close
+ @clients.each(&:close)
+ end
+ end
+
+ module ArtifactoryPath
+ private
+ def base_path
+ path = @distribution
+ path += "-staging" if @staging
+ path += "-rc" if @rc
+ path
+ end
+ end
+
+ class ArtifactoryDownloader
+ include ArtifactoryPath
+
+ def initialize(api_key:,
+ destination:,
+ distribution:,
+ list: nil,
+ pattern: nil,
+ prefix: nil,
+ rc: nil,
+ staging: false)
+ @api_key = api_key
+ @destination = destination
+ @distribution = distribution
+ @list = list
+ @pattern = pattern
+ @prefix = prefix
+ @rc = rc
+ @staging = staging
+ end
+
+ def download
+ progress_label = "Downloading: #{base_path}"
+ progress_reporter = ProgressReporter.new(progress_label)
+ prefix = [base_path, @prefix].compact.join("/")
+ ArtifactoryClientPool.open(prefix, @api_key) do |client_pool|
+ thread_pool = ThreadPool.new(:artifactory) do |path, output_path|
+ client_pool.pull do |client|
+ client.download(path, output_path)
+ end
+ progress_reporter.advance
+ end
+ files = client_pool.pull do |client|
+ if @list
+ list_output_path = "#{@destination}/#{@list}"
+ client.download(@list, list_output_path)
+ File.readlines(list_output_path, chomp: true)
+ else
+ client.files
+ end
+ end
+ files.each do |path|
+ output_path = "#{@destination}/#{path}"
+ if @pattern
+ next unless @pattern.match?(path)
+ end
+ yield(output_path)
+ output_dir = File.dirname(output_path)
+ FileUtils.mkdir_p(output_dir)
+ progress_reporter.increment_max
+ thread_pool << [path, output_path]
+ end
+ thread_pool.join
+ end
+ progress_reporter.finish
+ end
+ end
+
+ class ArtifactoryUploader
+ include ArtifactoryPath
+
+ def initialize(api_key:,
+ destination_prefix: nil,
+ distribution:,
+ rc: nil,
+ source:,
+ staging: false,
+ sync: false,
+ sync_pattern: nil)
+ @api_key = api_key
+ @destination_prefix = destination_prefix
+ @distribution = distribution
+ @rc = rc
+ @source = source
+ @staging = staging
+ @sync = sync
+ @sync_pattern = sync_pattern
+ end
+
+ def upload
+ progress_label = "Uploading: #{base_path}"
+ progress_reporter = ProgressReporter.new(progress_label)
+ prefix = base_path
+ prefix += "/#{@destination_prefix}" if @destination_prefix
+ ArtifactoryClientPool.open(prefix, @api_key) do |client_pool|
+ if @sync
+ existing_files = client_pool.pull do |client|
+ client.files
+ end
+ else
+ existing_files = []
+ end
+
+ thread_pool = ThreadPool.new(:artifactory) do |path, relative_path|
+ client_pool.pull do |client|
+ client.upload(path, relative_path)
+ end
+ progress_reporter.advance
+ end
+
+ source = Pathname(@source)
+ source.glob("**/*") do |path|
+ next if path.directory?
+ destination_path = path.relative_path_from(source)
+ progress_reporter.increment_max
+ existing_files.delete(destination_path.to_s)
+ thread_pool << [path, destination_path]
+ end
+ thread_pool.join
+
+ if @sync
+ thread_pool = ThreadPool.new(:artifactory) do |path|
+ client_pool.pull do |client|
+ client.delete(path)
+ end
+ progress_reporter.advance
+ end
+ existing_files.each do |path|
+ if @sync_pattern
+ next unless @sync_pattern.match?(path)
+ end
+ progress_reporter.increment_max
+ thread_pool << path
+ end
+ thread_pool.join
+ end
+ end
+ progress_reporter.finish
+ end
+ end
+
+ def define
+ define_apt_tasks
+ define_yum_tasks
+ define_python_tasks
+ define_nuget_tasks
+ define_summary_tasks
+ end
+
+ private
+ def env_value(name)
+ value = ENV[name]
+ value = yield(name) if value.nil? and block_given?
+ raise "Specify #{name} environment variable" if value.nil?
+ value
+ end
+
+ def verbose?
+ ENV["VERBOSE"] == "yes"
+ end
+
+ def default_output
+ if verbose?
+ $stdout
+ else
+ IO::NULL
+ end
+ end
+
+ def gpg_key_id
+ env_value("GPG_KEY_ID")
+ end
+
+ def shorten_gpg_key_id(id)
+ id[-8..-1]
+ end
+
+ def rpm_gpg_key_package_name(id)
+ "gpg-pubkey-#{shorten_gpg_key_id(id).downcase}"
+ end
+
+ def artifactory_api_key
+ env_value("ARTIFACTORY_API_KEY")
+ end
+
+ def artifacts_dir
+ env_value("ARTIFACTS_DIR")
+ end
+
+ def version
+ env_value("VERSION")
+ end
+
+ def rc
+ env_value("RC")
+ end
+
+ def staging?
+ ENV["STAGING"] == "yes"
+ end
+
+ def full_version
+ "#{version}-rc#{rc}"
+ end
+
+ def valid_sign?(path, sign_path)
+ IO.pipe do |input, output|
+ begin
+ sh({"LANG" => "C"},
+ "gpg",
+ "--verify",
+ sign_path,
+ path,
+ out: default_output,
+ err: output,
+ verbose: false)
+ rescue
+ return false
+ end
+ output.close
+ /Good signature/ === input.read
+ end
+ end
+
+ def sign(source_path, destination_path)
+ if File.exist?(destination_path)
+ return if valid_sign?(source_path, destination_path)
+ rm(destination_path, verbose: false)
+ end
+ sh("gpg",
+ "--detach-sig",
+ "--local-user", gpg_key_id,
+ "--output", destination_path,
+ source_path,
+ out: default_output,
+ verbose: verbose?)
+ end
+
+ def sha512(source_path, destination_path)
+ if File.exist?(destination_path)
+ sha512 = File.read(destination_path).split[0]
+ return if Digest::SHA512.file(source_path).hexdigest == sha512
+ end
+ absolute_destination_path = File.expand_path(destination_path)
+ Dir.chdir(File.dirname(source_path)) do
+ sh("shasum",
+ "--algorithm", "512",
+ File.basename(source_path),
+ out: absolute_destination_path,
+ verbose: verbose?)
+ end
+ end
+
+ def sign_dir(label, dir)
+ progress_label = "Signing: #{label}"
+ progress_reporter = ProgressReporter.new(progress_label)
+
+ target_paths = []
+ Pathname(dir).glob("**/*") do |path|
+ next if path.directory?
+ case path.extname
+ when ".asc", ".sha512"
+ next
+ end
+ progress_reporter.increment_max
+ target_paths << path.to_s
+ end
+ target_paths.each do |path|
+ sign(path, "#{path}.asc")
+ sha512(path, "#{path}.sha512")
+ progress_reporter.advance
+ end
+ progress_reporter.finish
+ end
+
+ def download_distribution(distribution,
+ destination,
+ target,
+ list: nil,
+ pattern: nil,
+ prefix: nil)
+ mkdir_p(destination, verbose: verbose?) unless File.exist?(destination)
+ existing_paths = {}
+ Pathname(destination).glob("**/*") do |path|
+ next if path.directory?
+ existing_paths[path.to_s] = true
+ end
+ options = {
+ api_key: artifactory_api_key,
+ destination: destination,
+ distribution: distribution,
+ list: list,
+ pattern: pattern,
+ prefix: prefix,
+ staging: staging?,
+ }
+ options[:rc] = rc if target == :rc
+ downloader = ArtifactoryDownloader.new(**options)
+ downloader.download do |output_path|
+ existing_paths.delete(output_path)
+ end
+ existing_paths.each_key do |path|
+ rm_f(path, verbose: verbose?)
+ end
+ end
+
+ def same_content?(path1, path2)
+ File.exist?(path1) and
+ File.exist?(path2) and
+ Digest::SHA256.file(path1) == Digest::SHA256.file(path2)
+ end
+
+ def copy_artifact(source_path,
+ destination_path,
+ progress_reporter)
+ return if same_content?(source_path, destination_path)
+ progress_reporter.increment_max
+ destination_dir = File.dirname(destination_path)
+ unless File.exist?(destination_dir)
+ mkdir_p(destination_dir, verbose: verbose?)
+ end
+ cp(source_path, destination_path, verbose: verbose?)
+ progress_reporter.advance
+ end
+
+ def prepare_staging(base_path)
+ client = ArtifactoryClient.new(nil, artifactory_api_key)
+ ["", "-rc"].each do |suffix|
+ path = "#{base_path}#{suffix}"
+ progress_reporter = ProgressReporter.new("Preparing staging for #{path}")
+ progress_reporter.increment_max
+ begin
+ staging_path = "#{base_path}-staging#{suffix}"
+ if client.exist?(staging_path)
+ client.delete(staging_path)
+ end
+ if client.exist?(path)
+ client.copy(path, staging_path)
+ end
+ ensure
+ progress_reporter.advance
+ progress_reporter.finish
+ end
+ end
+ end
+
+ def delete_staging(base_path)
+ client = ArtifactoryClient.new(nil, artifactory_api_key)
+ ["", "-rc"].each do |suffix|
+ path = "#{base_path}#{suffix}"
+ progress_reporter = ProgressReporter.new("Deleting staging for #{path}")
+ progress_reporter.increment_max
+ begin
+ staging_path = "#{base_path}-staging#{suffix}"
+ if client.exist?(staging_path)
+ client.delete(staging_path)
+ end
+ ensure
+ progress_reporter.advance
+ progress_reporter.finish
+ end
+ end
+ end
+
+ def uploaded_files_name
+ "uploaded-files.txt"
+ end
+
+ def write_uploaded_files(dir)
+ dir = Pathname(dir)
+ uploaded_files = []
+ dir.glob("**/*") do |path|
+ next if path.directory?
+ uploaded_files << path.relative_path_from(dir).to_s
+ end
+ File.open("#{dir}/#{uploaded_files_name}", "w") do |output|
+ output.puts(uploaded_files.sort)
+ end
+ end
+
+ def tmp_dir
+ "binary/tmp"
+ end
+
+ def rc_dir
+ "#{tmp_dir}/rc"
+ end
+
+ def release_dir
+ "#{tmp_dir}/release"
+ end
+
+ def apt_repository_label
+ "Apache Arrow"
+ end
+
+ def apt_repository_description
+ "Apache Arrow packages"
+ end
+
+ def apt_rc_repositories_dir
+ "#{rc_dir}/apt/repositories"
+ end
+
+ def apt_release_repositories_dir
+ "#{release_dir}/apt/repositories"
+ end
+
+ def available_apt_targets
+ [
+ ["debian", "buster", "main"],
+ ["debian", "bullseye", "main"],
+ ["debian", "bookworm", "main"],
+ ["ubuntu", "bionic", "main"],
+ ["ubuntu", "focal", "main"],
+ ["ubuntu", "hirsute", "main"],
+ ["ubuntu", "impish", "main"],
+ ]
+ end
+
+ def apt_targets
+ env_apt_targets = (ENV["APT_TARGETS"] || "").split(",")
+ if env_apt_targets.empty?
+ available_apt_targets
+ else
+ available_apt_targets.select do |distribution, code_name, component|
+ env_apt_targets.any? do |env_apt_target|
+ if env_apt_target.include?("-")
+ env_apt_target.start_with?("#{distribution}-#{code_name}")
+ else
+ env_apt_target == distribution
+ end
+ end
+ end
+ end
+ end
+
+ def apt_distributions
+ apt_targets.collect(&:first).uniq
+ end
+
+ def apt_architectures
+ [
+ "amd64",
+ "arm64",
+ ]
+ end
+
+ def generate_apt_release(dists_dir, code_name, component, architecture)
+ dir = "#{dists_dir}/#{component}/"
+ if architecture == "source"
+ dir << architecture
+ else
+ dir << "binary-#{architecture}"
+ end
+
+ mkdir_p(dir, verbose: verbose?)
+ File.open("#{dir}/Release", "w") do |release|
+ release.puts(<<-RELEASE)
+Archive: #{code_name}
+Component: #{component}
+Origin: #{apt_repository_label}
+Label: #{apt_repository_label}
+Architecture: #{architecture}
+ RELEASE
+ end
+ end
+
+ def generate_apt_ftp_archive_generate_conf(code_name, component)
+ conf = <<-CONF
+Dir::ArchiveDir ".";
+Dir::CacheDir ".";
+TreeDefault::Directory "pool/#{code_name}/#{component}";
+TreeDefault::SrcDirectory "pool/#{code_name}/#{component}";
+Default::Packages::Extensions ".deb";
+Default::Packages::Compress ". gzip xz";
+Default::Sources::Compress ". gzip xz";
+Default::Contents::Compress "gzip";
+ CONF
+
+ apt_architectures.each do |architecture|
+ conf << <<-CONF
+
+BinDirectory "dists/#{code_name}/#{component}/binary-#{architecture}" {
+ Packages "dists/#{code_name}/#{component}/binary-#{architecture}/Packages";
+ Contents "dists/#{code_name}/#{component}/Contents-#{architecture}";
+ SrcPackages "dists/#{code_name}/#{component}/source/Sources";
+};
+ CONF
+ end
+
+ conf << <<-CONF
+
+Tree "dists/#{code_name}" {
+ Sections "#{component}";
+ Architectures "#{apt_architectures.join(" ")} source";
+};
+ CONF
+
+ conf
+ end
+
+ def generate_apt_ftp_archive_release_conf(code_name, component)
+ <<-CONF
+APT::FTPArchive::Release::Origin "#{apt_repository_label}";
+APT::FTPArchive::Release::Label "#{apt_repository_label}";
+APT::FTPArchive::Release::Architectures "#{apt_architectures.join(" ")}";
+APT::FTPArchive::Release::Codename "#{code_name}";
+APT::FTPArchive::Release::Suite "#{code_name}";
+APT::FTPArchive::Release::Components "#{component}";
+APT::FTPArchive::Release::Description "#{apt_repository_description}";
+ CONF
+ end
+
+ def apt_update(base_dir, incoming_dir, merged_dir)
+ apt_targets.each do |distribution, code_name, component|
+ distribution_dir = "#{incoming_dir}/#{distribution}"
+ pool_dir = "#{distribution_dir}/pool/#{code_name}"
+ next unless File.exist?(pool_dir)
+ dists_dir = "#{distribution_dir}/dists/#{code_name}"
+ rm_rf(dists_dir, verbose: verbose?)
+ generate_apt_release(dists_dir, code_name, component, "source")
+ apt_architectures.each do |architecture|
+ generate_apt_release(dists_dir, code_name, component, architecture)
+ end
+
+ generate_conf_file = Tempfile.new("apt-ftparchive-generate.conf")
+ File.open(generate_conf_file.path, "w") do |conf|
+ conf.puts(generate_apt_ftp_archive_generate_conf(code_name,
+ component))
+ end
+ cd(distribution_dir, verbose: verbose?) do
+ sh("apt-ftparchive",
+ "generate",
+ generate_conf_file.path,
+ out: default_output,
+ verbose: verbose?)
+ end
+
+ Dir.glob("#{dists_dir}/Release*") do |release|
+ rm_f(release, verbose: verbose?)
+ end
+ Dir.glob("#{distribution_dir}/*.db") do |db|
+ rm_f(db, verbose: verbose?)
+ end
+ release_conf_file = Tempfile.new("apt-ftparchive-release.conf")
+ File.open(release_conf_file.path, "w") do |conf|
+ conf.puts(generate_apt_ftp_archive_release_conf(code_name,
+ component))
+ end
+ release_file = Tempfile.new("apt-ftparchive-release")
+ sh("apt-ftparchive",
+ "-c", release_conf_file.path,
+ "release",
+ dists_dir,
+ out: release_file.path,
+ verbose: verbose?)
+ mv(release_file.path, "#{dists_dir}/Release", verbose: verbose?)
+
+ base_dists_dir = "#{base_dir}/#{distribution}/dists/#{code_name}"
+ merged_dists_dir = "#{merged_dir}/#{distribution}/dists/#{code_name}"
+ rm_rf(merged_dists_dir)
+ merger = APTDistsMerge::Merger.new(base_dists_dir,
+ dists_dir,
+ merged_dists_dir)
+ merger.merge
+
+ in_release_path = "#{merged_dists_dir}/InRelease"
+ release_path = "#{merged_dists_dir}/Release"
+ signed_release_path = "#{release_path}.gpg"
+ sh("gpg",
+ "--sign",
+ "--detach-sign",
+ "--armor",
+ "--local-user", gpg_key_id,
+ "--output", signed_release_path,
+ release_path,
+ out: default_output,
+ verbose: verbose?)
+ sh("gpg",
+ "--clear-sign",
+ "--local-user", gpg_key_id,
+ "--output", in_release_path,
+ release_path,
+ out: default_output,
+ verbose: verbose?)
+ end
+ end
+
+ def define_apt_staging_tasks
+ namespace :apt do
+ namespace :staging do
+ desc "Prepare staging environment for APT repositories"
+ task :prepare do
+ apt_distributions.each do |distribution|
+ prepare_staging(distribution)
+ end
+ end
+
+ desc "Delete staging environment for APT repositories"
+ task :delete do
+ apt_distributions.each do |distribution|
+ delete_staging(distribution)
+ end
+ end
+ end
+ end
+ end
+
+ def define_apt_rc_tasks
+ namespace :apt do
+ namespace :rc do
+ base_dir = "#{apt_rc_repositories_dir}/base"
+ incoming_dir = "#{apt_rc_repositories_dir}/incoming"
+ merged_dir = "#{apt_rc_repositories_dir}/merged"
+ upload_dir = "#{apt_rc_repositories_dir}/upload"
+
+ desc "Copy .deb packages"
+ task :copy do
+ apt_targets.each do |distribution, code_name, component|
+ progress_label = "Copying: #{distribution} #{code_name}"
+ progress_reporter = ProgressReporter.new(progress_label)
+
+ distribution_dir = "#{incoming_dir}/#{distribution}"
+ pool_dir = "#{distribution_dir}/pool/#{code_name}"
+ rm_rf(pool_dir, verbose: verbose?)
+ mkdir_p(pool_dir, verbose: verbose?)
+ source_dir_prefix = "#{artifacts_dir}/#{distribution}-#{code_name}"
+ Dir.glob("#{source_dir_prefix}*/**/*") do |path|
+ next if File.directory?(path)
+ base_name = File.basename(path)
+ if base_name.start_with?("apache-arrow-apt-source")
+ package_name = "apache-arrow-apt-source"
+ else
+ package_name = "apache-arrow"
+ end
+ destination_path = [
+ pool_dir,
+ component,
+ package_name[0],
+ package_name,
+ base_name,
+ ].join("/")
+ copy_artifact(path,
+ destination_path,
+ progress_reporter)
+ case base_name
+ when /\A[^_]+-apt-source_.*\.deb\z/
+ latest_apt_source_package_path = [
+ distribution_dir,
+ "#{package_name}-latest-#{code_name}.deb"
+ ].join("/")
+ copy_artifact(path,
+ latest_apt_source_package_path,
+ progress_reporter)
+ end
+ end
+ progress_reporter.finish
+ end
+ end
+
+ desc "Download dists/ for RC APT repositories"
+ task :download do
+ apt_distributions.each do |distribution|
+ not_checksum_pattern = /.+(?<!\.asc|\.sha512)\z/
+ base_distribution_dir = "#{base_dir}/#{distribution}"
+ pattern = /\Adists\/#{not_checksum_pattern}/
+ download_distribution(distribution,
+ base_distribution_dir,
+ :base,
+ pattern: pattern)
+ end
+ end
+
+ desc "Sign .deb packages"
+ task :sign do
+ apt_distributions.each do |distribution|
+ distribution_dir = "#{incoming_dir}/#{distribution}"
+ Dir.glob("#{distribution_dir}/**/*.dsc") do |path|
+ begin
+ sh({"LANG" => "C"},
+ "gpg",
+ "--verify",
+ path,
+ out: IO::NULL,
+ err: IO::NULL,
+ verbose: false)
+ rescue
+ sh("debsign",
+ "--no-re-sign",
+ "-k#{gpg_key_id}",
+ path,
+ out: default_output,
+ verbose: verbose?)
+ end
+ end
+ sign_dir(distribution, distribution_dir)
+ end
+ end
+
+ desc "Update RC APT repositories"
+ task :update do
+ apt_update(base_dir, incoming_dir, merged_dir)
+ apt_targets.each do |distribution, code_name, component|
+ dists_dir = "#{merged_dir}/#{distribution}/dists/#{code_name}"
+ next unless File.exist?(dists_dir)
+ sign_dir("#{distribution} #{code_name}",
+ dists_dir)
+ end
+ end
+
+ desc "Upload .deb packages and RC APT repositories"
+ task :upload do
+ apt_distributions.each do |distribution|
+ upload_distribution_dir = "#{upload_dir}/#{distribution}"
+ incoming_distribution_dir = "#{incoming_dir}/#{distribution}"
+ merged_dists_dir = "#{merged_dir}/#{distribution}/dists"
+
+ rm_rf(upload_distribution_dir, verbose: verbose?)
+ mkdir_p(upload_distribution_dir, verbose: verbose?)
+ Dir.glob("#{incoming_distribution_dir}/*") do |path|
+ next if File.basename(path) == "dists"
+ cp_r(path,
+ upload_distribution_dir,
+ preserve: true,
+ verbose: verbose?)
+ end
+ cp_r(merged_dists_dir,
+ upload_distribution_dir,
+ preserve: true,
+ verbose: verbose?)
+ write_uploaded_files(upload_distribution_dir)
+ uploader = ArtifactoryUploader.new(api_key: artifactory_api_key,
+ distribution: distribution,
+ rc: rc,
+ source: upload_distribution_dir,
+ staging: staging?)
+ uploader.upload
+ end
+ end
+ end
+
+ desc "Release RC APT repositories"
+ apt_rc_tasks = [
+ "apt:rc:copy",
+ "apt:rc:download",
+ "apt:rc:sign",
+ "apt:rc:update",
+ "apt:rc:upload",
+ ]
+ apt_rc_tasks.unshift("apt:staging:prepare") if staging?
+ task :rc => apt_rc_tasks
+ end
+ end
+
+ def define_apt_release_tasks
+ directory apt_release_repositories_dir
+
+ namespace :apt do
+ namespace :release do
+ desc "Download RC APT repositories"
+ task :download => apt_release_repositories_dir do
+ apt_distributions.each do |distribution|
+ distribution_dir = "#{apt_release_repositories_dir}/#{distribution}"
+ download_distribution(distribution,
+ distribution_dir,
+ :rc,
+ list: uploaded_files_name)
+ end
+ end
+
+ desc "Upload release APT repositories"
+ task :upload => apt_release_repositories_dir do
+ apt_distributions.each do |distribution|
+ distribution_dir = "#{apt_release_repositories_dir}/#{distribution}"
+ uploader = ArtifactoryUploader.new(api_key: artifactory_api_key,
+ distribution: distribution,
+ source: distribution_dir,
+ staging: staging?)
+ uploader.upload
+ end
+ end
+ end
+
+ desc "Release APT repositories"
+ apt_release_tasks = [
+ "apt:release:download",
+ "apt:release:upload",
+ ]
+ task :release => apt_release_tasks
+ end
+ end
+
+ def define_apt_tasks
+ define_apt_staging_tasks
+ define_apt_rc_tasks
+ define_apt_release_tasks
+ end
+
+ def yum_rc_repositories_dir
+ "#{rc_dir}/yum/repositories"
+ end
+
+ def yum_release_repositories_dir
+ "#{release_dir}/yum/repositories"
+ end
+
+ def available_yum_targets
+ [
+ ["almalinux", "8"],
+ ["amazon-linux", "2"],
+ ["centos", "7"],
+ ["centos", "8"],
+ ]
+ end
+
+ def yum_targets
+ env_yum_targets = (ENV["YUM_TARGETS"] || "").split(",")
+ if env_yum_targets.empty?
+ available_yum_targets
+ else
+ available_yum_targets.select do |distribution, distribution_version|
+ env_yum_targets.any? do |env_yum_target|
+ if /\d/.match?(env_yum_target)
+ env_yum_target.start_with?("#{distribution}-#{distribution_version}")
+ else
+ env_yum_target == distribution
+ end
+ end
+ end
+ end
+ end
+
+ def yum_distributions
+ yum_targets.collect(&:first).uniq
+ end
+
+ def yum_architectures
+ [
+ "aarch64",
+ "x86_64",
+ ]
+ end
+
+ def signed_rpm?(rpm)
+ IO.pipe do |input, output|
+ system("rpm", "--checksig", rpm, out: output)
+ output.close
+ signature = input.gets.sub(/\A#{Regexp.escape(rpm)}: /, "")
+ signature.split.include?("signatures")
+ end
+ end
+
+ def sign_rpms(directory)
+ thread_pool = ThreadPool.new(:gpg) do |rpm|
+ unless signed_rpm?(rpm)
+ sh("rpm",
+ "-D", "_gpg_name #{gpg_key_id}",
+ "-D", "__gpg /usr/bin/gpg",
+ "-D", "__gpg_check_password_cmd /bin/true true",
+ "--resign",
+ rpm,
+ out: default_output,
+ verbose: verbose?)
+ end
+ end
+ Dir.glob("#{directory}/**/*.rpm") do |rpm|
+ thread_pool << rpm
+ end
+ thread_pool.join
+ end
+
+ def rpm_sign(directory)
+ unless system("rpm", "-q",
+ rpm_gpg_key_package_name(gpg_key_id),
+ out: IO::NULL)
+ gpg_key = Tempfile.new(["apache-arrow-binary", ".asc"])
+ sh("gpg",
+ "--armor",
+ "--export", gpg_key_id,
+ out: gpg_key.path,
+ verbose: verbose?)
+ sh("rpm",
+ "--import", gpg_key.path,
+ out: default_output,
+ verbose: verbose?)
+ gpg_key.close!
+ end
+
+ yum_targets.each do |distribution, distribution_version|
+ source_dir = [
+ directory,
+ distribution,
+ distribution_version,
+ ].join("/")
+ sign_rpms(source_dir)
+ end
+ end
+
+ def yum_update(base_dir, incoming_dir)
+ yum_targets.each do |distribution, distribution_version|
+ target_dir = "#{incoming_dir}/#{distribution}/#{distribution_version}"
+ target_dir = Pathname(target_dir)
+ next unless target_dir.directory?
+ Dir.glob("#{target_dir}/**/repodata") do |repodata|
+ rm_rf(repodata, verbose: verbose?)
+ end
+ target_dir.glob("*") do |arch_dir|
+ next unless arch_dir.directory?
+ base_repodata_dir = [
+ base_dir,
+ distribution,
+ distribution_version,
+ File.basename(arch_dir),
+ "repodata",
+ ].join("/")
+ if File.exist?(base_repodata_dir)
+ cp_r(base_repodata_dir,
+ arch_dir.to_s,
+ preserve: true,
+ verbose: verbose?)
+ end
+ packages = Tempfile.new("createrepo-c-packages")
+ Pathname.glob("#{arch_dir}/*/*.rpm") do |rpm|
+ relative_rpm = rpm.relative_path_from(arch_dir)
+ packages.puts(relative_rpm.to_s)
+ end
+ packages.close
+ sh("createrepo_c",
+ "--pkglist", packages.path,
+ "--recycle-pkglist",
+ "--retain-old-md-by-age=0",
+ "--skip-stat",
+ "--update",
+ arch_dir.to_s,
+ out: default_output,
+ verbose: verbose?)
+ end
+ end
+ end
+
+ def define_yum_staging_tasks
+ namespace :yum do
+ namespace :staging do
+ desc "Prepare staging environment for Yum repositories"
+ task :prepare do
+ yum_distributions.each do |distribution|
+ prepare_staging(distribution)
+ end
+ end
+
+ desc "Delete staging environment for Yum repositories"
+ task :delete do
+ yum_distributions.each do |distribution|
+ delete_staging(distribution)
+ end
+ end
+ end
+ end
+ end
+
+ def define_yum_rc_tasks
+ namespace :yum do
+ namespace :rc do
+ base_dir = "#{yum_rc_repositories_dir}/base"
+ incoming_dir = "#{yum_rc_repositories_dir}/incoming"
+ upload_dir = "#{yum_rc_repositories_dir}/upload"
+
+ desc "Copy RPM packages"
+ task :copy do
+ yum_targets.each do |distribution, distribution_version|
+ progress_label = "Copying: #{distribution} #{distribution_version}"
+ progress_reporter = ProgressReporter.new(progress_label)
+
+ destination_prefix = [
+ incoming_dir,
+ distribution,
+ distribution_version,
+ ].join("/")
+ rm_rf(destination_prefix, verbose: verbose?)
+ source_dir_prefix =
+ "#{artifacts_dir}/#{distribution}-#{distribution_version}"
+ Dir.glob("#{source_dir_prefix}*/**/*") do |path|
+ next if File.directory?(path)
+ base_name = File.basename(path)
+ type = base_name.split(".")[-2]
+ destination_paths = []
+ case type
+ when "src"
+ destination_paths << [
+ destination_prefix,
+ "Source",
+ "SPackages",
+ base_name,
+ ].join("/")
+ when "noarch"
+ yum_architectures.each do |architecture|
+ destination_paths << [
+ destination_prefix,
+ architecture,
+ "Packages",
+ base_name,
+ ].join("/")
+ end
+ else
+ destination_paths << [
+ destination_prefix,
+ type,
+ "Packages",
+ base_name,
+ ].join("/")
+ end
+ destination_paths.each do |destination_path|
+ copy_artifact(path,
+ destination_path,
+ progress_reporter)
+ end
+ case base_name
+ when /\A(apache-arrow-release)-.*\.noarch\.rpm\z/
+ package_name = $1
+ latest_release_package_path = [
+ destination_prefix,
+ "#{package_name}-latest.rpm"
+ ].join("/")
+ copy_artifact(path,
+ latest_release_package_path,
+ progress_reporter)
+ end
+ end
+
+ progress_reporter.finish
+ end
+ end
+
+ desc "Download repodata for RC Yum repositories"
+ task :download do
+ yum_distributions.each do |distribution|
+ distribution_dir = "#{base_dir}/#{distribution}"
+ download_distribution(distribution,
+ distribution_dir,
+ :base,
+ pattern: /\/repodata\//)
+ end
+ end
+
+ desc "Sign RPM packages"
+ task :sign do
+ rpm_sign(incoming_dir)
+ yum_targets.each do |distribution, distribution_version|
+ source_dir = [
+ incoming_dir,
+ distribution,
+ distribution_version,
+ ].join("/")
+ sign_dir("#{distribution}-#{distribution_version}",
+ source_dir)
+ end
+ end
+
+ desc "Update RC Yum repositories"
+ task :update do
+ yum_update(base_dir, incoming_dir)
+ yum_targets.each do |distribution, distribution_version|
+ target_dir = [
+ incoming_dir,
+ distribution,
+ distribution_version,
+ ].join("/")
+ target_dir = Pathname(target_dir)
+ next unless target_dir.directory?
+ target_dir.glob("*") do |arch_dir|
+ next unless arch_dir.directory?
+ sign_label =
+ "#{distribution}-#{distribution_version} #{arch_dir.basename}"
+ sign_dir(sign_label,
+ arch_dir.to_s)
+ end
+ end
+ end
+
+ desc "Upload RC Yum repositories"
+ task :upload => yum_rc_repositories_dir do
+ yum_distributions.each do |distribution|
+ incoming_target_dir = "#{incoming_dir}/#{distribution}"
+ upload_target_dir = "#{upload_dir}/#{distribution}"
+
+ rm_rf(upload_target_dir, verbose: verbose?)
+ mkdir_p(upload_target_dir, verbose: verbose?)
+ cp_r(Dir.glob("#{incoming_target_dir}/*"),
+ upload_target_dir.to_s,
+ preserve: true,
+ verbose: verbose?)
+ write_uploaded_files(upload_target_dir)
+
+ uploader = ArtifactoryUploader.new(api_key: artifactory_api_key,
+ distribution: distribution,
+ rc: rc,
+ source: upload_target_dir,
+ staging: staging?,
+ sync: true,
+ sync_pattern: /\/repodata\//)
+ uploader.upload
+ end
+ end
+ end
+
+ desc "Release RC Yum packages"
+ yum_rc_tasks = [
+ "yum:rc:copy",
+ "yum:rc:download",
+ "yum:rc:sign",
+ "yum:rc:update",
+ "yum:rc:upload",
+ ]
+ yum_rc_tasks.unshift("yum:staging:prepare") if staging?
+ task :rc => yum_rc_tasks
+ end
+ end
+
+ def define_yum_release_tasks
+ directory yum_release_repositories_dir
+
+ namespace :yum do
+ namespace :release do
+ desc "Download RC Yum repositories"
+ task :download => yum_release_repositories_dir do
+ yum_distributions.each do |distribution|
+ distribution_dir = "#{yum_release_repositories_dir}/#{distribution}"
+ download_distribution(distribution,
+ distribution_dir,
+ :rc,
+ list: uploaded_files_name)
+ end
+ end
+
+ desc "Upload release Yum repositories"
+ task :upload => yum_release_repositories_dir do
+ yum_distributions.each do |distribution|
+ distribution_dir = "#{yum_release_repositories_dir}/#{distribution}"
+ uploader =
+ ArtifactoryUploader.new(api_key: artifactory_api_key,
+ distribution: distribution,
+ source: distribution_dir,
+ staging: staging?,
+ sync: true,
+ sync_pattern: /\/repodata\//)
+ uploader.upload
+ end
+ end
+ end
+
+ desc "Release Yum packages"
+ yum_release_tasks = [
+ "yum:release:download",
+ "yum:release:upload",
+ ]
+ task :release => yum_release_tasks
+ end
+ end
+
+ def define_yum_tasks
+ define_yum_staging_tasks
+ define_yum_rc_tasks
+ define_yum_release_tasks
+ end
+
+ def define_generic_data_rc_tasks(label,
+ id,
+ rc_dir,
+ target_files_glob)
+ directory rc_dir
+
+ namespace id do
+ namespace :rc do
+ desc "Copy #{label} packages"
+ task :copy => rc_dir do
+ progress_label = "Copying: #{label}"
+ progress_reporter = ProgressReporter.new(progress_label)
+
+ Pathname(artifacts_dir).glob(target_files_glob) do |path|
+ next if path.directory?
+ destination_path = [
+ rc_dir,
+ path.basename.to_s,
+ ].join("/")
+ copy_artifact(path, destination_path, progress_reporter)
+ end
+
+ progress_reporter.finish
+ end
+
+ desc "Sign #{label} packages"
+ task :sign => rc_dir do
+ sign_dir(label, rc_dir)
+ end
+
+ desc "Upload #{label} packages"
+ task :upload do
+ uploader =
+ ArtifactoryUploader.new(api_key: artifactory_api_key,
+ destination_prefix: full_version,
+ distribution: id.to_s,
+ rc: rc,
+ source: rc_dir,
+ staging: staging?)
+ uploader.upload
+ end
+ end
+
+ desc "Release RC #{label} packages"
+ rc_tasks = [
+ "#{id}:rc:copy",
+ "#{id}:rc:sign",
+ "#{id}:rc:upload",
+ ]
+ task :rc => rc_tasks
+ end
+ end
+
+ def define_generic_data_release_tasks(label, id, release_dir)
+ directory release_dir
+
+ namespace id do
+ namespace :release do
+ desc "Download RC #{label} packages"
+ task :download => release_dir do
+ download_distribution(id.to_s,
+ release_dir,
+ :rc,
+ prefix: "#{full_version}")
+ end
+
+ desc "Upload release #{label} packages"
+ task :upload => release_dir do
+ uploader = ArtifactoryUploader.new(api_key: artifactory_api_key,
+ destination_prefix: version,
+ distribution: id.to_s,
+ source: release_dir,
+ staging: staging?)
+ uploader.upload
+ end
+ end
+
+ desc "Release #{label} packages"
+ release_tasks = [
+ "#{id}:release:download",
+ "#{id}:release:upload",
+ ]
+ task :release => release_tasks
+ end
+ end
+
+ def define_generic_data_tasks(label,
+ id,
+ rc_dir,
+ release_dir,
+ target_files_glob)
+ define_generic_data_rc_tasks(label, id, rc_dir, target_files_glob)
+ define_generic_data_release_tasks(label, id, release_dir)
+ end
+
+ def define_python_tasks
+ define_generic_data_tasks("Python",
+ :python,
+ "#{rc_dir}/python/#{full_version}",
+ "#{release_dir}/python/#{full_version}",
+ "{python-sdist,wheel-*}/**/*")
+ end
+
+ def define_nuget_tasks
+ define_generic_data_tasks("NuGet",
+ :nuget,
+ "#{rc_dir}/nuget/#{full_version}",
+ "#{release_dir}/nuget/#{full_version}",
+ "nuget/**/*")
+ end
+
+ def define_summary_tasks
+ namespace :summary do
+ desc "Show RC summary"
+ task :rc do
+ suffix = ""
+ suffix << "-staging" if staging?
+ puts(<<-SUMMARY)
+Success! The release candidate binaries are available here:
+ https://apache.jfrog.io/artifactory/arrow/almalinux#{suffix}-rc/
+ https://apache.jfrog.io/artifactory/arrow/amazon-linux#{suffix}-rc/
+ https://apache.jfrog.io/artifactory/arrow/centos#{suffix}-rc/
+ https://apache.jfrog.io/artifactory/arrow/debian#{suffix}-rc/
+ https://apache.jfrog.io/artifactory/arrow/nuget#{suffix}-rc/#{full_version}
+ https://apache.jfrog.io/artifactory/arrow/python#{suffix}-rc/#{full_version}
+ https://apache.jfrog.io/artifactory/arrow/ubuntu#{suffix}-rc/
+ SUMMARY
+ end
+
+ desc "Show release summary"
+ task :release do
+ suffix = ""
+ suffix << "-staging" if staging?
+ puts(<<-SUMMARY)
+Success! The release binaries are available here:
+ https://apache.jfrog.io/artifactory/arrow/almalinux#{suffix}/
+ https://apache.jfrog.io/artifactory/arrow/amazon-linux#{suffix}/
+ https://apache.jfrog.io/artifactory/arrow/centos#{suffix}/
+ https://apache.jfrog.io/artifactory/arrow/debian#{suffix}/
+ https://apache.jfrog.io/artifactory/arrow/nuget#{suffix}/#{version}
+ https://apache.jfrog.io/artifactory/arrow/python#{suffix}/#{version}
+ https://apache.jfrog.io/artifactory/arrow/ubuntu#{suffix}/
+ SUMMARY
+ end
+ end
+ end
+end
diff --git a/src/arrow/dev/release/binary/.dockerignore b/src/arrow/dev/release/binary/.dockerignore
new file mode 100644
index 000000000..f2c46d8ce
--- /dev/null
+++ b/src/arrow/dev/release/binary/.dockerignore
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+/tmp/
diff --git a/src/arrow/dev/release/binary/Dockerfile b/src/arrow/dev/release/binary/Dockerfile
new file mode 100644
index 000000000..a21b32dd7
--- /dev/null
+++ b/src/arrow/dev/release/binary/Dockerfile
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+FROM debian:bullseye
+
+ENV DEBIAN_FRONTEND noninteractive
+
+ARG DEBUG
+
+RUN \
+ quiet=$([ "${DEBUG}" = "yes" ] || echo "-qq") && \
+ apt update ${quiet} && \
+ apt install -y -V ${quiet} \
+ apt-utils \
+ createrepo-c \
+ devscripts \
+ gpg \
+ locales \
+ openssh-server \
+ rake \
+ rpm \
+ ruby \
+ sudo && \
+ apt clean && \
+ rm -rf /var/lib/apt/lists/*
+
+RUN gem install apt-dists-merge -v ">= 1.0.2"
+
+RUN locale-gen en_US.UTF-8
+
+RUN mkdir -p /run/sshd
+RUN echo "StreamLocalBindUnlink yes" >> /etc/ssh/sshd_config
+
+ENV ARROW_USER arrow
+ENV ARROW_UID 10000
+
+RUN \
+ groupadd --gid ${ARROW_UID} ${ARROW_USER} && \
+ useradd --uid ${ARROW_UID} --gid ${ARROW_UID} --create-home ${ARROW_USER} && \
+ mkdir -p /home/arrow/.gnupg /home/arrow/.ssh && \
+ chown -R arrow: /home/arrow/.gnupg /home/arrow/.ssh && \
+ chmod -R og-rwx /home/arrow/.gnupg /home/arrow/.ssh && \
+ echo "${ARROW_USER} ALL=(ALL:ALL) NOPASSWD:ALL" | \
+ EDITOR=tee visudo -f /etc/sudoers.d/arrow
+
+COPY id_rsa.pub /home/arrow/.ssh/authorized_keys
+RUN \
+ chown -R arrow: /home/arrow/.ssh && \
+ chmod -R og-rwx /home/arrow/.ssh
+
+COPY runner.sh /home/arrow/runner.sh
+RUN \
+ chown -R arrow: /home/arrow/runner.sh && \
+ chmod +x /home/arrow/runner.sh
+
+EXPOSE 22
diff --git a/src/arrow/dev/release/binary/runner.sh b/src/arrow/dev/release/binary/runner.sh
new file mode 100755
index 000000000..465d60d62
--- /dev/null
+++ b/src/arrow/dev/release/binary/runner.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -u
+
+export LANG=C
+
+target_dir=/host/binary/tmp
+original_owner=$(stat --format=%u ${target_dir})
+original_group=$(stat --format=%g ${target_dir})
+
+sudo -H chown -R ${USER}: ${target_dir}
+restore_owner() {
+ sudo -H chown -R ${original_owner}:${original_group} ${target_dir}
+}
+trap restore_owner EXIT
+
+cd /host
+
+"$@"
diff --git a/src/arrow/dev/release/check-rat-report.py b/src/arrow/dev/release/check-rat-report.py
new file mode 100644
index 000000000..a5718103a
--- /dev/null
+++ b/src/arrow/dev/release/check-rat-report.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+##############################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+##############################################################################
+import fnmatch
+import re
+import sys
+import xml.etree.ElementTree as ET
+
+if len(sys.argv) != 3:
+ sys.stderr.write("Usage: %s exclude_globs.lst rat_report.xml\n" %
+ sys.argv[0])
+ sys.exit(1)
+
+exclude_globs_filename = sys.argv[1]
+xml_filename = sys.argv[2]
+
+globs = [line.strip() for line in open(exclude_globs_filename, "r")]
+
+tree = ET.parse(xml_filename)
+root = tree.getroot()
+resources = root.findall('resource')
+
+all_ok = True
+for r in resources:
+ approvals = r.findall('license-approval')
+ if not approvals or approvals[0].attrib['name'] == 'true':
+ continue
+ clean_name = re.sub('^[^/]+/', '', r.attrib['name'])
+ excluded = False
+ for g in globs:
+ if fnmatch.fnmatch(clean_name, g):
+ excluded = True
+ break
+ if not excluded:
+ sys.stdout.write("NOT APPROVED: %s (%s): %s\n" % (
+ clean_name, r.attrib['name'], approvals[0].attrib['name']))
+ all_ok = False
+
+if not all_ok:
+ sys.exit(1)
+
+print('OK')
+sys.exit(0)
diff --git a/src/arrow/dev/release/download_rc_binaries.py b/src/arrow/dev/release/download_rc_binaries.py
new file mode 100755
index 000000000..3e3d0f7d3
--- /dev/null
+++ b/src/arrow/dev/release/download_rc_binaries.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+import argparse
+import concurrent.futures as cf
+import functools
+import os
+import subprocess
+import urllib.request
+
+
+ARTIFACTORY_ROOT = "https://apache.jfrog.io/artifactory/arrow"
+DEFAULT_PARALLEL_DOWNLOADS = 8
+
+
+class Artifactory:
+
+ def get_file_list(self, prefix):
+ def traverse(directory, files, directories):
+ url = f'{ARTIFACTORY_ROOT}/{directory}'
+ response = urllib.request.urlopen(url).read().decode()
+ paths = re.findall('<a href="(.+?)"', response)
+ for path in paths:
+ if path == '../':
+ continue
+ resolved_path = f'{directory}{path}'
+ if path.endswith('/'):
+ directories.append(resolved_path)
+ else:
+ files.append(resolved_path)
+ files = []
+ if not prefix.endswith('/'):
+ prefix += '/'
+ directories = [prefix]
+ while len(directories) > 0:
+ directory = directories.pop()
+ traverse(directory, files, directories)
+ return files
+
+ def download_files(self, files, dest=None, num_parallel=None,
+ re_match=None):
+ """
+ Download files from Bintray in parallel. If file already exists, will
+ overwrite if the checksum does not match what Bintray says it should be
+
+ Parameters
+ ----------
+ files : List[Dict]
+ File listing from Bintray
+ dest : str, default None
+ Defaults to current working directory
+ num_parallel : int, default 8
+ Number of files to download in parallel. If set to None, uses
+ default
+ """
+ if dest is None:
+ dest = os.getcwd()
+ if num_parallel is None:
+ num_parallel = DEFAULT_PARALLEL_DOWNLOADS
+
+ if re_match is not None:
+ regex = re.compile(re_match)
+ files = [x for x in files if regex.match(x)]
+
+ if num_parallel == 1:
+ for path in files:
+ self._download_file(dest, path)
+ else:
+ parallel_map_terminate_early(
+ functools.partial(self._download_file, dest),
+ files,
+ num_parallel
+ )
+
+ def _download_file(self, dest, path):
+ base, filename = os.path.split(path)
+
+ dest_dir = os.path.join(dest, base)
+ os.makedirs(dest_dir, exist_ok=True)
+
+ dest_path = os.path.join(dest_dir, filename)
+
+ print("Downloading {} to {}".format(path, dest_path))
+
+ url = f'{ARTIFACTORY_ROOT}/{path}'
+
+ cmd = [
+ 'curl', '--fail', '--location', '--retry', '5',
+ '--output', dest_path, url
+ ]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = proc.communicate()
+ if proc.returncode != 0:
+ raise Exception("Downloading {} failed\nstdout: {}\nstderr: {}"
+ .format(path, stdout, stderr))
+
+
+def parallel_map_terminate_early(f, iterable, num_parallel):
+ tasks = []
+ with cf.ProcessPoolExecutor(num_parallel) as pool:
+ for v in iterable:
+ tasks.append(pool.submit(functools.partial(f, v)))
+
+ for task in cf.as_completed(tasks):
+ if task.exception() is not None:
+ e = task.exception()
+ for task in tasks:
+ task.cancel()
+ raise e
+
+
+ARROW_REPOSITORY_PACKAGE_TYPES = ['centos', 'debian', 'ubuntu']
+ARROW_STANDALONE_PACKAGE_TYPES = ['nuget', 'python']
+ARROW_PACKAGE_TYPES = \
+ ARROW_REPOSITORY_PACKAGE_TYPES + \
+ ARROW_STANDALONE_PACKAGE_TYPES
+
+
+def download_rc_binaries(version, rc_number, re_match=None, dest=None,
+ num_parallel=None, target_package_type=None):
+ artifactory = Artifactory()
+
+ version_string = '{}-rc{}'.format(version, rc_number)
+ if target_package_type:
+ package_types = [target_package_type]
+ else:
+ package_types = ARROW_PACKAGE_TYPES
+ for package_type in package_types:
+ if package_type in ARROW_REPOSITORY_PACKAGE_TYPES:
+ prefix = f'{package_type}-rc'
+ else:
+ prefix = f'{package_type}-rc/{version_string}'
+ files = artifactory.get_file_list(prefix)
+ if package_type in ARROW_REPOSITORY_PACKAGE_TYPES:
+ version_pattern = re.compile(r'\d+\.\d+\.\d+')
+
+ def is_old_release(path):
+ match = version_pattern.search(path)
+ if not match:
+ return False
+ return match[0] != version
+ files = [x for x in files if not is_old_release(x)]
+ artifactory.download_files(files, re_match=re_match, dest=dest,
+ num_parallel=num_parallel)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='Download release candidate binaries'
+ )
+ parser.add_argument('version', type=str, help='The version number')
+ parser.add_argument('rc_number', type=int,
+ help='The release candidate number, e.g. 0, 1, etc')
+ parser.add_argument('-e', '--regexp', type=str, default=None,
+ help=('Regular expression to match on file names '
+ 'to only download certain files'))
+ parser.add_argument('--dest', type=str, default=os.getcwd(),
+ help='The output folder for the downloaded files')
+ parser.add_argument('--num_parallel', type=int, default=8,
+ help='The number of concurrent downloads to do')
+ parser.add_argument('--package_type', type=str, default=None,
+ help='The package type to be downloaded')
+ args = parser.parse_args()
+
+ download_rc_binaries(args.version, args.rc_number, dest=args.dest,
+ re_match=args.regexp, num_parallel=args.num_parallel,
+ target_package_type=args.package_type)
diff --git a/src/arrow/dev/release/post-01-upload.sh b/src/arrow/dev/release/post-01-upload.sh
new file mode 100755
index 000000000..5671c3746
--- /dev/null
+++ b/src/arrow/dev/release/post-01-upload.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -e
+set -u
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc=$2
+
+tmp_dir=tmp-apache-arrow-dist
+
+echo "Recreate temporary directory: ${tmp_dir}"
+rm -rf ${tmp_dir}
+mkdir -p ${tmp_dir}
+
+echo "Clone dev dist repository"
+svn \
+ co \
+ https://dist.apache.org/repos/dist/dev/arrow/apache-arrow-${version}-rc${rc} \
+ ${tmp_dir}/dev
+
+echo "Clone release dist repository"
+svn co https://dist.apache.org/repos/dist/release/arrow ${tmp_dir}/release
+
+echo "Copy ${version}-rc${rc} to release working copy"
+release_version=arrow-${version}
+mkdir -p ${tmp_dir}/release/${release_version}
+cp -r ${tmp_dir}/dev/* ${tmp_dir}/release/${release_version}/
+svn add ${tmp_dir}/release/${release_version}
+
+echo "Keep only the three most recent versions"
+old_releases=$(
+ svn ls ${tmp_dir}/release/ | \
+ grep -E '^arrow-[0-9\.]+' | \
+ sort --version-sort --reverse | \
+ tail -n +4
+)
+for old_release_version in $old_releases; do
+ echo "Remove old release ${old_release_version}"
+ svn delete ${tmp_dir}/release/${old_release_version}
+done
+
+echo "Commit release"
+svn ci -m "Apache Arrow ${version}" ${tmp_dir}/release
+
+echo "Clean up"
+rm -rf ${tmp_dir}
+
+echo "Success! The release is available here:"
+echo " https://dist.apache.org/repos/dist/release/arrow/${release_version}"
diff --git a/src/arrow/dev/release/post-02-binary.sh b/src/arrow/dev/release/post-02-binary.sh
new file mode 100755
index 000000000..b1b41f9fb
--- /dev/null
+++ b/src/arrow/dev/release/post-02-binary.sh
@@ -0,0 +1,101 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -o pipefail
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc=$2
+
+cd "${SOURCE_DIR}"
+
+if [ ! -f .env ]; then
+ echo "You must create $(pwd)/.env"
+ echo "You can use $(pwd)/.env.example as template"
+ exit 1
+fi
+. .env
+
+. utils-binary.sh
+
+# By default deploy all artifacts.
+# To deactivate one category, deactivate the category and all of its dependents.
+# To explicitly select one category, set DEPLOY_DEFAULT=0 DEPLOY_X=1.
+: ${DEPLOY_DEFAULT:=1}
+: ${DEPLOY_ALMALINUX:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_AMAZON_LINUX:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_CENTOS:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_DEBIAN:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_NUGET:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_PYTHON:=${DEPLOY_DEFAULT}}
+: ${DEPLOY_UBUNTU:=${DEPLOY_DEFAULT}}
+
+rake_tasks=()
+apt_targets=()
+yum_targets=()
+if [ ${DEPLOY_ALMALINUX} -gt 0 ]; then
+ rake_tasks+=(yum:release)
+ yum_targets+=(almalinux)
+fi
+if [ ${DEPLOY_AMAZON_LINUX} -gt 0 ]; then
+ rake_tasks+=(yum:release)
+ yum_targets+=(amazon-linux)
+fi
+if [ ${DEPLOY_CENTOS} -gt 0 ]; then
+ rake_tasks+=(yum:release)
+ yum_targets+=(centos)
+fi
+if [ ${DEPLOY_DEBIAN} -gt 0 ]; then
+ rake_tasks+=(apt:release)
+ apt_targets+=(debian)
+fi
+if [ ${DEPLOY_NUGET} -gt 0 ]; then
+ rake_tasks+=(nuget:release)
+fi
+if [ ${DEPLOY_PYTHON} -gt 0 ]; then
+ rake_tasks+=(python:release)
+fi
+if [ ${DEPLOY_UBUNTU} -gt 0 ]; then
+ rake_tasks+=(apt:release)
+ apt_targets+=(ubuntu)
+fi
+rake_tasks+=(summary:release)
+
+tmp_dir=binary/tmp
+mkdir -p "${tmp_dir}"
+
+docker_run \
+ ./runner.sh \
+ rake \
+ --trace \
+ "${rake_tasks[@]}" \
+ APT_TARGETS=$(IFS=,; echo "${apt_targets[*]}") \
+ ARTIFACTORY_API_KEY="${ARTIFACTORY_API_KEY}" \
+ ARTIFACTS_DIR="${tmp_dir}/artifacts" \
+ RC=${rc} \
+ STAGING=${STAGING:-no} \
+ VERSION=${version} \
+ YUM_TARGETS=$(IFS=,; echo "${yum_targets[*]}")
diff --git a/src/arrow/dev/release/post-03-website.sh b/src/arrow/dev/release/post-03-website.sh
new file mode 100755
index 000000000..7aceeaf59
--- /dev/null
+++ b/src/arrow/dev/release/post-03-website.sh
@@ -0,0 +1,266 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -u
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ARROW_DIR="${SOURCE_DIR}/../.."
+ARROW_SITE_DIR="${ARROW_DIR}/../arrow-site"
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <previous-version> <version>"
+ exit 1
+fi
+
+previous_version=$1
+version=$2
+
+branch_name=release-note-${version}
+release_dir="${ARROW_SITE_DIR}/_release"
+announce_file="${release_dir}/${version}.md"
+versions_yml="${ARROW_SITE_DIR}/_data/versions.yml"
+
+pushd "${ARROW_SITE_DIR}"
+git checkout master
+git checkout -b ${branch_name}
+popd
+
+pushd "${ARROW_DIR}"
+
+release_date=$(LANG=C date "+%-d %B %Y")
+previous_tag_date=$(git log -n 1 --pretty=%aI apache-arrow-${previous_version})
+rough_previous_release_date=$(date --date "${previous_tag_date}" +%s)
+rough_release_date=$(date +%s)
+rough_n_development_months=$((
+ (${rough_release_date} - ${rough_previous_release_date}) / (60 * 60 * 24 * 30)
+))
+
+git_tag=apache-arrow-${version}
+git_range=apache-arrow-${previous_version}..${git_tag}
+
+committers_command_line="git shortlog -csn ${git_range}"
+contributors_command_line="git shortlog -sn ${git_range}"
+
+committers=$(${committers_command_line})
+contributors=$(${contributors_command_line})
+
+n_commits=$(git log --pretty=oneline ${git_range} | wc -l)
+n_contributors=$(${contributors_command_line} | wc -l)
+
+git_tag_hash=$(git log -n 1 --pretty=%H ${git_tag})
+
+popd
+
+pushd "${ARROW_SITE_DIR}"
+
+# Add announce for the current version
+cat <<ANNOUNCE > "${announce_file}"
+---
+layout: default
+title: Apache Arrow ${version} Release
+permalink: /release/${version}.html
+---
+<!--
+{% comment %}
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+{% endcomment %}
+-->
+
+# Apache Arrow ${version} (${release_date})
+
+This is a major release covering more than ${rough_n_development_months} months of development.
+
+## Download
+
+* [**Source Artifacts**][1]
+* **Binary Artifacts**
+ * [For CentOS][2]
+ * [For Debian][3]
+ * [For Python][4]
+ * [For Ubuntu][5]
+* [Git tag][6]
+
+## Contributors
+
+This release includes ${n_commits} commits from ${n_contributors} distinct contributors.
+
+\`\`\`console
+$ ${contributors_command_line}
+ANNOUNCE
+
+echo "${contributors}" >> "${announce_file}"
+
+cat <<ANNOUNCE >> "${announce_file}"
+\`\`\`
+
+## Patch Committers
+
+The following Apache committers merged contributed patches to the repository.
+
+\`\`\`console
+$ ${committers_command_line}
+ANNOUNCE
+
+echo "${committers}" >> "${announce_file}"
+
+cat <<ANNOUNCE >> "${announce_file}"
+\`\`\`
+
+## Changelog
+
+ANNOUNCE
+
+archery release changelog generate ${version} | \
+ sed -e 's/^#/##/g' >> "${announce_file}"
+
+cat <<ANNOUNCE >> "${announce_file}"
+[1]: https://www.apache.org/dyn/closer.lua/arrow/arrow-${version}/
+[2]: https://apache.jfrog.io/artifactory/arrow/centos/
+[3]: https://apache.jfrog.io/artifactory/arrow/debian/
+[4]: https://apache.jfrog.io/artifactory/arrow/python/${version}/
+[5]: https://apache.jfrog.io/artifactory/arrow/ubuntu/
+[6]: https://github.com/apache/arrow/releases/tag/apache-arrow-${version}
+ANNOUNCE
+git add "${announce_file}"
+
+
+# Update index
+pushd "${release_dir}"
+
+index_file=index.md
+rm -f ${index_file}
+announce_files="$(ls | sort --version-sort --reverse)"
+cat <<INDEX > ${index_file}
+---
+layout: default
+title: Releases
+permalink: /release/index.html
+---
+<!--
+{% comment %}
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+{% endcomment %}
+-->
+
+# Apache Arrow Releases
+
+Navigate to the release page for downloads and the changelog.
+
+INDEX
+
+i=0
+for md_file in ${announce_files}; do
+ i=$((i + 1))
+ title=$(grep '^# Apache Arrow' ${md_file} | sed -e 's/^# Apache Arrow //')
+ echo "* [${title}][${i}]" >> ${index_file}
+done
+echo >> ${index_file}
+
+i=0
+for md_file in ${announce_files}; do
+ i=$((i + 1))
+ html_file=$(echo ${md_file} | sed -e 's/md$/html/')
+ echo "[${i}]: {{ site.baseurl }}/release/${html_file}" >> ${index_file}
+done
+
+git add ${index_file}
+
+popd
+
+
+# Update versions.yml
+pinned_version=$(echo ${version} | sed -e 's/\.[^.]*$/.*/')
+
+apache_download_url=https://downloads.apache.org
+
+cat <<YAML > "${versions_yml}"
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Database of the current version
+#
+current:
+ number: '${version}'
+ pinned_number: '${pinned_version}'
+ date: '${release_date}'
+ git-tag: '${git_tag_hash}'
+ github-tag-link: 'https://github.com/apache/arrow/releases/tag/${git_tag}'
+ release-notes: 'https://arrow.apache.org/release/${version}.html'
+ mirrors: 'https://www.apache.org/dyn/closer.lua/arrow/arrow-${version}/'
+ tarball-name: 'apache-arrow-${version}.tar.gz'
+ tarball-url: 'https://www.apache.org/dyn/closer.lua?action=download&filename=arrow/arrow-${version}/apache-arrow-${version}.tar.gz'
+ java-artifacts: 'http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.arrow%22%20AND%20v%3A%22${version}%22'
+ asc: '${apache_download_url}/arrow/arrow-${version}/apache-arrow-${version}.tar.gz.asc'
+ sha256: '${apache_download_url}/arrow/arrow-${version}/apache-arrow-${version}.tar.gz.sha256'
+ sha512: '${apache_download_url}/arrow/arrow-${version}/apache-arrow-${version}.tar.gz.sha512'
+YAML
+git add "${versions_yml}"
+
+git commit -m "[Website] Add release note for ${version}"
+git push -u origin ${branch_name}
+
+github_url=$(git remote get-url origin | \
+ sed \
+ -e 's,^git@github.com:,https://github.com/,' \
+ -e 's,\.git$,,')
+
+echo "Success!"
+echo "Create a pull request:"
+echo " ${github_url}/pull/new/${branch_name}"
+
+popd
diff --git a/src/arrow/dev/release/post-04-ruby.sh b/src/arrow/dev/release/post-04-ruby.sh
new file mode 100755
index 000000000..edcb54c13
--- /dev/null
+++ b/src/arrow/dev/release/post-04-ruby.sh
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash
+# -*- indent-tabs-mode: nil; sh-indentation: 2; sh-basic-offset: 2 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -e
+set -o pipefail
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+archive_name=apache-arrow-${version}
+tar_gz=${archive_name}.tar.gz
+
+echo "NOTE: We should release RubyGems after Homebrew and MSYS2 packages are updated!!!"
+
+echo "Checking Homebrew package..."
+homebrew_version=$(
+ curl \
+ --fail \
+ --no-progress-meter \
+ https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/apache-arrow-glib.rb | \
+ grep url | \
+ grep -o "[0-9]*\.[0-9]*\.[0-9]*" | \
+ head -n 1)
+echo "Homebrew package version: ${homebrew_version}"
+if [ "${version}" = "${homebrew_version}" ]; then
+ echo "OK!"
+else
+ echo "Different!"
+ exit 1
+fi
+
+
+echo "Checking MSYS2 package..."
+msys2_version=$(
+ curl \
+ --fail \
+ --no-progress-meter \
+ https://packages.msys2.org/base/mingw-w64-arrow | \
+ grep -A 1 ">Version:<" | \
+ grep -o "[0-9]*\.[0-9]*\.[0-9]*")
+echo "MSYS2 package version: ${msys2_version}"
+if [ "${version}" = "${msys2_version}" ]; then
+ echo "OK!"
+else
+ echo "Different!"
+ exit 1
+fi
+
+
+rm -f ${tar_gz}
+curl \
+ --remote-name \
+ --fail \
+ https://downloads.apache.org/arrow/arrow-${version}/${tar_gz}
+rm -rf ${archive_name}
+tar xf ${tar_gz}
+modules=()
+for module in ${archive_name}/ruby/red-*; do
+ pushd ${module}
+ rake release
+ modules+=($(basename ${module}))
+ popd
+done
+rm -rf ${archive_name}
+rm -f ${tar_gz}
+
+echo "Success! The released RubyGems are available here:"
+for module in ${modules[@]}; do
+ echo " https://rubygems.org/gems/${module}/versions/${version}"
+done
diff --git a/src/arrow/dev/release/post-05-js.sh b/src/arrow/dev/release/post-05-js.sh
new file mode 100755
index 000000000..edc5fe20b
--- /dev/null
+++ b/src/arrow/dev/release/post-05-js.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+# -*- indent-tabs-mode: nil; sh-indentation: 2; sh-basic-offset: 2 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -e
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+archive_name=apache-arrow-${version}
+tar_gz=${archive_name}.tar.gz
+
+rm -f ${tar_gz}
+curl \
+ --remote-name \
+ --fail \
+ https://downloads.apache.org/arrow/arrow-${version}/${tar_gz}
+rm -rf ${archive_name}
+tar xf ${tar_gz}
+pushd ${archive_name}/js
+./npm-release.sh
+popd
+rm -rf ${archive_name}
+rm -f ${tar_gz}
+
+echo "Success! The released npm packages are available here:"
+echo " https://www.npmjs.com/package/apache-arrow/v/${version}"
diff --git a/src/arrow/dev/release/post-06-csharp.sh b/src/arrow/dev/release/post-06-csharp.sh
new file mode 100755
index 000000000..d2968a5d5
--- /dev/null
+++ b/src/arrow/dev/release/post-06-csharp.sh
@@ -0,0 +1,60 @@
+#!/usr/bin/env bash
+# -*- indent-tabs-mode: nil; sh-indentation: 2; sh-basic-offset: 2 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set -eux
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+
+if [ -z "${NUGET_API_KEY}" ]; then
+ echo "NUGET_API_KEY is empty"
+ exit 1
+fi
+
+base_names=()
+base_names+=(Apache.Arrow.${version})
+base_names+=(Apache.Arrow.Flight.${version})
+base_names+=(Apache.Arrow.Flight.AspNetCore.${version})
+for base_name in ${base_names[@]}; do
+ for extension in nupkg snupkg; do
+ path=${base_name}.${extension}
+ rm -f ${path}
+ curl \
+ --fail \
+ --location \
+ --remote-name \
+ https://apache.jfrog.io/artifactory/arrow/nuget/${version}/${path}
+ done
+ dotnet nuget push \
+ ${base_name}.nupkg \
+ -k ${NUGET_API_KEY} \
+ -s https://api.nuget.org/v3/index.json
+ rm -f ${base_name}.{nupkg,snupkg}
+done
+
+echo "Success! The released NuGet package is available here:"
+echo " https://www.nuget.org/packages/Apache.Arrow/${version}"
diff --git a/src/arrow/dev/release/post-08-remove-rc.sh b/src/arrow/dev/release/post-08-remove-rc.sh
new file mode 100755
index 000000000..8e02b7e95
--- /dev/null
+++ b/src/arrow/dev/release/post-08-remove-rc.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set -e
+set -u
+set -o pipefail
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+
+base_url=https://dist.apache.org/repos/dist/dev/arrow
+pattern="^apache-arrow-${version}-rc"
+paths=$()
+if svn ls ${base_url} | grep "${pattern}" > /dev/null 2>&1; then
+ rc_paths=$(svn ls ${base_url} | grep "${pattern}")
+ rc_urls=()
+ for rc_path in ${rc_paths}; do
+ rc_urls+=(${base_url}/${rc_path})
+ done
+ svn rm --message "Remove RC for ${version}" ${rc_urls[@]}
+ echo "Removed RC artifacts:"
+ for rc_url in ${rc_urls[@]}; do
+ echo " ${rc_url}"
+ done
+else
+ echo "No RC artifacts at ${base_url}"
+fi
diff --git a/src/arrow/dev/release/post-09-docs.sh b/src/arrow/dev/release/post-09-docs.sh
new file mode 100755
index 000000000..9c0b77bb5
--- /dev/null
+++ b/src/arrow/dev/release/post-09-docs.sh
@@ -0,0 +1,67 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -u
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ARROW_DIR="${SOURCE_DIR}/../.."
+ARROW_SITE_DIR="${ARROW_DIR}/../arrow-site"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit 1
+fi
+
+version=$1
+release_tag="apache-arrow-${version}"
+branch_name=release-docs-${version}
+
+pushd "${ARROW_SITE_DIR}"
+git checkout asf-site
+git checkout -b ${branch_name}
+rm -rf docs/*
+git checkout docs/c_glib/index.html
+popd
+
+pushd "${ARROW_DIR}"
+git checkout "${release_tag}"
+
+UBUNTU=20.10 archery docker run \
+ -v "${ARROW_SITE_DIR}/docs:/build/docs" \
+ -e ARROW_DOCS_VERSION="${version}" \
+ ubuntu-docs
+
+: ${PUSH:=1}
+
+if [ ${PUSH} -gt 0 ]; then
+ pushd "${ARROW_SITE_DIR}"
+ git add docs
+ git commit -m "[Website] Update documentations for ${version}"
+ git push -u origin ${branch_name}
+ github_url=$(git remote get-url origin | \
+ sed \
+ -e 's,^git@github.com:,https://github.com/,' \
+ -e 's,\.git$,,')
+ popd
+
+ echo "Success!"
+ echo "Create a pull request:"
+ echo " ${github_url}/pull/new/${branch_name}"
+fi
diff --git a/src/arrow/dev/release/post-10-python.sh b/src/arrow/dev/release/post-10-python.sh
new file mode 100755
index 000000000..a014239ea
--- /dev/null
+++ b/src/arrow/dev/release/post-10-python.sh
@@ -0,0 +1,52 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -ex
+set -o pipefail
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+: ${TEST_PYPI:=0}
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <rc-num>"
+ exit
+fi
+
+version=$1
+rc=$2
+
+tmp=$(mktemp -d -t "arrow-post-python.XXXXX")
+${PYTHON:-python} \
+ "${SOURCE_DIR}/download_rc_binaries.py" \
+ ${version} \
+ ${rc} \
+ --dest="${tmp}" \
+ --package_type=python \
+ --regex=".*\.(whl|tar\.gz)$"
+
+if [ ${TEST_PYPI} -gt 0 ]; then
+ TWINE_ARGS="--repository-url https://test.pypi.org/legacy/"
+fi
+
+twine upload ${TWINE_ARGS} ${tmp}/python-rc/${version}-rc${rc}/*.{whl,tar.gz}
+
+rm -rf "${tmp}"
+
+echo "Success! The released PyPI packages are available here:"
+echo " https://pypi.org/project/pyarrow/${version}"
diff --git a/src/arrow/dev/release/post-11-java.sh b/src/arrow/dev/release/post-11-java.sh
new file mode 100755
index 000000000..86e6e9b57
--- /dev/null
+++ b/src/arrow/dev/release/post-11-java.sh
@@ -0,0 +1,81 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -o pipefail
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+archive_name=apache-arrow-${version}
+tar_gz=${archive_name}.tar.gz
+
+rm -f ${tar_gz}
+curl \
+ --remote-name \
+ --fail \
+ https://downloads.apache.org/arrow/arrow-${version}/${tar_gz}
+rm -rf ${archive_name}
+tar xf ${tar_gz}
+
+pushd ${archive_name}
+
+# clone the testing data to the appropiate directories
+git clone https://github.com/apache/arrow-testing.git testing
+git clone https://github.com/apache/parquet-testing.git cpp/submodules/parquet-testing
+
+# build the jni bindings similarly like the 01-perform.sh does
+mkdir -p cpp/java-build
+pushd cpp/java-build
+cmake \
+ -DARROW_DATASET=ON \
+ -DARROW_FILESYSTEM=ON \
+ -DARROW_GANDIVA_JAVA=ON \
+ -DARROW_GANDIVA=ON \
+ -DARROW_JNI=ON \
+ -DARROW_ORC=ON \
+ -DARROW_PARQUET=ON \
+ -DCMAKE_BUILD_TYPE=release \
+ -G Ninja \
+ ..
+ninja
+popd
+
+# go in the java subfolder
+pushd java
+# stage the artifacts using both the apache-release and arrow-jni profiles
+# Note: on ORC checkstyle failure use -Dcheckstyle.skip=true until https://issues.apache.org/jira/browse/ARROW-12552 gets resolved
+mvn -Papache-release,arrow-jni -Darrow.cpp.build.dir=$(realpath ../cpp/java-build/release) deploy
+popd
+
+popd
+
+echo "Success! The maven artifacts have been stated. Proceed with the following steps:"
+echo "1. Login to the apache repository: https://repository.apache.org/#stagingRepositories"
+echo "2. Select the arrow staging repository you just just created: orgapachearrow-100x"
+echo "3. Click the \"close\" button"
+echo "4. Once validation has passed, click the \"release\" button"
+echo ""
+echo "Note, that you must set up Maven to be able to publish to Apache's repositories."
+echo "Read more at https://www.apache.org/dev/publishing-maven-artifacts.html."
diff --git a/src/arrow/dev/release/post-12-bump-versions.sh b/src/arrow/dev/release/post-12-bump-versions.sh
new file mode 100755
index 000000000..8474f03d2
--- /dev/null
+++ b/src/arrow/dev/release/post-12-bump-versions.sh
@@ -0,0 +1,79 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -ue
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <version> <next_version>"
+ exit 1
+fi
+
+: ${BUMP_DEFAULT:=1}
+: ${BUMP_VERSION_POST_TAG:=${BUMP_DEFAULT}}
+: ${BUMP_DEB_PACKAGE_NAMES:=${BUMP_DEFAULT}}
+
+. $SOURCE_DIR/utils-prepare.sh
+
+version=$1
+next_version=$2
+next_version_snapshot="${next_version}-SNAPSHOT"
+
+if [ ${BUMP_VERSION_POST_TAG} -gt 0 ]; then
+ echo "Updating versions for ${next_version_snapshot}"
+ update_versions "${version}" "${next_version}" "snapshot"
+ git commit -m "[Release] Update versions for ${next_version_snapshot}"
+fi
+
+if [ ${BUMP_DEB_PACKAGE_NAMES} -gt 0 ]; then
+ echo "Updating .deb package names for ${next_version}"
+ so_version() {
+ local version=$1
+ local major_version=$(echo $version | sed -E -e 's/^([0-9]+)\.[0-9]+\.[0-9]+$/\1/')
+ local minor_version=$(echo $version | sed -E -e 's/^[0-9]+\.([0-9]+)\.[0-9]+$/\1/')
+ expr ${major_version} \* 100 + ${minor_version}
+ }
+ deb_lib_suffix=$(so_version $version)
+ next_deb_lib_suffix=$(so_version $next_version)
+ if [ "${deb_lib_suffix}" != "${next_deb_lib_suffix}" ]; then
+ cd $SOURCE_DIR/../tasks/linux-packages/apache-arrow
+ for target in debian*/lib*${deb_lib_suffix}.install; do
+ git mv \
+ ${target} \
+ $(echo $target | sed -e "s/${deb_lib_suffix}/${next_deb_lib_suffix}/")
+ done
+ deb_lib_suffix_substitute_pattern="s/(lib(arrow|gandiva|parquet|plasma)[-a-z]*)${deb_lib_suffix}/\\1${next_deb_lib_suffix}/g"
+ sed -i.bak -E -e "${deb_lib_suffix_substitute_pattern}" debian*/control*
+ rm -f debian*/control*.bak
+ git add debian*/control*
+ cd -
+ cd $SOURCE_DIR/../tasks/
+ sed -i.bak -E -e "${deb_lib_suffix_substitute_pattern}" tasks.yml
+ rm -f tasks.yml.bak
+ git add tasks.yml
+ cd -
+ cd $SOURCE_DIR
+ sed -i.bak -E -e "${deb_lib_suffix_substitute_pattern}" rat_exclude_files.txt
+ rm -f rat_exclude_files.txt.bak
+ git add rat_exclude_files.txt
+ git commit -m "[Release] Update .deb package names for $next_version"
+ cd -
+ fi
+fi
diff --git a/src/arrow/dev/release/post-13-go.sh b/src/arrow/dev/release/post-13-go.sh
new file mode 100644
index 000000000..7c6034837
--- /dev/null
+++ b/src/arrow/dev/release/post-13-go.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set -ue
+
+SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <version>"
+ exit
+fi
+
+version=$1
+version_tag="apache-arrow-${version}"
+go_arrow_tag="go/v${version}"
+
+git tag "${go_arrow_tag}" "${version_tag}"
+git push apache "${go_arrow_tag}"
diff --git a/src/arrow/dev/release/rat_exclude_files.txt b/src/arrow/dev/release/rat_exclude_files.txt
new file mode 100644
index 000000000..47fcf618f
--- /dev/null
+++ b/src/arrow/dev/release/rat_exclude_files.txt
@@ -0,0 +1,208 @@
+*.npmrc
+*.gitignore
+.gitmodules
+*_generated.h
+*_generated.js
+*_generated.ts
+*.csv
+*.json
+*.snap
+.github/ISSUE_TEMPLATE/question.md
+ci/etc/rprofile
+ci/etc/*.patch
+ci/vcpkg/*.patch
+CHANGELOG.md
+cpp/CHANGELOG_PARQUET.md
+cpp/src/arrow/io/mman.h
+cpp/src/arrow/util/random.h
+cpp/src/arrow/status.cc
+cpp/src/arrow/status.h
+cpp/src/arrow/vendored/*
+cpp/build-support/asan_symbolize.py
+cpp/build-support/cpplint.py
+cpp/build-support/lint_exclusions.txt
+cpp/build-support/iwyu/*
+cpp/cmake_modules/FindPythonLibsNew.cmake
+cpp/cmake_modules/SnappyCMakeLists.txt
+cpp/cmake_modules/SnappyConfig.h
+cpp/examples/parquet/parquet-arrow/cmake_modules/FindArrow.cmake
+cpp/src/parquet/.parquetcppversion
+cpp/src/generated/parquet_constants.cpp
+cpp/src/generated/parquet_constants.h
+cpp/src/generated/parquet_types.cpp
+cpp/src/generated/parquet_types.h
+cpp/src/plasma/thirdparty/ae/ae.c
+cpp/src/plasma/thirdparty/ae/ae.h
+cpp/src/plasma/thirdparty/ae/ae_epoll.c
+cpp/src/plasma/thirdparty/ae/ae_evport.c
+cpp/src/plasma/thirdparty/ae/ae_kqueue.c
+cpp/src/plasma/thirdparty/ae/ae_select.c
+cpp/src/plasma/thirdparty/ae/config.h
+cpp/src/plasma/thirdparty/ae/zmalloc.h
+cpp/src/plasma/thirdparty/dlmalloc.c
+cpp/thirdparty/flatbuffers/include/flatbuffers/base.h
+cpp/thirdparty/flatbuffers/include/flatbuffers/flatbuffers.h
+cpp/thirdparty/flatbuffers/include/flatbuffers/stl_emulation.h
+dev/requirements*.txt
+dev/archery/MANIFEST.in
+dev/archery/requirements*.txt
+dev/archery/archery/tests/fixtures/*
+dev/archery/archery/crossbow/tests/fixtures/*
+dev/release/rat_exclude_files.txt
+dev/tasks/homebrew-formulae/apache-arrow.rb
+dev/tasks/linux-packages/apache-arrow-apt-source/debian/apache-arrow-apt-source.install
+dev/tasks/linux-packages/apache-arrow-apt-source/debian/compat
+dev/tasks/linux-packages/apache-arrow-apt-source/debian/control
+dev/tasks/linux-packages/apache-arrow-apt-source/debian/rules
+dev/tasks/linux-packages/apache-arrow-apt-source/debian/source/format
+dev/tasks/linux-packages/apache-arrow/debian/compat
+dev/tasks/linux-packages/apache-arrow/debian/control.in
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-arrow-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-arrow-cuda-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-arrow-dataset-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-arrow-flight-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-gandiva-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-parquet-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/gir1.2-plasma-1.0.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-python-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-python-flight-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-python-flight600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow-python600.install
+dev/tasks/linux-packages/apache-arrow/debian/libarrow600.install
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libgandiva600.install
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libparquet600.install
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-glib-dev.install
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-glib-doc.doc-base
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-glib-doc.install
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-glib-doc.links
+dev/tasks/linux-packages/apache-arrow/debian/libplasma-glib600.install
+dev/tasks/linux-packages/apache-arrow/debian/libplasma600.install
+dev/tasks/linux-packages/apache-arrow/debian/patches/series
+dev/tasks/linux-packages/apache-arrow/debian/plasma-store-server.install
+dev/tasks/linux-packages/apache-arrow/debian/rules
+dev/tasks/linux-packages/apache-arrow/debian/source/format
+dev/tasks/linux-packages/apache-arrow/debian/watch
+dev/tasks/requirements*.txt
+dev/tasks/conda-recipes/*
+docs/requirements.txt
+go/arrow/flight/Flight_grpc.pb.go
+go/go.sum
+go/arrow/Gopkg.lock
+go/arrow/flight/Flight.pb.go
+go/arrow/flight/Flight_grpc.pb.go
+go/arrow/internal/cpu/*
+go/arrow/type_string.go
+go/arrow/cdata/test/go.sum
+go/*.tmpldata
+go/*.s
+go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go
+go/parquet/internal/gen-go/parquet/parquet-consts.go
+go/parquet/internal/gen-go/parquet/parquet.go
+js/.npmignore
+js/closure-compiler-scripts/*
+js/src/fb/*.ts
+js/yarn.lock
+js/.eslintignore
+python/cmake_modules
+python/cmake_modules/FindPythonLibsNew.cmake
+python/cmake_modules/SnappyCMakeLists.txt
+python/cmake_modules/SnappyConfig.h
+python/MANIFEST.in
+python/manylinux1/.dockerignore
+python/pyarrow/includes/__init__.pxd
+python/pyarrow/tests/__init__.py
+python/pyarrow/vendored/*
+python/requirements*.txt
+pax_global_header
+MANIFEST.in
+__init__.pxd
+__init__.py
+requirements.txt
+csharp/.gitattributes
+csharp/dummy.git/*
+csharp/src/Apache.Arrow/Flatbuf/*
+csharp/Apache.Arrow.sln
+csharp/examples/FluentBuilderExample/FluentBuilderExample.csproj
+csharp/examples/Examples.sln
+csharp/src/Apache.Arrow/Apache.Arrow.csproj
+csharp/src/Apache.Arrow/Properties/Resources.Designer.cs
+csharp/src/Apache.Arrow/Properties/Resources.resx
+csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj
+csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj
+csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj
+csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj
+csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj
+csharp/test/Apache.Arrow.IntegrationTest/Apache.Arrow.IntegrationTest.csproj
+csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj
+csharp/test/Apache.Arrow.Tests/app.config
+*.html
+*.sgml
+*.css
+*.png
+*.ico
+*.svg
+*.devhelp2
+*.scss
+r/R/arrowExports.R
+r/src/arrowExports.cpp
+r/DESCRIPTION
+r/LICENSE.md
+r/NAMESPACE
+r/.Rbuildignore
+r/arrow.Rproj
+r/README.md
+r/README.Rmd
+r/man/*.Rd
+r/cran-comments.md
+r/vignettes/*.Rmd
+r/tests/testthat/test-*.txt
+r/inst/include/cpp11.hpp
+r/inst/include/cpp11/*.hpp
+.gitattributes
+ruby/red-arrow/.yardopts
+julia/Arrow/Project.toml
+julia/Arrow/README.md
+julia/Arrow/docs/Manifest.toml
+julia/Arrow/docs/Project.toml
+julia/Arrow/docs/make.jl
+julia/Arrow/docs/mkdocs.yml
+julia/Arrow/docs/src/index.md
+julia/Arrow/docs/src/manual.md
+julia/Arrow/docs/src/reference.md
diff --git a/src/arrow/dev/release/run-rat.sh b/src/arrow/dev/release/run-rat.sh
new file mode 100755
index 000000000..2596a284c
--- /dev/null
+++ b/src/arrow/dev/release/run-rat.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+RAT_VERSION=0.13
+
+# download apache rat
+if [ ! -f apache-rat-${RAT_VERSION}.jar ]; then
+ curl -s https://repo1.maven.org/maven2/org/apache/rat/apache-rat/${RAT_VERSION}/apache-rat-${RAT_VERSION}.jar > apache-rat-${RAT_VERSION}.jar
+fi
+
+RAT="java -jar apache-rat-${RAT_VERSION}.jar -x "
+
+RELEASE_DIR=$(cd "$(dirname "$BASH_SOURCE")"; pwd)
+
+# generate the rat report
+$RAT $1 > rat.txt
+python $RELEASE_DIR/check-rat-report.py $RELEASE_DIR/rat_exclude_files.txt rat.txt > filtered_rat.txt
+cat filtered_rat.txt
+UNAPPROVED=`cat filtered_rat.txt | grep "NOT APPROVED" | wc -l`
+
+if [ "0" -eq "${UNAPPROVED}" ]; then
+ echo "No unapproved licenses"
+else
+ echo "${UNAPPROVED} unapproved licences. Check rat report: rat.txt"
+ exit 1
+fi
diff --git a/src/arrow/dev/release/run-test.rb b/src/arrow/dev/release/run-test.rb
new file mode 100755
index 000000000..90df39b13
--- /dev/null
+++ b/src/arrow/dev/release/run-test.rb
@@ -0,0 +1,31 @@
+#!/usr/bin/env ruby
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+$VERBOSE = true
+
+require "pathname"
+
+test_dir = Pathname.new(__dir__)
+
+require "test-unit"
+require_relative "test-helper"
+
+ENV["TEST_UNIT_MAX_DIFF_TARGET_STRING_SIZE"] = "10000"
+
+exit(Test::Unit::AutoRunner.run(true, test_dir.to_s))
diff --git a/src/arrow/dev/release/setup-gpg-agent.sh b/src/arrow/dev/release/setup-gpg-agent.sh
new file mode 100644
index 000000000..9ff84f6f0
--- /dev/null
+++ b/src/arrow/dev/release/setup-gpg-agent.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# source me
+eval $(gpg-agent --daemon --allow-preset-passphrase)
+gpg --use-agent -s LICENSE.txt
+rm -rf LICENSE.txt.gpg
diff --git a/src/arrow/dev/release/test-helper.rb b/src/arrow/dev/release/test-helper.rb
new file mode 100644
index 000000000..8a272ddfe
--- /dev/null
+++ b/src/arrow/dev/release/test-helper.rb
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+require "English"
+require "cgi/util"
+require "fileutils"
+require "find"
+require "json"
+require "open-uri"
+require "rexml/document"
+require "tempfile"
+require "tmpdir"
+
+module CommandRunnable
+ class Error < StandardError
+ end
+
+ def sh(*command_line, check_result: true)
+ if command_line[0].is_a?(Hash)
+ env = command_line.shift
+ else
+ env = {}
+ end
+ stdout = Tempfile.new("command-stdout.log")
+ stderr = Tempfile.new("command-stderr.log")
+ success = system(env, *command_line, out: stdout.path, err: stderr.path)
+ if check_result
+ unless success
+ message = "Failed to run: #{command_line.join(" ")}\n"
+ message << "stdout:\n #{stdout.read}\n"
+ message << "stderr:\n #{stderr.read}"
+ raise Error, message
+ end
+ end
+ stdout.read
+ end
+end
+
+module GitRunnable
+ include CommandRunnable
+
+ def git(*args)
+ if args[0].is_a?(Hash)
+ env = args.shift
+ else
+ env = {}
+ end
+ sh(env, "git", *args)
+ end
+
+ def git_current_commit
+ git("rev-parse", "HEAD").chomp
+ end
+
+ def git_tags
+ git("tags").lines(chomp: true)
+ end
+end
+
+module VersionDetectable
+ def detect_versions
+ top_dir = Pathname(__dir__).parent.parent
+ cpp_cmake_lists = top_dir + "cpp" + "CMakeLists.txt"
+ @snapshot_version = cpp_cmake_lists.read[/ARROW_VERSION "(.+?)"/, 1]
+ @release_version = @snapshot_version.gsub(/-SNAPSHOT\z/, "")
+ @so_version = compute_so_version(@release_version)
+ @next_version = @release_version.gsub(/\A\d+/) {|major| major.succ}
+ @next_snapshot_version = "#{@next_version}-SNAPSHOT"
+ @next_so_version = compute_so_version(@next_version)
+ r_description = top_dir + "r" + "DESCRIPTION"
+ @previous_version = r_description.read[/^Version: (.+?)\.9000$/, 1]
+ end
+
+ def compute_so_version(version)
+ major, minor, _patch = version.split(".")
+ Integer(major, 10) * 100 + Integer(minor, 10)
+ end
+
+ def on_release_branch?
+ @snapshot_version == @release_version
+ end
+end
diff --git a/src/arrow/dev/release/utils-binary.sh b/src/arrow/dev/release/utils-binary.sh
new file mode 100644
index 000000000..31ebcd8e9
--- /dev/null
+++ b/src/arrow/dev/release/utils-binary.sh
@@ -0,0 +1,86 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+docker_image_name=apache-arrow/release-binary
+gpg_agent_extra_socket="$(gpgconf --list-dirs agent-extra-socket)"
+if [ $(uname) = "Darwin" ]; then
+ docker_uid=10000
+ docker_gid=10000
+else
+ docker_uid=$(id -u)
+ docker_gid=$(id -g)
+fi
+docker_ssh_key="${SOURCE_DIR}/binary/id_rsa"
+
+if [ ! -f "${docker_ssh_key}" ]; then
+ ssh-keygen -N "" -f "${docker_ssh_key}"
+fi
+
+docker_gpg_ssh() {
+ local ssh_port=$1
+ shift
+ local known_hosts_file=$(mktemp -t "arrow-binary-gpg-ssh-known-hosts.XXXXX")
+ local exit_code=
+ if ssh \
+ -o StrictHostKeyChecking=no \
+ -o UserKnownHostsFile=${known_hosts_file} \
+ -i "${docker_ssh_key}" \
+ -p ${ssh_port} \
+ -R "/home/arrow/.gnupg/S.gpg-agent:${gpg_agent_extra_socket}" \
+ arrow@127.0.0.1 \
+ "$@"; then
+ exit_code=$?;
+ else
+ exit_code=$?;
+ fi
+ rm -f ${known_hosts_file}
+ return ${exit_code}
+}
+
+docker_run() {
+ local container_id_dir=$(mktemp -d -t "arrow-binary-gpg-container.XXXXX")
+ local container_id_file=${container_id_dir}/id
+ docker \
+ run \
+ --cidfile ${container_id_file} \
+ --detach \
+ --publish-all \
+ --rm \
+ --volume "$PWD":/host \
+ ${docker_image_name} \
+ bash -c "
+if [ \$(id -u) -ne ${docker_uid} ]; then
+ usermod --uid ${docker_uid} arrow
+ chown -R arrow: ~arrow
+fi
+/usr/sbin/sshd -D
+"
+ local container_id=$(cat ${container_id_file})
+ local ssh_port=$(docker port ${container_id} | grep -E -o '[0-9]+$' | head -n 1)
+ # Wait for sshd available
+ while ! docker_gpg_ssh ${ssh_port} : > /dev/null 2>&1; do
+ sleep 0.1
+ done
+ gpg --export ${GPG_KEY_ID} | docker_gpg_ssh ${ssh_port} gpg --import
+ docker_gpg_ssh ${ssh_port} "$@"
+ docker kill ${container_id}
+ rm -rf ${container_id_dir}
+}
+
+docker build -t ${docker_image_name} "${SOURCE_DIR}/binary"
+
+chmod go-rwx "${docker_ssh_key}"
diff --git a/src/arrow/dev/release/utils-prepare.sh b/src/arrow/dev/release/utils-prepare.sh
new file mode 100644
index 000000000..7ba786a75
--- /dev/null
+++ b/src/arrow/dev/release/utils-prepare.sh
@@ -0,0 +1,145 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+ARROW_DIR="${SOURCE_DIR}/../.."
+
+update_versions() {
+ local base_version=$1
+ local next_version=$2
+ local type=$3
+
+ case ${type} in
+ release)
+ local version=${base_version}
+ local r_version=${base_version}
+ ;;
+ snapshot)
+ local version=${next_version}-SNAPSHOT
+ local r_version=${base_version}.9000
+ ;;
+ esac
+
+ pushd "${ARROW_DIR}/c_glib"
+ sed -i.bak -E -e \
+ "s/^version = '.+'/version = '${version}'/" \
+ meson.build
+ rm -f meson.build.bak
+ git add meson.build
+ popd
+
+ pushd "${ARROW_DIR}/ci/scripts"
+ sed -i.bak -E -e \
+ "s/^pkgver=.+/pkgver=${r_version}/" \
+ PKGBUILD
+ rm -f PKGBUILD.bak
+ git add PKGBUILD
+ popd
+
+ pushd "${ARROW_DIR}/cpp"
+ sed -i.bak -E -e \
+ "s/^set\(ARROW_VERSION \".+\"\)/set(ARROW_VERSION \"${version}\")/" \
+ CMakeLists.txt
+ rm -f CMakeLists.txt.bak
+ git add CMakeLists.txt
+
+ sed -i.bak -E -e \
+ "s/\"version-string\": \".+\"/\"version-string\": \"${version}\"/" \
+ vcpkg.json
+ rm -f vcpkg.json.bak
+ git add vcpkg.json
+ popd
+
+ pushd "${ARROW_DIR}/java"
+ mvn versions:set -DnewVersion=${version}
+ find . -type f -name pom.xml.versionsBackup -delete
+ git add "pom.xml"
+ git add "**/pom.xml"
+ popd
+
+ pushd "${ARROW_DIR}/csharp"
+ sed -i.bak -E -e \
+ "s/^ <Version>.+<\/Version>/ <Version>${version}<\/Version>/" \
+ Directory.Build.props
+ rm -f Directory.Build.props.bak
+ git add Directory.Build.props
+ popd
+
+ pushd "${ARROW_DIR}/dev/tasks/homebrew-formulae"
+ sed -i.bak -E -e \
+ "s/arrow-[0-9.]+[0-9]+/arrow-${r_version}/g" \
+ autobrew/apache-arrow.rb
+ rm -f autobrew/apache-arrow.rb.bak
+ git add autobrew/apache-arrow.rb
+ sed -i.bak -E -e \
+ "s/arrow-[0-9.\-]+[0-9SNAPHOT]+/arrow-${version}/g" \
+ apache-arrow.rb
+ rm -f apache-arrow.rb.bak
+ git add apache-arrow.rb
+ popd
+
+ pushd "${ARROW_DIR}/js"
+ sed -i.bak -E -e \
+ "s/^ \"version\": \".+\"/ \"version\": \"${version}\"/" \
+ package.json
+ rm -f package.json.bak
+ git add package.json
+ popd
+
+ pushd "${ARROW_DIR}/matlab"
+ sed -i.bak -E -e \
+ "s/^set\(MLARROW_VERSION \".+\"\)/set(MLARROW_VERSION \"${version}\")/" \
+ CMakeLists.txt
+ rm -f CMakeLists.txt.bak
+ git add CMakeLists.txt
+ popd
+
+ pushd "${ARROW_DIR}/python"
+ sed -i.bak -E -e \
+ "s/^default_version = '.+'/default_version = '${version}'/" \
+ setup.py
+ rm -f setup.py.bak
+ git add setup.py
+ popd
+
+ pushd "${ARROW_DIR}/r"
+ sed -i.bak -E -e \
+ "s/^Version: .+/Version: ${r_version}/" \
+ DESCRIPTION
+ rm -f DESCRIPTION.bak
+ git add DESCRIPTION
+ # Replace dev version with release version
+ sed -i.bak -E -e \
+ "0,/^# arrow /s/^# arrow .+/# arrow ${base_version}/" \
+ NEWS.md
+ if [ ${type} = "snapshot" ]; then
+ # Add a news entry for the new dev version
+ sed -i.bak -E -e \
+ "0,/^# arrow /s/^(# arrow .+)/# arrow ${r_version}\n\n\1/" \
+ NEWS.md
+ fi
+ rm -f NEWS.md.bak
+ git add NEWS.md
+ popd
+
+ pushd "${ARROW_DIR}/ruby"
+ sed -i.bak -E -e \
+ "s/^ VERSION = \".+\"/ VERSION = \"${version}\"/g" \
+ */*/*/version.rb
+ rm -f */*/*/version.rb.bak
+ git add */*/*/version.rb
+ popd
+}
diff --git a/src/arrow/dev/release/verify-apt.sh b/src/arrow/dev/release/verify-apt.sh
new file mode 100755
index 000000000..3773e27fa
--- /dev/null
+++ b/src/arrow/dev/release/verify-apt.sh
@@ -0,0 +1,194 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -exu
+
+if [ $# -lt 2 ]; then
+ echo "Usage: $0 VERSION rc"
+ echo " $0 VERSION staging-rc"
+ echo " $0 VERSION release"
+ echo " $0 VERSION staging-release"
+ echo " $0 VERSION local"
+ echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC"
+ echo " e.g.: $0 0.13.0 staging-rc # Verify 0.13.0 RC on staging"
+ echo " e.g.: $0 0.13.0 release # Verify 0.13.0"
+ echo " e.g.: $0 0.13.0 staging-release # Verify 0.13.0 on staging"
+ echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local"
+ exit 1
+fi
+
+VERSION="$1"
+TYPE="$2"
+
+local_prefix="/arrow/dev/tasks/linux-packages"
+
+
+echo "::group::Prepare repository"
+
+export DEBIAN_FRONTEND=noninteractive
+
+APT_INSTALL="apt install -y -V --no-install-recommends"
+
+apt update
+${APT_INSTALL} \
+ ca-certificates \
+ curl \
+ lsb-release
+
+code_name="$(lsb_release --codename --short)"
+distribution="$(lsb_release --id --short | tr 'A-Z' 'a-z')"
+artifactory_base_url="https://apache.jfrog.io/artifactory/arrow/${distribution}"
+case "${TYPE}" in
+ rc|staging-rc|staging-release)
+ suffix=${TYPE%-release}
+ artifactory_base_url+="-${suffix}"
+ ;;
+esac
+
+have_flight=yes
+have_plasma=yes
+workaround_missing_packages=()
+case "${distribution}-${code_name}" in
+ debian-*)
+ sed \
+ -i"" \
+ -e "s/ main$/ main contrib non-free/g" \
+ /etc/apt/sources.list
+ ;;
+esac
+if [ "$(arch)" = "aarch64" ]; then
+ have_plasma=no
+fi
+
+if [ "${TYPE}" = "local" ]; then
+ case "${VERSION}" in
+ *-dev*)
+ package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/~dev\1/g')"
+ ;;
+ *-rc*)
+ package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')"
+ ;;
+ *)
+ package_version="${VERSION}"
+ ;;
+ esac
+ package_version+="-1"
+ apt_source_path="${local_prefix}/apt/repositories"
+ apt_source_path+="/${distribution}/pool/${code_name}/main"
+ apt_source_path+="/a/apache-arrow-apt-source"
+ apt_source_path+="/apache-arrow-apt-source_${package_version}_all.deb"
+ ${APT_INSTALL} "${apt_source_path}"
+else
+ package_version="${VERSION}-1"
+ apt_source_base_name="apache-arrow-apt-source-latest-${code_name}.deb"
+ curl \
+ --output "${apt_source_base_name}" \
+ "${artifactory_base_url}/${apt_source_base_name}"
+ ${APT_INSTALL} "./${apt_source_base_name}"
+fi
+
+if [ "${TYPE}" = "local" ]; then
+ sed \
+ -i"" \
+ -e "s,^URIs: .*$,URIs: file://${local_prefix}/apt/repositories/${distribution},g" \
+ /etc/apt/sources.list.d/apache-arrow.sources
+ keys="${local_prefix}/KEYS"
+ if [ -f "${keys}" ]; then
+ gpg \
+ --no-default-keyring \
+ --keyring /usr/share/keyrings/apache-arrow-apt-source.gpg \
+ --import "${keys}"
+ fi
+else
+ case "${TYPE}" in
+ rc|staging-rc|staging-release)
+ suffix=${TYPE%-release}
+ sed \
+ -i"" \
+ -e "s,^URIs: \\(.*\\)/,URIs: \\1-${suffix}/,g" \
+ /etc/apt/sources.list.d/apache-arrow.sources
+ ;;
+ esac
+fi
+
+apt update
+
+echo "::endgroup::"
+
+
+echo "::group::Test Apache Arrow C++"
+${APT_INSTALL} libarrow-dev=${package_version}
+required_packages=()
+required_packages+=(cmake)
+required_packages+=(g++)
+required_packages+=(git)
+required_packages+=(make)
+required_packages+=(pkg-config)
+required_packages+=(${workaround_missing_packages[@]})
+${APT_INSTALL} ${required_packages[@]}
+mkdir -p build
+cp -a /arrow/cpp/examples/minimal_build build
+pushd build/minimal_build
+cmake .
+make -j$(nproc)
+./arrow_example
+c++ -std=c++11 -o arrow_example example.cc $(pkg-config --cflags --libs arrow)
+./arrow_example
+popd
+echo "::endgroup::"
+
+
+echo "::group::Test Apache Arrow GLib"
+${APT_INSTALL} libarrow-glib-dev=${package_version}
+${APT_INSTALL} libarrow-glib-doc=${package_version}
+echo "::endgroup::"
+
+
+if [ "${have_flight}" = "yes" ]; then
+ echo "::group::Test Apache Arrow Flight"
+ ${APT_INSTALL} libarrow-flight-glib-dev=${package_version}
+ ${APT_INSTALL} libarrow-flight-glib-doc=${package_version}
+ echo "::endgroup::"
+fi
+
+
+echo "::group::Test libarrow-python"
+${APT_INSTALL} libarrow-python-dev=${package_version}
+echo "::endgroup::"
+
+
+if [ "${have_plasma}" = "yes" ]; then
+ echo "::group::Test Plasma"
+ ${APT_INSTALL} libplasma-glib-dev=${package_version}
+ ${APT_INSTALL} libplasma-glib-doc=${package_version}
+ ${APT_INSTALL} plasma-store-server=${package_version}
+ echo "::endgroup::"
+fi
+
+
+echo "::group::Test Gandiva"
+${APT_INSTALL} libgandiva-glib-dev=${package_version}
+${APT_INSTALL} libgandiva-glib-doc=${package_version}
+echo "::endgroup::"
+
+
+echo "::group::Test Parquet"
+${APT_INSTALL} libparquet-glib-dev=${package_version}
+${APT_INSTALL} libparquet-glib-doc=${package_version}
+echo "::endgroup::"
diff --git a/src/arrow/dev/release/verify-release-candidate-wheels.bat b/src/arrow/dev/release/verify-release-candidate-wheels.bat
new file mode 100644
index 000000000..5bcefe80d
--- /dev/null
+++ b/src/arrow/dev/release/verify-release-candidate-wheels.bat
@@ -0,0 +1,107 @@
+@rem Licensed to the Apache Software Foundation (ASF) under one
+@rem or more contributor license agreements. See the NOTICE file
+@rem distributed with this work for additional information
+@rem regarding copyright ownership. The ASF licenses this file
+@rem to you under the Apache License, Version 2.0 (the
+@rem "License"); you may not use this file except in compliance
+@rem with the License. You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing,
+@rem software distributed under the License is distributed on an
+@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@rem KIND, either express or implied. See the License for the
+@rem specific language governing permissions and limitations
+@rem under the License.
+
+@rem This script downloads and installs all Windows wheels for a release
+@rem candidate into temporary conda environments and makes sure that imports
+@rem work
+
+@rem To run the script:
+@rem verify-release-candidate-wheels.bat VERSION RC_NUM
+
+@echo on
+
+set _CURRENT_DIR=%CD%
+set _VERIFICATION_DIR=C:\tmp\arrow-verify-release-wheels
+
+if not exist "C:\tmp\" mkdir C:\tmp
+if exist %_VERIFICATION_DIR% rd %_VERIFICATION_DIR% /s /q
+if not exist %_VERIFICATION_DIR% mkdir %_VERIFICATION_DIR%
+
+cd %_VERIFICATION_DIR%
+
+@rem clone Arrow repository to obtain test requirements
+set GIT_ENV_PATH=%_VERIFICATION_DIR%\_git
+call conda create -p %GIT_ENV_PATH% ^
+ --no-shortcuts -f -q -y git ^
+ || EXIT /B 1
+call activate %GIT_ENV_PATH%
+
+git clone https://github.com/apache/arrow.git || EXIT /B 1
+pushd arrow
+git submodule update --init
+popd
+
+set ARROW_VERSION=%1
+set RC_NUMBER=%2
+
+python arrow\dev\release\download_rc_binaries.py %ARROW_VERSION% %RC_NUMBER% ^
+ --package_type python ^
+ --regex=".*win_amd64.*" || EXIT /B 1
+
+call deactivate
+
+set ARROW_TEST_DATA=%cd%\arrow\testing\data
+
+CALL :verify_wheel 3.6 m
+if errorlevel 1 GOTO error
+
+CALL :verify_wheel 3.7 m
+if errorlevel 1 GOTO error
+
+CALL :verify_wheel 3.8
+if errorlevel 1 GOTO error
+
+:done
+cd %_CURRENT_DIR%
+
+EXIT /B %ERRORLEVEL%
+
+:error
+call deactivate
+cd %_CURRENT_DIR%
+
+EXIT /B 1
+
+@rem a batch function to verify a single wheel
+:verify_wheel
+
+set PY_VERSION=%1
+set ABI_TAG=%2
+set PY_VERSION_NO_PERIOD=%PY_VERSION:.=%
+
+set CONDA_ENV_PATH=%_VERIFICATION_DIR%\_verify-wheel-%PY_VERSION%
+call conda create -p %CONDA_ENV_PATH% ^
+ --no-shortcuts -f -q -y python=%PY_VERSION% ^
+ || EXIT /B 1
+call activate %CONDA_ENV_PATH%
+
+set WHEEL_FILENAME=pyarrow-%ARROW_VERSION%-cp%PY_VERSION_NO_PERIOD%-cp%PY_VERSION_NO_PERIOD%%ABI_TAG%-win_amd64.whl
+
+pip install python-rc\%ARROW_VERSION%-rc%RC_NUMBER%\%WHEEL_FILENAME% || EXIT /B 1
+python -c "import pyarrow" || EXIT /B 1
+python -c "import pyarrow.parquet" || EXIT /B 1
+python -c "import pyarrow.flight" || EXIT /B 1
+python -c "import pyarrow.dataset" || EXIT /B 1
+
+pip install -r arrow\python\requirements-test.txt || EXIT /B 1
+pytest %CONDA_ENV_PATH%\Lib\site-packages\pyarrow --pdb -v || EXIT /B 1
+
+:done
+
+call deactivate
+
+EXIT /B 0
diff --git a/src/arrow/dev/release/verify-release-candidate.bat b/src/arrow/dev/release/verify-release-candidate.bat
new file mode 100644
index 000000000..fee8c01bc
--- /dev/null
+++ b/src/arrow/dev/release/verify-release-candidate.bat
@@ -0,0 +1,130 @@
+@rem Licensed to the Apache Software Foundation (ASF) under one
+@rem or more contributor license agreements. See the NOTICE file
+@rem distributed with this work for additional information
+@rem regarding copyright ownership. The ASF licenses this file
+@rem to you under the Apache License, Version 2.0 (the
+@rem "License"); you may not use this file except in compliance
+@rem with the License. You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing,
+@rem software distributed under the License is distributed on an
+@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@rem KIND, either express or implied. See the License for the
+@rem specific language governing permissions and limitations
+@rem under the License.
+
+@rem To run the script:
+@rem verify-release-candidate.bat VERSION RC_NUM
+
+@echo on
+
+if not exist "C:\tmp\" mkdir C:\tmp
+if exist "C:\tmp\arrow-verify-release" rd C:\tmp\arrow-verify-release /s /q
+if not exist "C:\tmp\arrow-verify-release" mkdir C:\tmp\arrow-verify-release
+
+set _VERIFICATION_DIR=C:\tmp\arrow-verify-release
+set _VERIFICATION_DIR_UNIX=C:/tmp/arrow-verify-release
+set _VERIFICATION_CONDA_ENV=%_VERIFICATION_DIR%\conda-env
+set _DIST_URL=https://dist.apache.org/repos/dist/dev/arrow
+set _TARBALL=apache-arrow-%1.tar.gz
+set ARROW_SOURCE=%_VERIFICATION_DIR%\apache-arrow-%1
+set INSTALL_DIR=%_VERIFICATION_DIR%\install
+
+@rem Requires GNU Wget for Windows
+wget --no-check-certificate -O %_TARBALL% %_DIST_URL%/apache-arrow-%1-rc%2/%_TARBALL% || exit /B 1
+
+tar xf %_TARBALL% -C %_VERIFICATION_DIR_UNIX%
+
+set PYTHON=3.6
+
+@rem Using call with conda.bat seems necessary to avoid terminating the batch
+@rem script execution
+call conda create --no-shortcuts -c conda-forge -f -q -y -p %_VERIFICATION_CONDA_ENV% ^
+ --file=ci\conda_env_cpp.txt ^
+ --file=ci\conda_env_python.txt ^
+ git ^
+ python=%PYTHON% ^
+ || exit /B 1
+
+call activate %_VERIFICATION_CONDA_ENV% || exit /B 1
+
+set GENERATOR=Visual Studio 15 2017 Win64
+set CONFIGURATION=release
+
+pushd %ARROW_SOURCE%
+
+set ARROW_HOME=%INSTALL_DIR%
+set PARQUET_HOME=%INSTALL_DIR%
+set PATH=%INSTALL_DIR%\bin;%PATH%
+
+@rem Build and test Arrow C++ libraries
+mkdir %ARROW_SOURCE%\cpp\build
+pushd %ARROW_SOURCE%\cpp\build
+
+@rem This is the path for Visual Studio Community 2017
+call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=amd64
+
+@rem NOTE(wesm): not using Ninja for now to be able to more easily control the
+@rem generator used
+
+cmake -G "%GENERATOR%" ^
+ -DARROW_BOOST_USE_SHARED=ON ^
+ -DARROW_BUILD_STATIC=OFF ^
+ -DARROW_BUILD_TESTS=ON ^
+ -DARROW_CXXFLAGS="/MP" ^
+ -DARROW_DATASET=ON ^
+ -DARROW_FLIGHT=ON ^
+ -DARROW_MIMALLOC=ON ^
+ -DARROW_PARQUET=ON ^
+ -DARROW_PYTHON=ON ^
+ -DARROW_WITH_BROTLI=ON ^
+ -DARROW_WITH_BZ2=ON ^
+ -DARROW_WITH_LZ4=ON ^
+ -DARROW_WITH_SNAPPY=ON ^
+ -DARROW_WITH_ZLIB=ON ^
+ -DARROW_WITH_ZSTD=ON ^
+ -DCMAKE_BUILD_TYPE=%CONFIGURATION% ^
+ -DCMAKE_INSTALL_PREFIX=%ARROW_HOME% ^
+ -DCMAKE_UNITY_BUILD=ON ^
+ -DGTest_SOURCE=BUNDLED ^
+ .. || exit /B
+
+cmake --build . --target INSTALL --config Release || exit /B 1
+
+@rem NOTE(wesm): Building googletest is flaky for me with ninja. Building it
+@rem first fixes the problem
+
+@rem ninja googletest_ep || exit /B 1
+@rem ninja install || exit /B 1
+
+@rem Get testing datasets for Parquet unit tests
+git clone https://github.com/apache/parquet-testing.git %_VERIFICATION_DIR%\parquet-testing
+set PARQUET_TEST_DATA=%_VERIFICATION_DIR%\parquet-testing\data
+
+git clone https://github.com/apache/arrow-testing.git %_VERIFICATION_DIR%\arrow-testing
+set ARROW_TEST_DATA=%_VERIFICATION_DIR%\arrow-testing\data
+
+@rem Needed so python-test.exe works
+set PYTHONPATH_ORIGINAL=%PYTHONPATH%
+set PYTHONPATH=%CONDA_PREFIX%\Lib;%CONDA_PREFIX%\Lib\site-packages;%CONDA_PREFIX%\DLLs;%CONDA_PREFIX%;%PYTHONPATH%
+ctest -VV || exit /B 1
+set PYTHONPATH=%PYTHONPATH_ORIGINAL%
+popd
+
+@rem Build and import pyarrow
+pushd %ARROW_SOURCE%\python
+
+pip install -r requirements-test.txt || exit /B 1
+
+set PYARROW_CMAKE_GENERATOR=%GENERATOR%
+set PYARROW_WITH_FLIGHT=1
+set PYARROW_WITH_PARQUET=1
+set PYARROW_WITH_DATASET=1
+python setup.py build_ext --inplace --bundle-arrow-cpp bdist_wheel || exit /B 1
+pytest pyarrow -v -s --enable-parquet || exit /B 1
+
+popd
+
+call deactivate
diff --git a/src/arrow/dev/release/verify-release-candidate.sh b/src/arrow/dev/release/verify-release-candidate.sh
new file mode 100755
index 000000000..3da89360c
--- /dev/null
+++ b/src/arrow/dev/release/verify-release-candidate.sh
@@ -0,0 +1,817 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Requirements
+# - Ruby >= 2.3
+# - Maven >= 3.3.9
+# - JDK >=7
+# - gcc >= 4.8
+# - Node.js >= 11.12 (best way is to use nvm)
+# - Go >= 1.15
+#
+# If using a non-system Boost, set BOOST_ROOT and add Boost libraries to
+# LD_LIBRARY_PATH.
+#
+# To reuse build artifacts between runs set ARROW_TMPDIR environment variable to
+# a directory where the temporary files should be placed to, note that this
+# directory is not cleaned up automatically.
+
+case $# in
+ 3) ARTIFACT="$1"
+ VERSION="$2"
+ RC_NUMBER="$3"
+ case $ARTIFACT in
+ source|binaries|wheels) ;;
+ *) echo "Invalid argument: '${ARTIFACT}', valid options are \
+'source', 'binaries', or 'wheels'"
+ exit 1
+ ;;
+ esac
+ ;;
+ *) echo "Usage: $0 source|binaries X.Y.Z RC_NUMBER"
+ exit 1
+ ;;
+esac
+
+set -e
+set -x
+set -o pipefail
+
+SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd)"
+ARROW_DIR="$(dirname $(dirname ${SOURCE_DIR}))"
+
+detect_cuda() {
+ if ! (which nvcc && which nvidia-smi) > /dev/null; then
+ return 1
+ fi
+
+ local n_gpus=$(nvidia-smi --list-gpus | wc -l)
+ return $((${n_gpus} < 1))
+}
+
+# Build options for the C++ library
+
+if [ -z "${ARROW_CUDA:-}" ] && detect_cuda; then
+ ARROW_CUDA=ON
+fi
+: ${ARROW_CUDA:=OFF}
+: ${ARROW_FLIGHT:=ON}
+: ${ARROW_GANDIVA:=ON}
+
+ARROW_DIST_URL='https://dist.apache.org/repos/dist/dev/arrow'
+
+download_dist_file() {
+ curl \
+ --silent \
+ --show-error \
+ --fail \
+ --location \
+ --remote-name $ARROW_DIST_URL/$1
+}
+
+download_rc_file() {
+ download_dist_file apache-arrow-${VERSION}-rc${RC_NUMBER}/$1
+}
+
+import_gpg_keys() {
+ download_dist_file KEYS
+ gpg --import KEYS
+}
+
+fetch_archive() {
+ local dist_name=$1
+ download_rc_file ${dist_name}.tar.gz
+ download_rc_file ${dist_name}.tar.gz.asc
+ download_rc_file ${dist_name}.tar.gz.sha256
+ download_rc_file ${dist_name}.tar.gz.sha512
+ gpg --verify ${dist_name}.tar.gz.asc ${dist_name}.tar.gz
+ shasum -a 256 -c ${dist_name}.tar.gz.sha256
+ shasum -a 512 -c ${dist_name}.tar.gz.sha512
+}
+
+verify_dir_artifact_signatures() {
+ # verify the signature and the checksums of each artifact
+ find $1 -name '*.asc' | while read sigfile; do
+ artifact=${sigfile/.asc/}
+ gpg --verify $sigfile $artifact || exit 1
+
+ # go into the directory because the checksum files contain only the
+ # basename of the artifact
+ pushd $(dirname $artifact)
+ base_artifact=$(basename $artifact)
+ if [ -f $base_artifact.sha256 ]; then
+ shasum -a 256 -c $base_artifact.sha256 || exit 1
+ fi
+ shasum -a 512 -c $base_artifact.sha512 || exit 1
+ popd
+ done
+}
+
+test_binary() {
+ local download_dir=binaries
+ mkdir -p ${download_dir}
+
+ ${PYTHON:-python} $SOURCE_DIR/download_rc_binaries.py $VERSION $RC_NUMBER \
+ --dest=${download_dir}
+
+ verify_dir_artifact_signatures ${download_dir}
+}
+
+test_apt() {
+ for target in "debian:buster" \
+ "arm64v8/debian:buster" \
+ "debian:bullseye" \
+ "arm64v8/debian:bullseye" \
+ "debian:bookworm" \
+ "arm64v8/debian:bookworm" \
+ "ubuntu:bionic" \
+ "arm64v8/ubuntu:bionic" \
+ "ubuntu:focal" \
+ "arm64v8/ubuntu:focal" \
+ "ubuntu:hirsute" \
+ "arm64v8/ubuntu:hirsute" \
+ "ubuntu:impish" \
+ "arm64v8/ubuntu:impish"; do \
+ case "${target}" in
+ arm64v8/*)
+ if [ "$(arch)" = "aarch64" -o -e /usr/bin/qemu-aarch64-static ]; then
+ case "${target}" in
+ arm64v8/debian:buster|arm64v8/ubuntu:bionic|arm64v8/ubuntu:focal)
+ ;; # OK
+ *)
+ # qemu-user-static in Ubuntu 20.04 has a crash bug:
+ # https://bugs.launchpad.net/qemu/+bug/1749393
+ continue
+ ;;
+ esac
+ else
+ continue
+ fi
+ ;;
+ esac
+ if ! docker run --rm -v "${SOURCE_DIR}"/../..:/arrow:delegated \
+ "${target}" \
+ /arrow/dev/release/verify-apt.sh \
+ "${VERSION}" \
+ "rc"; then
+ echo "Failed to verify the APT repository for ${target}"
+ exit 1
+ fi
+ done
+}
+
+test_yum() {
+ for target in "almalinux:8" \
+ "arm64v8/almalinux:8" \
+ "amazonlinux:2" \
+ "centos:7" \
+ "centos:8" \
+ "arm64v8/centos:8"; do
+ case "${target}" in
+ arm64v8/*)
+ if [ "$(arch)" = "aarch64" -o -e /usr/bin/qemu-aarch64-static ]; then
+ : # OK
+ else
+ continue
+ fi
+ ;;
+ esac
+ if ! docker run --rm -v "${SOURCE_DIR}"/../..:/arrow:delegated \
+ "${target}" \
+ /arrow/dev/release/verify-yum.sh \
+ "${VERSION}" \
+ "rc"; then
+ echo "Failed to verify the Yum repository for ${target}"
+ exit 1
+ fi
+ done
+}
+
+
+setup_tempdir() {
+ cleanup() {
+ if [ "${TEST_SUCCESS}" = "yes" ]; then
+ rm -fr "${ARROW_TMPDIR}"
+ else
+ echo "Failed to verify release candidate. See ${ARROW_TMPDIR} for details."
+ fi
+ }
+
+ if [ -z "${ARROW_TMPDIR}" ]; then
+ # clean up automatically if ARROW_TMPDIR is not defined
+ ARROW_TMPDIR=$(mktemp -d -t "$1.XXXXX")
+ trap cleanup EXIT
+ else
+ # don't clean up automatically
+ mkdir -p "${ARROW_TMPDIR}"
+ fi
+}
+
+setup_miniconda() {
+ # Setup short-lived miniconda for Python and integration tests
+ OS="$(uname)"
+ if [ "${OS}" == "Darwin" ]; then
+ OS=MacOSX
+ fi
+ ARCH="$(uname -m)"
+ MINICONDA_URL="https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-${OS}-${ARCH}.sh"
+
+ MINICONDA=$PWD/test-miniconda
+
+ if [ ! -d "${MINICONDA}" ]; then
+ # Setup miniconda only if the directory doesn't exist yet
+ wget -O miniconda.sh $MINICONDA_URL
+ bash miniconda.sh -b -p $MINICONDA
+ rm -f miniconda.sh
+ fi
+ echo "Installed miniconda at ${MINICONDA}"
+
+ . $MINICONDA/etc/profile.d/conda.sh
+
+ conda create -n arrow-test -y -q -c conda-forge \
+ python=3.8 \
+ nomkl \
+ numpy \
+ pandas \
+ cython
+ conda activate arrow-test
+ echo "Using conda environment ${CONDA_PREFIX}"
+}
+
+# Build and test Java (Requires newer Maven -- I used 3.3.9)
+
+test_package_java() {
+ pushd java
+
+ mvn test
+ mvn package
+
+ popd
+}
+
+# Build and test C++
+
+test_and_install_cpp() {
+ mkdir -p cpp/build
+ pushd cpp/build
+
+ ARROW_CMAKE_OPTIONS="
+${ARROW_CMAKE_OPTIONS:-}
+-DCMAKE_INSTALL_PREFIX=$ARROW_HOME
+-DCMAKE_INSTALL_LIBDIR=lib
+-DARROW_FLIGHT=${ARROW_FLIGHT}
+-DARROW_PLASMA=ON
+-DARROW_ORC=ON
+-DARROW_PYTHON=ON
+-DARROW_GANDIVA=${ARROW_GANDIVA}
+-DARROW_PARQUET=ON
+-DARROW_DATASET=ON
+-DPARQUET_REQUIRE_ENCRYPTION=ON
+-DARROW_VERBOSE_THIRDPARTY_BUILD=ON
+-DARROW_WITH_BZ2=ON
+-DARROW_WITH_ZLIB=ON
+-DARROW_WITH_ZSTD=ON
+-DARROW_WITH_LZ4=ON
+-DARROW_WITH_SNAPPY=ON
+-DARROW_WITH_BROTLI=ON
+-DARROW_BOOST_USE_SHARED=ON
+-DCMAKE_BUILD_TYPE=release
+-DARROW_BUILD_TESTS=ON
+-DARROW_BUILD_INTEGRATION=ON
+-DARROW_CUDA=${ARROW_CUDA}
+-DARROW_DEPENDENCY_SOURCE=AUTO
+"
+ cmake $ARROW_CMAKE_OPTIONS ..
+
+ make -j$NPROC install
+
+ # TODO: ARROW-5036: plasma-serialization_tests broken
+ # TODO: ARROW-5054: libgtest.so link failure in flight-server-test
+ LD_LIBRARY_PATH=$PWD/release:$LD_LIBRARY_PATH ctest \
+ --exclude-regex "plasma-serialization_tests" \
+ -j$NPROC \
+ --output-on-failure \
+ -L unittest
+ popd
+}
+
+test_csharp() {
+ pushd csharp
+
+ local csharp_bin=${PWD}/bin
+ mkdir -p ${csharp_bin}
+
+ if which dotnet > /dev/null 2>&1; then
+ if ! which sourcelink > /dev/null 2>&1; then
+ local dotnet_tools_dir=$HOME/.dotnet/tools
+ if [ -d "${dotnet_tools_dir}" ]; then
+ PATH="${dotnet_tools_dir}:$PATH"
+ fi
+ fi
+ else
+ local dotnet_version=3.1.405
+ local dotnet_platform=
+ case "$(uname)" in
+ Linux)
+ dotnet_platform=linux
+ ;;
+ Darwin)
+ dotnet_platform=macos
+ ;;
+ esac
+ local dotnet_download_thank_you_url=https://dotnet.microsoft.com/download/thank-you/dotnet-sdk-${dotnet_version}-${dotnet_platform}-x64-binaries
+ local dotnet_download_url=$( \
+ curl --location ${dotnet_download_thank_you_url} | \
+ grep 'window\.open' | \
+ grep -E -o '[^"]+' | \
+ sed -n 2p)
+ curl ${dotnet_download_url} | \
+ tar xzf - -C ${csharp_bin}
+ PATH=${csharp_bin}:${PATH}
+ fi
+
+ dotnet test
+ mv dummy.git ../.git
+ dotnet pack -c Release
+ mv ../.git dummy.git
+
+ if ! which sourcelink > /dev/null 2>&1; then
+ dotnet tool install --tool-path ${csharp_bin} sourcelink
+ PATH=${csharp_bin}:${PATH}
+ if ! sourcelink --help > /dev/null 2>&1; then
+ export DOTNET_ROOT=${csharp_bin}
+ fi
+ fi
+
+ sourcelink test artifacts/Apache.Arrow/Release/netstandard1.3/Apache.Arrow.pdb
+ sourcelink test artifacts/Apache.Arrow/Release/netcoreapp2.1/Apache.Arrow.pdb
+
+ popd
+}
+
+# Build and test Python
+
+test_python() {
+ pushd python
+
+ pip install -r requirements-build.txt -r requirements-test.txt
+
+ export PYARROW_WITH_DATASET=1
+ export PYARROW_WITH_PARQUET=1
+ export PYARROW_WITH_PLASMA=1
+ if [ "${ARROW_CUDA}" = "ON" ]; then
+ export PYARROW_WITH_CUDA=1
+ fi
+ if [ "${ARROW_FLIGHT}" = "ON" ]; then
+ export PYARROW_WITH_FLIGHT=1
+ fi
+ if [ "${ARROW_GANDIVA}" = "ON" ]; then
+ export PYARROW_WITH_GANDIVA=1
+ fi
+
+ python setup.py build_ext --inplace
+ pytest pyarrow -v --pdb
+
+ popd
+}
+
+test_glib() {
+ pushd c_glib
+
+ pip install meson
+
+ meson build --prefix=$ARROW_HOME --libdir=lib
+ ninja -C build
+ ninja -C build install
+
+ export GI_TYPELIB_PATH=$ARROW_HOME/lib/girepository-1.0:$GI_TYPELIB_PATH
+
+ if ! bundle --version; then
+ gem install --no-document bundler
+ fi
+
+ bundle install --path vendor/bundle
+ bundle exec ruby test/run-test.rb
+
+ popd
+}
+
+test_js() {
+ pushd js
+
+ if [ "${INSTALL_NODE}" -gt 0 ]; then
+ export NVM_DIR="`pwd`/.nvm"
+ mkdir -p $NVM_DIR
+ curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | \
+ PROFILE=/dev/null bash
+ [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
+
+ nvm install --lts
+ npm install -g yarn
+ fi
+
+ yarn --frozen-lockfile
+ yarn run-s clean:all lint build
+ yarn test
+ popd
+}
+
+test_ruby() {
+ pushd ruby
+
+ local modules="red-arrow red-arrow-dataset red-plasma red-parquet"
+ if [ "${ARROW_CUDA}" = "ON" ]; then
+ modules="${modules} red-arrow-cuda"
+ fi
+ if [ "${ARROW_FLIGHT}" = "ON" ]; then
+ modules="${modules} red-arrow-flight"
+ fi
+ if [ "${ARROW_GANDIVA}" = "ON" ]; then
+ modules="${modules} red-gandiva"
+ fi
+
+ for module in ${modules}; do
+ pushd ${module}
+ bundle install --path vendor/bundle
+ bundle exec ruby test/run-test.rb
+ popd
+ done
+
+ popd
+}
+
+test_go() {
+ local VERSION=1.15.14
+ local ARCH=amd64
+
+ if [ "$(uname)" == "Darwin" ]; then
+ local OS=darwin
+ else
+ local OS=linux
+ fi
+
+ local GO_ARCHIVE=go$VERSION.$OS-$ARCH.tar.gz
+ wget https://dl.google.com/go/$GO_ARCHIVE
+
+ mkdir -p local-go
+ tar -xzf $GO_ARCHIVE -C local-go
+ rm -f $GO_ARCHIVE
+
+ export GOROOT=`pwd`/local-go/go
+ export GOPATH=`pwd`/local-go/gopath
+ export PATH=$GOROOT/bin:$GOPATH/bin:$PATH
+
+ pushd go/arrow
+
+ go get -v ./...
+ go test ./...
+ go clean -modcache
+
+ popd
+}
+
+# Run integration tests
+test_integration() {
+ JAVA_DIR=$PWD/java
+ CPP_BUILD_DIR=$PWD/cpp/build
+
+ export ARROW_JAVA_INTEGRATION_JAR=$JAVA_DIR/tools/target/arrow-tools-$VERSION-jar-with-dependencies.jar
+ export ARROW_CPP_EXE_PATH=$CPP_BUILD_DIR/release
+
+ pip install -e dev/archery
+
+ INTEGRATION_TEST_ARGS=""
+
+ if [ "${ARROW_FLIGHT}" = "ON" ]; then
+ INTEGRATION_TEST_ARGS="${INTEGRATION_TEST_ARGS} --run-flight"
+ fi
+
+ # Flight integration test executable have runtime dependency on
+ # release/libgtest.so
+ LD_LIBRARY_PATH=$ARROW_CPP_EXE_PATH:$LD_LIBRARY_PATH \
+ archery integration \
+ --with-cpp=${TEST_INTEGRATION_CPP} \
+ --with-java=${TEST_INTEGRATION_JAVA} \
+ --with-js=${TEST_INTEGRATION_JS} \
+ --with-go=${TEST_INTEGRATION_GO} \
+ $INTEGRATION_TEST_ARGS
+}
+
+clone_testing_repositories() {
+ # Clone testing repositories if not cloned already
+ if [ ! -d "arrow-testing" ]; then
+ git clone https://github.com/apache/arrow-testing.git
+ fi
+ if [ ! -d "parquet-testing" ]; then
+ git clone https://github.com/apache/parquet-testing.git
+ fi
+ export ARROW_TEST_DATA=$PWD/arrow-testing/data
+ export PARQUET_TEST_DATA=$PWD/parquet-testing/data
+}
+
+test_source_distribution() {
+ export ARROW_HOME=$ARROW_TMPDIR/install
+ export PARQUET_HOME=$ARROW_TMPDIR/install
+ export LD_LIBRARY_PATH=$ARROW_HOME/lib:${LD_LIBRARY_PATH:-}
+ export PKG_CONFIG_PATH=$ARROW_HOME/lib/pkgconfig:${PKG_CONFIG_PATH:-}
+
+ if [ "$(uname)" == "Darwin" ]; then
+ NPROC=$(sysctl -n hw.ncpu)
+ else
+ NPROC=$(nproc)
+ fi
+
+ clone_testing_repositories
+
+ if [ ${TEST_JAVA} -gt 0 ]; then
+ test_package_java
+ fi
+ if [ ${TEST_CPP} -gt 0 ]; then
+ test_and_install_cpp
+ fi
+ if [ ${TEST_CSHARP} -gt 0 ]; then
+ test_csharp
+ fi
+ if [ ${TEST_PYTHON} -gt 0 ]; then
+ test_python
+ fi
+ if [ ${TEST_GLIB} -gt 0 ]; then
+ test_glib
+ fi
+ if [ ${TEST_RUBY} -gt 0 ]; then
+ test_ruby
+ fi
+ if [ ${TEST_JS} -gt 0 ]; then
+ test_js
+ fi
+ if [ ${TEST_GO} -gt 0 ]; then
+ test_go
+ fi
+ if [ ${TEST_INTEGRATION} -gt 0 ]; then
+ test_integration
+ fi
+}
+
+test_binary_distribution() {
+ if [ ${TEST_BINARY} -gt 0 ]; then
+ test_binary
+ fi
+ if [ ${TEST_APT} -gt 0 ]; then
+ test_apt
+ fi
+ if [ ${TEST_YUM} -gt 0 ]; then
+ test_yum
+ fi
+}
+
+test_linux_wheels() {
+ if [ "$(uname -m)" = "aarch64" ]; then
+ local arch="aarch64"
+ else
+ local arch="x86_64"
+ fi
+
+ local py_arches="3.6m 3.7m 3.8 3.9"
+ local platform_tags="manylinux_2_12_${arch}.manylinux2010_${arch} manylinux_2_17_${arch}.manylinux2014_${arch}"
+
+ for py_arch in ${py_arches}; do
+ local env=_verify_wheel-${py_arch}
+ conda create -yq -n ${env} python=${py_arch//[mu]/}
+ conda activate ${env}
+ pip install -U pip
+
+ for tag in ${platform_tags}; do
+ # check the mandatory and optional imports
+ pip install python-rc/${VERSION}-rc${RC_NUMBER}/pyarrow-${VERSION}-cp${py_arch//[mu.]/}-cp${py_arch//./}-${tag}.whl
+ INSTALL_PYARROW=OFF ${ARROW_DIR}/ci/scripts/python_wheel_unix_test.sh ${ARROW_DIR}
+ done
+
+ conda deactivate
+ done
+}
+
+test_macos_wheels() {
+ local py_arches="3.6m 3.7m 3.8 3.9"
+ local macos_version=$(sw_vers -productVersion)
+ local macos_short_version=${macos_version:0:5}
+
+ local check_s3=ON
+ local check_flight=ON
+
+ # macOS version <= 10.13
+ if [ $(echo "${macos_short_version}\n10.14" | sort -V | head -n1) == "${macos_short_version}" ]; then
+ local check_s3=OFF
+ fi
+ # apple silicon processor
+ if [ "$(uname -m)" = "arm64" ]; then
+ local py_arches="3.8 3.9"
+ local check_flight=OFF
+ fi
+
+ # verify arch-native wheels inside an arch-native conda environment
+ for py_arch in ${py_arches}; do
+ local env=_verify_wheel-${py_arch}
+ conda create -yq -n ${env} python=${py_arch//m/}
+ conda activate ${env}
+ pip install -U pip
+
+ # check the mandatory and optional imports
+ pip install --find-links python-rc/${VERSION}-rc${RC_NUMBER} pyarrow==${VERSION}
+ INSTALL_PYARROW=OFF ARROW_FLIGHT=${check_flight} ARROW_S3=${check_s3} \
+ ${ARROW_DIR}/ci/scripts/python_wheel_unix_test.sh ${ARROW_DIR}
+
+ conda deactivate
+ done
+
+ # verify arm64 and universal2 wheels using an universal2 python binary
+ # the interpreter should be installed from python.org:
+ # https://www.python.org/ftp/python/3.9.6/python-3.9.6-macosx10.9.pkg
+ if [ "$(uname -m)" = "arm64" ]; then
+ for py_arch in "3.9"; do
+ local pyver=${py_arch//m/}
+ local python="/Library/Frameworks/Python.framework/Versions/${pyver}/bin/python${pyver}"
+
+ # create and activate a virtualenv for testing as arm64
+ for arch in "arm64" "x86_64"; do
+ local venv="${ARROW_TMPDIR}/test-${arch}-virtualenv"
+ $python -m virtualenv $venv
+ source $venv/bin/activate
+ pip install -U pip
+
+ # install pyarrow's universal2 wheel
+ pip install \
+ --find-links python-rc/${VERSION}-rc${RC_NUMBER} \
+ --target $(python -c 'import site; print(site.getsitepackages()[0])') \
+ --platform macosx_11_0_universal2 \
+ --only-binary=:all: \
+ pyarrow==${VERSION}
+ # check the imports and execute the unittests
+ INSTALL_PYARROW=OFF ARROW_FLIGHT=${check_flight} ARROW_S3=${check_s3} \
+ arch -${arch} ${ARROW_DIR}/ci/scripts/python_wheel_unix_test.sh ${ARROW_DIR}
+
+ deactivate
+ done
+ done
+ fi
+}
+
+test_wheels() {
+ clone_testing_repositories
+
+ local download_dir=binaries
+ mkdir -p ${download_dir}
+
+ if [ "$(uname)" == "Darwin" ]; then
+ local filter_regex=.*macosx.*
+ else
+ local filter_regex=.*manylinux.*
+ fi
+
+ python $SOURCE_DIR/download_rc_binaries.py $VERSION $RC_NUMBER \
+ --package_type python \
+ --regex=${filter_regex} \
+ --dest=${download_dir}
+
+ verify_dir_artifact_signatures ${download_dir}
+
+ pushd ${download_dir}
+
+ if [ "$(uname)" == "Darwin" ]; then
+ test_macos_wheels
+ else
+ test_linux_wheels
+ fi
+
+ popd
+}
+
+# By default test all functionalities.
+# To deactivate one test, deactivate the test and all of its dependents
+# To explicitly select one test, set TEST_DEFAULT=0 TEST_X=1
+
+# Install NodeJS locally for running the JavaScript tests rather than using the
+# system Node installation, which may be too old.
+: ${INSTALL_NODE:=1}
+
+if [ "${ARTIFACT}" == "source" ]; then
+ : ${TEST_SOURCE:=1}
+elif [ "${ARTIFACT}" == "wheels" ]; then
+ TEST_WHEELS=1
+else
+ TEST_BINARY_DISTRIBUTIONS=1
+fi
+: ${TEST_SOURCE:=0}
+: ${TEST_WHEELS:=0}
+: ${TEST_BINARY_DISTRIBUTIONS:=0}
+
+: ${TEST_DEFAULT:=1}
+: ${TEST_JAVA:=${TEST_DEFAULT}}
+: ${TEST_CPP:=${TEST_DEFAULT}}
+: ${TEST_CSHARP:=${TEST_DEFAULT}}
+: ${TEST_GLIB:=${TEST_DEFAULT}}
+: ${TEST_RUBY:=${TEST_DEFAULT}}
+: ${TEST_PYTHON:=${TEST_DEFAULT}}
+: ${TEST_JS:=${TEST_DEFAULT}}
+: ${TEST_GO:=${TEST_DEFAULT}}
+: ${TEST_INTEGRATION:=${TEST_DEFAULT}}
+if [ ${TEST_BINARY_DISTRIBUTIONS} -gt 0 ]; then
+ TEST_BINARY_DISTRIBUTIONS_DEFAULT=${TEST_DEFAULT}
+else
+ TEST_BINARY_DISTRIBUTIONS_DEFAULT=0
+fi
+: ${TEST_BINARY:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}}
+: ${TEST_APT:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}}
+: ${TEST_YUM:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}}
+
+# For selective Integration testing, set TEST_DEFAULT=0 TEST_INTEGRATION_X=1 TEST_INTEGRATION_Y=1
+: ${TEST_INTEGRATION_CPP:=${TEST_INTEGRATION}}
+: ${TEST_INTEGRATION_JAVA:=${TEST_INTEGRATION}}
+: ${TEST_INTEGRATION_JS:=${TEST_INTEGRATION}}
+: ${TEST_INTEGRATION_GO:=${TEST_INTEGRATION}}
+
+# Automatically test if its activated by a dependent
+TEST_GLIB=$((${TEST_GLIB} + ${TEST_RUBY}))
+TEST_CPP=$((${TEST_CPP} + ${TEST_GLIB} + ${TEST_PYTHON} + ${TEST_INTEGRATION_CPP}))
+TEST_JAVA=$((${TEST_JAVA} + ${TEST_INTEGRATION_JAVA}))
+TEST_JS=$((${TEST_JS} + ${TEST_INTEGRATION_JS}))
+TEST_GO=$((${TEST_GO} + ${TEST_INTEGRATION_GO}))
+TEST_INTEGRATION=$((${TEST_INTEGRATION} + ${TEST_INTEGRATION_CPP} + ${TEST_INTEGRATION_JAVA} + ${TEST_INTEGRATION_JS} + ${TEST_INTEGRATION_GO}))
+
+if [ "${ARTIFACT}" == "source" ]; then
+ NEED_MINICONDA=$((${TEST_CPP} + ${TEST_INTEGRATION}))
+elif [ "${ARTIFACT}" == "wheels" ]; then
+ NEED_MINICONDA=$((${TEST_WHEELS}))
+else
+ if [ -z "${PYTHON:-}" ]; then
+ NEED_MINICONDA=$((${TEST_BINARY}))
+ else
+ NEED_MINICONDA=0
+ fi
+fi
+
+: ${TEST_ARCHIVE:=apache-arrow-${VERSION}.tar.gz}
+case "${TEST_ARCHIVE}" in
+ /*)
+ ;;
+ *)
+ TEST_ARCHIVE=${PWD}/${TEST_ARCHIVE}
+ ;;
+esac
+
+TEST_SUCCESS=no
+
+setup_tempdir "arrow-${VERSION}"
+echo "Working in sandbox ${ARROW_TMPDIR}"
+cd ${ARROW_TMPDIR}
+
+if [ ${NEED_MINICONDA} -gt 0 ]; then
+ setup_miniconda
+fi
+
+if [ "${ARTIFACT}" == "source" ]; then
+ dist_name="apache-arrow-${VERSION}"
+ if [ ${TEST_SOURCE} -gt 0 ]; then
+ import_gpg_keys
+ if [ ! -d "${dist_name}" ]; then
+ fetch_archive ${dist_name}
+ tar xf ${dist_name}.tar.gz
+ fi
+ else
+ mkdir -p ${dist_name}
+ if [ ! -f ${TEST_ARCHIVE} ]; then
+ echo "${TEST_ARCHIVE} not found"
+ exit 1
+ fi
+ tar xf ${TEST_ARCHIVE} -C ${dist_name} --strip-components=1
+ fi
+ pushd ${dist_name}
+ test_source_distribution
+ popd
+elif [ "${ARTIFACT}" == "wheels" ]; then
+ import_gpg_keys
+ test_wheels
+else
+ import_gpg_keys
+ test_binary_distribution
+fi
+
+TEST_SUCCESS=yes
+echo 'Release candidate looks good!'
+exit 0
diff --git a/src/arrow/dev/release/verify-yum.sh b/src/arrow/dev/release/verify-yum.sh
new file mode 100755
index 000000000..a7f572a44
--- /dev/null
+++ b/src/arrow/dev/release/verify-yum.sh
@@ -0,0 +1,204 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -exu
+
+if [ $# -lt 2 ]; then
+ echo "Usage: $0 VERSION rc"
+ echo " $0 VERSION staging-rc"
+ echo " $0 VERSION release"
+ echo " $0 VERSION staging-release"
+ echo " $0 VERSION local"
+ echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC"
+ echo " e.g.: $0 0.13.0 staging-rc # Verify 0.13.0 RC on staging"
+ echo " e.g.: $0 0.13.0 release # Verify 0.13.0"
+ echo " e.g.: $0 0.13.0 staging-release # Verify 0.13.0 on staging"
+ echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local"
+ exit 1
+fi
+
+VERSION="$1"
+TYPE="$2"
+
+local_prefix="/arrow/dev/tasks/linux-packages"
+
+artifactory_base_url="https://apache.jfrog.io/artifactory/arrow"
+
+distribution=$(. /etc/os-release && echo "${ID}")
+distribution_version=$(. /etc/os-release && echo "${VERSION_ID}" | grep -o "^[0-9]*")
+distribution_prefix="centos"
+
+cmake_package=cmake
+cmake_command=cmake
+have_flight=yes
+have_gandiva=yes
+have_glib=yes
+have_parquet=yes
+have_python=yes
+install_command="dnf install -y --enablerepo=powertools"
+
+case "${distribution}-${distribution_version}" in
+ almalinux-*)
+ distribution_prefix="almalinux"
+ ;;
+ amzn-2)
+ cmake_package=cmake3
+ cmake_command=cmake3
+ have_flight=no
+ have_gandiva=no
+ have_python=no
+ install_command="yum install -y"
+ distribution_prefix="amazon-linux"
+ amazon-linux-extras install epel -y
+ ;;
+ centos-7)
+ cmake_package=cmake3
+ cmake_command=cmake3
+ have_flight=no
+ have_gandiva=no
+ install_command="yum install -y"
+ ;;
+esac
+if [ "$(arch)" = "aarch64" ]; then
+ have_gandiva=no
+fi
+
+if [ "${TYPE}" = "local" ]; then
+ case "${VERSION}" in
+ *-dev*)
+ package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/-0.dev\1/g')"
+ ;;
+ *-rc*)
+ package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')"
+ package_version+="-1"
+ ;;
+ *)
+ package_version="${VERSION}-1"
+ ;;
+ esac
+ release_path="${local_prefix}/yum/repositories"
+ case "${distribution}" in
+ almalinux)
+ package_version+=".el${distribution_version}"
+ release_path+="/almalinux"
+ ;;
+ amzn)
+ package_version+=".${distribution}${distribution_version}"
+ release_path+="/amazon-linux"
+ amazon-linux-extras install -y epel
+ ;;
+ *)
+ package_version+=".el${distribution_version}"
+ release_path+="/centos"
+ ;;
+ esac
+ release_path+="/${distribution_version}/$(arch)/Packages"
+ release_path+="/apache-arrow-release-${package_version}.noarch.rpm"
+ ${install_command} "${release_path}"
+else
+ package_version="${VERSION}"
+ case "${TYPE}" in
+ rc|staging-rc|staging-release)
+ suffix=${TYPE%-release}
+ distribution_prefix+="-${suffix}"
+ ;;
+ esac
+ ${install_command} \
+ ${artifactory_base_url}/${distribution_prefix}/${distribution_version}/apache-arrow-release-latest.rpm
+fi
+
+if [ "${TYPE}" = "local" ]; then
+ sed \
+ -i"" \
+ -e "s,baseurl=https://apache\.jfrog\.io/artifactory/arrow/,baseurl=file://${local_prefix}/yum/repositories/,g" \
+ /etc/yum.repos.d/Apache-Arrow.repo
+ keys="${local_prefix}/KEYS"
+ if [ -f "${keys}" ]; then
+ cp "${keys}" /etc/pki/rpm-gpg/RPM-GPG-KEY-Apache-Arrow
+ fi
+else
+ case "${TYPE}" in
+ rc|staging-rc|staging-release)
+ suffix=${TYPE%-release}
+ sed \
+ -i"" \
+ -e "s,/almalinux/,/almalinux-${suffix}/,g" \
+ -e "s,/centos/,/centos-${suffix}/,g" \
+ -e "s,/amazon-linux/,/amazon-linux-${suffix}/,g" \
+ /etc/yum.repos.d/Apache-Arrow.repo
+ ;;
+ esac
+fi
+
+${install_command} --enablerepo=epel arrow-devel-${package_version}
+${install_command} \
+ ${cmake_package} \
+ gcc-c++ \
+ git \
+ libarchive \
+ make \
+ pkg-config
+mkdir -p build
+cp -a /arrow/cpp/examples/minimal_build build
+pushd build/minimal_build
+${cmake_command} .
+make -j$(nproc)
+./arrow_example
+c++ -std=c++11 -o arrow_example example.cc $(pkg-config --cflags --libs arrow)
+./arrow_example
+popd
+
+if [ "${have_glib}" = "yes" ]; then
+ ${install_command} --enablerepo=epel arrow-glib-devel-${package_version}
+ ${install_command} --enablerepo=epel arrow-glib-doc-${package_version}
+fi
+
+if [ "${have_python}" = "yes" ]; then
+ ${install_command} --enablerepo=epel arrow-python-devel-${package_version}
+fi
+
+if [ "${have_glib}" = "yes" ]; then
+ ${install_command} --enablerepo=epel plasma-glib-devel-${package_version}
+ ${install_command} --enablerepo=epel plasma-glib-doc-${package_version}
+else
+ ${install_command} --enablerepo=epel plasma-devel-${package_version}
+fi
+
+if [ "${have_flight}" = "yes" ]; then
+ ${install_command} --enablerepo=epel arrow-flight-glib-devel-${package_version}
+ ${install_command} --enablerepo=epel arrow-flight-glib-doc-${package_version}
+fi
+
+if [ "${have_gandiva}" = "yes" ]; then
+ if [ "${have_glib}" = "yes" ]; then
+ ${install_command} --enablerepo=epel gandiva-glib-devel-${package_version}
+ ${install_command} --enablerepo=epel gandiva-glib-doc-${package_version}
+ else
+ ${install_command} --enablerepo=epel gandiva-devel-${package_version}
+ fi
+fi
+
+if [ "${have_parquet}" = "yes" ]; then
+ if [ "${have_glib}" = "yes" ]; then
+ ${install_command} --enablerepo=epel parquet-glib-devel-${package_version}
+ ${install_command} --enablerepo=epel parquet-glib-doc-${package_version}
+ else
+ ${install_command} --enablerepo=epel parquet-devel-${package_version}
+ fi
+fi