summaryrefslogtreecommitdiffstats
path: root/src/arrow/docker-compose.yml
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:54:28 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:54:28 +0000
commite6918187568dbd01842d8d1d2c808ce16a894239 (patch)
tree64f88b554b444a49f656b6c656111a145cbbaa28 /src/arrow/docker-compose.yml
parentInitial commit. (diff)
downloadceph-e6918187568dbd01842d8d1d2c808ce16a894239.tar.xz
ceph-e6918187568dbd01842d8d1d2c808ce16a894239.zip
Adding upstream version 18.2.2.upstream/18.2.2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/arrow/docker-compose.yml')
-rw-r--r--src/arrow/docker-compose.yml1579
1 files changed, 1579 insertions, 0 deletions
diff --git a/src/arrow/docker-compose.yml b/src/arrow/docker-compose.yml
new file mode 100644
index 000000000..de4d9b997
--- /dev/null
+++ b/src/arrow/docker-compose.yml
@@ -0,0 +1,1579 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Usage
+# -----
+#
+# The docker compose file is parametrized using environment variables, the
+# defaults are set in .env file.
+#
+# Example:
+# $ ARCH=arm64v8 docker-compose build ubuntu-cpp
+# $ ARCH=arm64v8 docker-compose run ubuntu-cpp
+#
+#
+# Coredumps
+# ---------
+#
+# In order to enable coredumps for the C++ tests run by CTest either with
+# command `make unittest` or `ctest --output-on-failure` the correct coredump
+# patterns must be set.
+# The kernel settings are coming from the host, so while it can be enabled from
+# a running container using --priviled option the change will affect all other
+# containers, so prefer setting it explicitly, directly on the host.
+# WARNING: setting this will affect the host machine.
+#
+# Linux host:
+# $ sudo sysctl -w kernel.core_pattern=core.%e.%p
+#
+# macOS host running Docker for Mac (won't persist between restarts):
+# $ screen ~/Library/Containers/com.docker.docker/Data/vms/0/tty
+# # echo "core.%e.%p" > /proc/sys/kernel/core_pattern
+#
+# The setup attempts to generate coredumps by default, but the correct paths
+# above must be set. In order to disable the coredump generation set
+# ULIMIT_CORE environment variable to 0 before running docker-compose
+# (or by setting it in .env file):
+#
+# ULIMIT_CORE=0 docker-compose run --rm conda-cpp
+#
+# See more in cpp/build-support/run-test.sh::print_coredumps
+
+version: '3.5'
+
+x-ccache: &ccache
+ CCACHE_COMPILERCHECK: content
+ CCACHE_COMPRESS: 1
+ CCACHE_COMPRESSLEVEL: 6
+ CCACHE_MAXSIZE: 500M
+ CCACHE_DIR: /ccache
+
+# CPU/memory limit presets to pass to Docker.
+#
+# Usage: archery docker run --resource-limit=github <image>
+#
+# Note that exporting ARCHERY_DOCKER_BIN="sudo docker" is likely required,
+# unless Docker is configured with cgroups v2 (else Docker will silently
+# ignore the limits).
+x-limit-presets:
+ # These values emulate GitHub Actions:
+ # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
+ github:
+ # Note we use cpuset and not cpus since Ninja only detects and limits
+ # parallelism given the former
+ cpuset_cpus: [0, 1]
+ memory: 7g
+
+x-with-gpus:
+ - ubuntu-cuda-cpp
+ - ubuntu-cuda-python
+
+x-hierarchy:
+ # This section is used by the archery tool to enable building nested images,
+ # so it is enough to call:
+ # archery run debian-ruby
+ # instead of a seguence of docker-compose commands:
+ # docker-compose build debian-cpp
+ # docker-compose build debian-c-glib
+ # docker-compose build debian-ruby
+ # docker-compose run --rm debian-ruby
+ #
+ # Each node must be either a string scalar of a list containing the
+ # descendant images if any. Archery checks that all node has a corresponding
+ # service entry, so any new image/service must be listed here.
+ - conda:
+ - conda-cpp:
+ - conda-cpp-hiveserver2
+ - conda-cpp-valgrind
+ - conda-python:
+ - conda-python-pandas
+ - conda-python-dask
+ - conda-python-hdfs
+ - conda-python-jpype
+ - conda-python-turbodbc
+ - conda-python-kartothek
+ - conda-python-spark
+ - conda-integration
+ - debian-cpp:
+ - debian-c-glib:
+ - debian-ruby
+ - debian-python
+ - debian-go:
+ - debian-go-cgo
+ - debian-go-cgo-python
+ - debian-java:
+ - debian-java-jni
+ - debian-js
+ - fedora-cpp:
+ - fedora-python
+ - ubuntu-cpp:
+ - ubuntu-c-glib:
+ - ubuntu-ruby
+ - ubuntu-lint
+ - ubuntu-python:
+ - ubuntu-docs
+ - ubuntu-python-sdist-test
+ - ubuntu-r
+ - ubuntu-r-only-r
+ - ubuntu-cpp-bundled
+ - ubuntu-cuda-cpp:
+ - ubuntu-cuda-python
+ - ubuntu-csharp
+ - ubuntu-cpp-sanitizer
+ - ubuntu-cpp-thread-sanitizer
+ - ubuntu-r-sanitizer
+ - ubuntu-r-valgrind
+ - python-sdist
+ - r
+ - r-revdepcheck
+ # helper services
+ - impala
+ - postgres
+ - python-wheel-manylinux-2010
+ - python-wheel-manylinux-2014:
+ - java-jni-manylinux-2014
+ - python-wheel-manylinux-test-imports
+ - python-wheel-manylinux-test-unittests
+ - python-wheel-windows-vs2017
+ - python-wheel-windows-test
+
+volumes:
+ conda-ccache:
+ name: ${ARCH}-conda-ccache
+ debian-ccache:
+ name: ${ARCH}-debian-${DEBIAN}-ccache
+ ubuntu-ccache:
+ name: ${ARCH}-ubuntu-${UBUNTU}-ccache
+ fedora-ccache:
+ name: ${ARCH}-fedora-${FEDORA}-ccache
+ debian-rust:
+ name: ${ARCH}-debian-${DEBIAN}-rust
+ maven-cache:
+ name: maven-cache
+ python-wheel-manylinux2010-ccache:
+ name: python-wheel-manylinux2010-ccache
+ python-wheel-manylinux2014-ccache:
+ name: python-wheel-manylinux2014-ccache
+ python-wheel-windows-clcache:
+ name: python-wheel-windows-clcache
+
+services:
+
+ ################################# C++ #######################################
+ # Release build:
+ # docker-compose run -e ARROW_BUILD_TYPE=release conda-cpp|debian-cpp|...
+ # Shared only:
+ # docker-compose run -e ARROW_BUILD_STATIC=OFF conda-cpp|debian-cpp|...
+ # Static only:
+ # docker-compose run \
+ # -e ARROW_BUILD_SHARED=OFF \
+ # -e ARROW_TEST_LINKAGE=static \
+ # conda-cpp|debian-cpp|...
+
+ conda:
+ # Base image for conda builds.
+ #
+ # Usage:
+ # docker-compose build con
+ # docker-compose run --rm conda
+ # Parameters:
+ # ARCH: amd64, arm32v7
+ image: ${REPO}:${ARCH}-conda
+ build:
+ context: .
+ dockerfile: ci/docker/conda.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda
+ args:
+ arch: ${ARCH}
+ prefix: /opt/conda
+ volumes:
+ - .:/arrow:delegated
+
+ conda-cpp:
+ # C++ build in conda environment, including the doxygen docs.
+ #
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose run --rm conda-cpp
+ # Parameters:
+ # ARCH: amd64, arm32v7
+ image: ${REPO}:${ARCH}-conda-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/conda-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-cpp
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ shm_size: &shm-size 2G
+ ulimits: &ulimits
+ core: ${ULIMIT_CORE}
+ environment:
+ <<: *ccache
+ ARROW_BUILD_BENCHMARKS: "ON"
+ ARROW_BUILD_EXAMPLES: "ON"
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_MIMALLOC: "ON"
+ ARROW_USE_LD_GOLD: "ON"
+ ARROW_USE_PRECOMPILED_HEADERS: "ON"
+ volumes: &conda-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
+ command: &cpp-conda-command
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build true &&
+ /arrow/ci/scripts/cpp_test.sh /arrow /build"]
+
+ conda-cpp-valgrind:
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose run --rm conda-cpp-valgrind
+ # Parameters:
+ # ARCH: amd64, arm32v7
+ image: ${REPO}:${ARCH}-conda-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/conda-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-cpp
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ prefix: /opt/conda
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ ARROW_CXXFLAGS: "-Og" # Shrink test runtime by enabling minimal optimizations
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_FLIGHT: "OFF"
+ ARROW_GANDIVA: "OFF"
+ ARROW_JEMALLOC: "OFF"
+ ARROW_RUNTIME_SIMD_LEVEL: "AVX2" # AVX512 not supported by Valgrind (ARROW-9851)
+ ARROW_S3: "OFF"
+ ARROW_TEST_MEMCHECK: "ON"
+ ARROW_USE_LD_GOLD: "ON"
+ BUILD_WARNING_LEVEL: "PRODUCTION"
+ volumes: *conda-volumes
+ command: *cpp-conda-command
+
+ debian-cpp:
+ # Usage:
+ # docker-compose build debian-cpp
+ # docker-compose run --rm debian-cpp
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # DEBIAN: 10, 11
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/debian-${DEBIAN}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
+ args:
+ arch: ${ARCH}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_MIMALLOC: "ON"
+ volumes: &debian-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
+ command: &cpp-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/cpp_test.sh /arrow /build"
+
+ ubuntu-cpp:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose run --rm ubuntu-cpp
+ # Parameters:
+ # ARCH: amd64, arm64v8, s390x, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ args:
+ arch: ${ARCH}
+ base: "${ARCH}/ubuntu:${UBUNTU}"
+ clang_tools: ${CLANG_TOOLS}
+ llvm: ${LLVM}
+ gcc_version: ${GCC_VERSION}
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_MIMALLOC: "ON"
+ volumes: &ubuntu-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}ubuntu-ccache:/ccache:delegated
+ command: *cpp-command
+
+ ubuntu-cpp-bundled:
+ # Arrow build with BUNDLED dependencies
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp-minimal.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
+ args:
+ arch: ${ARCH}
+ base: "${ARCH}/ubuntu:${UBUNTU}"
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_DEPENDENCY_SOURCE: BUNDLED
+ CMAKE_GENERATOR: "Unix Makefiles"
+ volumes: *ubuntu-volumes
+ command: *cpp-command
+
+ ubuntu-cuda-cpp:
+ # Usage:
+ # docker-compose build cuda-cpp
+ # docker-compose run --rm cuda-cpp
+ # Also need to edit the host docker configuration as follows:
+ # https://github.com/docker/compose/issues/6691#issuecomment-561504928
+ # Parameters:
+ # ARCH: amd64
+ # CUDA: 9.1, 10.0, 10.1
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
+ args:
+ arch: ${ARCH}
+ base: nvidia/cuda:${CUDA}-devel-ubuntu${UBUNTU}
+ clang_tools: ${CLANG_TOOLS}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_CUDA: "ON"
+ volumes: *ubuntu-volumes
+ command: *cpp-command
+
+ ubuntu-cpp-sanitizer:
+ # Usage:
+ # docker-compose build ubuntu-cpp-sanitizer
+ # docker-compose run --rm ubuntu-cpp-sanitizer
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ cap_add:
+ # For LeakSanitizer
+ - SYS_PTRACE
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ args:
+ arch: ${ARCH}
+ clang_tools: ${CLANG_TOOLS}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ volumes: *ubuntu-volumes
+ environment:
+ <<: *ccache
+ CC: clang-${CLANG_TOOLS}
+ CXX: clang++-${CLANG_TOOLS}
+ ARROW_BUILD_STATIC: "OFF"
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_FUZZING: "ON" # Check fuzz regressions
+ ARROW_JEMALLOC: "OFF"
+ ARROW_ORC: "OFF"
+ ARROW_S3: "OFF"
+ ARROW_USE_ASAN: "ON"
+ ARROW_USE_UBSAN: "ON"
+ # utf8proc 2.1.0 in Ubuntu Bionic has test failures
+ utf8proc_SOURCE: "BUNDLED"
+ command: *cpp-command
+
+ ubuntu-cpp-thread-sanitizer:
+ # Usage:
+ # docker-compose build ubuntu-cpp-thread-sanitizer
+ # docker-compose run --rm ubuntu-cpp-thread-sanitizer
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ args:
+ arch: ${ARCH}
+ clang_tools: ${CLANG_TOOLS}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ volumes: *ubuntu-volumes
+ environment:
+ <<: *ccache
+ CC: clang-${CLANG_TOOLS}
+ CXX: clang++-${CLANG_TOOLS}
+ ARROW_BUILD_STATIC: "OFF"
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_DATASET: "ON"
+ ARROW_JEMALLOC: "OFF"
+ ARROW_ORC: "OFF"
+ ARROW_S3: "OFF"
+ ARROW_USE_TSAN: "ON"
+ command: *cpp-command
+
+ fedora-cpp:
+ # Usage:
+ # docker-compose build fedora-cpp
+ # docker-compose run --rm fedora-cpp
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # FEDORA: 33
+ image: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
+ build:
+ context: .
+ dockerfile: ci/docker/fedora-${FEDORA}-cpp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
+ args:
+ arch: ${ARCH}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_ENABLE_TIMING_TESTS: # inherit
+ ARROW_MIMALLOC: "ON"
+ Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
+ volumes: &fedora-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}fedora-ccache:/ccache:delegated
+ command: *cpp-command
+
+ ############################### C GLib ######################################
+
+ debian-c-glib:
+ # Usage:
+ # docker-compose build debian-cpp
+ # docker-compose build debian-c-glib
+ # docker-compose run --rm debian-c-glib
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # DEBIAN: 10, 11
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-c-glib.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
+ args:
+ base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_GLIB_GTK_DOC: "true"
+ volumes: *debian-volumes
+ command: &c-glib-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
+ /arrow/ci/scripts/c_glib_test.sh /arrow /build"
+
+ ubuntu-c-glib:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-c-glib
+ # docker-compose run --rm ubuntu-c-glib
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-c-glib.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ ARROW_GLIB_GTK_DOC: "true"
+ volumes: *ubuntu-volumes
+ command: *c-glib-command
+
+ ############################### Ruby ########################################
+ # Until Ruby is the only dependent implementation on top of C Glib we can
+ # test C Glib and Ruby in one pass. This is an optimization to avoid
+ # redundant (one for C GLib and one for Ruby doing the same work twice)
+ # builds on CI services.
+
+ debian-ruby:
+ # Usage:
+ # docker-compose build debian-cpp
+ # docker-compose build debian-c-glib
+ # docker-compose build debian-ruby
+ # docker-compose run --rm debian-ruby
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # DEBIAN: 10, 11
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-ruby.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
+ args:
+ base: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ volumes: *debian-volumes
+ command: &ruby-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
+ /arrow/ci/scripts/c_glib_test.sh /arrow /build &&
+ /arrow/ci/scripts/ruby_test.sh /arrow /build"
+
+ ubuntu-ruby:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-c-glib
+ # docker-compose build ubuntu-ruby
+ # docker-compose run --rm ubuntu-ruby
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-ruby.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
+ shm_size: *shm-size
+ ulimits: *ulimits
+ environment:
+ <<: *ccache
+ volumes: *ubuntu-volumes
+ command: *ruby-command
+
+ ############################### Python ######################################
+
+ conda-python:
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose run --rm conda-python
+ # Parameters:
+ # ARCH: amd64, arm32v7
+ # PYTHON: 3.6, 3.7, 3.8, 3.9
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-volumes
+ command: &python-conda-command
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_test.sh /arrow"]
+
+ ubuntu-cuda-python:
+ # Usage:
+ # docker-compose build cuda-cpp
+ # docker-compose build cuda-python
+ # docker-compose run --rm cuda-python
+ # Parameters:
+ # ARCH: amd64
+ # CUDA: 8.0, 10.0, ...
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-python-3.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ ARROW_CUDA: "ON"
+ volumes: *ubuntu-volumes
+ command: &python-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_test.sh /arrow"
+
+ debian-python:
+ # Usage:
+ # docker-compose build debian-cpp
+ # docker-compose build debian-python
+ # docker-compose run --rm debian-python
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # DEBIAN: 10, 11
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-python-3.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
+ args:
+ base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *debian-volumes
+ command: *python-command
+
+ ubuntu-python:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-python
+ # docker-compose run --rm ubuntu-python
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-python-3.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *ubuntu-volumes
+ command: *python-command
+
+ fedora-python:
+ # Usage:
+ # docker-compose build fedora-cpp
+ # docker-compose build fedora-python
+ # docker-compose run --rm fedora-python
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # FEDORA: 33
+ image: ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
+ build:
+ context: .
+ dockerfile: ci/docker/linux-dnf-python-3.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
+ args:
+ base: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
+ volumes: *fedora-volumes
+ command: *python-command
+
+ ############################ Python sdist ###################################
+
+ python-sdist:
+ # Usage:
+ # docker-compose build python-sdist
+ # docker-compose run --rm python-sdist
+ # Parameters:
+ # PYARROW_VERSION: The pyarrow version for sdist such as "3.0.0"
+ image: ${REPO}:python-sdist
+ build:
+ context: .
+ dockerfile: ci/docker/python-sdist.dockerfile
+ cache_from:
+ - ${REPO}:python-sdist
+ environment:
+ PYARROW_VERSION: ${PYARROW_VERSION:-}
+ volumes:
+ - .:/arrow:delegated
+ command: /arrow/ci/scripts/python_sdist_build.sh /arrow
+
+ ubuntu-python-sdist-test:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-python-sdist-test
+ # docker-compose run --rm ubuntu-python-sdist-test
+ # Parameters:
+ # ARCH: amd64, arm64v8, ...
+ # PYARROW_VERSION: The test target pyarrow version such as "3.0.0"
+ # UBUNTU: 18.04, 20.04
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-python-3.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ PYARROW_VERSION: ${PYARROW_VERSION:-}
+ volumes: *ubuntu-volumes
+ command: >
+ /bin/bash -c "
+ apt remove -y git &&
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_sdist_test.sh /arrow"
+
+ ############################ Python wheels ##################################
+
+ # See available versions at:
+ # https://quay.io/repository/pypa/manylinux2010_x86_64?tab=tags
+ # only amd64 arch is supported
+ python-wheel-manylinux-2010:
+ image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
+ build:
+ args:
+ arch_alias: ${ARCH_ALIAS}
+ arch_short_alias: ${ARCH_SHORT_ALIAS}
+ base: quay.io/pypa/manylinux2010_${ARCH_ALIAS}:2021-10-11-14ac00e
+ vcpkg: ${VCPKG}
+ python: ${PYTHON}
+ context: .
+ dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
+ environment:
+ <<: *ccache
+ MANYLINUX_VERSION: 2010
+ volumes:
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2010-ccache:/ccache:delegated
+ command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
+
+ # See available versions at:
+ # https://quay.io/repository/pypa/manylinux2014_x86_64?tab=tags
+ python-wheel-manylinux-2014:
+ image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
+ build:
+ args:
+ arch_alias: ${ARCH_ALIAS}
+ arch_short_alias: ${ARCH_SHORT_ALIAS}
+ base: quay.io/pypa/manylinux2014_${ARCH_ALIAS}:2021-10-11-14ac00e
+ vcpkg: ${VCPKG}
+ python: ${PYTHON}
+ context: .
+ dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
+ environment:
+ <<: *ccache
+ MANYLINUX_VERSION: 2014
+ volumes:
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
+ command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
+
+ python-wheel-manylinux-test-imports:
+ image: ${ARCH}/python:${PYTHON}
+ shm_size: 2G
+ volumes:
+ - .:/arrow:delegated
+ environment:
+ CHECK_IMPORTS: "ON"
+ CHECK_UNITTESTS: "OFF"
+ command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
+
+ python-wheel-manylinux-test-unittests:
+ image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
+ build:
+ args:
+ arch: ${ARCH}
+ python: ${PYTHON}
+ context: .
+ dockerfile: ci/docker/python-wheel-manylinux-test.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
+ shm_size: 2G
+ volumes:
+ - .:/arrow:delegated
+ environment:
+ CHECK_IMPORTS: "OFF"
+ CHECK_UNITTESTS: "ON"
+ command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
+
+ python-wheel-windows-vs2017:
+ # The windows images must be built locally and pushed to a remote registry:
+ # export REPO=ghcr.io/ursacomputing/arrow
+ # PYTHON=3.6 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
+ # PYTHON=3.7 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
+ # PYTHON=3.8 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
+ # PYTHON=3.9 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
+ # PYTHON=3.6 archery docker push python-wheel-windows-vs2017
+ # PYTHON=3.7 archery docker push python-wheel-windows-vs2017
+ # PYTHON=3.8 archery docker push python-wheel-windows-vs2017
+ # PYTHON=3.9 archery docker push python-wheel-windows-vs2017
+ image: ${REPO}:python-${PYTHON}-wheel-windows-vs2017-vcpkg-${VCPKG}
+ build:
+ args:
+ vcpkg: ${VCPKG}
+ python: ${PYTHON}
+ context: .
+ dockerfile: ci/docker/python-wheel-windows-vs2017.dockerfile
+ # This should make the pushed images reusable, but the image gets rebuilt.
+ # Uncomment if no local cache is available.
+ # cache_from:
+ # - mcr.microsoft.com/windows/servercore:ltsc2019
+ # - ${REPO}:wheel-windows-vs2017
+ volumes:
+ - "${DOCKER_VOLUME_PREFIX}python-wheel-windows-clcache:C:/clcache"
+ - type: bind
+ source: .
+ target: "C:/arrow"
+ command: arrow\\ci\\scripts\\python_wheel_windows_build.bat
+
+ python-wheel-windows-test:
+ image: python:${PYTHON}-windowsservercore-1809
+ volumes:
+ - type: bind
+ source: .
+ target: "C:/arrow"
+ command: arrow\\ci\\scripts\\python_wheel_windows_test.bat
+
+ java-jni-manylinux-2014:
+ image: ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
+ build:
+ args:
+ base: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
+ java: 1.8.0
+ context: .
+ dockerfile: ci/docker/java-jni-manylinux-201x.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
+ environment:
+ <<: *ccache
+ volumes:
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
+ command:
+ ["pip install -e /arrow/dev/archery &&
+ /arrow/ci/scripts/java_cdata_build.sh /arrow /java-native-build /arrow/java-dist &&
+ /arrow/ci/scripts/java_jni_manylinux_build.sh /arrow /build /arrow/java-dist"]
+
+ ############################## Integration #################################
+
+ conda-python-pandas:
+ # Possible $PANDAS parameters:
+ # - `latest`: latest release
+ # - `master`: git master branch, use `docker-compose run --no-cache`
+ # - `<version>`: specific version available on conda-forge
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-pandas
+ # docker-compose run --rm conda-python-pandas
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-pandas.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ numpy: ${NUMPY}
+ pandas: ${PANDAS}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-volumes
+ command: *python-conda-command
+
+ conda-python-dask:
+ # Possible $DASK parameters:
+ # - `latest`: latest release
+ # - `master`: git master branch, use `docker-compose run --no-cache`
+ # - `<version>`: specific version available on conda-forge
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-dask
+ # docker-compose run --rm conda-python-dask
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-dask.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ dask: ${DASK}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_dask.sh"]
+
+ conda-python-jpype:
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-jpype
+ # docker-compose run --rm conda-python-jpype
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-jpype.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ ARROW_FLIGHT: "OFF"
+ ARROW_GANDIVA: "OFF"
+ volumes: *conda-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_test.sh /arrow"]
+
+ conda-python-turbodbc:
+ # Possible $TURBODBC parameters:
+ # - `latest`: latest release
+ # - `master`: git master branch, use `docker-compose run --no-cache`
+ # - `<version>`: specific version available under github releases
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-turbodbc
+ # docker-compose run --rm conda-python-turbodbc
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-turbodbc.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ turbodbc: ${TURBODBC}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_turbodbc.sh /turbodbc /build"]
+
+ conda-python-kartothek:
+ # Possible $KARTOTHEK parameters:
+ # - `latest`: latest release
+ # - `master`: git master branch, use `docker-compose run --no-cache`
+ # - `<version>`: specific version available under github releases
+ # Usage:
+ # docker-compose build conda
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-kartothek
+ # docker-compose run --rm conda-python-kartothek
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-kartothek.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ kartothek: ${KARTOTHEK}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_kartothek.sh /kartothek /build"]
+
+ ################################## R ########################################
+
+ ubuntu-r:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-r
+ # docker-compose run ubuntu-r
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-r.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
+ args:
+ arch: ${ARCH}
+ r: ${R}
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ gcc_version: ${GCC_VERSION}
+ tz: ${TZ}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ ARROW_R_CXXFLAGS: '-Werror'
+ LIBARROW_BUILD: 'false'
+ NOT_CRAN: 'true'
+ ARROW_R_DEV: ${ARROW_R_DEV}
+ volumes: *ubuntu-volumes
+ command: >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/r_test.sh /arrow"
+
+ ubuntu-r-only-r:
+ environment:
+ ARROW_DEPENDENCY_SOURCE: ''
+ extends: ubuntu-r
+ command: >
+ /bin/bash -c "
+ /arrow/ci/scripts/r_test.sh /arrow"
+
+ r:
+ # This lets you test building/installing the arrow R package
+ # (including building the C++ library) on any Docker image that contains R
+ #
+ # Usage:
+ # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose build r
+ # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose run r
+ image: ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
+ build:
+ context: .
+ dockerfile: ci/docker/linux-r.dockerfile
+ cache_from:
+ - ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
+ args:
+ base: ${R_ORG}/${R_IMAGE}:${R_TAG}
+ r_dev: ${ARROW_R_DEV}
+ devtoolset_version: ${DEVTOOLSET_VERSION}
+ tz: ${TZ}
+ shm_size: *shm-size
+ environment:
+ LIBARROW_DOWNLOAD: "false"
+ ARROW_SOURCE_HOME: "/arrow"
+ ARROW_R_DEV: ${ARROW_R_DEV}
+ # To test for CRAN release, delete ^^ these two env vars so we download the Apache release
+ ARROW_USE_PKG_CONFIG: "false"
+ devtoolset_version: ${DEVTOOLSET_VERSION}
+ volumes:
+ - .:/arrow:delegated
+ command: >
+ /bin/bash -c "/arrow/ci/scripts/r_test.sh /arrow"
+
+ ubuntu-r-sanitizer:
+ # Only 18.04 and amd64 supported
+ # Usage:
+ # docker-compose build ubuntu-r-sanitizer
+ # docker-compose run ubuntu-r-sanitizer
+ image: ${REPO}:amd64-ubuntu-18.04-r-sanitizer
+ cap_add:
+ # LeakSanitizer and gdb requires ptrace(2)
+ - SYS_PTRACE
+ build:
+ context: .
+ dockerfile: ci/docker/linux-r.dockerfile
+ cache_from:
+ - ${REPO}:amd64-ubuntu-18.04-r-sanitizer
+ args:
+ base: wch1/r-debug:latest
+ r_bin: RDsan
+ tz: ${TZ}
+ environment:
+ <<: *ccache
+ volumes: *ubuntu-volumes
+ command: >
+ /bin/bash -c "
+ /arrow/ci/scripts/r_sanitize.sh /arrow"
+
+ ubuntu-r-valgrind:
+ # Only 18.04 and amd64 supported
+ # Usage:
+ # docker-compose build ubuntu-r-valgrind
+ # docker-compose run ubuntu-r-valgrind
+ image: ${REPO}:amd64-ubuntu-18.04-r-valgrind
+ build:
+ context: .
+ dockerfile: ci/docker/linux-r.dockerfile
+ cache_from:
+ - ${REPO}:amd64-ubuntu-18.04-r-valgrind
+ args:
+ base: wch1/r-debug:latest
+ r_bin: RDvalgrind
+ tz: ${TZ}
+ environment:
+ <<: *ccache
+ ARROW_R_DEV: ${ARROW_R_DEV}
+ # AVX512 not supported by Valgrind (similar to ARROW-9851) some runners support AVX512 and some do not
+ # so some build might pass without this setting, but we want to ensure that we stay to AVX2 regardless of runner.
+ EXTRA_CMAKE_FLAGS: "-DARROW_RUNTIME_SIMD_LEVEL=AVX2"
+ volumes: *ubuntu-volumes
+ command: >
+ /bin/bash -c "
+ /arrow/ci/scripts/r_valgrind.sh /arrow"
+
+ r-revdepcheck:
+ # Usage:
+ # docker-compose build r-revdepcheck
+ # docker-compose run r-revdepcheck
+ image: ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
+ build:
+ context: .
+ dockerfile: ci/docker/linux-r.dockerfile
+ cache_from:
+ - ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
+ args:
+ base: rstudio/r-base:4.0-focal
+ r_dev: ${ARROW_R_DEV}
+ tz: ${TZ}
+ shm_size: *shm-size
+ environment:
+ LIBARROW_DOWNLOAD: "true"
+ LIBARROW_MINIMAL: "false"
+ ARROW_SOURCE_HOME: "/arrow"
+ ARROW_R_DEV: "true"
+ volumes: *ubuntu-volumes
+ command: >
+ /bin/bash -c "/arrow/ci/scripts/r_revdepcheck.sh /arrow"
+
+
+
+ ################################# Go ########################################
+
+ debian-go:
+ # Usage:
+ # docker-compose build debian-go
+ # docker-compose run debian-go
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
+ build:
+ context: .
+ dockerfile: ci/docker/debian-${DEBIAN}-go.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
+ args:
+ arch: ${ARCH}
+ go: ${GO}
+ shm_size: *shm-size
+ volumes: *debian-volumes
+ command: &go-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/go_build.sh /arrow &&
+ /arrow/ci/scripts/go_test.sh /arrow"
+
+ debian-go-cgo:
+ # Usage:
+ # docker-compose build debian-go-cgo
+ # docker-compose run debian-go-cgo
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
+ build:
+ context: .
+ dockerfile: ci/docker/debian-go-cgo.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
+ args:
+ base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
+ shm_size: *shm-size
+ volumes: *debian-volumes
+ environment:
+ ARROW_GO_TESTCGO: "1"
+ command: *go-command
+
+ debian-go-cgo-python:
+ # Usage:
+ # docker-compose build debian-go-cgo-python
+ # docker-compose run debian-go-cgo-python
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
+ build:
+ context: .
+ dockerfile: ci/docker/debian-${DEBIAN}-go-cgo-python.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
+ args:
+ base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
+ shm_size: *shm-size
+ volumes: *debian-volumes
+ command: &go-cgo-python-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/go_cgo_python_test.sh /arrow"
+
+ ############################# JavaScript ####################################
+
+ debian-js:
+ # Usage:
+ # docker-compose build debian-js
+ # docker-compose run debian-js
+ image: ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
+ build:
+ context: .
+ dockerfile: ci/docker/debian-${DEBIAN}-js.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
+ args:
+ arch: ${ARCH}
+ node: ${NODE}
+ shm_size: *shm-size
+ volumes: *debian-volumes
+ command: &js-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/js_build.sh /arrow &&
+ /arrow/ci/scripts/js_test.sh /arrow"
+
+ #################################### C# #####################################
+
+ ubuntu-csharp:
+ # Usage:
+ # docker-compose build ubuntu-csharp
+ # docker-compose run ubuntu-csharp
+ image: ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
+ build:
+ context: .
+ dockerfile: ci/docker/ubuntu-18.04-csharp.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
+ args:
+ dotnet: ${DOTNET}
+ platform: bionic # use bionic-arm64v8 for ARM
+ shm_size: *shm-size
+ volumes: *ubuntu-volumes
+ command: &csharp-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/csharp_build.sh /arrow &&
+ /arrow/ci/scripts/csharp_test.sh /arrow &&
+ /arrow/ci/scripts/csharp_pack.sh /arrow"
+
+ ################################ Java #######################################
+
+ debian-java:
+ # Usage:
+ # docker-compose build debian-java
+ # docker-compose run debian-java
+ image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
+ build:
+ context: .
+ dockerfile: ci/docker/debian-9-java.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
+ args:
+ arch: ${ARCH}
+ jdk: ${JDK}
+ maven: ${MAVEN}
+ shm_size: *shm-size
+ volumes: &java-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
+ command: &java-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/java_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_test.sh /arrow /build"
+
+ debian-java-jni:
+ # Includes plasma test, jni for gandiva and orc, and C data interface.
+ # Usage:
+ # docker-compose build debian-java
+ # docker-compose build debian-java-jni
+ # docker-compose run debian-java-jni
+ image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-jni.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
+ args:
+ base: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
+ llvm: ${LLVM}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes:
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
+ - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
+ command:
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_cdata_build.sh /arrow /build/java/c/build /build/java/c &&
+ /arrow/ci/scripts/java_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_test.sh /arrow /build"
+
+ ############################## Integration ##################################
+
+ conda-integration:
+ # Usage:
+ # docker-compose build conda-cpp
+ # docker-compose build conda-integration
+ # docker-compose run conda-integration
+ image: ${REPO}:${ARCH}-conda-integration
+ build:
+ context: .
+ dockerfile: ci/docker/conda-integration.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-integration
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ jdk: ${JDK}
+ # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
+ # be set to ${MAVEN}
+ maven: 3.5
+ node: ${NODE}
+ go: ${GO}
+ volumes: *conda-volumes
+ environment:
+ <<: *ccache
+ # tell archery where the arrow binaries are located
+ ARROW_CPP_EXE_PATH: /build/cpp/debug
+ ARCHERY_INTEGRATION_WITH_RUST: 0
+ command:
+ ["/arrow/ci/scripts/rust_build.sh /arrow /build &&
+ /arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/csharp_build.sh /arrow /build &&
+ /arrow/ci/scripts/go_build.sh /arrow &&
+ /arrow/ci/scripts/java_build.sh /arrow /build &&
+ /arrow/ci/scripts/js_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_arrow.sh /arrow /build"]
+
+ ################################ Docs #######################################
+
+ ubuntu-docs:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-python
+ # docker-compose build ubuntu-docs
+ # docker-compose run --rm ubuntu-docs
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-docs.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
+ args:
+ r: ${R}
+ jdk: ${JDK}
+ node: ${NODE}
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
+ environment:
+ <<: *ccache
+ ARROW_CUDA: "ON"
+ ARROW_GLIB_GTK_DOC: "true"
+ Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
+ volumes: *ubuntu-volumes
+ command: &docs-command >
+ /bin/bash -c "
+ /arrow/ci/scripts/cpp_build.sh /arrow /build true &&
+ /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_build.sh /arrow /build true &&
+ /arrow/ci/scripts/js_build.sh /arrow true &&
+ /arrow/ci/scripts/r_build.sh /arrow true &&
+ /arrow/ci/scripts/docs_build.sh /arrow /build"
+
+ ################################# Tools #####################################
+
+ ubuntu-lint:
+ # Usage:
+ # docker-compose build ubuntu-cpp
+ # docker-compose build ubuntu-lint
+ # docker-compose run ubuntu-lint
+ image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
+ build:
+ context: .
+ dockerfile: ci/docker/linux-apt-lint.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
+ args:
+ base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
+ clang_tools: ${CLANG_TOOLS}
+ environment:
+ <<: *ccache
+ volumes: *ubuntu-volumes
+ command: archery lint --all --no-clang-tidy --no-iwyu --no-numpydoc
+
+ ######################### Integration Tests #################################
+
+ postgres:
+ # required for the impala service
+ image: postgres
+ ports:
+ - 5432:5432
+ environment:
+ POSTGRES_PASSWORD: postgres
+
+ impala:
+ # required for the hiveserver and hdfs tests
+ image: ibisproject/impala:latest
+ hostname: impala
+ links:
+ - postgres:postgres
+ environment:
+ PGPASSWORD: postgres
+ ports:
+ # HDFS
+ - 9020:9020
+ - 50070:50070
+ - 50075:50075
+ - 8020:8020
+ - 8042:8042
+ # Hive
+ - 9083:9083
+ # Impala
+ - 21000:21000
+ - 21050:21050
+ - 25000:25000
+ - 25010:25010
+ - 25020:25020
+
+ conda-cpp-hiveserver2:
+ # Usage:
+ # docker-compose build conda-cpp
+ # docker-compose build conda-cpp-hiveserver2
+ # docker-compose run conda-cpp-hiveserver2
+ image: ${REPO}:${ARCH}-conda-cpp
+ links:
+ - impala:impala
+ environment:
+ <<: *ccache
+ ARROW_FLIGHT: "OFF"
+ ARROW_GANDIVA: "OFF"
+ ARROW_PLASMA: "OFF"
+ ARROW_HIVESERVER2: "ON"
+ ARROW_HIVESERVER2_TEST_HOST: impala
+ shm_size: *shm-size
+ volumes: *conda-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_hiveserver2.sh /arrow /build"]
+
+ conda-python-hdfs:
+ # Usage:
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-hdfs
+ # docker-compose run conda-python-hdfs
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-hdfs.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ jdk: ${JDK}
+ # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
+ # be set to ${MAVEN}
+ maven: 3.5
+ hdfs: ${HDFS}
+ links:
+ - impala:impala
+ environment:
+ <<: *ccache
+ ARROW_HDFS: "ON"
+ ARROW_HDFS_TEST_HOST: impala
+ ARROW_HDFS_TEST_PORT: 8020
+ ARROW_HDFS_TEST_USER: hdfs
+ ARROW_S3: "OFF"
+ CMAKE_UNITY_BUILD: "ON"
+ shm_size: *shm-size
+ volumes: &conda-maven-volumes
+ - .:/arrow:delegated
+ - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
+ - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_hdfs.sh /arrow /build"]
+
+ conda-python-spark:
+ # Usage:
+ # docker-compose build conda-cpp
+ # docker-compose build conda-python
+ # docker-compose build conda-python-spark
+ # docker-compose run conda-python-spark
+ image: ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
+ build:
+ context: .
+ dockerfile: ci/docker/conda-python-spark.dockerfile
+ cache_from:
+ - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
+ args:
+ repo: ${REPO}
+ arch: ${ARCH}
+ python: ${PYTHON}
+ jdk: ${JDK}
+ # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
+ # be set to ${MAVEN}
+ maven: 3.5
+ spark: ${SPARK}
+ shm_size: *shm-size
+ environment:
+ <<: *ccache
+ volumes: *conda-maven-volumes
+ command:
+ ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+ /arrow/ci/scripts/python_build.sh /arrow /build &&
+ /arrow/ci/scripts/java_build.sh /arrow /build &&
+ /arrow/ci/scripts/integration_spark.sh /arrow /spark ${TEST_PYARROW_ONLY:-false}"]