summaryrefslogtreecommitdiffstats
path: root/src/seastar/fmt/support
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-27 18:24:20 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-27 18:24:20 +0000
commit483eb2f56657e8e7f419ab1a4fab8dce9ade8609 (patch)
treee5d88d25d870d5dedacb6bbdbe2a966086a0a5cf /src/seastar/fmt/support
parentInitial commit. (diff)
downloadceph-483eb2f56657e8e7f419ab1a4fab8dce9ade8609.tar.xz
ceph-483eb2f56657e8e7f419ab1a4fab8dce9ade8609.zip
Adding upstream version 14.2.21.upstream/14.2.21upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/seastar/fmt/support')
-rw-r--r--src/seastar/fmt/support/Android.mk15
-rw-r--r--src/seastar/fmt/support/AndroidManifest.xml1
-rw-r--r--src/seastar/fmt/support/README6
-rw-r--r--src/seastar/fmt/support/appveyor-build.py40
-rw-r--r--src/seastar/fmt/support/appveyor.yml34
-rw-r--r--src/seastar/fmt/support/build.gradle105
-rw-r--r--src/seastar/fmt/support/cmake/FindSetEnv.cmake7
-rw-r--r--src/seastar/fmt/support/cmake/cxx14.cmake102
-rw-r--r--src/seastar/fmt/support/cmake/fmt-config.cmake.in4
-rw-r--r--src/seastar/fmt/support/cmake/fmt.pc.in11
-rw-r--r--src/seastar/fmt/support/cmake/run-cmake.bat11
-rwxr-xr-xsrc/seastar/fmt/support/compute-powers.py53
-rw-r--r--src/seastar/fmt/support/docopt.py581
-rw-r--r--src/seastar/fmt/support/fmt.pro27
-rwxr-xr-xsrc/seastar/fmt/support/manage.py261
-rw-r--r--src/seastar/fmt/support/rst2md.py151
-rw-r--r--src/seastar/fmt/support/rtd/conf.py7
-rw-r--r--src/seastar/fmt/support/rtd/index.rst2
-rw-r--r--src/seastar/fmt/support/rtd/theme/layout.html17
-rw-r--r--src/seastar/fmt/support/rtd/theme/theme.conf2
-rwxr-xr-xsrc/seastar/fmt/support/travis-build.py113
-rwxr-xr-xsrc/seastar/fmt/support/update-coverity-branch.py30
22 files changed, 1580 insertions, 0 deletions
diff --git a/src/seastar/fmt/support/Android.mk b/src/seastar/fmt/support/Android.mk
new file mode 100644
index 00000000..84a3e32f
--- /dev/null
+++ b/src/seastar/fmt/support/Android.mk
@@ -0,0 +1,15 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fmt_static
+LOCAL_MODULE_FILENAME := libfmt
+
+LOCAL_SRC_FILES := ../src/format.cc
+
+LOCAL_C_INCLUDES := $(LOCAL_PATH)
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
+
+LOCAL_CFLAGS += -std=c++11 -fexceptions
+
+include $(BUILD_STATIC_LIBRARY)
+
diff --git a/src/seastar/fmt/support/AndroidManifest.xml b/src/seastar/fmt/support/AndroidManifest.xml
new file mode 100644
index 00000000..b5281fee
--- /dev/null
+++ b/src/seastar/fmt/support/AndroidManifest.xml
@@ -0,0 +1 @@
+<manifest package="net.fmtlib" />
diff --git a/src/seastar/fmt/support/README b/src/seastar/fmt/support/README
new file mode 100644
index 00000000..e7fbacc7
--- /dev/null
+++ b/src/seastar/fmt/support/README
@@ -0,0 +1,6 @@
+This directory contains build support files such as
+
+* CMake modules
+* Build scripts
+* qmake (static build with dynamic libc only)
+
diff --git a/src/seastar/fmt/support/appveyor-build.py b/src/seastar/fmt/support/appveyor-build.py
new file mode 100644
index 00000000..2cfcb03a
--- /dev/null
+++ b/src/seastar/fmt/support/appveyor-build.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Build the project on AppVeyor.
+
+import os
+from subprocess import check_call
+
+build = os.environ['BUILD']
+config = os.environ['CONFIGURATION']
+platform = os.environ['PLATFORM']
+path = os.environ['PATH']
+image = os.environ['APPVEYOR_BUILD_WORKER_IMAGE']
+jobid = os.environ['APPVEYOR_JOB_ID']
+cmake_command = ['cmake', '-DFMT_PEDANTIC=ON', '-DCMAKE_BUILD_TYPE=' + config, '..']
+if build == 'mingw':
+ cmake_command.append('-GMinGW Makefiles')
+ build_command = ['mingw32-make', '-j4']
+ test_command = ['mingw32-make', 'test']
+ # Remove the path to Git bin directory from $PATH because it breaks
+ # MinGW config.
+ path = path.replace(r'C:\Program Files (x86)\Git\bin', '')
+ os.environ['PATH'] = r'C:\MinGW\bin;' + path
+else:
+ # Add MSBuild 14.0 to PATH as described in
+ # http://help.appveyor.com/discussions/problems/2229-v140-not-found-on-vs2105rc.
+ os.environ['PATH'] = r'C:\Program Files (x86)\MSBuild\15.0\Bin;' + path
+ if image == 'Visual Studio 2013':
+ generator = 'Visual Studio 12 2013'
+ elif image == 'Visual Studio 2015':
+ generator = 'Visual Studio 14 2015'
+ elif image == 'Visual Studio 2017':
+ generator = 'Visual Studio 15 2017'
+ if platform == 'x64':
+ generator += ' Win64'
+ cmake_command.append('-G' + generator)
+ build_command = ['cmake', '--build', '.', '--config', config, '--', '/m:4']
+ test_command = ['ctest', '-C', config]
+
+check_call(cmake_command)
+check_call(build_command)
+check_call(test_command)
diff --git a/src/seastar/fmt/support/appveyor.yml b/src/seastar/fmt/support/appveyor.yml
new file mode 100644
index 00000000..af298cf7
--- /dev/null
+++ b/src/seastar/fmt/support/appveyor.yml
@@ -0,0 +1,34 @@
+configuration:
+ - Debug
+ - Release
+
+clone_depth: 1
+
+platform:
+ - Win32
+ - x64
+
+image:
+ - Visual Studio 2013
+ - Visual Studio 2015
+ - Visual Studio 2017
+
+environment:
+ CTEST_OUTPUT_ON_FAILURE: 1
+ MSVC_DEFAULT_OPTIONS: ON
+ BUILD: msvc
+
+before_build:
+ - mkdir build
+ - cd build
+
+build_script:
+ - python ../support/appveyor-build.py
+
+on_failure:
+ - appveyor PushArtifact Testing/Temporary/LastTest.log
+ - appveyor AddTest test
+
+# Uncomment this to debug AppVeyor failures.
+#on_finish:
+# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
diff --git a/src/seastar/fmt/support/build.gradle b/src/seastar/fmt/support/build.gradle
new file mode 100644
index 00000000..797cf491
--- /dev/null
+++ b/src/seastar/fmt/support/build.gradle
@@ -0,0 +1,105 @@
+
+// General gradle arguments for root project
+buildscript {
+ repositories {
+ google()
+ jcenter()
+ }
+ dependencies {
+ //
+ // https://developer.android.com/studio/releases/gradle-plugin
+ //
+ // Notice that 3.1.3 here is the version of [Android Gradle Plugin]
+ // Accroding to URL above you will need Gradle 4.4 or higher
+ //
+ classpath 'com.android.tools.build:gradle:3.1.3'
+ }
+}
+repositories {
+ google()
+ jcenter()
+}
+
+// Output: Shared library (.so) for Android
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 25 // Android 7.0
+
+ // Target ABI
+ // - This option controls target platform of module
+ // - The platform might be limited by compiler's support
+ // some can work with Clang(default), but some can work only with GCC...
+ // if bad, both toolchains might not support it
+ splits {
+ abi {
+ enable true
+ // Specify platforms for Application
+ reset()
+ include "arm64-v8a", "armeabi-v7a", "x86_64"
+ }
+ }
+
+ defaultConfig {
+ minSdkVersion 21 // Android 5.0+
+ targetSdkVersion 25 // Follow Compile SDK
+ versionCode 20 // Follow release count
+ versionName "5.2.1" // Follow Official version
+ testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+
+ externalNativeBuild {
+ cmake {
+ arguments "-DANDROID_STL=c++_shared" // Specify Android STL
+ arguments "-DBUILD_SHARED_LIBS=true" // Build shared object
+ arguments "-DFMT_TEST=false" // Skip test
+ arguments "-DFMT_DOC=false" // Skip document
+ cppFlags "-std=c++17"
+ }
+ }
+ println("Gradle CMake Plugin: ")
+ println(externalNativeBuild.cmake.cppFlags)
+ println(externalNativeBuild.cmake.arguments)
+ }
+
+ // External Native build
+ // - Use existing CMakeList.txt
+ // - Give path to CMake. This gradle file should be
+ // neighbor of the top level cmake
+ externalNativeBuild {
+ cmake {
+ path "../CMakeLists.txt"
+ // buildStagingDirectory "./build" // Custom path for cmake output
+ }
+ //println(cmake.path)
+ }
+
+ sourceSets{
+ // Android Manifest for Gradle
+ main {
+ manifest.srcFile 'AndroidManifest.xml'
+ }
+ }
+}
+
+assemble.doLast
+{
+ // Instead of `ninja install`, Gradle will deploy the files.
+ // We are doing this since FMT is dependent to the ANDROID_STL after build
+ copy {
+ from 'build/intermediates/cmake'
+ into '../libs'
+ }
+ // Copy debug binaries
+ copy {
+ from '../libs/debug/obj'
+ into '../libs/debug'
+ }
+ // Copy Release binaries
+ copy {
+ from '../libs/release/obj'
+ into '../libs/release'
+ }
+ // Remove empty directory
+ delete '../libs/debug/obj'
+ delete '../libs/release/obj'
+}
diff --git a/src/seastar/fmt/support/cmake/FindSetEnv.cmake b/src/seastar/fmt/support/cmake/FindSetEnv.cmake
new file mode 100644
index 00000000..4e2da540
--- /dev/null
+++ b/src/seastar/fmt/support/cmake/FindSetEnv.cmake
@@ -0,0 +1,7 @@
+# A CMake script to find SetEnv.cmd.
+
+find_program(WINSDK_SETENV NAMES SetEnv.cmd
+ PATHS "[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows;CurrentInstallFolder]/bin")
+if (WINSDK_SETENV AND PRINT_PATH)
+ execute_process(COMMAND ${CMAKE_COMMAND} -E echo "${WINSDK_SETENV}")
+endif ()
diff --git a/src/seastar/fmt/support/cmake/cxx14.cmake b/src/seastar/fmt/support/cmake/cxx14.cmake
new file mode 100644
index 00000000..6aa9e21d
--- /dev/null
+++ b/src/seastar/fmt/support/cmake/cxx14.cmake
@@ -0,0 +1,102 @@
+# C++14 feature support detection
+
+include(CheckCXXSourceCompiles)
+include(CheckCXXCompilerFlag)
+
+if (CMAKE_CXX_STANDARD)
+ message(STATUS "CXX_STANDARD: ${CMAKE_CXX_STANDARD}")
+elseif (CMAKE_CXX_FLAGS MATCHES "-std=(c|gnu)\\+\\+")
+ string(REGEX MATCH "-std=(c|gnu)\\+\\+[^ ]+" CXX_STANDARD_FLAG "${CMAKE_CXX_FLAGS}")
+ message(STATUS "CXX_STANDARD_FLAG: ${CXX_STANDARD_FLAG}")
+else()
+ set(CMAKE_CXX_STANDARD 11)
+ message(STATUS "CXX_STANDARD: ${CMAKE_CXX_STANDARD}")
+endif()
+
+if (CMAKE_CXX_STANDARD EQUAL 20)
+ check_cxx_compiler_flag(-std=c++20 has_std_20_flag)
+ check_cxx_compiler_flag(-std=c++2a has_std_2a_flag)
+
+ if (has_std_20_flag)
+ set(CXX_STANDARD_FLAG -std=c++20)
+ elseif (has_std_2a_flag)
+ set(CXX_STANDARD_FLAG -std=c++2a)
+ endif ()
+elseif (CMAKE_CXX_STANDARD EQUAL 17)
+ check_cxx_compiler_flag(-std=c++17 has_std_17_flag)
+ check_cxx_compiler_flag(-std=c++1z has_std_1z_flag)
+
+ if (has_std_17_flag)
+ set(CXX_STANDARD_FLAG -std=c++17)
+ elseif (has_std_1z_flag)
+ set(CXX_STANDARD_FLAG -std=c++1z)
+ endif ()
+elseif (CMAKE_CXX_STANDARD EQUAL 14)
+ check_cxx_compiler_flag(-std=c++14 has_std_14_flag)
+ check_cxx_compiler_flag(-std=c++1y has_std_1y_flag)
+
+ if (has_std_14_flag)
+ set(CXX_STANDARD_FLAG -std=c++14)
+ elseif (has_std_1y_flag)
+ set(CXX_STANDARD_FLAG -std=c++1y)
+ endif ()
+elseif (CMAKE_CXX_STANDARD EQUAL 11)
+ check_cxx_compiler_flag(-std=c++11 has_std_11_flag)
+ check_cxx_compiler_flag(-std=c++0x has_std_0x_flag)
+
+ if (has_std_11_flag)
+ set(CXX_STANDARD_FLAG -std=c++11)
+ elseif (has_std_0x_flag)
+ set(CXX_STANDARD_FLAG -std=c++0x)
+ endif ()
+endif ()
+
+set(CMAKE_REQUIRED_FLAGS ${CXX_STANDARD_FLAG})
+
+# Check if variadic templates are working and not affected by GCC bug 39653:
+# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=39653
+check_cxx_source_compiles("
+ template <class T, class ...Types>
+ struct S { typedef typename S<Types...>::type type; };
+ int main() {}" SUPPORTS_VARIADIC_TEMPLATES)
+if (NOT SUPPORTS_VARIADIC_TEMPLATES)
+ set (SUPPORTS_VARIADIC_TEMPLATES OFF)
+endif ()
+
+# Check if initializer lists are supported.
+check_cxx_source_compiles("
+ #include <initializer_list>
+ int main() {}" SUPPORTS_INITIALIZER_LIST)
+if (NOT SUPPORTS_INITIALIZER_LIST)
+ set (SUPPORTS_INITIALIZER_LIST OFF)
+endif ()
+
+# Check if enum bases are available
+check_cxx_source_compiles("
+ enum C : char {A};
+ int main() {}"
+ SUPPORTS_ENUM_BASE)
+if (NOT SUPPORTS_ENUM_BASE)
+ set (SUPPORTS_ENUM_BASE OFF)
+endif ()
+
+# Check if type traits are available
+check_cxx_source_compiles("
+ #include <type_traits>
+ class C { void operator=(const C&); };
+ int main() { static_assert(!std::is_copy_assignable<C>::value, \"\"); }"
+ SUPPORTS_TYPE_TRAITS)
+if (NOT SUPPORTS_TYPE_TRAITS)
+ set (SUPPORTS_TYPE_TRAITS OFF)
+endif ()
+
+# Check if user-defined literals are available
+check_cxx_source_compiles("
+ void operator\"\" _udl(long double);
+ int main() {}"
+ SUPPORTS_USER_DEFINED_LITERALS)
+if (NOT SUPPORTS_USER_DEFINED_LITERALS)
+ set (SUPPORTS_USER_DEFINED_LITERALS OFF)
+endif ()
+
+set(CMAKE_REQUIRED_FLAGS )
diff --git a/src/seastar/fmt/support/cmake/fmt-config.cmake.in b/src/seastar/fmt/support/cmake/fmt-config.cmake.in
new file mode 100644
index 00000000..71e30286
--- /dev/null
+++ b/src/seastar/fmt/support/cmake/fmt-config.cmake.in
@@ -0,0 +1,4 @@
+@PACKAGE_INIT@
+
+include(${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake)
+check_required_components(fmt)
diff --git a/src/seastar/fmt/support/cmake/fmt.pc.in b/src/seastar/fmt/support/cmake/fmt.pc.in
new file mode 100644
index 00000000..79de0b6d
--- /dev/null
+++ b/src/seastar/fmt/support/cmake/fmt.pc.in
@@ -0,0 +1,11 @@
+prefix=@CMAKE_INSTALL_PREFIX@
+exec_prefix=@CMAKE_INSTALL_PREFIX@
+libdir=@CMAKE_INSTALL_FULL_LIBDIR@
+includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
+
+Name: fmt
+Description: A modern formatting library
+Version: @FMT_VERSION@
+Libs: -L${libdir} -lfmt
+Cflags: -I${includedir}
+
diff --git a/src/seastar/fmt/support/cmake/run-cmake.bat b/src/seastar/fmt/support/cmake/run-cmake.bat
new file mode 100644
index 00000000..f18bb055
--- /dev/null
+++ b/src/seastar/fmt/support/cmake/run-cmake.bat
@@ -0,0 +1,11 @@
+@echo on
+rem This scripts configures build environment and runs CMake.
+rem Use it instead of running CMake directly when building with
+rem the Microsoft SDK toolchain rather than Visual Studio.
+rem It is used in the same way as cmake, for example:
+rem
+rem run-cmake -G "Visual Studio 10 Win64" .
+
+for /F "delims=" %%i IN ('cmake "-DPRINT_PATH=1" -P %~dp0/FindSetEnv.cmake') DO set setenv=%%i
+if NOT "%setenv%" == "" call "%setenv%"
+cmake %*
diff --git a/src/seastar/fmt/support/compute-powers.py b/src/seastar/fmt/support/compute-powers.py
new file mode 100755
index 00000000..601063d4
--- /dev/null
+++ b/src/seastar/fmt/support/compute-powers.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Compute 10 ** exp with exp in the range [min_exponent, max_exponent] and print
+# normalized (with most-significant bit equal to 1) significands in hexadecimal.
+
+from __future__ import print_function
+
+min_exponent = -348
+max_exponent = 340
+step = 8
+significand_size = 64
+exp_offset = 2000
+
+class fp:
+ pass
+
+powers = []
+for i, exp in enumerate(range(min_exponent, max_exponent + 1, step)):
+ result = fp()
+ n = 10 ** exp if exp >= 0 else 2 ** exp_offset / 10 ** -exp
+ k = significand_size + 1
+ # Convert to binary and round.
+ binary = '{:b}'.format(n)
+ result.f = (int('{:0<{}}'.format(binary[:k], k), 2) + 1) / 2
+ result.e = len(binary) - (exp_offset if exp < 0 else 0) - significand_size
+ powers.append(result)
+ # Sanity check.
+ exp_offset10 = 400
+ actual = result.f * 10 ** exp_offset10
+ if result.e > 0:
+ actual *= 2 ** result.e
+ else:
+ for j in range(-result.e):
+ actual /= 2
+ expected = 10 ** (exp_offset10 + exp)
+ precision = len('{}'.format(expected)) - len('{}'.format(actual - expected))
+ if precision < 19:
+ print('low precision:', precision)
+ exit(1)
+
+print('Significands:', end='')
+for i, fp in enumerate(powers):
+ if i % 3 == 0:
+ print(end='\n ')
+ print(' {:0<#16x}'.format(fp.f, ), end=',')
+
+print('\n\nExponents:', end='')
+for i, fp in enumerate(powers):
+ if i % 11 == 0:
+ print(end='\n ')
+ print(' {:5}'.format(fp.e), end=',')
+
+print('\n\nMax exponent difference:',
+ max([x.e - powers[i - 1].e for i, x in enumerate(powers)][1:]))
diff --git a/src/seastar/fmt/support/docopt.py b/src/seastar/fmt/support/docopt.py
new file mode 100644
index 00000000..2e43f7ce
--- /dev/null
+++ b/src/seastar/fmt/support/docopt.py
@@ -0,0 +1,581 @@
+"""Pythonic command-line interface parser that will make you smile.
+
+ * http://docopt.org
+ * Repository and issue-tracker: https://github.com/docopt/docopt
+ * Licensed under terms of MIT license (see LICENSE-MIT)
+ * Copyright (c) 2013 Vladimir Keleshev, vladimir@keleshev.com
+
+"""
+import sys
+import re
+
+
+__all__ = ['docopt']
+__version__ = '0.6.1'
+
+
+class DocoptLanguageError(Exception):
+
+ """Error in construction of usage-message by developer."""
+
+
+class DocoptExit(SystemExit):
+
+ """Exit in case user invoked program with incorrect arguments."""
+
+ usage = ''
+
+ def __init__(self, message=''):
+ SystemExit.__init__(self, (message + '\n' + self.usage).strip())
+
+
+class Pattern(object):
+
+ def __eq__(self, other):
+ return repr(self) == repr(other)
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def fix(self):
+ self.fix_identities()
+ self.fix_repeating_arguments()
+ return self
+
+ def fix_identities(self, uniq=None):
+ """Make pattern-tree tips point to same object if they are equal."""
+ if not hasattr(self, 'children'):
+ return self
+ uniq = list(set(self.flat())) if uniq is None else uniq
+ for i, child in enumerate(self.children):
+ if not hasattr(child, 'children'):
+ assert child in uniq
+ self.children[i] = uniq[uniq.index(child)]
+ else:
+ child.fix_identities(uniq)
+
+ def fix_repeating_arguments(self):
+ """Fix elements that should accumulate/increment values."""
+ either = [list(child.children) for child in transform(self).children]
+ for case in either:
+ for e in [child for child in case if case.count(child) > 1]:
+ if type(e) is Argument or type(e) is Option and e.argcount:
+ if e.value is None:
+ e.value = []
+ elif type(e.value) is not list:
+ e.value = e.value.split()
+ if type(e) is Command or type(e) is Option and e.argcount == 0:
+ e.value = 0
+ return self
+
+
+def transform(pattern):
+ """Expand pattern into an (almost) equivalent one, but with single Either.
+
+ Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d)
+ Quirks: [-a] => (-a), (-a...) => (-a -a)
+
+ """
+ result = []
+ groups = [[pattern]]
+ while groups:
+ children = groups.pop(0)
+ parents = [Required, Optional, OptionsShortcut, Either, OneOrMore]
+ if any(t in map(type, children) for t in parents):
+ child = [c for c in children if type(c) in parents][0]
+ children.remove(child)
+ if type(child) is Either:
+ for c in child.children:
+ groups.append([c] + children)
+ elif type(child) is OneOrMore:
+ groups.append(child.children * 2 + children)
+ else:
+ groups.append(child.children + children)
+ else:
+ result.append(children)
+ return Either(*[Required(*e) for e in result])
+
+
+class LeafPattern(Pattern):
+
+ """Leaf/terminal node of a pattern tree."""
+
+ def __init__(self, name, value=None):
+ self.name, self.value = name, value
+
+ def __repr__(self):
+ return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.value)
+
+ def flat(self, *types):
+ return [self] if not types or type(self) in types else []
+
+ def match(self, left, collected=None):
+ collected = [] if collected is None else collected
+ pos, match = self.single_match(left)
+ if match is None:
+ return False, left, collected
+ left_ = left[:pos] + left[pos + 1:]
+ same_name = [a for a in collected if a.name == self.name]
+ if type(self.value) in (int, list):
+ if type(self.value) is int:
+ increment = 1
+ else:
+ increment = ([match.value] if type(match.value) is str
+ else match.value)
+ if not same_name:
+ match.value = increment
+ return True, left_, collected + [match]
+ same_name[0].value += increment
+ return True, left_, collected
+ return True, left_, collected + [match]
+
+
+class BranchPattern(Pattern):
+
+ """Branch/inner node of a pattern tree."""
+
+ def __init__(self, *children):
+ self.children = list(children)
+
+ def __repr__(self):
+ return '%s(%s)' % (self.__class__.__name__,
+ ', '.join(repr(a) for a in self.children))
+
+ def flat(self, *types):
+ if type(self) in types:
+ return [self]
+ return sum([child.flat(*types) for child in self.children], [])
+
+
+class Argument(LeafPattern):
+
+ def single_match(self, left):
+ for n, pattern in enumerate(left):
+ if type(pattern) is Argument:
+ return n, Argument(self.name, pattern.value)
+ return None, None
+
+ @classmethod
+ def parse(class_, source):
+ name = re.findall('(<\S*?>)', source)[0]
+ value = re.findall('\[default: (.*)\]', source, flags=re.I)
+ return class_(name, value[0] if value else None)
+
+
+class Command(Argument):
+
+ def __init__(self, name, value=False):
+ self.name, self.value = name, value
+
+ def single_match(self, left):
+ for n, pattern in enumerate(left):
+ if type(pattern) is Argument:
+ if pattern.value == self.name:
+ return n, Command(self.name, True)
+ else:
+ break
+ return None, None
+
+
+class Option(LeafPattern):
+
+ def __init__(self, short=None, long=None, argcount=0, value=False):
+ assert argcount in (0, 1)
+ self.short, self.long, self.argcount = short, long, argcount
+ self.value = None if value is False and argcount else value
+
+ @classmethod
+ def parse(class_, option_description):
+ short, long, argcount, value = None, None, 0, False
+ options, _, description = option_description.strip().partition(' ')
+ options = options.replace(',', ' ').replace('=', ' ')
+ for s in options.split():
+ if s.startswith('--'):
+ long = s
+ elif s.startswith('-'):
+ short = s
+ else:
+ argcount = 1
+ if argcount:
+ matched = re.findall('\[default: (.*)\]', description, flags=re.I)
+ value = matched[0] if matched else None
+ return class_(short, long, argcount, value)
+
+ def single_match(self, left):
+ for n, pattern in enumerate(left):
+ if self.name == pattern.name:
+ return n, pattern
+ return None, None
+
+ @property
+ def name(self):
+ return self.long or self.short
+
+ def __repr__(self):
+ return 'Option(%r, %r, %r, %r)' % (self.short, self.long,
+ self.argcount, self.value)
+
+
+class Required(BranchPattern):
+
+ def match(self, left, collected=None):
+ collected = [] if collected is None else collected
+ l = left
+ c = collected
+ for pattern in self.children:
+ matched, l, c = pattern.match(l, c)
+ if not matched:
+ return False, left, collected
+ return True, l, c
+
+
+class Optional(BranchPattern):
+
+ def match(self, left, collected=None):
+ collected = [] if collected is None else collected
+ for pattern in self.children:
+ m, left, collected = pattern.match(left, collected)
+ return True, left, collected
+
+
+class OptionsShortcut(Optional):
+
+ """Marker/placeholder for [options] shortcut."""
+
+
+class OneOrMore(BranchPattern):
+
+ def match(self, left, collected=None):
+ assert len(self.children) == 1
+ collected = [] if collected is None else collected
+ l = left
+ c = collected
+ l_ = None
+ matched = True
+ times = 0
+ while matched:
+ # could it be that something didn't match but changed l or c?
+ matched, l, c = self.children[0].match(l, c)
+ times += 1 if matched else 0
+ if l_ == l:
+ break
+ l_ = l
+ if times >= 1:
+ return True, l, c
+ return False, left, collected
+
+
+class Either(BranchPattern):
+
+ def match(self, left, collected=None):
+ collected = [] if collected is None else collected
+ outcomes = []
+ for pattern in self.children:
+ matched, _, _ = outcome = pattern.match(left, collected)
+ if matched:
+ outcomes.append(outcome)
+ if outcomes:
+ return min(outcomes, key=lambda outcome: len(outcome[1]))
+ return False, left, collected
+
+
+class Tokens(list):
+
+ def __init__(self, source, error=DocoptExit):
+ self += source.split() if hasattr(source, 'split') else source
+ self.error = error
+
+ @staticmethod
+ def from_pattern(source):
+ source = re.sub(r'([\[\]\(\)\|]|\.\.\.)', r' \1 ', source)
+ source = [s for s in re.split('\s+|(\S*<.*?>)', source) if s]
+ return Tokens(source, error=DocoptLanguageError)
+
+ def move(self):
+ return self.pop(0) if len(self) else None
+
+ def current(self):
+ return self[0] if len(self) else None
+
+
+def parse_long(tokens, options):
+ """long ::= '--' chars [ ( ' ' | '=' ) chars ] ;"""
+ long, eq, value = tokens.move().partition('=')
+ assert long.startswith('--')
+ value = None if eq == value == '' else value
+ similar = [o for o in options if o.long == long]
+ if tokens.error is DocoptExit and similar == []: # if no exact match
+ similar = [o for o in options if o.long and o.long.startswith(long)]
+ if len(similar) > 1: # might be simply specified ambiguously 2+ times?
+ raise tokens.error('%s is not a unique prefix: %s?' %
+ (long, ', '.join(o.long for o in similar)))
+ elif len(similar) < 1:
+ argcount = 1 if eq == '=' else 0
+ o = Option(None, long, argcount)
+ options.append(o)
+ if tokens.error is DocoptExit:
+ o = Option(None, long, argcount, value if argcount else True)
+ else:
+ o = Option(similar[0].short, similar[0].long,
+ similar[0].argcount, similar[0].value)
+ if o.argcount == 0:
+ if value is not None:
+ raise tokens.error('%s must not have an argument' % o.long)
+ else:
+ if value is None:
+ if tokens.current() in [None, '--']:
+ raise tokens.error('%s requires argument' % o.long)
+ value = tokens.move()
+ if tokens.error is DocoptExit:
+ o.value = value if value is not None else True
+ return [o]
+
+
+def parse_shorts(tokens, options):
+ """shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;"""
+ token = tokens.move()
+ assert token.startswith('-') and not token.startswith('--')
+ left = token.lstrip('-')
+ parsed = []
+ while left != '':
+ short, left = '-' + left[0], left[1:]
+ similar = [o for o in options if o.short == short]
+ if len(similar) > 1:
+ raise tokens.error('%s is specified ambiguously %d times' %
+ (short, len(similar)))
+ elif len(similar) < 1:
+ o = Option(short, None, 0)
+ options.append(o)
+ if tokens.error is DocoptExit:
+ o = Option(short, None, 0, True)
+ else: # why copying is necessary here?
+ o = Option(short, similar[0].long,
+ similar[0].argcount, similar[0].value)
+ value = None
+ if o.argcount != 0:
+ if left == '':
+ if tokens.current() in [None, '--']:
+ raise tokens.error('%s requires argument' % short)
+ value = tokens.move()
+ else:
+ value = left
+ left = ''
+ if tokens.error is DocoptExit:
+ o.value = value if value is not None else True
+ parsed.append(o)
+ return parsed
+
+
+def parse_pattern(source, options):
+ tokens = Tokens.from_pattern(source)
+ result = parse_expr(tokens, options)
+ if tokens.current() is not None:
+ raise tokens.error('unexpected ending: %r' % ' '.join(tokens))
+ return Required(*result)
+
+
+def parse_expr(tokens, options):
+ """expr ::= seq ( '|' seq )* ;"""
+ seq = parse_seq(tokens, options)
+ if tokens.current() != '|':
+ return seq
+ result = [Required(*seq)] if len(seq) > 1 else seq
+ while tokens.current() == '|':
+ tokens.move()
+ seq = parse_seq(tokens, options)
+ result += [Required(*seq)] if len(seq) > 1 else seq
+ return [Either(*result)] if len(result) > 1 else result
+
+
+def parse_seq(tokens, options):
+ """seq ::= ( atom [ '...' ] )* ;"""
+ result = []
+ while tokens.current() not in [None, ']', ')', '|']:
+ atom = parse_atom(tokens, options)
+ if tokens.current() == '...':
+ atom = [OneOrMore(*atom)]
+ tokens.move()
+ result += atom
+ return result
+
+
+def parse_atom(tokens, options):
+ """atom ::= '(' expr ')' | '[' expr ']' | 'options'
+ | long | shorts | argument | command ;
+ """
+ token = tokens.current()
+ result = []
+ if token in '([':
+ tokens.move()
+ matching, pattern = {'(': [')', Required], '[': [']', Optional]}[token]
+ result = pattern(*parse_expr(tokens, options))
+ if tokens.move() != matching:
+ raise tokens.error("unmatched '%s'" % token)
+ return [result]
+ elif token == 'options':
+ tokens.move()
+ return [OptionsShortcut()]
+ elif token.startswith('--') and token != '--':
+ return parse_long(tokens, options)
+ elif token.startswith('-') and token not in ('-', '--'):
+ return parse_shorts(tokens, options)
+ elif token.startswith('<') and token.endswith('>') or token.isupper():
+ return [Argument(tokens.move())]
+ else:
+ return [Command(tokens.move())]
+
+
+def parse_argv(tokens, options, options_first=False):
+ """Parse command-line argument vector.
+
+ If options_first:
+ argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
+ else:
+ argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
+
+ """
+ parsed = []
+ while tokens.current() is not None:
+ if tokens.current() == '--':
+ return parsed + [Argument(None, v) for v in tokens]
+ elif tokens.current().startswith('--'):
+ parsed += parse_long(tokens, options)
+ elif tokens.current().startswith('-') and tokens.current() != '-':
+ parsed += parse_shorts(tokens, options)
+ elif options_first:
+ return parsed + [Argument(None, v) for v in tokens]
+ else:
+ parsed.append(Argument(None, tokens.move()))
+ return parsed
+
+
+def parse_defaults(doc):
+ defaults = []
+ for s in parse_section('options:', doc):
+ # FIXME corner case "bla: options: --foo"
+ _, _, s = s.partition(':') # get rid of "options:"
+ split = re.split('\n[ \t]*(-\S+?)', '\n' + s)[1:]
+ split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
+ options = [Option.parse(s) for s in split if s.startswith('-')]
+ defaults += options
+ return defaults
+
+
+def parse_section(name, source):
+ pattern = re.compile('^([^\n]*' + name + '[^\n]*\n?(?:[ \t].*?(?:\n|$))*)',
+ re.IGNORECASE | re.MULTILINE)
+ return [s.strip() for s in pattern.findall(source)]
+
+
+def formal_usage(section):
+ _, _, section = section.partition(':') # drop "usage:"
+ pu = section.split()
+ return '( ' + ' '.join(') | (' if s == pu[0] else s for s in pu[1:]) + ' )'
+
+
+def extras(help, version, options, doc):
+ if help and any((o.name in ('-h', '--help')) and o.value for o in options):
+ print(doc.strip("\n"))
+ sys.exit()
+ if version and any(o.name == '--version' and o.value for o in options):
+ print(version)
+ sys.exit()
+
+
+class Dict(dict):
+ def __repr__(self):
+ return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
+
+
+def docopt(doc, argv=None, help=True, version=None, options_first=False):
+ """Parse `argv` based on command-line interface described in `doc`.
+
+ `docopt` creates your command-line interface based on its
+ description that you pass as `doc`. Such description can contain
+ --options, <positional-argument>, commands, which could be
+ [optional], (required), (mutually | exclusive) or repeated...
+
+ Parameters
+ ----------
+ doc : str
+ Description of your command-line interface.
+ argv : list of str, optional
+ Argument vector to be parsed. sys.argv[1:] is used if not
+ provided.
+ help : bool (default: True)
+ Set to False to disable automatic help on -h or --help
+ options.
+ version : any object
+ If passed, the object will be printed if --version is in
+ `argv`.
+ options_first : bool (default: False)
+ Set to True to require options precede positional arguments,
+ i.e. to forbid options and positional arguments intermix.
+
+ Returns
+ -------
+ args : dict
+ A dictionary, where keys are names of command-line elements
+ such as e.g. "--verbose" and "<path>", and values are the
+ parsed values of those elements.
+
+ Example
+ -------
+ >>> from docopt import docopt
+ >>> doc = '''
+ ... Usage:
+ ... my_program tcp <host> <port> [--timeout=<seconds>]
+ ... my_program serial <port> [--baud=<n>] [--timeout=<seconds>]
+ ... my_program (-h | --help | --version)
+ ...
+ ... Options:
+ ... -h, --help Show this screen and exit.
+ ... --baud=<n> Baudrate [default: 9600]
+ ... '''
+ >>> argv = ['tcp', '127.0.0.1', '80', '--timeout', '30']
+ >>> docopt(doc, argv)
+ {'--baud': '9600',
+ '--help': False,
+ '--timeout': '30',
+ '--version': False,
+ '<host>': '127.0.0.1',
+ '<port>': '80',
+ 'serial': False,
+ 'tcp': True}
+
+ See also
+ --------
+ * For video introduction see http://docopt.org
+ * Full documentation is available in README.rst as well as online
+ at https://github.com/docopt/docopt#readme
+
+ """
+ argv = sys.argv[1:] if argv is None else argv
+
+ usage_sections = parse_section('usage:', doc)
+ if len(usage_sections) == 0:
+ raise DocoptLanguageError('"usage:" (case-insensitive) not found.')
+ if len(usage_sections) > 1:
+ raise DocoptLanguageError('More than one "usage:" (case-insensitive).')
+ DocoptExit.usage = usage_sections[0]
+
+ options = parse_defaults(doc)
+ pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
+ # [default] syntax for argument is disabled
+ #for a in pattern.flat(Argument):
+ # same_name = [d for d in arguments if d.name == a.name]
+ # if same_name:
+ # a.value = same_name[0].value
+ argv = parse_argv(Tokens(argv), list(options), options_first)
+ pattern_options = set(pattern.flat(Option))
+ for options_shortcut in pattern.flat(OptionsShortcut):
+ doc_options = parse_defaults(doc)
+ options_shortcut.children = list(set(doc_options) - pattern_options)
+ #if any_options:
+ # options_shortcut.children += [Option(o.short, o.long, o.argcount)
+ # for o in argv if type(o) is Option]
+ extras(help, version, argv, doc)
+ matched, left, collected = pattern.fix().match(argv)
+ if matched and left == []: # better error message if left?
+ return Dict((a.name, a.value) for a in (pattern.flat() + collected))
+ raise DocoptExit()
diff --git a/src/seastar/fmt/support/fmt.pro b/src/seastar/fmt/support/fmt.pro
new file mode 100644
index 00000000..c555d0b4
--- /dev/null
+++ b/src/seastar/fmt/support/fmt.pro
@@ -0,0 +1,27 @@
+# Staticlib configuration for qmake builds
+# For some reason qmake 3.1 fails to identify source dependencies and excludes format.cc and printf.cc
+# from compilation so it _MUST_ be called as qmake -nodepend
+# A workaround is implemented below: a custom compiler is defined which does not track dependencies
+
+TEMPLATE = lib
+
+TARGET = fmt
+
+QMAKE_EXT_CPP = .cc
+
+CONFIG = staticlib warn_on c++11
+
+FMT_SOURCES = \
+ ../src/format.cc \
+ ../src/posix.cc
+
+fmt.name = libfmt
+fmt.input = FMT_SOURCES
+fmt.output = ${QMAKE_FILE_BASE}$$QMAKE_EXT_OBJ
+fmt.clean = ${QMAKE_FILE_BASE}$$QMAKE_EXT_OBJ
+fmt.depends = ${QMAKE_FILE_IN}
+# QMAKE_RUN_CXX will not be expanded
+fmt.commands = $$QMAKE_CXX -c $$QMAKE_CXXFLAGS $$QMAKE_CXXFLAGS_WARN_ON $$QMAKE_CXXFLAGS_RELEASE_WITH_DEBUGINFO $$QMAKE_CXXFLAGS_CXX11 ${QMAKE_FILE_IN}
+fmt.variable_out = OBJECTS
+fmt.CONFIG = no_dependencies no_link
+QMAKE_EXTRA_COMPILERS += fmt
diff --git a/src/seastar/fmt/support/manage.py b/src/seastar/fmt/support/manage.py
new file mode 100755
index 00000000..9bd2e489
--- /dev/null
+++ b/src/seastar/fmt/support/manage.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+
+"""Manage site and releases.
+
+Usage:
+ manage.py release [<branch>]
+ manage.py site
+"""
+
+from __future__ import print_function
+import datetime, docopt, errno, fileinput, json, os
+import re, requests, shutil, sys, tempfile
+from contextlib import contextmanager
+from distutils.version import LooseVersion
+from subprocess import check_call
+
+
+class Git:
+ def __init__(self, dir):
+ self.dir = dir
+
+ def call(self, method, args, **kwargs):
+ return check_call(['git', method] + list(args), **kwargs)
+
+ def add(self, *args):
+ return self.call('add', args, cwd=self.dir)
+
+ def checkout(self, *args):
+ return self.call('checkout', args, cwd=self.dir)
+
+ def clean(self, *args):
+ return self.call('clean', args, cwd=self.dir)
+
+ def clone(self, *args):
+ return self.call('clone', list(args) + [self.dir])
+
+ def commit(self, *args):
+ return self.call('commit', args, cwd=self.dir)
+
+ def pull(self, *args):
+ return self.call('pull', args, cwd=self.dir)
+
+ def push(self, *args):
+ return self.call('push', args, cwd=self.dir)
+
+ def reset(self, *args):
+ return self.call('reset', args, cwd=self.dir)
+
+ def update(self, *args):
+ clone = not os.path.exists(self.dir)
+ if clone:
+ self.clone(*args)
+ return clone
+
+
+def clean_checkout(repo, branch):
+ repo.clean('-f', '-d')
+ repo.reset('--hard')
+ repo.checkout(branch)
+
+
+class Runner:
+ def __init__(self, cwd):
+ self.cwd = cwd
+
+ def __call__(self, *args, **kwargs):
+ kwargs['cwd'] = kwargs.get('cwd', self.cwd)
+ check_call(args, **kwargs)
+
+
+def create_build_env():
+ """Create a build environment."""
+ class Env:
+ pass
+ env = Env()
+
+ # Import the documentation build module.
+ env.fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ sys.path.insert(0, os.path.join(env.fmt_dir, 'doc'))
+ import build
+
+ env.build_dir = 'build'
+ env.versions = build.versions
+
+ # Virtualenv and repos are cached to speed up builds.
+ build.create_build_env(os.path.join(env.build_dir, 'virtualenv'))
+
+ env.fmt_repo = Git(os.path.join(env.build_dir, 'fmt'))
+ return env
+
+
+@contextmanager
+def rewrite(filename):
+ class Buffer:
+ pass
+ buffer = Buffer()
+ if not os.path.exists(filename):
+ buffer.data = ''
+ yield buffer
+ return
+ with open(filename) as f:
+ buffer.data = f.read()
+ yield buffer
+ with open(filename, 'w') as f:
+ f.write(buffer.data)
+
+
+fmt_repo_url = 'git@github.com:fmtlib/fmt'
+
+
+def update_site(env):
+ env.fmt_repo.update(fmt_repo_url)
+
+ doc_repo = Git(os.path.join(env.build_dir, 'fmtlib.github.io'))
+ doc_repo.update('git@github.com:fmtlib/fmtlib.github.io')
+
+ for version in env.versions:
+ clean_checkout(env.fmt_repo, version)
+ target_doc_dir = os.path.join(env.fmt_repo.dir, 'doc')
+ # Remove the old theme.
+ for entry in os.listdir(target_doc_dir):
+ path = os.path.join(target_doc_dir, entry)
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ # Copy the new theme.
+ for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
+ 'conf.py', 'fmt.less']:
+ src = os.path.join(env.fmt_dir, 'doc', entry)
+ dst = os.path.join(target_doc_dir, entry)
+ copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
+ copy(src, dst)
+ # Rename index to contents.
+ contents = os.path.join(target_doc_dir, 'contents.rst')
+ if not os.path.exists(contents):
+ os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
+ # Fix issues in reference.rst/api.rst.
+ for filename in ['reference.rst', 'api.rst']:
+ pattern = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.M)
+ with rewrite(os.path.join(target_doc_dir, filename)) as b:
+ b.data = b.data.replace('std::ostream &', 'std::ostream&')
+ b.data = re.sub(pattern, r'doxygenfunction:: \1(int)', b.data)
+ b.data = b.data.replace('std::FILE*', 'std::FILE *')
+ b.data = b.data.replace('unsigned int', 'unsigned')
+ b.data = b.data.replace('operator""_', 'operator"" _')
+ # Fix a broken link in index.rst.
+ index = os.path.join(target_doc_dir, 'index.rst')
+ with rewrite(index) as b:
+ b.data = b.data.replace(
+ 'doc/latest/index.html#format-string-syntax', 'syntax.html')
+ # Build the docs.
+ html_dir = os.path.join(env.build_dir, 'html')
+ if os.path.exists(html_dir):
+ shutil.rmtree(html_dir)
+ include_dir = env.fmt_repo.dir
+ if LooseVersion(version) >= LooseVersion('5.0.0'):
+ include_dir = os.path.join(include_dir, 'include', 'fmt')
+ elif LooseVersion(version) >= LooseVersion('3.0.0'):
+ include_dir = os.path.join(include_dir, 'fmt')
+ import build
+ build.build_docs(version, doc_dir=target_doc_dir,
+ include_dir=include_dir, work_dir=env.build_dir)
+ shutil.rmtree(os.path.join(html_dir, '.doctrees'))
+ # Create symlinks for older versions.
+ for link, target in {'index': 'contents', 'api': 'reference'}.items():
+ link = os.path.join(html_dir, link) + '.html'
+ target += '.html'
+ if os.path.exists(os.path.join(html_dir, target)) and \
+ not os.path.exists(link):
+ os.symlink(target, link)
+ # Copy docs to the website.
+ version_doc_dir = os.path.join(doc_repo.dir, version)
+ try:
+ shutil.rmtree(version_doc_dir)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ shutil.move(html_dir, version_doc_dir)
+
+
+def release(args):
+ env = create_build_env()
+ fmt_repo = env.fmt_repo
+
+ branch = args.get('<branch>')
+ if branch is None:
+ branch = 'master'
+ if not fmt_repo.update('-b', branch, fmt_repo_url):
+ clean_checkout(fmt_repo, branch)
+
+ # Convert changelog from RST to GitHub-flavored Markdown and get the
+ # version.
+ changelog = 'ChangeLog.rst'
+ changelog_path = os.path.join(fmt_repo.dir, changelog)
+ import rst2md
+ changes, version = rst2md.convert(changelog_path)
+ cmakelists = 'CMakeLists.txt'
+ for line in fileinput.input(os.path.join(fmt_repo.dir, cmakelists),
+ inplace=True):
+ prefix = 'set(FMT_VERSION '
+ if line.startswith(prefix):
+ line = prefix + version + ')\n'
+ sys.stdout.write(line)
+
+ # Update the version in the changelog.
+ title_len = 0
+ for line in fileinput.input(changelog_path, inplace=True):
+ if line.decode('utf-8').startswith(version + ' - TBD'):
+ line = version + ' - ' + datetime.date.today().isoformat()
+ title_len = len(line)
+ line += '\n'
+ elif title_len:
+ line = '-' * title_len + '\n'
+ title_len = 0
+ sys.stdout.write(line)
+
+ # Add the version to the build script.
+ script = os.path.join('doc', 'build.py')
+ script_path = os.path.join(fmt_repo.dir, script)
+ for line in fileinput.input(script_path, inplace=True):
+ m = re.match(r'( *versions = )\[(.+)\]', line)
+ if m:
+ line = '{}[{}, \'{}\']\n'.format(m.group(1), m.group(2), version)
+ sys.stdout.write(line)
+
+ fmt_repo.checkout('-B', 'release')
+ fmt_repo.add(changelog, cmakelists, script)
+ fmt_repo.commit('-m', 'Update version')
+
+ # Build the docs and package.
+ run = Runner(fmt_repo.dir)
+ run('cmake', '.')
+ run('make', 'doc', 'package_source')
+ update_site(env)
+
+ # Create a release on GitHub.
+ fmt_repo.push('origin', 'release')
+ params = {'access_token': os.getenv('FMT_TOKEN')}
+ r = requests.post('https://api.github.com/repos/fmtlib/fmt/releases',
+ params=params,
+ data=json.dumps({'tag_name': version,
+ 'target_commitish': 'release',
+ 'body': changes, 'draft': True}))
+ if r.status_code != 201:
+ raise Exception('Failed to create a release ' + str(r))
+ id = r.json()['id']
+ uploads_url = 'https://uploads.github.com/repos/fmtlib/fmt/releases'
+ package = 'fmt-{}.zip'.format(version)
+ r = requests.post(
+ '{}/{}/assets?name={}'.format(uploads_url, id, package),
+ headers={'Content-Type': 'application/zip'},
+ params=params, data=open('build/fmt/' + package, 'rb'))
+ if r.status_code != 201:
+ raise Exception('Failed to upload an asset ' + str(r))
+
+
+if __name__ == '__main__':
+ args = docopt.docopt(__doc__)
+ if args.get('release'):
+ release(args)
+ elif args.get('site'):
+ update_site(create_build_env())
diff --git a/src/seastar/fmt/support/rst2md.py b/src/seastar/fmt/support/rst2md.py
new file mode 100644
index 00000000..b1a193ce
--- /dev/null
+++ b/src/seastar/fmt/support/rst2md.py
@@ -0,0 +1,151 @@
+# reStructuredText (RST) to GitHub-flavored Markdown converter
+
+import re
+from docutils import core, nodes, writers
+
+
+def is_github_ref(node):
+ return re.match('https://github.com/.*/(issues|pull)/.*', node['refuri'])
+
+
+class Translator(nodes.NodeVisitor):
+ def __init__(self, document):
+ nodes.NodeVisitor.__init__(self, document)
+ self.output = ''
+ self.indent = 0
+ self.preserve_newlines = False
+
+ def write(self, text):
+ self.output += text.replace('\n', '\n' + ' ' * self.indent)
+
+ def visit_document(self, node):
+ pass
+
+ def depart_document(self, node):
+ pass
+
+ def visit_section(self, node):
+ pass
+
+ def depart_section(self, node):
+ # Skip all sections except the first one.
+ raise nodes.StopTraversal
+
+ def visit_title(self, node):
+ self.version = re.match(r'(\d+\.\d+\.\d+).*', node.children[0]).group(1)
+ raise nodes.SkipChildren
+
+ def depart_title(self, node):
+ pass
+
+ def visit_Text(self, node):
+ if not self.preserve_newlines:
+ node = node.replace('\n', ' ')
+ self.write(node)
+
+ def depart_Text(self, node):
+ pass
+
+ def visit_bullet_list(self, node):
+ pass
+
+ def depart_bullet_list(self, node):
+ pass
+
+ def visit_list_item(self, node):
+ self.write('* ')
+ self.indent += 2
+
+ def depart_list_item(self, node):
+ self.indent -= 2
+ self.write('\n\n')
+
+ def visit_paragraph(self, node):
+ pass
+
+ def depart_paragraph(self, node):
+ pass
+
+ def visit_reference(self, node):
+ if not is_github_ref(node):
+ self.write('[')
+
+ def depart_reference(self, node):
+ if not is_github_ref(node):
+ self.write('](' + node['refuri'] + ')')
+
+ def visit_target(self, node):
+ pass
+
+ def depart_target(self, node):
+ pass
+
+ def visit_literal(self, node):
+ self.write('`')
+
+ def depart_literal(self, node):
+ self.write('`')
+
+ def visit_literal_block(self, node):
+ self.write('\n\n```')
+ if 'c++' in node['classes']:
+ self.write('c++')
+ self.write('\n')
+ self.preserve_newlines = True
+
+ def depart_literal_block(self, node):
+ self.write('\n```\n')
+ self.preserve_newlines = False
+
+ def visit_inline(self, node):
+ pass
+
+ def depart_inline(self, node):
+ pass
+
+ def visit_image(self, node):
+ self.write('![](' + node['uri'] + ')')
+
+ def depart_image(self, node):
+ pass
+
+ def write_row(self, row, widths):
+ for i, entry in enumerate(row):
+ text = entry[0][0] if len(entry) > 0 else ''
+ if i != 0:
+ self.write('|')
+ self.write('{:{}}'.format(text, widths[i]))
+ self.write('\n')
+
+ def visit_table(self, node):
+ table = node.children[0]
+ colspecs = table[:-2]
+ thead = table[-2]
+ tbody = table[-1]
+ widths = [int(cs['colwidth']) for cs in colspecs]
+ sep = '|'.join(['-' * w for w in widths]) + '\n'
+ self.write('\n\n')
+ self.write_row(thead[0], widths)
+ self.write(sep)
+ for row in tbody:
+ self.write_row(row, widths)
+ raise nodes.SkipChildren
+
+ def depart_table(self, node):
+ pass
+
+class MDWriter(writers.Writer):
+ """GitHub-flavored markdown writer"""
+
+ supported = ('md',)
+ """Formats this writer supports."""
+
+ def translate(self):
+ translator = Translator(self.document)
+ self.document.walkabout(translator)
+ self.output = (translator.output, translator.version)
+
+
+def convert(rst_path):
+ """Converts RST file to Markdown."""
+ return core.publish_file(source_path=rst_path, writer=MDWriter())
diff --git a/src/seastar/fmt/support/rtd/conf.py b/src/seastar/fmt/support/rtd/conf.py
new file mode 100644
index 00000000..124fb9d4
--- /dev/null
+++ b/src/seastar/fmt/support/rtd/conf.py
@@ -0,0 +1,7 @@
+# Sphinx configuration for readthedocs.
+
+import os, sys
+
+master_doc = 'index'
+html_theme = 'theme'
+html_theme_path = ["."]
diff --git a/src/seastar/fmt/support/rtd/index.rst b/src/seastar/fmt/support/rtd/index.rst
new file mode 100644
index 00000000..4a59e9be
--- /dev/null
+++ b/src/seastar/fmt/support/rtd/index.rst
@@ -0,0 +1,2 @@
+If you are not redirected automatically, follow the
+`link to the fmt documentation <http://fmtlib.net/latest/>`_.
diff --git a/src/seastar/fmt/support/rtd/theme/layout.html b/src/seastar/fmt/support/rtd/theme/layout.html
new file mode 100644
index 00000000..ee140868
--- /dev/null
+++ b/src/seastar/fmt/support/rtd/theme/layout.html
@@ -0,0 +1,17 @@
+{% extends "basic/layout.html" %}
+
+{% block extrahead %}
+<meta charset="UTF-8">
+<meta http-equiv="refresh" content="1;url=http://fmtlib.net/latest/">
+<script type="text/javascript">
+ window.location.href = "http://fmtlib.net/latest/"
+</script>
+<title>Page Redirection</title>
+{% endblock %}
+
+{% block document %}
+If you are not redirected automatically, follow the <a href='http://fmtlib.net/latest/'>link to the fmt documentation</a>.
+{% endblock %}
+
+{% block footer %}
+{% endblock %}
diff --git a/src/seastar/fmt/support/rtd/theme/theme.conf b/src/seastar/fmt/support/rtd/theme/theme.conf
new file mode 100644
index 00000000..89e03bbd
--- /dev/null
+++ b/src/seastar/fmt/support/rtd/theme/theme.conf
@@ -0,0 +1,2 @@
+[theme]
+inherit = basic
diff --git a/src/seastar/fmt/support/travis-build.py b/src/seastar/fmt/support/travis-build.py
new file mode 100755
index 00000000..d71a7ae6
--- /dev/null
+++ b/src/seastar/fmt/support/travis-build.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+# Build the project on Travis CI.
+
+from __future__ import print_function
+import errno, os, shutil, subprocess, sys, urllib
+from subprocess import call, check_call, Popen, PIPE, STDOUT
+
+def rmtree_if_exists(dir):
+ try:
+ shutil.rmtree(dir)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ pass
+
+def makedirs_if_not_exist(dir):
+ try:
+ os.makedirs(dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+def install_dependencies():
+ branch = os.environ['TRAVIS_BRANCH']
+ if branch != 'master':
+ print('Branch: ' + branch)
+ exit(0) # Ignore non-master branches
+ check_call('curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key ' +
+ '| sudo apt-key add -', shell=True)
+ check_call('echo "deb https://deb.nodesource.com/node_0.10 precise main" ' +
+ '| sudo tee /etc/apt/sources.list.d/nodesource.list', shell=True)
+ check_call(['sudo', 'apt-get', 'update'])
+ check_call(['sudo', 'apt-get', 'install', 'python-virtualenv', 'nodejs'])
+ check_call(['sudo', 'npm', 'install', '-g', 'less@2.6.1', 'less-plugin-clean-css'])
+ deb_file = 'doxygen_1.8.6-2_amd64.deb'
+ urllib.urlretrieve('http://mirrors.kernel.org/ubuntu/pool/main/d/doxygen/' +
+ deb_file, deb_file)
+ check_call(['sudo', 'dpkg', '-i', deb_file])
+
+fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+build = os.environ['BUILD']
+if build == 'Doc':
+ travis = 'TRAVIS' in os.environ
+ if travis:
+ install_dependencies()
+ sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
+ import build
+ build.create_build_env()
+ html_dir = build.build_docs()
+ repo = 'fmtlib.github.io'
+ if travis and 'KEY' not in os.environ:
+ # Don't update the repo if building on Travis from an account that
+ # doesn't have push access.
+ print('Skipping update of ' + repo)
+ exit(0)
+ # Clone the fmtlib.github.io repo.
+ rmtree_if_exists(repo)
+ git_url = 'https://github.com/' if travis else 'git@github.com:'
+ check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
+ # Copy docs to the repo.
+ target_dir = os.path.join(repo, 'dev')
+ rmtree_if_exists(target_dir)
+ shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
+ if travis:
+ check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
+ check_call(['git', 'config', '--global', 'user.email', 'viz@ampl.com'])
+ # Push docs to GitHub pages.
+ check_call(['git', 'add', '--all'], cwd=repo)
+ if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
+ check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
+ cmd = 'git push'
+ if travis:
+ cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master'
+ p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
+ # Print the output without the key.
+ print(p.communicate()[0].replace(os.environ['KEY'], '$KEY'))
+ if p.returncode != 0:
+ raise subprocess.CalledProcessError(p.returncode, cmd)
+ exit(0)
+
+standard = os.environ['STANDARD']
+install_dir = os.path.join(fmt_dir, "_install")
+build_dir = os.path.join(fmt_dir, "_build")
+test_build_dir = os.path.join(fmt_dir, "_build_test")
+
+# Configure library.
+makedirs_if_not_exist(build_dir)
+cmake_flags = [
+ '-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build,
+ '-DCMAKE_CXX_STANDARD=' + standard
+]
+check_call(['cmake', '-DFMT_DOC=OFF', '-DFMT_PEDANTIC=ON', '-DFMT_WERROR=ON', fmt_dir] +
+ cmake_flags, cwd=build_dir)
+
+# Build library.
+check_call(['make', '-j4'], cwd=build_dir)
+
+# Test library.
+env = os.environ.copy()
+env['CTEST_OUTPUT_ON_FAILURE'] = '1'
+if call(['make', 'test'], env=env, cwd=build_dir):
+ with open(os.path.join(build_dir, 'Testing', 'Temporary', 'LastTest.log'), 'r') as f:
+ print(f.read())
+ sys.exit(-1)
+
+# Install library.
+check_call(['make', 'install'], cwd=build_dir)
+
+# Test installation.
+makedirs_if_not_exist(test_build_dir)
+check_call(['cmake', os.path.join(fmt_dir, "test", "find-package-test")] +
+ cmake_flags, cwd=test_build_dir)
+check_call(['make', '-j4'], cwd=test_build_dir)
diff --git a/src/seastar/fmt/support/update-coverity-branch.py b/src/seastar/fmt/support/update-coverity-branch.py
new file mode 100755
index 00000000..519f5d00
--- /dev/null
+++ b/src/seastar/fmt/support/update-coverity-branch.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# Update the coverity branch from the master branch.
+# It is not done automatically because Coverity Scan limits
+# the number of submissions per day.
+
+from __future__ import print_function
+import shutil, tempfile
+from subprocess import check_output, STDOUT
+
+class Git:
+ def __init__(self, dir):
+ self.dir = dir
+
+ def __call__(self, *args):
+ output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
+ print(output)
+ return output
+
+dir = tempfile.mkdtemp()
+try:
+ git = Git(dir)
+ git('clone', '-b', 'coverity', 'git@github.com:fmtlib/fmt.git', dir)
+ output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
+ if 'Fast-forward' not in output:
+ git('reset', 'HEAD', '.travis.yml')
+ git('checkout', '--', '.travis.yml')
+ git('commit', '-m', 'Update coverity branch')
+ git('push')
+finally:
+ shutil.rmtree(dir)