summaryrefslogtreecommitdiffstats
path: root/comm/third_party/botan/src/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /comm/third_party/botan/src/scripts
parentInitial commit. (diff)
downloadthunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.tar.xz
thunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'comm/third_party/botan/src/scripts')
-rw-r--r--comm/third_party/botan/src/scripts/Dockerfile.android17
-rwxr-xr-xcomm/third_party/botan/src/scripts/bench.py216
-rwxr-xr-xcomm/third_party/botan/src/scripts/build_docs.py242
-rw-r--r--comm/third_party/botan/src/scripts/check.py89
-rw-r--r--comm/third_party/botan/src/scripts/ci/appveyor.yml90
-rw-r--r--comm/third_party/botan/src/scripts/ci/codecov.yml15
-rw-r--r--comm/third_party/botan/src/scripts/ci/lgtm.yml31
-rw-r--r--comm/third_party/botan/src/scripts/ci/setup_appveyor.bat19
-rwxr-xr-xcomm/third_party/botan/src/scripts/ci/setup_gh_actions.sh69
-rwxr-xr-xcomm/third_party/botan/src/scripts/ci/setup_travis.sh89
-rw-r--r--comm/third_party/botan/src/scripts/ci/travis.yml50
-rwxr-xr-xcomm/third_party/botan/src/scripts/ci_build.py620
-rwxr-xr-xcomm/third_party/botan/src/scripts/ci_check_install.py104
-rwxr-xr-xcomm/third_party/botan/src/scripts/cleanup.py133
-rwxr-xr-xcomm/third_party/botan/src/scripts/comba.py126
-rwxr-xr-xcomm/third_party/botan/src/scripts/create_corpus_zip.py48
-rwxr-xr-xcomm/third_party/botan/src/scripts/dist.py466
-rwxr-xr-xcomm/third_party/botan/src/scripts/docker-android.sh11
-rwxr-xr-xcomm/third_party/botan/src/scripts/ffi_decls.py113
-rw-r--r--comm/third_party/botan/src/scripts/fuzzer.xml17
-rwxr-xr-xcomm/third_party/botan/src/scripts/gen_os_features.py95
-rwxr-xr-xcomm/third_party/botan/src/scripts/install.py261
-rwxr-xr-xcomm/third_party/botan/src/scripts/macro_checks.py42
-rwxr-xr-xcomm/third_party/botan/src/scripts/monty.py98
-rwxr-xr-xcomm/third_party/botan/src/scripts/oids.py337
-rwxr-xr-xcomm/third_party/botan/src/scripts/python_unittests.py224
-rwxr-xr-xcomm/third_party/botan/src/scripts/python_unittests_unix.py67
-rwxr-xr-xcomm/third_party/botan/src/scripts/run_tls_attacker.py138
-rwxr-xr-xcomm/third_party/botan/src/scripts/run_tls_fuzzer.py98
-rwxr-xr-xcomm/third_party/botan/src/scripts/show_dependencies.py213
-rwxr-xr-xcomm/third_party/botan/src/scripts/test_all_configs.py136
-rwxr-xr-xcomm/third_party/botan/src/scripts/test_cli.py1429
-rwxr-xr-xcomm/third_party/botan/src/scripts/test_cli_crypt.py220
-rwxr-xr-xcomm/third_party/botan/src/scripts/test_fuzzers.py187
-rw-r--r--comm/third_party/botan/src/scripts/test_python.py695
-rw-r--r--comm/third_party/botan/src/scripts/tls_scanner/boa.txt1
-rw-r--r--comm/third_party/botan/src/scripts/tls_scanner/policy.txt19
-rw-r--r--comm/third_party/botan/src/scripts/tls_scanner/readme.txt5
-rwxr-xr-xcomm/third_party/botan/src/scripts/tls_scanner/tls_scanner.py60
-rw-r--r--comm/third_party/botan/src/scripts/tls_scanner/urls.txt58
-rwxr-xr-xcomm/third_party/botan/src/scripts/tls_suite_info.py342
-rwxr-xr-xcomm/third_party/botan/src/scripts/website.py166
42 files changed, 7456 insertions, 0 deletions
diff --git a/comm/third_party/botan/src/scripts/Dockerfile.android b/comm/third_party/botan/src/scripts/Dockerfile.android
new file mode 100644
index 0000000000..124d5d4f6d
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/Dockerfile.android
@@ -0,0 +1,17 @@
+FROM devnexen/android-ndk:r20 AS android-ndk
+ARG ANDROID_ARCH
+ARG ANDROID_TOOLCHAIN_SUF
+ARG ANDROID_ARCH_SUF
+ARG ANDROID_SDK_VER
+RUN apt-get update && apt-get install -y --no-install-recommends python
+RUN mkdir -p /botan/android
+WORKDIR /botan
+COPY configure.py configure.py
+COPY src src
+COPY doc doc
+COPY license.txt license.txt
+COPY news.rst news.rst
+ENV PATH=$PATH:/opt/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/
+RUN ./configure.py --prefix=android/arm --os=android --cpu=${ANDROID_ARCH} --cc=clang --cc-bin=${ANDROID_ARCH}${ANDROID_ARCH_SUF}-linux-android${ANDROID_TOOLCHAIN_SUF}${ANDROID_SDK_VER}-clang++ --ar-command=${ANDROID_ARCH}${ANDROID_ARCH_SUF}-linux-android${ANDROID_TOOLCHAIN_SUF}-ar
+RUN make -j`getconf _NPROCESSORS_ONLN`
+RUN make install
diff --git a/comm/third_party/botan/src/scripts/bench.py b/comm/third_party/botan/src/scripts/bench.py
new file mode 100755
index 0000000000..1cc626366f
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/bench.py
@@ -0,0 +1,216 @@
+#!/usr/bin/python
+
+"""
+Compare Botan with OpenSSL using their respective benchmark utils
+
+(C) 2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+
+TODO
+ - Also compare RSA, ECDSA, ECDH
+ - Output pretty graphs with matplotlib
+"""
+
+import logging
+import os
+import sys
+import optparse # pylint: disable=deprecated-module
+import subprocess
+import re
+import json
+
+def setup_logging(options):
+ if options.verbose:
+ log_level = logging.DEBUG
+ elif options.quiet:
+ log_level = logging.WARNING
+ else:
+ log_level = logging.INFO
+
+ class LogOnErrorHandler(logging.StreamHandler, object):
+ def emit(self, record):
+ super(LogOnErrorHandler, self).emit(record)
+ if record.levelno >= logging.ERROR:
+ sys.exit(1)
+
+ lh = LogOnErrorHandler(sys.stdout)
+ lh.setFormatter(logging.Formatter('%(levelname) 7s: %(message)s'))
+ logging.getLogger().addHandler(lh)
+ logging.getLogger().setLevel(log_level)
+
+def run_command(cmd):
+ logging.debug("Running '%s'", ' '.join(cmd))
+
+ proc = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True)
+ stdout, stderr = proc.communicate()
+
+ if proc.returncode != 0:
+ logging.error("Running command %s failed ret %d", ' '.join(cmd), proc.returncode)
+
+ return stdout + stderr
+
+def get_openssl_version(openssl):
+ output = run_command([openssl, 'version'])
+
+ openssl_version_re = re.compile(r'OpenSSL ([0-9a-z\.]+) .*')
+
+ match = openssl_version_re.match(output)
+
+ if match:
+ return match.group(1)
+ else:
+ logging.warning("Unable to parse OpenSSL version output %s", output)
+ return output
+
+def get_botan_version(botan):
+ return run_command([botan, 'version']).strip()
+
+EVP_MAP = {
+ 'Blowfish': 'bf-ecb',
+ 'AES-128/GCM': 'aes-128-gcm',
+ 'AES-256/GCM': 'aes-256-gcm',
+ 'ChaCha20': 'chacha20',
+ 'MD5': 'md5',
+ 'SHA-1': 'sha1',
+ 'RIPEMD-160': 'ripemd160',
+ 'SHA-256': 'sha256',
+ 'SHA-384': 'sha384',
+ 'SHA-512': 'sha512'
+ }
+
+def run_openssl_bench(openssl, algo):
+
+ logging.info('Running OpenSSL benchmark for %s', algo)
+
+ cmd = [openssl, 'speed', '-mr']
+
+ if algo in EVP_MAP:
+ cmd += ['-evp', EVP_MAP[algo]]
+ else:
+ cmd += [algo]
+
+ output = run_command(cmd)
+
+ buf_header = re.compile(r'\+DT:([a-z0-9-]+):([0-9]+):([0-9]+)$')
+ res_header = re.compile(r'\+R:([0-9]+):[a-z0-9-]+:([0-9]+\.[0-9]+)$')
+ ignored = re.compile(r'\+(H|F):.*')
+
+ results = []
+
+ result = None
+
+ for l in output.splitlines():
+ if ignored.match(l):
+ continue
+
+ if result is None:
+ match = buf_header.match(l)
+ if match is None:
+ logging.error("Unexpected output from OpenSSL %s", l)
+
+ result = {'algo': algo, 'buf_size': int(match.group(3))}
+ else:
+ match = res_header.match(l)
+
+ result['bytes'] = int(match.group(1)) * result['buf_size']
+ result['runtime'] = float(match.group(2))
+ result['bps'] = int(result['bytes'] / result['runtime'])
+ results.append(result)
+ result = None
+
+ return results
+
+def run_botan_bench(botan, runtime, buf_sizes, algo):
+
+ runtime = .05
+
+ cmd = [botan, 'speed', '--format=json', '--msec=%d' % int(runtime * 1000),
+ '--buf-size=%s' % (','.join([str(i) for i in buf_sizes])), algo]
+ output = run_command(cmd)
+ output = json.loads(output)
+
+ return output
+
+class BenchmarkResult(object):
+ def __init__(self, algo, buf_sizes, openssl_results, botan_results):
+ self.algo = algo
+ self.results = {}
+
+ def find_result(results, sz):
+ for r in results:
+ if 'buf_size' in r and r['buf_size'] == sz:
+ return r['bps']
+ raise Exception("Could not find expected result in data")
+
+ for buf_size in buf_sizes:
+ self.results[buf_size] = {
+ 'openssl': find_result(openssl_results, buf_size),
+ 'botan': find_result(botan_results, buf_size)
+ }
+
+ def result_string(self):
+
+ out = ""
+ for (k, v) in self.results.items():
+ out += "algo %s buf_size % 6d botan % 12d bps openssl % 12d bps adv %.02f\n" % (
+ self.algo, k, v['botan'], v['openssl'], float(v['botan']) / v['openssl'])
+ return out
+
+def bench_algo(openssl, botan, algo):
+ openssl_results = run_openssl_bench(openssl, algo)
+
+ buf_sizes = sorted([x['buf_size'] for x in openssl_results])
+ runtime = sum(x['runtime'] for x in openssl_results) / len(openssl_results)
+
+ botan_results = run_botan_bench(botan, runtime, buf_sizes, algo)
+
+ return BenchmarkResult(algo, buf_sizes, openssl_results, botan_results)
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ parser = optparse.OptionParser()
+
+ parser.add_option('--verbose', action='store_true', default=False, help="be noisy")
+ parser.add_option('--quiet', action='store_true', default=False, help="be very quiet")
+
+ parser.add_option('--openssl-cli', metavar='PATH',
+ default='/usr/bin/openssl',
+ help='Path to openssl binary (default %default)')
+
+ parser.add_option('--botan-cli', metavar='PATH',
+ default='/usr/bin/botan',
+ help='Path to botan binary (default %default)')
+
+ (options, args) = parser.parse_args(args)
+
+ setup_logging(options)
+
+ openssl = options.openssl_cli
+ botan = options.botan_cli
+
+ if os.access(openssl, os.X_OK) is False:
+ logging.error("Unable to access openssl binary at %s", openssl)
+
+ if os.access(botan, os.X_OK) is False:
+ logging.error("Unable to access botan binary at %s", botan)
+
+ openssl_version = get_openssl_version(openssl)
+ botan_version = get_botan_version(botan)
+
+ logging.info("Comparing Botan %s with OpenSSL %s", botan_version, openssl_version)
+
+ for algo in sorted(EVP_MAP.keys()):
+ result = bench_algo(openssl, botan, algo)
+ print(result.result_string())
+
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/build_docs.py b/comm/third_party/botan/src/scripts/build_docs.py
new file mode 100755
index 0000000000..6eb9b656c9
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/build_docs.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python
+
+"""
+Botan doc generation script
+
+(C) 2014,2015,2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import sys
+import optparse # pylint: disable=deprecated-module
+import subprocess
+import shutil
+import logging
+import json
+import tempfile
+import os
+import stat
+
+def get_concurrency():
+ """
+ Get default concurrency level of build
+ """
+ def_concurrency = 2
+
+ try:
+ import multiprocessing
+ return max(def_concurrency, multiprocessing.cpu_count())
+ except ImportError:
+ return def_concurrency
+
+def have_prog(prog):
+ """
+ Check if some named program exists in the path
+ """
+ for path in os.environ['PATH'].split(os.pathsep):
+ exe_file = os.path.join(path, prog)
+ if os.path.exists(exe_file) and os.access(exe_file, os.X_OK):
+ return True
+ return False
+
+def find_rst2man():
+ possible_names = ['rst2man', 'rst2man.py']
+
+ for name in possible_names:
+ if have_prog(name):
+ return name
+ raise Exception("Was configured with rst2man but could not be located in PATH")
+
+def touch(fname):
+ try:
+ os.utime(fname, None)
+ except OSError:
+ open(fname, 'a').close()
+
+def copy_files(src_path, dest_dir):
+
+ logging.debug("Copying %s to %s", src_path, dest_dir)
+
+ file_mode = os.stat(src_path).st_mode
+
+ try:
+ os.mkdir(dest_dir)
+ except OSError:
+ pass
+
+ if stat.S_ISREG(file_mode):
+ logging.debug("Copying file %s to %s", src_path, dest_dir)
+ shutil.copy(src_path, dest_dir)
+ else:
+ for f in os.listdir(src_path):
+ src_file = os.path.join(src_path, f)
+ file_mode = os.stat(src_file).st_mode
+ if stat.S_ISREG(file_mode):
+ dest_file = os.path.join(dest_dir, f)
+ shutil.copyfile(src_file, dest_file)
+ elif stat.S_ISDIR(file_mode):
+ copy_files(os.path.join(src_path, f), os.path.join(dest_dir, f))
+
+def run_and_check(cmd_line, cwd=None):
+
+ logging.info("Starting %s", ' '.join(cmd_line))
+
+ try:
+ proc = subprocess.Popen(cmd_line, cwd=cwd)
+
+ proc.communicate()
+ except OSError as e:
+ logging.error("Executing %s failed (%s)", ' '.join(cmd_line), e)
+
+ if proc.returncode != 0:
+ logging.error("Error running %s", ' '.join(cmd_line))
+ sys.exit(1)
+
+
+def parse_options(args):
+ parser = optparse.OptionParser()
+
+ parser.add_option('--verbose', action='store_true', default=False,
+ help='Show debug messages')
+ parser.add_option('--quiet', action='store_true', default=False,
+ help='Show only warnings and errors')
+
+ parser.add_option('--build-dir', metavar='DIR', default='build',
+ help='Location of build output (default \'%default\')')
+ parser.add_option('--dry-run', default=False, action='store_true',
+ help='Just display what would be done')
+
+ (options, args) = parser.parse_args(args)
+
+ if len(args) > 1:
+ logging.error("Unknown arguments")
+ return None
+
+ def log_level():
+ if options.verbose:
+ return logging.DEBUG
+ if options.quiet:
+ return logging.WARNING
+ return logging.INFO
+
+ logging.getLogger().setLevel(log_level())
+
+ return options
+
+def sphinx_supports_concurrency():
+ import re
+ from distutils.version import StrictVersion
+
+ proc = subprocess.Popen(['sphinx-build', '--version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ output, _ = proc.communicate()
+ if isinstance(output, bytes):
+ output = output.decode('ascii')
+ output = output.strip()
+
+ # Sphinx v1.1.3
+ # sphinx-build 1.7.4
+ match = re.match(r'^(?:[a-zA-Z_-]+) v?(([0-9]+)\.([0-9]+))', output)
+
+ if match is None:
+ # If regex doesn't match, disable by default
+ logging.warning("Did not recognize sphinx version from '%s'", output)
+ return False
+
+ version = StrictVersion(match.group(1))
+
+ if version < StrictVersion('1.4'):
+ # not supported
+ return False
+ if version == StrictVersion('3.0'):
+ # Bug in Sphinx 3.0 https://github.com/sphinx-doc/sphinx/issues/7438
+ return False
+ return True
+
+def read_config(config):
+ try:
+ f = open(config)
+ cfg = json.load(f)
+ f.close()
+ except OSError:
+ raise Exception('Failed to load build config %s - is build dir correct?' % (config))
+
+ return cfg
+
+def main(args=None):
+ # pylint: disable=too-many-branches
+
+ if args is None:
+ args = sys.argv
+
+ logging.basicConfig(stream=sys.stdout,
+ format='%(levelname) 7s: %(message)s')
+
+ options = parse_options(args)
+
+ if options is None:
+ return 1
+
+ cfg = read_config(os.path.join(options.build_dir, 'build_config.json'))
+
+ with_docs = bool(cfg['with_documentation'])
+ with_sphinx = bool(cfg['with_sphinx'])
+ with_pdf = bool(cfg['with_pdf'])
+ with_rst2man = bool(cfg['with_rst2man'])
+ with_doxygen = bool(cfg['with_doxygen'])
+
+ doc_stamp_file = cfg['doc_stamp_file']
+
+ handbook_src = cfg['doc_dir']
+ handbook_output = cfg['handbook_output_dir']
+
+ if with_docs is False:
+ logging.debug('Documentation build disabled')
+ return 0
+
+ cmds = []
+
+ if with_doxygen:
+ cmds.append(['doxygen', os.path.join(cfg['build_dir'], 'botan.doxy')])
+
+ if with_sphinx:
+ sphinx_build = ['sphinx-build', '-q', '-c', cfg['sphinx_config_dir']]
+ if sphinx_supports_concurrency():
+ sphinx_build += ['-j', str(get_concurrency())]
+
+ cmds.append(sphinx_build + ['-b', 'html', handbook_src, handbook_output])
+
+ if with_pdf:
+ latex_output = tempfile.mkdtemp(prefix='botan_latex_')
+ cmds.append(sphinx_build + ['-b', 'latex', handbook_src, latex_output])
+ cmds.append(['make', '-C', latex_output])
+ cmds.append(['cp', os.path.join(latex_output, 'botan.pdf'), handbook_output])
+ else:
+ # otherwise just copy it
+ cmds.append(['cp', handbook_src, handbook_output])
+
+ if with_rst2man:
+ cmds.append([find_rst2man(),
+ os.path.join(cfg['build_dir'], 'botan.rst'),
+ os.path.join(cfg['build_dir'], 'botan.1')])
+
+ cmds.append(['touch', doc_stamp_file])
+
+ for cmd in cmds:
+ if options.dry_run:
+ print(' '.join(cmd))
+ else:
+ if cmd[0] == 'cp':
+ assert len(cmd) == 3
+ copy_files(cmd[1], cmd[2])
+ elif cmd[0] == 'touch':
+ assert len(cmd) == 2
+ touch(cmd[1])
+ else:
+ run_and_check(cmd)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/check.py b/comm/third_party/botan/src/scripts/check.py
new file mode 100644
index 0000000000..ea80d552d2
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/check.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+"""
+Implements the "make check" target
+
+(C) 2020 Jack Lloyd, Rene Meusel
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import json
+import logging
+import optparse # pylint: disable=deprecated-module
+import os
+import subprocess
+import sys
+
+def run_and_check(cmd_line, env=None, cwd=None):
+
+ logging.info("Starting %s", ' '.join(cmd_line))
+
+ try:
+ proc = subprocess.Popen(cmd_line, cwd=cwd, env=env)
+ proc.communicate()
+ except OSError as e:
+ logging.error("Executing %s failed (%s)", ' '.join(cmd_line), e)
+
+ if proc.returncode != 0:
+ logging.error("Error running %s", ' '.join(cmd_line))
+ sys.exit(1)
+
+
+def make_environment(build_shared_lib):
+ if not build_shared_lib:
+ return None
+
+ env = os.environ.copy()
+
+ def extend_colon_list(k, n):
+ env[k] = n if k not in env else ":".join([env[k], n])
+
+ extend_colon_list("DYLD_LIBRARY_PATH", os.path.abspath("."))
+ extend_colon_list("LD_LIBRARY_PATH", os.path.abspath("."))
+
+ return env
+
+
+def parse_options(args):
+ parser = optparse.OptionParser()
+ parser.add_option('--build-dir', default='build', metavar='DIR',
+ help='specify the botan build directory (default %default)')
+
+ (options, args) = parser.parse_args(args)
+
+ if len(args) > 1:
+ raise Exception("Unknown arguments")
+
+ return options
+
+
+def read_config(config):
+ try:
+ with open(config) as f:
+ return json.load(f)
+ except OSError:
+ raise Exception('Failed to load build config %s - is build dir correct?' % (config))
+
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ options = parse_options(args)
+
+ cfg = read_config(os.path.join(options.build_dir, 'build_config.json'))
+
+ test_exe = cfg.get('test_exe')
+ build_shared_lib = cfg.get('build_shared_lib')
+
+ if not os.path.isfile(test_exe) or not os.access(test_exe, os.X_OK):
+ raise Exception("Test binary not built")
+
+ run_and_check([test_exe, "--data-dir=%s" % cfg.get('test_data_dir')],
+ make_environment(build_shared_lib))
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/ci/appveyor.yml b/comm/third_party/botan/src/scripts/ci/appveyor.yml
new file mode 100644
index 0000000000..35345841a4
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/appveyor.yml
@@ -0,0 +1,90 @@
+
+clone_depth: 5
+
+environment:
+ SCCACHE_CACHE_SIZE: 160M
+ SCCACHE_VERSION: 0.2.12
+ APPVEYOR_SAVE_CACHE_ON_ERROR: true
+
+ matrix:
+
+ # MSVC 2015 DLL x86-32
+ - CC: VC2015
+ PLATFORM: x86
+ TARGET: shared
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
+ BOOST_ROOT: "C:\\Libraries\\boost_1_69_0"
+ BOOST_LIBRARYDIR: "C:\\Libraries\\boost_1_69_0\\lib32-msvc-14.0"
+ BOOST_SYSTEM_LIBRARY: "libboost_system-vc140-mt-x32-1_69"
+ MAKE_TOOL: nmake
+ TARGET_CC: msvc
+ EXTRA_FLAGS: "--disable-werror"
+ DISABLED_TESTS: "certstor_system" # requires 'ISRG Root X1' / not in this AppVeyor image
+
+ # MSVC 2017 DLL x86-32
+ - CC: VC2017
+ PLATFORM: x86
+ TARGET: shared
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
+ BOOST_ROOT: "C:\\Libraries\\boost_1_69_0"
+ BOOST_LIBRARYDIR: "C:\\Libraries\\boost_1_69_0\\lib32-msvc-14.1"
+ BOOST_SYSTEM_LIBRARY: "libboost_system-vc141-mt-x32-1_69"
+ MAKE_TOOL: jom
+ TARGET_CC: msvc
+ DISABLED_TESTS: "certstor_system" # requires 'ISRG Root X1' / not in this AppVeyor image
+
+ # MSVC 2017 DLL x86-64
+ - CC: VC2017
+ PLATFORM: x86_amd64
+ TARGET: shared
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
+ BOOST_ROOT: "C:\\Libraries\\boost_1_69_0"
+ BOOST_LIBRARYDIR: "C:\\Libraries\\boost_1_69_0\\lib64-msvc-14.1"
+ BOOST_SYSTEM_LIBRARY: "libboost_system-vc141-mt-x64-1_69"
+ MAKE_TOOL: jom
+ TARGET_CC: msvc
+ DISABLED_TESTS: "certstor_system" # requires 'ISRG Root X1' / not in this AppVeyor image
+
+ # MSVC 2017 static x86-64
+ - CC: VC2017
+ PLATFORM: x86_amd64
+ TARGET: static
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
+ BOOST_ROOT: "C:\\Libraries\\boost_1_69_0"
+ BOOST_LIBRARYDIR: "C:\\Libraries\\boost_1_69_0\\lib64-msvc-14.1"
+ BOOST_SYSTEM_LIBRARY: "libboost_system-vc141-mt-x64-1_69"
+ MAKE_TOOL: jom
+ TARGET_CC: msvc
+ EXTRA_FLAGS: "--extra-cxxflags=/DUNICODE --extra-cxxflags=/D_UNICODE"
+ DISABLED_TESTS: "certstor_system" # requires 'ISRG Root X1' / not in this AppVeyor image
+
+ # MSVC 2019 static x86-64 w/debug iterators
+ - CC: VC2019
+ PLATFORM: x86_amd64
+ TARGET: sanitizer
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
+ MAKE_TOOL: jom
+ TARGET_CC: msvc
+
+ # MinGW GCC
+ - CC: MinGW
+ PLATFORM: x86_amd64
+ TARGET: static
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
+ MAKE_TOOL: mingw32-make
+ TARGET_CC: gcc
+
+install:
+ - call src\scripts\ci\setup_appveyor.bat
+
+build_script:
+ - python src\scripts\ci_build.py --os=windows --cc=%TARGET_CC% --without-python3 --compiler-cache=sccache --make-tool=%MAKE_TOOL% --cpu=%PLATFORM% --disabled-tests=%DISABLED_TESTS% %EXTRA_FLAGS% %TARGET%
+
+# whitelist branches to avoid testing feature branches twice (as branch and as pull request)
+branches:
+ only:
+ - master
+ - release-2
+
+cache:
+ - C:\Users\appveyor\AppData\Local\Mozilla\sccache\cache
diff --git a/comm/third_party/botan/src/scripts/ci/codecov.yml b/comm/third_party/botan/src/scripts/ci/codecov.yml
new file mode 100644
index 0000000000..db26a6bd55
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/codecov.yml
@@ -0,0 +1,15 @@
+---
+
+# Documentation
+# https://github.com/codecov/support/wiki/Codecov-Yaml#full-yaml
+#
+# Validate this file
+# curl --data-binary @codecov.yml https://codecov.io/validate
+
+coverage:
+ status:
+ project:
+ default:
+ # Random seeds in tests lead to a +/-0.05% coverage span even for PRs
+ # that do not change source code
+ threshold: 0.05
diff --git a/comm/third_party/botan/src/scripts/ci/lgtm.yml b/comm/third_party/botan/src/scripts/ci/lgtm.yml
new file mode 100644
index 0000000000..fa2423858b
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/lgtm.yml
@@ -0,0 +1,31 @@
+
+queries:
+ - include: cpp/inconsistent-null-check
+ - include: cpp/overrunning-write
+ - include: cpp/unbounded-write
+ - include: cpp/offset-use-before-range-check
+ - include: cpp/incomplete-parity-check
+ - include: cpp/mistyped-function-arguments
+ - include: cpp/comparison-with-wider-type
+ - include: cpp/inconsistent-call-on-result
+ - include: cpp/incorrect-not-operator-usage
+ - include: cpp/stack-address-escape
+ - include: cpp/nested-loops-with-same-variable
+ - include: cpp/suspicious-allocation-size
+ - include: cpp/allocation-too-small
+ - include: cpp/uninitialized-local
+ - include: cpp/static-buffer-overflow
+ - include: cpp/suspicious-sizeof
+ - include: cpp/suspicious-pointer-scaling-void
+ - include: cpp/declaration-hides-variable
+ - include: cpp/empty-if
+ - include: cpp/unused-local-variable
+ - include: cpp/unused-static-function
+ - include: cpp/unused-static-variable
+ - exclude: cpp/fixme-comment
+
+extraction:
+ cpp:
+ configure:
+ command:
+ - ./configure.py --build-targets="static,shared,cli,tests,bogo_shim" --build-fuzzers=test --with-zlib --with-bzip2 --with-lzma --with-openssl --with-sqlite3 --no-store-vc-rev
diff --git a/comm/third_party/botan/src/scripts/ci/setup_appveyor.bat b/comm/third_party/botan/src/scripts/ci/setup_appveyor.bat
new file mode 100644
index 0000000000..5b48e78d8f
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/setup_appveyor.bat
@@ -0,0 +1,19 @@
+
+echo Current build setup CC="%CC%" PLATFORM="%PLATFORM%" TARGET="%TARGET%"
+
+if %CC% == VC2015 call "%ProgramFiles(x86)%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %PLATFORM%
+if %CC% == VC2017 call "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" %PLATFORM%
+if %CC% == VC2019 call "%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat" %PLATFORM%
+if %CC% == MinGW set PATH=%PATH%;C:\msys64\mingw64\bin
+
+rem check compiler version
+if %CC% == MinGW g++ -v
+if not %CC% == MinGW cl
+
+appveyor DownloadFile https://github.com/mozilla/sccache/releases/download/%SCCACHE_VERSION%/sccache-%SCCACHE_VERSION%-x86_64-pc-windows-msvc.tar.gz
+tar -xf sccache-%SCCACHE_VERSION%-x86_64-pc-windows-msvc.tar.gz
+
+appveyor DownloadFile http://download.qt.io/official_releases/jom/jom.zip -FileName jom.zip
+7z e jom.zip
+
+set PATH=%PATH%;sccache-%SCCACHE_VERSION%-x86_64-pc-windows-msvc
diff --git a/comm/third_party/botan/src/scripts/ci/setup_gh_actions.sh b/comm/third_party/botan/src/scripts/ci/setup_gh_actions.sh
new file mode 100755
index 0000000000..33737ffe37
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/setup_gh_actions.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+# GitHub Actions setup script for Botan build
+#
+# (C) 2015,2017 Simon Warta
+# (C) 2016,2017,2018,2020 Jack Lloyd
+#
+# Botan is released under the Simplified BSD License (see license.txt)
+
+command -v shellcheck > /dev/null && shellcheck "$0" # Run shellcheck on this if available
+
+set -ex
+
+TARGET=$1
+
+if type -p "apt-get"; then
+ sudo apt-get -qq update
+ sudo apt-get -qq install ccache
+
+ if [ "$TARGET" = "valgrind" ]; then
+ sudo apt-get -qq install valgrind
+
+ elif [ "$TARGET" = "clang" ]; then
+ sudo apt-get -qq install clang
+
+ elif [ "$TARGET" = "cross-i386" ]; then
+ sudo apt-get -qq install g++-multilib linux-libc-dev libc6-dev-i386
+
+ elif [ "$TARGET" = "cross-win64" ]; then
+ sudo apt-get -qq install wine-development g++-mingw-w64-x86-64
+
+ elif [ "$TARGET" = "cross-arm64" ]; then
+ sudo apt-get -qq install qemu-user g++-aarch64-linux-gnu
+
+ elif [ "$TARGET" = "cross-ppc64" ]; then
+ sudo apt-get -qq install qemu-user g++-powerpc64le-linux-gnu
+
+ elif [ "$TARGET" = "cross-android-arm32" ] || [ "$TARGET" = "cross-android-arm64" ]; then
+ wget -nv https://dl.google.com/android/repository/"$ANDROID_NDK"-linux-x86_64.zip
+ unzip -qq "$ANDROID_NDK"-linux-x86_64.zip
+
+ elif [ "$TARGET" = "baremetal" ]; then
+ sudo apt-get -qq install gcc-arm-none-eabi libstdc++-arm-none-eabi-newlib
+
+ echo 'extern "C" void __sync_synchronize() {}' >> src/tests/main.cpp
+ echo 'extern "C" void __sync_synchronize() {}' >> src/cli/main.cpp
+
+ elif [ "$TARGET" = "lint" ]; then
+ sudo apt-get -qq install pylint
+
+ elif [ "$TARGET" = "coverage" ]; then
+ sudo apt-get -qq install g++-8 softhsm2 libtspi-dev lcov python-coverage libboost-all-dev gdb
+ pip install --user codecov
+ echo "$HOME/.local/bin" >> "$GITHUB_PATH"
+
+ git clone --depth 1 --branch runner-changes https://github.com/randombit/boringssl.git
+
+ sudo chgrp -R "$(id -g)" /var/lib/softhsm/ /etc/softhsm
+ sudo chmod g+w /var/lib/softhsm/tokens
+
+ softhsm2-util --init-token --free --label test --pin 123456 --so-pin 12345678
+ echo "PKCS11_LIB=/usr/lib/softhsm/libsofthsm2.so" >> "$GITHUB_ENV"
+
+ elif [ "$TARGET" = "docs" ]; then
+ sudo apt-get -qq install doxygen python-docutils python3-sphinx
+ fi
+else
+ HOMEBREW_NO_AUTO_UPDATE=1 brew install ccache
+fi
diff --git a/comm/third_party/botan/src/scripts/ci/setup_travis.sh b/comm/third_party/botan/src/scripts/ci/setup_travis.sh
new file mode 100755
index 0000000000..f039e574dd
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/setup_travis.sh
@@ -0,0 +1,89 @@
+#!/bin/bash
+
+# Travis CI setup script for Botan build
+#
+# (C) 2015,2017 Simon Warta
+# (C) 2016,2017,2018 Jack Lloyd
+
+command -v shellcheck > /dev/null && shellcheck "$0" # Run shellcheck on this if available
+
+set -ev
+
+if [ "$TRAVIS_OS_NAME" = "linux" ]; then
+
+ if [ "$TARGET" = "valgrind" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install valgrind
+
+ elif [ "$TARGET" = "gcc4.8" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install g++-4.8
+
+ elif [ "$TARGET" = "clang8" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install clang-8
+
+ elif [ "$TARGET" = "cross-i386" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install g++-multilib linux-libc-dev libc6-dev-i386
+
+ elif [ "$TARGET" = "cross-win64" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install wine-development g++-mingw-w64-x86-64
+
+ elif [ "$TARGET" = "cross-arm32" ]; then
+ sudo dpkg --add-architecture armhf
+ sudo apt-get -qq update
+ sudo apt-get install g++-arm-linux-gnueabihf
+ sudo apt-get install -o APT::Immediate-Configure=0 libc6:armhf libstdc++6:armhf
+
+ elif [ "$TARGET" = "cross-arm64" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install qemu-user g++-aarch64-linux-gnu
+
+ elif [ "$TARGET" = "cross-ppc32" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install qemu-user g++-powerpc-linux-gnu
+
+ elif [ "$TARGET" = "cross-ppc64" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install qemu-user g++-powerpc64le-linux-gnu
+
+ elif [ "$TARGET" = "cross-mips64" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install qemu-user g++-mips64-linux-gnuabi64
+
+ elif [ "$TARGET" = "cross-android-arm32" ] || [ "$TARGET" = "cross-android-arm64" ]; then
+ wget -nv https://dl.google.com/android/repository/"$ANDROID_NDK"-linux-x86_64.zip
+ unzip -qq "$ANDROID_NDK"-linux-x86_64.zip
+
+ elif [ "$TARGET" = "baremetal" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install gcc-arm-none-eabi libstdc++-arm-none-eabi-newlib
+
+ echo 'extern "C" void __sync_synchronize() {}' >> src/tests/main.cpp
+ echo 'extern "C" void __sync_synchronize() {}' >> src/cli/main.cpp
+
+ elif [ "$TARGET" = "lint" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install pylint
+
+ elif [ "$TARGET" = "coverage" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install g++-8 softhsm2 libtspi-dev lcov python-coverage libboost-all-dev gdb
+ pip install --user codecov
+ git clone --depth 1 --branch runner-changes-golang1.10 https://github.com/randombit/boringssl.git
+
+ sudo chgrp -R "$(id -g)" /var/lib/softhsm/ /etc/softhsm
+ sudo chmod g+w /var/lib/softhsm/tokens
+
+ softhsm2-util --init-token --free --label test --pin 123456 --so-pin 12345678
+
+ elif [ "$TARGET" = "docs" ]; then
+ sudo apt-get -qq update
+ sudo apt-get install doxygen python-docutils python3-sphinx
+ fi
+
+elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
+ HOMEBREW_NO_AUTO_UPDATE=1 brew install ccache
+fi
diff --git a/comm/third_party/botan/src/scripts/ci/travis.yml b/comm/third_party/botan/src/scripts/ci/travis.yml
new file mode 100644
index 0000000000..15609864c4
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci/travis.yml
@@ -0,0 +1,50 @@
+language: cpp
+os: linux
+dist: focal
+compiler: gcc
+
+jobs:
+ include:
+ - name: Linux ppc64le (GCC)
+ arch: ppc64le
+ env:
+ - TARGET="shared"
+
+ - name: Linux arm64 (GCC)
+ arch: arm64
+ env:
+ - TARGET="shared"
+
+ - name: Linux GCC 4.8
+ dist: bionic
+ env:
+ - TARGET="gcc4.8"
+ - EXTRA_FLAGS="--disable-werror"
+
+install:
+ - ./src/scripts/ci/setup_travis.sh
+
+script:
+ - ./src/scripts/ci_build.py --os=$TRAVIS_OS_NAME --cc=$CC --cc-bin=$CXX --without-pylint3 --pkcs11-lib=$PKCS11_LIB $EXTRA_FLAGS $TARGET
+
+# whitelist branches to avoid testing feature branches twice (as branch and as pull request)
+branches:
+ only:
+ - master
+ - release-2
+ - coverity_scan
+
+git:
+ depth: 10
+
+cache:
+ ccache: true
+
+addons:
+ coverity_scan:
+ project:
+ name: "randombit/botan"
+ notification_email: jack@randombit.net
+ build_command_prepend: "./configure.py --no-optimizations --with-zlib --with-openssl"
+ build_command: "make -j2"
+ branch_pattern: coverity_scan
diff --git a/comm/third_party/botan/src/scripts/ci_build.py b/comm/third_party/botan/src/scripts/ci_build.py
new file mode 100755
index 0000000000..93a5ec3626
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci_build.py
@@ -0,0 +1,620 @@
+#!/usr/bin/env python
+
+"""
+CI build script
+(C) 2017,2020 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import os
+import platform
+import subprocess
+import sys
+import time
+import tempfile
+import optparse # pylint: disable=deprecated-module
+
+def get_concurrency():
+ def_concurrency = 2
+
+ try:
+ import multiprocessing
+ return multiprocessing.cpu_count()
+ except ImportError:
+ return def_concurrency
+
+def build_targets(target, target_os):
+ if target in ['shared', 'minimized', 'bsi', 'nist']:
+ yield 'shared'
+ elif target in ['static', 'fuzzers', 'baremetal']:
+ yield 'static'
+ elif target_os in ['windows']:
+ yield 'shared'
+ elif target_os in ['ios', 'mingw']:
+ yield 'static'
+ else:
+ yield 'shared'
+ yield 'static'
+
+ yield 'cli'
+ yield 'tests'
+
+ if target in ['coverage']:
+ yield 'bogo_shim'
+
+def determine_flags(target, target_os, target_cpu, target_cc, cc_bin,
+ ccache, root_dir, pkcs11_lib, use_gdb, disable_werror, extra_cxxflags,
+ disabled_tests):
+ # pylint: disable=too-many-branches,too-many-statements,too-many-arguments,too-many-locals
+
+ """
+ Return the configure.py flags as well as make/test running prefixes
+ """
+ is_cross_target = target.startswith('cross-')
+
+ if target_os not in ['linux', 'osx', 'windows', 'freebsd']:
+ print('Error unknown OS %s' % (target_os))
+ return (None, None, None)
+
+ if is_cross_target:
+ if target_os == 'osx':
+ target_os = 'ios'
+ elif target == 'cross-win64':
+ target_os = 'mingw'
+ elif target in ['cross-android-arm32', 'cross-android-arm64']:
+ target_os = 'android'
+
+ if target_os == 'windows' and target_cc == 'gcc':
+ target_os = 'mingw'
+
+ if target == 'baremetal':
+ target_os = 'none'
+
+ make_prefix = []
+ test_prefix = []
+ test_cmd = [os.path.join(root_dir, 'botan-test')]
+
+ install_prefix = tempfile.mkdtemp(prefix='botan-install-')
+
+ flags = ['--prefix=%s' % (install_prefix),
+ '--cc=%s' % (target_cc),
+ '--os=%s' % (target_os),
+ '--build-targets=%s' % ','.join(build_targets(target, target_os))]
+
+ if ccache is not None:
+ flags += ['--no-store-vc-rev', '--compiler-cache=%s' % (ccache)]
+
+ if target_os != 'osx' and not disable_werror:
+ flags += ['--werror-mode']
+
+ if target_cpu is not None:
+ flags += ['--cpu=%s' % (target_cpu)]
+
+ for flag in extra_cxxflags:
+ flags += ['--extra-cxxflags=%s' % (flag)]
+
+ if target in ['minimized']:
+ flags += ['--minimized-build', '--enable-modules=system_rng,sha2_32,sha2_64,aes']
+
+ if target in ['bsi', 'nist']:
+ # tls is optional for bsi/nist but add it so verify tests work with these minimized configs
+ flags += ['--module-policy=%s' % (target), '--enable-modules=tls']
+
+ if target == 'docs':
+ flags += ['--with-doxygen', '--with-sphinx', '--with-rst2man']
+ test_cmd = None
+
+ if target == 'cross-win64':
+ # this test compiles under MinGW but fails when run under Wine
+ disabled_tests.append('certstor_system')
+
+ if target == 'coverage':
+ flags += ['--with-coverage-info', '--with-debug-info', '--test-mode']
+
+ if target == 'valgrind':
+ flags += ['--with-valgrind']
+ test_prefix = ['valgrind', '--error-exitcode=9', '-v', '--leak-check=full', '--show-reachable=yes']
+ # valgrind is single threaded anyway
+ test_cmd += ['--test-threads=1']
+ # valgrind is slow
+ slow_tests = [
+ 'cryptobox', 'dh_invalid', 'dh_kat', 'dh_keygen',
+ 'dl_group_gen', 'dlies', 'dsa_param', 'ecc_basemul',
+ 'ecdsa_verify_wycheproof', 'mce_keygen', 'passhash9',
+ 'rsa_encrypt', 'rsa_pss', 'rsa_pss_raw', 'scrypt',
+ 'srp6_kat', 'x509_path_bsi', 'xmss_keygen', 'xmss_sign',
+ 'pbkdf', 'argon2', 'bcrypt', 'bcrypt_pbkdf', 'compression',
+ 'ed25519_sign', 'elgamal_keygen', 'x509_path_rsa_pss']
+
+ disabled_tests += slow_tests
+
+ if target == 'fuzzers':
+ flags += ['--unsafe-fuzzer-mode']
+
+ if target in ['fuzzers', 'coverage']:
+ flags += ['--build-fuzzers=test']
+
+ if target in ['fuzzers', 'sanitizer']:
+ flags += ['--with-debug-asserts']
+
+ if target_cc in ['clang', 'gcc']:
+ flags += ['--enable-sanitizers=address,undefined']
+ else:
+ flags += ['--with-sanitizers']
+
+ if target in ['valgrind', 'sanitizer', 'fuzzers']:
+ flags += ['--disable-modules=locking_allocator']
+
+ if target == 'baremetal':
+ cc_bin = 'arm-none-eabi-c++'
+ flags += ['--cpu=arm32', '--disable-neon', '--without-stack-protector', '--ldflags=-specs=nosys.specs']
+ test_cmd = None
+
+ if is_cross_target:
+ if target_os == 'ios':
+ make_prefix = ['xcrun', '--sdk', 'iphoneos']
+ test_cmd = None
+ if target == 'cross-ios-arm64':
+ flags += ['--cpu=arm64', '--cc-abi-flags=-arch arm64 -stdlib=libc++']
+ else:
+ raise Exception("Unknown cross target '%s' for iOS" % (target))
+ elif target_os == 'android':
+
+ ndk = os.getenv('ANDROID_NDK')
+ if ndk is None:
+ raise Exception('Android CI build requires ANDROID_NDK env variable be set')
+
+ api_lvl = int(os.getenv('ANDROID_API_LEVEL', '0'))
+ if api_lvl == 0:
+ # If not set arbitrarily choose API 16 (Android 4.1) for ARMv7 and 28 (Android 9) for AArch64
+ api_lvl = 16 if target == 'cross-android-arm32' else 28
+
+ toolchain_dir = os.path.join(ndk, 'toolchains/llvm/prebuilt/linux-x86_64/bin')
+ test_cmd = None
+
+ if target == 'cross-android-arm32':
+ cc_bin = os.path.join(toolchain_dir, 'armv7a-linux-androideabi%d-clang++' % (api_lvl))
+ flags += ['--cpu=armv7',
+ '--ar-command=%s' % (os.path.join(toolchain_dir, 'arm-linux-androideabi-ar'))]
+ elif target == 'cross-android-arm64':
+ cc_bin = os.path.join(toolchain_dir, 'aarch64-linux-android%d-clang++' % (api_lvl))
+ flags += ['--cpu=arm64',
+ '--ar-command=%s' % (os.path.join(toolchain_dir, 'aarch64-linux-android-ar'))]
+
+ if api_lvl < 18:
+ flags += ['--without-os-features=getauxval']
+ if api_lvl >= 28:
+ flags += ['--with-os-features=getentropy']
+
+ elif target == 'cross-i386':
+ flags += ['--cpu=x86_32']
+
+ elif target == 'cross-win64':
+ # MinGW in 16.04 is lacking std::mutex for unknown reason
+ cc_bin = 'x86_64-w64-mingw32-g++'
+ flags += ['--cpu=x86_64', '--cc-abi-flags=-static',
+ '--ar-command=x86_64-w64-mingw32-ar', '--without-os-feature=threads']
+ test_cmd = [os.path.join(root_dir, 'botan-test.exe')] + test_cmd[1:]
+ test_prefix = ['wine']
+ else:
+ if target == 'cross-arm32':
+ flags += ['--cpu=armv7']
+ cc_bin = 'arm-linux-gnueabihf-g++'
+ # Currently arm32 CI only runs on native AArch64
+ #test_prefix = ['qemu-arm', '-L', '/usr/arm-linux-gnueabihf/']
+ elif target == 'cross-arm64':
+ flags += ['--cpu=aarch64']
+ cc_bin = 'aarch64-linux-gnu-g++'
+ test_prefix = ['qemu-aarch64', '-L', '/usr/aarch64-linux-gnu/']
+ elif target == 'cross-ppc32':
+ flags += ['--cpu=ppc32']
+ cc_bin = 'powerpc-linux-gnu-g++'
+ test_prefix = ['qemu-ppc', '-L', '/usr/powerpc-linux-gnu/']
+ elif target == 'cross-ppc64':
+ flags += ['--cpu=ppc64', '--with-endian=little']
+ cc_bin = 'powerpc64le-linux-gnu-g++'
+ test_prefix = ['qemu-ppc64le', '-cpu', 'POWER8', '-L', '/usr/powerpc64le-linux-gnu/']
+ elif target == 'cross-mips64':
+ flags += ['--cpu=mips64', '--with-endian=big']
+ cc_bin = 'mips64-linux-gnuabi64-g++'
+ test_prefix = ['qemu-mips64', '-L', '/usr/mips64-linux-gnuabi64/']
+ test_cmd.remove('simd_32') # no SIMD on MIPS
+ else:
+ raise Exception("Unknown cross target '%s' for Linux" % (target))
+ else:
+ # Flags specific to native targets
+
+ if target_os in ['osx', 'linux']:
+ flags += ['--with-bzip2', '--with-sqlite', '--with-zlib']
+
+ if target_os in ['osx', 'ios']:
+ flags += ['--with-commoncrypto']
+
+ if target == 'coverage':
+ flags += ['--with-boost']
+
+ if target_os == 'windows' and target in ['shared', 'static']:
+ # ./configure.py needs extra hand-holding for boost on windows
+ boost_root = os.environ.get('BOOST_ROOT')
+ boost_libs = os.environ.get('BOOST_LIBRARYDIR')
+ boost_system = os.environ.get('BOOST_SYSTEM_LIBRARY')
+
+ if boost_root and boost_libs and boost_system:
+ flags += ['--with-boost',
+ '--with-external-includedir', boost_root,
+ '--with-external-libdir', boost_libs,
+ '--boost-library-name', boost_system]
+
+ if target_os == 'linux':
+ flags += ['--with-lzma']
+
+ if target_os == 'linux':
+ if target not in ['sanitizer', 'valgrind', 'minimized']:
+ # Avoid OpenSSL when using dynamic checkers, or on OS X where it sporadically
+ # is not installed on the CI image
+ flags += ['--with-openssl']
+
+ if target in ['coverage']:
+ flags += ['--with-tpm']
+ test_cmd += ['--run-online-tests']
+ if pkcs11_lib and os.access(pkcs11_lib, os.R_OK):
+ test_cmd += ['--pkcs11-lib=%s' % (pkcs11_lib)]
+
+ if target in ['coverage', 'sanitizer']:
+ test_cmd += ['--run-long-tests']
+
+ flags += ['--cc-bin=%s' % (cc_bin)]
+
+ if test_cmd is None:
+ run_test_command = None
+ else:
+ if use_gdb:
+ disabled_tests.append("os_utils")
+
+ # render 'disabled_tests' array into test_cmd
+ if disabled_tests:
+ test_cmd += ['--skip-tests=%s' % (','.join(disabled_tests))]
+
+ if use_gdb:
+ (cmd, args) = test_cmd[0], test_cmd[1:]
+ run_test_command = test_prefix + ['gdb', cmd,
+ '-ex', 'run %s' % (' '.join(args)),
+ '-ex', 'bt',
+ '-ex', 'quit']
+ else:
+ run_test_command = test_prefix + test_cmd
+
+ return flags, run_test_command, make_prefix
+
+def run_cmd(cmd, root_dir):
+ """
+ Execute a command, die if it failed
+ """
+ print("Running '%s' ..." % (' '.join(cmd)))
+ sys.stdout.flush()
+
+ start = time.time()
+
+ cmd = [os.path.expandvars(elem) for elem in cmd]
+ sub_env = os.environ.copy()
+ sub_env['LD_LIBRARY_PATH'] = os.path.abspath(root_dir)
+ sub_env['DYLD_LIBRARY_PATH'] = os.path.abspath(root_dir)
+ sub_env['PYTHONPATH'] = os.path.abspath(os.path.join(root_dir, 'src/python'))
+ cwd = None
+
+ redirect_stdout = None
+ if len(cmd) >= 3 and cmd[-2] == '>':
+ redirect_stdout = open(cmd[-1], 'w')
+ cmd = cmd[:-2]
+ if len(cmd) > 1 and cmd[0].startswith('indir:'):
+ cwd = cmd[0][6:]
+ cmd = cmd[1:]
+ while len(cmd) > 1 and cmd[0].startswith('env:') and cmd[0].find('=') > 0:
+ env_key, env_val = cmd[0][4:].split('=')
+ sub_env[env_key] = env_val
+ cmd = cmd[1:]
+
+ proc = subprocess.Popen(cmd, cwd=cwd, close_fds=True, env=sub_env, stdout=redirect_stdout)
+ proc.communicate()
+
+ time_taken = int(time.time() - start)
+
+ if time_taken > 10:
+ print("Ran for %d seconds" % (time_taken))
+
+ if proc.returncode != 0:
+ print("Command '%s' failed with error code %d" % (' '.join(cmd), proc.returncode))
+
+ if cmd[0] not in ['lcov']:
+ sys.exit(proc.returncode)
+
+def default_os():
+ platform_os = platform.system().lower()
+ if platform_os == 'darwin':
+ return 'osx'
+ return platform_os
+
+def parse_args(args):
+ """
+ Parse arguments
+ """
+ parser = optparse.OptionParser()
+
+ parser.add_option('--os', default=default_os(),
+ help='Set the target os (default %default)')
+ parser.add_option('--cc', default='gcc',
+ help='Set the target compiler type (default %default)')
+ parser.add_option('--cc-bin', default=None,
+ help='Set path to compiler')
+ parser.add_option('--root-dir', metavar='D', default='.',
+ help='Set directory to execute from (default %default)')
+
+ parser.add_option('--make-tool', metavar='TOOL', default='make',
+ help='Specify tool to run to build source (default %default)')
+
+ parser.add_option('--extra-cxxflags', metavar='FLAGS', default=[], action='append',
+ help='Specify extra build flags')
+
+ parser.add_option('--cpu', default=None,
+ help='Specify a target CPU platform')
+
+ parser.add_option('--with-debug', action='store_true', default=False,
+ help='Include debug information')
+ parser.add_option('--amalgamation', action='store_true', default=False,
+ help='Build via amalgamation')
+ parser.add_option('--disable-shared', action='store_true', default=False,
+ help='Disable building shared libraries')
+ parser.add_option('--disabled-tests', metavar='DISABLED_TESTS', default=[], action='append',
+ help='Comma separated list of tests that should not be run')
+
+ parser.add_option('--branch', metavar='B', default=None,
+ help='Specify branch being built')
+
+ parser.add_option('--dry-run', action='store_true', default=False,
+ help='Just show commands to be executed')
+ parser.add_option('--build-jobs', metavar='J', default=get_concurrency(),
+ help='Set number of jobs to run in parallel (default %default)')
+
+ parser.add_option('--compiler-cache', default=None, metavar='CC',
+ help='Set a compiler cache to use (ccache, sccache)')
+
+ parser.add_option('--pkcs11-lib', default=os.getenv('PKCS11_LIB'), metavar='LIB',
+ help='Set PKCS11 lib to use for testing')
+
+ parser.add_option('--with-python3', dest='use_python3', action='store_true', default=None,
+ help='Enable using python3')
+ parser.add_option('--without-python3', dest='use_python3', action='store_false',
+ help='Disable using python3')
+
+ parser.add_option('--with-pylint3', dest='use_pylint3', action='store_true', default=True,
+ help='Enable using python3 pylint')
+ parser.add_option('--without-pylint3', dest='use_pylint3', action='store_false',
+ help='Disable using python3 pylint')
+
+ parser.add_option('--disable-werror', action='store_true', default=False,
+ help='Allow warnings to compile')
+
+ parser.add_option('--run-under-gdb', dest='use_gdb', action='store_true', default=False,
+ help='Run test suite under gdb and capture backtrace')
+
+ return parser.parse_args(args)
+
+def have_prog(prog):
+ """
+ Check if some named program exists in the path
+ """
+ for path in os.environ['PATH'].split(os.pathsep):
+ exe_file = os.path.join(path, prog)
+ if os.path.exists(exe_file) and os.access(exe_file, os.X_OK):
+ return True
+ return False
+
+def main(args=None):
+ # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements,too-many-locals
+ """
+ Parse options, do the things
+ """
+
+ if os.getenv('COVERITY_SCAN_BRANCH') == '1':
+ print('Skipping build COVERITY_SCAN_BRANCH set in environment')
+ return 0
+
+ if args is None:
+ args = sys.argv
+ print("Invoked as '%s'" % (' '.join(args)))
+ (options, args) = parse_args(args)
+
+ if len(args) != 2:
+ print('Usage: %s [options] target' % (args[0]))
+ return 1
+
+ target = args[1]
+
+ if options.use_python3 is None:
+ use_python3 = have_prog('python3')
+ else:
+ use_python3 = options.use_python3
+
+ py_interp = 'python'
+ if use_python3:
+ py_interp = 'python3'
+
+ if options.cc_bin is None:
+ if options.cc == 'gcc':
+ options.cc_bin = 'g++'
+ elif options.cc == 'clang':
+ options.cc_bin = 'clang++'
+ elif options.cc == 'msvc':
+ options.cc_bin = 'cl'
+ else:
+ print('Error unknown compiler %s' % (options.cc))
+ return 1
+
+ if options.compiler_cache is None and options.cc != 'msvc':
+ # Autodetect ccache
+ if have_prog('ccache'):
+ options.compiler_cache = 'ccache'
+
+ if options.compiler_cache not in [None, 'ccache', 'sccache']:
+ raise Exception("Don't know about %s as a compiler cache" % (options.compiler_cache))
+
+ root_dir = options.root_dir
+
+ if not os.access(root_dir, os.R_OK):
+ raise Exception('Bad root dir setting, dir %s not readable' % (root_dir))
+
+ cmds = []
+
+ if target == 'lint':
+
+ pylint_rc = '--rcfile=%s' % (os.path.join(root_dir, 'src/configs/pylint.rc'))
+ pylint_flags = [pylint_rc, '--reports=no']
+
+ # Some disabled rules specific to Python3
+ # useless-object-inheritance: complains about code still useful in Python2
+ py3_flags = '--disable=useless-object-inheritance'
+
+ py_scripts = [
+ 'configure.py',
+ 'src/python/botan2.py',
+ 'src/scripts/ci_build.py',
+ 'src/scripts/install.py',
+ 'src/scripts/ci_check_install.py',
+ 'src/scripts/dist.py',
+ 'src/scripts/cleanup.py',
+ 'src/scripts/check.py',
+ 'src/scripts/build_docs.py',
+ 'src/scripts/website.py',
+ 'src/scripts/bench.py',
+ 'src/scripts/test_python.py',
+ 'src/scripts/test_fuzzers.py',
+ 'src/scripts/test_cli.py',
+ 'src/scripts/python_unittests.py',
+ 'src/scripts/python_unittests_unix.py']
+
+ full_paths = [os.path.join(root_dir, s) for s in py_scripts]
+
+ if use_python3 and options.use_pylint3:
+ cmds.append(['python3', '-m', 'pylint'] + pylint_flags + [py3_flags] + full_paths)
+
+ else:
+ config_flags, run_test_command, make_prefix = determine_flags(
+ target, options.os, options.cpu, options.cc,
+ options.cc_bin, options.compiler_cache, root_dir,
+ options.pkcs11_lib, options.use_gdb, options.disable_werror,
+ options.extra_cxxflags, options.disabled_tests)
+
+ cmds.append([py_interp, os.path.join(root_dir, 'configure.py')] + config_flags)
+
+ make_cmd = [options.make_tool]
+ if root_dir != '.':
+ make_cmd += ['-C', root_dir]
+ if options.build_jobs > 1 and options.make_tool != 'nmake':
+ make_cmd += ['-j%d' % (options.build_jobs)]
+ make_cmd += ['-k']
+
+ if target == 'docs':
+ cmds.append(make_cmd + ['docs'])
+ else:
+ if options.compiler_cache is not None:
+ cmds.append([options.compiler_cache, '--show-stats'])
+
+ make_targets = ['libs', 'tests', 'cli']
+
+ if target in ['coverage', 'fuzzers']:
+ make_targets += ['fuzzer_corpus_zip', 'fuzzers']
+
+ if target in ['coverage']:
+ make_targets += ['bogo_shim']
+
+ cmds.append(make_prefix + make_cmd + make_targets)
+
+ if options.compiler_cache is not None:
+ cmds.append([options.compiler_cache, '--show-stats'])
+
+ if run_test_command is not None:
+ cmds.append(run_test_command)
+
+ if target == 'coverage':
+ runner_dir = os.path.abspath(os.path.join(root_dir, 'boringssl', 'ssl', 'test', 'runner'))
+
+ cmds.append(['indir:%s' % (runner_dir),
+ 'go', 'test', '-pipe',
+ '-num-workers', str(4*get_concurrency()),
+ '-shim-path', os.path.abspath(os.path.join(root_dir, 'botan_bogo_shim')),
+ '-shim-config', os.path.abspath(os.path.join(root_dir, 'src', 'bogo_shim', 'config.json'))])
+
+ if target in ['coverage', 'fuzzers']:
+ cmds.append([py_interp, os.path.join(root_dir, 'src/scripts/test_fuzzers.py'),
+ os.path.join(root_dir, 'fuzzer_corpus'),
+ os.path.join(root_dir, 'build/fuzzer')])
+
+ if target in ['shared', 'coverage'] and options.os != 'windows':
+ botan_exe = os.path.join(root_dir, 'botan-cli.exe' if options.os == 'windows' else 'botan')
+
+ args = ['--threads=%d' % (options.build_jobs)]
+ test_scripts = ['test_cli.py', 'test_cli_crypt.py']
+ for script in test_scripts:
+ cmds.append([py_interp, os.path.join(root_dir, 'src/scripts', script)] +
+ args + [botan_exe])
+
+ python_tests = os.path.join(root_dir, 'src/scripts/test_python.py')
+
+ if target in ['shared', 'coverage']:
+
+ if options.os == 'windows':
+ if options.cpu == 'x86':
+ # Python on AppVeyor is a 32-bit binary so only test for 32-bit
+ cmds.append([py_interp, '-b', python_tests])
+ else:
+ if use_python3:
+ cmds.append(['python3', '-b', python_tests])
+
+ if target in ['shared', 'static', 'bsi', 'nist']:
+ cmds.append(make_cmd + ['install'])
+ build_config = os.path.join(root_dir, 'build', 'build_config.json')
+ cmds.append([py_interp, os.path.join(root_dir, 'src/scripts/ci_check_install.py'), build_config])
+
+ if target in ['coverage']:
+ if not have_prog('lcov'):
+ print('Error: lcov not found in PATH (%s)' % (os.getenv('PATH')))
+ return 1
+
+ if not have_prog('gcov'):
+ print('Error: gcov not found in PATH (%s)' % (os.getenv('PATH')))
+ return 1
+
+ cov_file = 'coverage.info'
+ raw_cov_file = 'coverage.info.raw'
+
+ cmds.append(['lcov', '--capture', '--directory', options.root_dir,
+ '--output-file', raw_cov_file])
+ cmds.append(['lcov', '--remove', raw_cov_file, '/usr/*', '--output-file', cov_file])
+ cmds.append(['lcov', '--list', cov_file])
+
+ if have_prog('coverage'):
+ cmds.append(['coverage', 'run', '--branch',
+ '--rcfile', os.path.join(root_dir, 'src/configs/coverage.rc'),
+ python_tests])
+
+ if have_prog('codecov'):
+ # If codecov exists assume we are in CI and report to codecov.io
+ cmds.append(['codecov', '>', 'codecov_stdout.log'])
+ else:
+ # Otherwise generate a local HTML report
+ cmds.append(['genhtml', cov_file, '--output-directory', 'lcov-out'])
+
+ cmds.append(make_cmd + ['clean'])
+ cmds.append(make_cmd + ['distclean'])
+
+ for cmd in cmds:
+ if options.dry_run:
+ print('$ ' + ' '.join(cmd))
+ else:
+ run_cmd(cmd, root_dir)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/ci_check_install.py b/comm/third_party/botan/src/scripts/ci_check_install.py
new file mode 100755
index 0000000000..c0a3727629
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ci_check_install.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Botan CI check installation script
+This script is used to validate the results of `make install`
+
+(C) 2020 Jack Lloyd, René Meusel, Hannes Rantzsch
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import os
+import sys
+import json
+import re
+
+def verify_library(build_config):
+ lib_dir = build_config['libdir']
+ if not os.path.isdir(lib_dir):
+ print('Error: libdir "%s" is not a directory' % lib_dir)
+ return False
+
+ found_libs = set([])
+
+ major_version = int(build_config["version_major"])
+
+ if build_config['compiler'] == 'msvc':
+ expected_lib_format = r'^botan\.(dll|lib)$'
+ elif build_config['os'] == 'macos':
+ expected_lib_format = r'^libbotan-%d\.(a|dylib)$' % (major_version)
+ else:
+ expected_lib_format = r'^libbotan-%d\.(a|so)$' % (major_version)
+
+ lib_re = re.compile(expected_lib_format)
+
+ # Unlike the include dir this may have other random libs in it
+ for (_, _, filenames) in os.walk(lib_dir):
+ for filename in filenames:
+ if lib_re.match(filename) is not None:
+ found_libs.add(filename)
+
+ if len(found_libs) == 0:
+ print("Could not find any libraries from us")
+ return False
+
+ # This should match up the count and names of libraries installed
+ # vs the build configuration (eg static lib installed or not)
+
+ return True
+
+def verify_includes(build_config):
+ include_dir = build_config['installed_include_dir']
+ if not os.path.isdir(include_dir):
+ print('Error: installed_include_dir "%s" is not a directory' % include_dir)
+ return False
+
+ expected_headers = set(build_config['public_headers'] + build_config['external_headers'])
+ found_headers = set([])
+
+ for (_, _, filenames) in os.walk(include_dir):
+ for filename in filenames:
+ found_headers.add(filename)
+
+ if found_headers != expected_headers:
+ missing = expected_headers - found_headers
+ extra = found_headers - expected_headers
+
+ if len(missing) > 0:
+ print("Missing expected headers: %s" % (" ".join(sorted(missing))))
+
+ if len(extra) > 0:
+ print("Have unexpected headers: %s" % (" ".join(sorted(extra))))
+ return False
+
+ return True
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ if len(args) < 2:
+ print("Usage: %s <build_config.json>" % args[0])
+ return 1
+
+ with open(os.path.join(args[1])) as f:
+ build_config = json.load(f)
+
+ install_prefix = build_config['prefix']
+
+ if not os.path.isdir(install_prefix):
+ print('Error: install_prefix "%s" is not a directory' % install_prefix)
+ return 1
+
+ if not verify_includes(build_config):
+ return 1
+
+ if not verify_library(build_config):
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/cleanup.py b/comm/third_party/botan/src/scripts/cleanup.py
new file mode 100755
index 0000000000..3e8142bab0
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/cleanup.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+
+"""
+Implements the "make clean" target
+
+(C) 2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import os
+import sys
+import stat
+import re
+import optparse # pylint: disable=deprecated-module
+import logging
+import json
+import shutil
+import errno
+
+def remove_dir(d):
+ try:
+ if os.access(d, os.X_OK):
+ logging.debug('Removing directory "%s"', d)
+ shutil.rmtree(d)
+ else:
+ logging.debug('Directory %s was missing', d)
+ except Exception as e: # pylint: disable=broad-except
+ logging.error('Failed removing directory "%s": %s', d, e)
+
+def remove_file(f):
+ try:
+ logging.debug('Removing file "%s"', f)
+ os.unlink(f)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ logging.error('Failed removing file "%s": %s', f, e)
+
+def remove_all_in_dir(d):
+ if os.access(d, os.X_OK):
+ logging.debug('Removing all files in directory "%s"', d)
+
+ for f in os.listdir(d):
+ full_path = os.path.join(d, f)
+ mode = os.lstat(full_path).st_mode
+
+ if stat.S_ISDIR(mode):
+ remove_dir(full_path)
+ else:
+ remove_file(full_path)
+
+def parse_options(args):
+ parser = optparse.OptionParser()
+ parser.add_option('--build-dir', default='build', metavar='DIR',
+ help='specify build dir to clean (default %default)')
+
+ parser.add_option('--distclean', action='store_true', default=False,
+ help='clean everything')
+ parser.add_option('--verbose', action='store_true', default=False,
+ help='noisy logging')
+
+ (options, args) = parser.parse_args(args)
+
+ if len(args) > 1:
+ raise Exception("Unknown arguments")
+
+ return options
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ options = parse_options(args)
+
+ logging.basicConfig(stream=sys.stderr,
+ format='%(levelname) 7s: %(message)s',
+ level=logging.DEBUG if options.verbose else logging.INFO)
+
+ build_dir = options.build_dir
+
+ if not os.access(build_dir, os.X_OK):
+ logging.debug('No build directory found')
+ # No build dir: clean enough!
+ return 0
+
+ build_config_path = os.path.join(build_dir, 'build_config.json')
+ build_config_str = None
+
+ try:
+ build_config_file = open(build_config_path)
+ build_config_str = build_config_file.read()
+ build_config_file.close()
+ except Exception: # pylint: disable=broad-except
+ # Ugh have to do generic catch as different exception type thrown in Python2
+ logging.error("Unable to access build_config.json in build dir")
+ return 1
+
+ build_config = json.loads(build_config_str)
+
+ if options.distclean:
+ build_dir = build_config['build_dir']
+ remove_file(build_config['makefile_path'])
+ remove_dir(build_dir)
+ else:
+ for dir_type in ['libobj_dir', 'cliobj_dir', 'testobj_dir', 'handbook_output_dir', 'doc_output_dir_doxygen']:
+ dir_path = build_config[dir_type]
+ if dir_path:
+ remove_all_in_dir(dir_path)
+
+ remove_file(build_config['doc_stamp_file'])
+
+ remove_file(build_config['cli_exe'])
+ remove_file(build_config['test_exe'])
+
+ lib_basename = build_config['lib_prefix'] + build_config['libname']
+ matches_libname = re.compile('^' + lib_basename + '.([a-z]+)((\\.[0-9\\.]+)|$)')
+
+ known_suffix = ['a', 'so', 'dll', 'manifest', 'exp']
+
+ for f in os.listdir(build_config['out_dir']):
+ match = matches_libname.match(f)
+ if match and match.group(1) in known_suffix:
+ remove_file(os.path.join(build_config['out_dir'], f))
+
+ if options.distclean:
+ if 'generated_files' in build_config:
+ for f in build_config['generated_files'].split(' '):
+ remove_file(f)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/comba.py b/comm/third_party/botan/src/scripts/comba.py
new file mode 100755
index 0000000000..309dca0821
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/comba.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python3
+
+import sys
+import datetime
+
+# (C) 2011,2014,2015,2016 Jack Lloyd
+# Botan is released under the Simplified BSD License (see license.txt)
+
+# Used to generate src/lib/math/mp/mp_comba.cpp
+
+def comba_indexes(N):
+
+ indexes = []
+
+ for i in range(0, 2*N):
+ x = []
+
+ for j in range(max(0, i-N+1), min(N, i+1)):
+ x += [(j,i-j)]
+ indexes += [sorted(x)]
+
+ return indexes
+
+def comba_sqr_indexes(N):
+
+ indexes = []
+
+ for i in range(0, 2*N):
+ x = []
+
+ for j in range(max(0, i-N+1), min(N, i+1)):
+ if j < i-j:
+ x += [(j,i-j)]
+ else:
+ x += [(i-j,j)]
+ indexes += [sorted(x)]
+
+ return indexes
+
+def comba_multiply_code(N):
+ indexes = comba_indexes(N)
+
+ w2 = 'w2'
+ w1 = 'w1'
+ w0 = 'w0'
+
+ for (i,idx) in zip(range(0, len(indexes)), indexes):
+ for pair in idx:
+ print(" word3_muladd(&%s, &%s, &%s, x[%2d], y[%2d]);" % (w2, w1, w0, pair[0], pair[1]))
+
+ if i < 2*N-2:
+ print(" z[%2d] = %s; %s = 0;\n" % (i, w0, w0))
+ else:
+ print(" z[%2d] = %s;" % (i, w0))
+ (w0,w1,w2) = (w1,w2,w0)
+ #print("z[%2d] = w0; w0 = w1; w1 = w2; w2 = 0;" % (i))
+
+def comba_square_code(N):
+ indexes = comba_sqr_indexes(N)
+
+ w2 = 'w2'
+ w1 = 'w1'
+ w0 = 'w0'
+
+ for (rnd,idx) in zip(range(0, len(indexes)), indexes):
+ for (i,pair) in zip(range(0, len(idx)), idx):
+ if pair[0] == pair[1]:
+ print(" word3_muladd (&%s, &%s, &%s, x[%2d], x[%2d]);" % (w2, w1, w0, pair[0], pair[1]))
+ elif i % 2 == 0:
+ print(" word3_muladd_2(&%s, &%s, &%s, x[%2d], x[%2d]);" % (w2, w1, w0, pair[0], pair[1]))
+
+ if rnd < 2*N-2:
+ print(" z[%2d] = %s; %s = 0;\n" % (rnd, w0, w0))
+ else:
+ print(" z[%2d] = %s;" % (rnd, w0))
+
+ (w0,w1,w2) = (w1,w2,w0)
+
+def main(args = None):
+ if args is None:
+ args = sys.argv
+
+ if len(args) <= 1:
+ sizes = [4, 6, 8, 9, 16, 24]
+ else:
+ sizes = map(int, args[1:])
+
+ print("""/*
+* Comba Multiplication and Squaring
+*
+* This file was automatically generated by %s on %s
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/internal/mp_core.h>
+#include <botan/internal/mp_asmi.h>
+
+namespace Botan {
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d")))
+
+ for n in sizes:
+ print("/*\n* Comba %dx%d Squaring\n*/" % (n, n))
+ print("void bigint_comba_sqr%d(word z[%d], const word x[%d])" % (n, 2*n, n))
+ print(" {")
+ print(" word w2 = 0, w1 = 0, w0 = 0;\n")
+
+ comba_square_code(n)
+
+ print(" }\n")
+
+ print("/*\n* Comba %dx%d Multiplication\n*/" % (n, n))
+ print("void bigint_comba_mul%d(word z[%d], const word x[%d], const word y[%d])" % (n, 2*n, n, n))
+ print(" {")
+ print(" word w2 = 0, w1 = 0, w0 = 0;\n")
+
+ comba_multiply_code(n)
+
+ print(" }\n")
+
+ print("}")
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/create_corpus_zip.py b/comm/third_party/botan/src/scripts/create_corpus_zip.py
new file mode 100755
index 0000000000..5faee3b526
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/create_corpus_zip.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+
+# These is used to create fuzzer corpus zip files
+
+# This is primarily used by OSS-Fuzz but might be useful if you were
+# deploying the binaries in a custom fuzzer deployment system.
+
+import sys
+import os
+import zipfile
+import stat
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ if len(args) != 2 and len(args) != 3:
+ print("Usage: %s corpus_dir <output_dir>" % (args[0]))
+ return 1
+
+ root_dir = args[1]
+
+ if len(args) == 3:
+ output_dir = args[2]
+ else:
+ output_dir = ''
+
+ if not os.access(root_dir, os.R_OK):
+ print("Error could not access directory '%s'" % (root_dir))
+ return 1
+
+ for corpus_dir in os.listdir(root_dir):
+ if corpus_dir == '.git':
+ continue
+ subdir = os.path.join(root_dir, corpus_dir)
+ if not stat.S_ISDIR(os.stat(subdir).st_mode):
+ continue
+
+ zipfile_path = os.path.join(output_dir, '%s.zip' % (corpus_dir))
+ zf = zipfile.ZipFile(zipfile_path, 'w', zipfile.ZIP_DEFLATED)
+ for f in os.listdir(subdir):
+ zf.write(os.path.join(subdir, f), f)
+ zf.close()
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/dist.py b/comm/third_party/botan/src/scripts/dist.py
new file mode 100755
index 0000000000..ce072ec105
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/dist.py
@@ -0,0 +1,466 @@
+#!/usr/bin/env python
+
+"""
+Release script for botan (https://botan.randombit.net/)
+
+This script requires Python 2.7 or 3.6
+
+(C) 2011,2012,2013,2015,2016,2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import datetime
+import errno
+import hashlib
+import io
+import logging
+import optparse # pylint: disable=deprecated-module
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import time
+import traceback
+
+# This is horrible, but there is no way to override tarfile's use of time.time
+# in setting the gzip header timestamp, which breaks deterministic archives
+
+GZIP_HEADER_TIME = 0
+
+def fake_time():
+ return GZIP_HEADER_TIME
+time.time = fake_time
+
+
+def check_subprocess_results(subproc, name):
+ (raw_stdout, raw_stderr) = subproc.communicate()
+
+ stderr = raw_stderr.decode('utf-8')
+
+ if subproc.returncode != 0:
+ stdout = raw_stdout.decode('utf-8')
+ if stdout != '':
+ logging.error(stdout)
+ if stderr != '':
+ logging.error(stderr)
+ raise Exception('Running %s failed' % (name))
+
+ if stderr != '':
+ logging.warning(stderr)
+
+ return raw_stdout
+
+def run_git(args):
+ cmd = ['git'] + args
+ logging.debug('Running %s' % (' '.join(cmd)))
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return check_subprocess_results(proc, 'git')
+
+def maybe_gpg(val):
+ val = val.decode('ascii')
+ if 'BEGIN PGP SIGNATURE' in val:
+ return val.split('\n')[-2]
+ else:
+ return val.strip()
+
+def rel_time_to_epoch(year, month, day, hour, minute, second):
+ dt = datetime.datetime(year, month, day, hour, minute, second)
+ return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
+
+def datestamp(tag):
+ ts = maybe_gpg(run_git(['show', '--no-patch', '--format=%ai', tag]))
+
+ ts_matcher = re.compile(r'^(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}) .*')
+
+ logging.debug('Git returned timestamp of %s for tag %s' % (ts, tag))
+ match = ts_matcher.match(ts)
+
+ if match is None:
+ logging.error('Failed parsing timestamp "%s" of tag %s' % (ts, tag))
+ return 0
+
+ rel_date = int(match.group(1) + match.group(2) + match.group(3))
+ rel_epoch = rel_time_to_epoch(*[int(match.group(i)) for i in range(1, 7)])
+
+ return rel_date, rel_epoch
+
+def revision_of(tag):
+ return maybe_gpg(run_git(['show', '--no-patch', '--format=%H', tag]))
+
+def extract_revision(revision, to):
+ tar_val = run_git(['archive', '--format=tar', '--prefix=%s/' % (to), revision])
+ tar_f = tarfile.open(fileobj=io.BytesIO(tar_val))
+ tar_f.extractall()
+
+def gpg_sign(keyid, passphrase_file, files, detached=True):
+
+ options = ['--armor', '--detach-sign'] if detached else ['--clearsign']
+
+ gpg_cmd = ['gpg', '--batch'] + options + ['--local-user', keyid]
+ if passphrase_file is not None:
+ gpg_cmd[1:1] = ['--passphrase-file', passphrase_file]
+
+ for filename in files:
+ logging.info('Signing %s using PGP id %s' % (filename, keyid))
+
+ cmd = gpg_cmd + [filename]
+
+ logging.debug('Running %s' % (' '.join(cmd)))
+
+ gpg = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ check_subprocess_results(gpg, 'gpg')
+
+ return [filename + '.asc' for filename in files]
+
+def parse_args(args):
+ parser = optparse.OptionParser(
+ "usage: %prog [options] <version tag>\n" +
+ " %prog [options] snapshot <branch>"
+ )
+
+ parser.add_option('--verbose', action='store_true',
+ default=False, help='Extra debug output')
+
+ parser.add_option('--quiet', action='store_true',
+ default=False, help='Only show errors')
+
+ parser.add_option('--output-dir', metavar='DIR', default='.',
+ help='Where to place output (default %default)')
+
+ parser.add_option('--print-output-names', action='store_true',
+ help='Print output archive filenames to stdout')
+
+ parser.add_option('--archive-types', metavar='LIST', default='txz',
+ help='Set archive types to generate (default %default)')
+
+ parser.add_option('--pgp-key-id', metavar='KEYID',
+ default='EFBADFBC',
+ help='PGP signing key (default %default, "none" to disable)')
+
+ parser.add_option('--pgp-passphrase-file', metavar='FILE',
+ default=None,
+ help='PGP signing key passphrase file')
+
+ parser.add_option('--write-hash-file', metavar='FILE', default=None,
+ help='Write a file with checksums')
+
+ return parser.parse_args(args)
+
+def remove_file_if_exists(fspath):
+ try:
+ os.unlink(fspath)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+def rewrite_version_file(version_file, target_version, snapshot_branch, rev_id, rel_date):
+
+ if snapshot_branch:
+ assert target_version == snapshot_branch
+
+ contents = open(version_file).readlines()
+
+ version_re = re.compile('release_(major|minor|patch) = ([0-9]+)')
+ version_suffix_re = re.compile('release_suffix = \'(-(alpha|beta|rc)[0-9]+)\'')
+
+ def content_rewriter(target_version):
+ version_info = {}
+
+ release_type = 'release'
+
+ # Not included in old version files so set a default
+ version_info["suffix"] = ""
+
+ for line in contents:
+
+ if not snapshot_branch:
+ match = version_re.match(line)
+ if match:
+ version_info[match.group(1)] = int(match.group(2))
+
+ match = version_suffix_re.match(line)
+ if match:
+ suffix = match.group(1)
+ version_info['suffix'] = suffix
+ if suffix.find('alpha') >= 0:
+ release_type = 'alpha'
+ elif suffix.find('beta') >= 0:
+ release_type = 'beta'
+ elif suffix.find('rc') >= 0:
+ release_type = 'release candidate'
+
+ if line == 'release_vc_rev = None\n':
+ yield 'release_vc_rev = \'git:%s\'\n' % (rev_id)
+ elif line == 'release_datestamp = 0\n':
+ yield 'release_datestamp = %d\n' % (rel_date)
+ elif line == "release_type = \'unreleased\'\n":
+ if target_version == snapshot_branch:
+ yield "release_type = 'snapshot:%s'\n" % (snapshot_branch)
+ else:
+ yield "release_type = '%s'\n" % (release_type)
+ else:
+ yield line
+
+ if not snapshot_branch:
+ for req_var in ["major", "minor", "patch", "suffix"]:
+ if req_var not in version_info.keys():
+ raise Exception('Missing version field for %s in version file' % (req_var))
+
+ marked_version = "%d.%d.%d%s" % (version_info["major"],
+ version_info["minor"],
+ version_info["patch"],
+ version_info["suffix"])
+
+ if marked_version != target_version:
+ raise Exception('Release version file %s does not match tagged version %s' % (
+ marked_version, target_version))
+
+ new_contents = ''.join(list(content_rewriter(target_version)))
+ open(version_file, 'w').write(new_contents)
+
+def write_archive(version, output_basename, archive_type, rel_epoch, all_files, hash_file):
+ # pylint: disable=too-many-locals
+ def archive_suffix(archive_type):
+ if archive_type == 'tgz':
+ return 'tgz'
+ elif archive_type == 'tbz':
+ return 'tar.bz2'
+ elif archive_type == 'txz':
+ return 'tar.xz'
+ elif archive_type == 'tar':
+ return 'tar'
+ else:
+ raise Exception("Unknown archive type '%s'" % (archive_type))
+
+ output_archive = output_basename + '.' + archive_suffix(archive_type)
+ logging.info('Writing archive "%s"' % (output_archive))
+
+ remove_file_if_exists(output_archive)
+ remove_file_if_exists(output_archive + '.asc')
+
+ def write_mode(archive_type):
+ if archive_type == 'tgz':
+ return 'w:gz'
+ elif archive_type == 'tbz':
+ return 'w:bz2'
+ elif archive_type == 'txz':
+ return 'w:xz'
+ elif archive_type == 'tar':
+ return 'w'
+ else:
+ raise Exception("Unknown archive type '%s'" % (archive_type))
+
+ # gzip format embeds the original filename, tarfile.py does the wrong
+ # thing unless the output name ends in .gz. So pass an explicit
+ # fileobj in that case, and supply a name in the form tarfile expects.
+ archive_suffix = '.tar.gz' if archive_type == 'tgz' else '.tar'
+
+ def archive_format(version):
+ # A change in Python meant that 2.14 and 2.15 were released with a
+ # tarfile using POSIX pax format (the new default for tarfile module)
+ # instead of the previously used GNU format.
+ if version in ['2.14.0', '2.15.0']:
+ return tarfile.PAX_FORMAT
+ else:
+ return tarfile.GNU_FORMAT
+
+ archive = tarfile.open(output_basename + archive_suffix,
+ write_mode(archive_type),
+ format=archive_format(version),
+ fileobj=open(output_archive, 'wb'))
+
+ for f in all_files:
+ tarinfo = archive.gettarinfo(f)
+ tarinfo.uid = 500
+ tarinfo.gid = 500
+ tarinfo.uname = "botan"
+ tarinfo.gname = "botan"
+ tarinfo.mtime = rel_epoch
+ archive.addfile(tarinfo, open(f, 'rb'))
+ archive.close()
+
+ archive_contents = open(output_archive, 'rb').read()
+
+ sha256 = hashlib.new('sha256')
+ sha256.update(archive_contents)
+ archive_hash = sha256.hexdigest().upper()
+
+ logging.info('%s is %.2f MiB' % (output_archive, len(archive_contents) / (1024.0*1024.0)))
+ logging.info('SHA-256(%s) = %s' % (output_archive, archive_hash))
+ if hash_file is not None:
+ hash_file.write("%s %s\n" % (archive_hash, output_archive))
+
+ return output_archive
+
+def configure_logging(options):
+ class ExitOnErrorLogHandler(logging.StreamHandler, object):
+ def emit(self, record):
+ super(ExitOnErrorLogHandler, self).emit(record)
+ # Exit script if and ERROR or worse occurred
+ if record.levelno >= logging.ERROR:
+ if sys.exc_info()[2] is not None:
+ logging.info(traceback.format_exc())
+ sys.exit(1)
+
+ def log_level():
+ if options.verbose:
+ return logging.DEBUG
+ if options.quiet:
+ return logging.ERROR
+ return logging.INFO
+
+ lh = ExitOnErrorLogHandler(sys.stderr)
+ lh.setFormatter(logging.Formatter('%(levelname) 7s: %(message)s'))
+ logging.getLogger().addHandler(lh)
+ logging.getLogger().setLevel(log_level())
+
+def main(args=None):
+ # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ if args is None:
+ args = sys.argv[1:]
+
+ (options, args) = parse_args(args)
+
+ configure_logging(options)
+
+ if len(args) != 1 and len(args) != 2:
+ logging.error('Usage: %s [options] <version tag>' % (sys.argv[0]))
+
+ snapshot_branch = None
+ target_version = None
+
+ archives = options.archive_types.split(',') if options.archive_types != '' else []
+ for archive_type in archives:
+ if archive_type not in ['tar', 'tgz', 'tbz', 'txz']:
+ logging.error('Unknown archive type "%s"' % (archive_type))
+
+ if args[0] == 'snapshot':
+ if len(args) != 2:
+ logging.error('Missing branch name for snapshot command')
+ snapshot_branch = args[1]
+ else:
+ if len(args) != 1:
+ logging.error('Usage error, try --help')
+ target_version = args[0]
+
+ if snapshot_branch:
+ logging.info('Creating snapshot release from branch %s', snapshot_branch)
+ target_version = snapshot_branch
+ elif len(args) == 1:
+ try:
+ logging.info('Creating release for version %s' % (target_version))
+ except ValueError as e:
+ logging.error('Invalid version number %s' % (target_version))
+
+ rev_id = revision_of(target_version)
+ if rev_id == '':
+ logging.error('No tag matching %s found' % (target_version))
+
+ rel_date, rel_epoch = datestamp(target_version)
+ if rel_date == 0 or rel_epoch == 0:
+ logging.error('No date found for version, git error?')
+
+ logging.info('Found %s at revision id %s released %d' % (target_version, rev_id, rel_date))
+
+ global GZIP_HEADER_TIME # pylint: disable=global-statement
+ GZIP_HEADER_TIME = rel_epoch
+
+ def output_name():
+ if snapshot_branch:
+ if snapshot_branch == 'master':
+ return 'Botan-snapshot-%s' % (rel_date)
+ else:
+ return 'Botan-snapshot-%s-%s' % (snapshot_branch, rel_date)
+ else:
+ return 'Botan-' + target_version
+
+ output_basename = output_name()
+
+ logging.debug('Output basename %s' % (output_basename))
+
+ if os.access(output_basename, os.X_OK):
+ logging.info('Removing existing output dir %s' % (output_basename))
+ shutil.rmtree(output_basename)
+
+ extract_revision(rev_id, output_basename)
+
+ all_files = []
+ for (curdir, _, files) in os.walk(output_basename):
+ all_files += [os.path.join(curdir, f) for f in files]
+ all_files.sort(key=lambda f: (os.path.dirname(f), os.path.basename(f)))
+
+ def find_version_file():
+
+ # location of file with version information has moved over time
+ for possible_version_file in ['src/build-data/version.txt', 'version.txt', 'botan_version.py']:
+ full_path = os.path.join(output_basename, possible_version_file)
+ if os.access(full_path, os.R_OK):
+ return full_path
+
+ logging.error('Cannot locate version file')
+ return None
+
+ version_file = find_version_file()
+
+ if not os.access(version_file, os.R_OK):
+ logging.error('Cannot read %s' % (version_file))
+
+ rewrite_version_file(version_file, target_version, snapshot_branch, rev_id, rel_date)
+
+ try:
+ os.makedirs(options.output_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ logging.error('Creating dir %s failed %s' % (options.output_dir, e))
+
+ output_files = []
+
+ hash_file = None
+ if options.write_hash_file is not None:
+ hash_file = open(options.write_hash_file, 'w')
+
+ for archive_type in archives:
+ output_files.append(write_archive(target_version,
+ output_basename,
+ archive_type,
+ rel_epoch,
+ all_files,
+ hash_file))
+
+ if hash_file is not None:
+ hash_file.close()
+
+ shutil.rmtree(output_basename)
+
+ if options.pgp_key_id != 'none':
+ if options.write_hash_file is not None:
+ output_files += gpg_sign(options.pgp_key_id, options.pgp_passphrase_file,
+ [options.write_hash_file], False)
+ else:
+ output_files += gpg_sign(options.pgp_key_id, options.pgp_passphrase_file,
+ output_files, True)
+
+ if options.output_dir != '.':
+ for output_file in output_files:
+ logging.debug('Moving %s to %s' % (output_file, options.output_dir))
+ shutil.move(output_file, os.path.join(options.output_dir, output_file))
+
+ if options.print_output_names:
+ for output_file in output_files:
+ print(output_file)
+
+ return 0
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main())
+ except Exception as e: # pylint: disable=broad-except
+ logging.info(traceback.format_exc())
+ logging.error(e)
+ sys.exit(1)
diff --git a/comm/third_party/botan/src/scripts/docker-android.sh b/comm/third_party/botan/src/scripts/docker-android.sh
new file mode 100755
index 0000000000..22bbc41f43
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/docker-android.sh
@@ -0,0 +1,11 @@
+VERSION=`./configure.py --version`
+mkdir -p docker-builds
+docker build -f src/scripts/Dockerfile.android --force-rm -t botan-android-${VERSION} \
+ --build-arg ANDROID_ARCH=${ANDROID_ARCH} \
+ --build-arg ANDROID_ARCH_SUF=${ANDROID_ARCH_SUF} \
+ --build-arg ANDROID_SDK_VER=${ANDROID_SDK_VER} \
+ --build-arg ANDROID_TOOLCHAIN_SUF=${ANDROID_TOOLCHAIN_SUF} \
+ .
+docker create --name botan-android-${VERSION} botan-android-${VERSION}
+docker cp botan-android-${VERSION}:/botan/android docker-builds
+docker rm -f botan-android-${VERSION}
diff --git a/comm/third_party/botan/src/scripts/ffi_decls.py b/comm/third_party/botan/src/scripts/ffi_decls.py
new file mode 100755
index 0000000000..b336e61f53
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/ffi_decls.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+
+"""
+Automatically generate declarations for the FFI layer
+
+(C) 2019 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+from pycparser import c_ast, parse_file
+
+ffi_header = 'src/lib/ffi/ffi.h'
+
+def to_ctype(typ, is_ptr):
+
+ if typ.startswith('botan_') and typ.endswith('_t'):
+ return 'c_void_p'
+
+ if is_ptr is False:
+ if typ == 'uint32':
+ return 'c_uint32'
+ elif typ == 'size_t':
+ return 'c_size_t'
+ elif typ == 'uint8_t':
+ return 'c_uint8'
+ elif typ == 'uint32_t':
+ return 'c_uint32'
+ elif typ == 'uint64_t':
+ return 'c_uint64'
+ elif typ == 'int':
+ return 'c_int'
+ elif typ == 'unsigned':
+ return 'c_uint'
+ else:
+ if typ == 'void':
+ return 'c_void_p'
+ elif typ in ['char', 'uint8_t']: # hack
+ return 'c_char_p'
+ elif typ == 'size_t':
+ return 'POINTER(c_size_t)'
+ #elif typ == 'uint8_t':
+ # return 'POINTER(c_uint8)'
+ elif typ == 'uint32_t':
+ return 'POINTER(c_uint32)'
+ elif typ == 'uint64_t':
+ return 'POINTER(c_uint64)'
+ elif typ == 'int':
+ return 'POINTER(c_int)'
+
+ raise Exception("Unknown type %s/%d" % (typ, is_ptr))
+
+GROUP = None
+
+class FuncDefVisitor(c_ast.NodeVisitor):
+ def visit_FuncDecl(self, node):
+
+ if not isinstance(node.type, c_ast.TypeDecl):
+ #print("ignoring", node.type)
+ return
+
+ if node.type.type.names != ['int']:
+ #print("ignoring", node.type)
+ return
+
+ # all functions returning ints:
+ fn_name = node.type.declname
+
+ fn_group = fn_name.split('_')[1]
+ if fn_group == 'privkey':
+ fn_group = 'pubkey' # hack
+
+ global GROUP
+
+ if fn_group != GROUP:
+ if fn_group in ['rng', 'hash', 'mac', 'cipher', 'block', 'mp', 'pubkey', 'pk', 'x509', 'hotp', 'totp', 'fpe']:
+ print("\n # ", fn_group.upper())
+ else:
+ print("")
+ GROUP = fn_group
+
+
+ fn_args = []
+
+ for param in node.args.params:
+
+ is_ptr = False
+ typ = None
+ if isinstance(param.type, c_ast.PtrDecl):
+ is_ptr = True
+ typ = param.type.type.type.names[0]
+ elif isinstance(param.type, c_ast.ArrayDecl):
+ is_ptr = True
+ typ = param.type.type.type.names[0]
+ else:
+ typ = param.type.type.names[0]
+
+ ctype = to_ctype(typ, is_ptr)
+ fn_args.append(ctype)
+
+ decl = " ffi_api(dll.%s," % (fn_name)
+ if len(fn_args) > 4:
+ decl += "\n "
+ else:
+ decl += ' '
+
+ decl += '[' + ', '.join(fn_args) + '])'
+
+ print(decl)
+
+ast = parse_file(ffi_header, use_cpp=True, cpp_args=['-Ibuild/include', '-std=c89', '-DBOTAN_DLL='])
+v = FuncDefVisitor()
+v.visit(ast)
diff --git a/comm/third_party/botan/src/scripts/fuzzer.xml b/comm/third_party/botan/src/scripts/fuzzer.xml
new file mode 100644
index 0000000000..686059f8c8
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/fuzzer.xml
@@ -0,0 +1,17 @@
+<!-- This is a template for a config file for fuzzing with TLS-Attacker -->
+
+<startupCommandsHolder>
+ <serverCommand>$botan_cli </serverCommand>
+ <serverPort>$tls_port</serverPort>
+ <workflowFolder>$workflow_dir</workflowFolder>
+ <modifiedVariableTypes>TLS_CONSTANT,LENGTH,COUNT,PUBLIC_KEY,PADDING,SIGNATURE,PLAIN_PROTOCOL_MESSAGE</modifiedVariableTypes>
+ <outputFolder>/tmp/</outputFolder>
+ <startupCommandsList>
+ <startupCommands>
+ <fuzzerCommand>simple_fuzzer -connect localhost:$PORT</fuzzerCommand>
+ <serverCommandParameters>tls_server $rsa_cert $rsa_key --port=$PORT --policy=$fuzz_policy --dump-traces=/tmp/tls/ --output=/tmp/botan_output.log --error-output=/tmp/botan_error_output.log </serverCommandParameters>
+ <shortName>botan-rsa</shortName>
+ </startupCommands>
+ <!-- TODO ECDSA -->
+ </startupCommandsList>
+</startupCommandsHolder>
diff --git a/comm/third_party/botan/src/scripts/gen_os_features.py b/comm/third_party/botan/src/scripts/gen_os_features.py
new file mode 100755
index 0000000000..bfcfac7d19
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/gen_os_features.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+
+"""
+A script to automatically write docs to /docs. Currently it generates
+os.rst, a feature table of OS features.
+
+Requires Python 3.
+
+(C) 2015 Simon Warta (Kullo GmbH)
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+# global
+import argparse
+import glob
+import os
+import sys
+
+# Assume this script is in botan/src/scripts
+botan_root = os.path.join(os.path.dirname(sys.argv[0]), "..", "..")
+
+# locale
+sys.path.append(botan_root)
+from configure import OsInfo
+
+parser = argparse.ArgumentParser(description="")
+parser.add_argument('--verbose', dest='verbose', action='store_const',
+ const=True, default=False,
+ help='Verbose output (default: false)')
+args = parser.parse_args()
+
+def update_os():
+ TABLE_TITLE="OS Features"
+
+ files = []
+ files += glob.glob(botan_root + '/src/build-data/os/*.txt')
+ files.sort()
+
+ if len(files) == 0:
+ print("No info.txt files found.")
+ sys.exit(1)
+
+ f1 = open(os.path.join(botan_root, 'doc', 'dev_ref', 'os.rst'), 'w+')
+
+ all_features = set()
+ oss = {}
+
+ for filename in files:
+ o = OsInfo(filename)
+ oss[o.basename] = o
+ all_features |= set(o.target_features)
+ if args.verbose:
+ print(o.basename)
+ print(o.target_features)
+
+ featurelist = list(all_features)
+ featurelist.sort()
+ oslist = list(oss.keys())
+ oslist.sort()
+
+ if args.verbose:
+ print(featurelist)
+
+ print(TABLE_TITLE, file=f1)
+ print("========================================", file=f1)
+ print("", file=f1)
+
+ print("A summary of OS features as defined in ``src/build-data/os``.", file=f1)
+ print("", file=f1)
+
+ print("::", file=f1)
+ print("", file=f1)
+ for o in oslist:
+ print(" %s: %s" % (o[0:1], o), file=f1)
+ print("", file=f1)
+
+ print('.. csv-table::', file=f1)
+ print(' :header: "Feature", "' + '", "'.join([o[0:1] for o in oslist]) + '"', file=f1)
+ print('', file=f1)
+
+ for f in featurelist:
+ line = ' "' + f + '"'
+ for o in oslist:
+ line += ', "'
+ line += 'X' if f in oss[o].target_features else ' '
+ line += '"'
+ print(line, file=f1)
+ print("", file=f1)
+ print(".. note::", file=f1)
+ print(" This file is auto generated by ``src/scripts/%s``. Dont modify it manually."
+ % os.path.basename(sys.argv[0]), file=f1)
+
+if __name__ == '__main__':
+ update_os()
diff --git a/comm/third_party/botan/src/scripts/install.py b/comm/third_party/botan/src/scripts/install.py
new file mode 100755
index 0000000000..32a7a7e9e8
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/install.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+
+"""
+Botan install script
+
+(C) 2014,2015,2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import errno
+import json
+import logging
+import optparse # pylint: disable=deprecated-module
+import os
+import shutil
+import sys
+import traceback
+
+def parse_command_line(args):
+
+ parser = optparse.OptionParser()
+
+ parser.add_option('--verbose', action='store_true', default=False,
+ help='Show debug messages')
+ parser.add_option('--quiet', action='store_true', default=False,
+ help='Show only warnings and errors')
+
+ build_group = optparse.OptionGroup(parser, 'Source options')
+ build_group.add_option('--build-dir', metavar='DIR', default='build',
+ help='Location of build output (default \'%default\')')
+ parser.add_option_group(build_group)
+
+ install_group = optparse.OptionGroup(parser, 'Installation options')
+ install_group.add_option('--prefix', default='/usr/local',
+ help='Set output directory (default %default)')
+ install_group.add_option('--bindir', default='bin', metavar='DIR',
+ help='Set binary subdir (default %default)')
+ install_group.add_option('--libdir', default='lib', metavar='DIR',
+ help='Set library subdir (default %default)')
+ install_group.add_option('--includedir', default='include', metavar='DIR',
+ help='Set include subdir (default %default)')
+ install_group.add_option('--docdir', default='share/doc', metavar='DIR',
+ help='Set documentation subdir (default %default)')
+ install_group.add_option('--pkgconfigdir', default='pkgconfig', metavar='DIR',
+ help='Set pkgconfig subdir (default %default)')
+
+ install_group.add_option('--umask', metavar='MASK', default='022',
+ help='Umask to set (default %default)')
+ parser.add_option_group(install_group)
+
+ (options, args) = parser.parse_args(args)
+
+ def log_level():
+ if options.verbose:
+ return logging.DEBUG
+ if options.quiet:
+ return logging.WARNING
+ return logging.INFO
+
+ logging.getLogger().setLevel(log_level())
+
+ return (options, args)
+
+
+class PrependDestdirError(Exception):
+ pass
+
+
+def is_subdir(path, subpath):
+ return os.path.relpath(path, start=subpath).startswith("..")
+
+
+def prepend_destdir(path):
+ """
+ Needed because os.path.join() discards the first path if the
+ second one is absolute, which is usually the case here. Still, we
+ want relative paths to work and leverage the os awareness of
+ os.path.join().
+ """
+ destdir = os.environ.get('DESTDIR', "")
+
+ if destdir:
+ # DESTDIR is non-empty, but we only join absolute paths on UNIX-like file systems
+ if os.path.sep != "/":
+ raise PrependDestdirError("Only UNIX-like file systems using forward slash " \
+ "separator supported when DESTDIR is set.")
+ if not os.path.isabs(path):
+ raise PrependDestdirError("--prefix must be an absolute path when DESTDIR is set.")
+
+ path = os.path.normpath(path)
+ # Remove / or \ prefixes if existent to accomodate for os.path.join()
+ path = path.lstrip(os.path.sep)
+ path = os.path.join(destdir, path)
+
+ if not is_subdir(destdir, path):
+ raise PrependDestdirError("path escapes DESTDIR (path='%s', destdir='%s')" % (path, destdir))
+
+ return path
+
+
+def makedirs(dirname, exist_ok=True):
+ try:
+ logging.debug('Creating directory %s' % (dirname))
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno != errno.EEXIST or not exist_ok:
+ raise e
+
+# Clear link and create new one
+def force_symlink(target, linkname):
+ try:
+ os.unlink(linkname)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise e
+ os.symlink(target, linkname)
+
+def calculate_exec_mode(options):
+ out = 0o777
+ if 'umask' in os.__dict__:
+ umask = int(options.umask, 8)
+ logging.debug('Setting umask to %s' % oct(umask))
+ os.umask(int(options.umask, 8))
+ out &= (umask ^ 0o777)
+ return out
+
+def main(args):
+ # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+
+ logging.basicConfig(stream=sys.stdout,
+ format='%(levelname) 7s: %(message)s')
+
+ (options, args) = parse_command_line(args)
+
+ exe_mode = calculate_exec_mode(options)
+
+ def copy_file(src, dst):
+ logging.debug('Copying %s to %s' % (src, dst))
+ shutil.copyfile(src, dst)
+
+ def copy_executable(src, dst):
+ copy_file(src, dst)
+ logging.debug('Make %s executable' % dst)
+ os.chmod(dst, exe_mode)
+
+ with open(os.path.join(options.build_dir, 'build_config.json')) as f:
+ cfg = json.load(f)
+
+ ver_major = int(cfg['version_major'])
+ ver_minor = int(cfg['version_minor'])
+ ver_patch = int(cfg['version_patch'])
+ target_os = cfg['os']
+ build_shared_lib = bool(cfg['build_shared_lib'])
+ build_static_lib = bool(cfg['build_static_lib'])
+ build_cli = bool(cfg['build_cli_exe'])
+ out_dir = cfg['out_dir']
+
+ bin_dir = options.bindir
+ lib_dir = options.libdir
+ target_include_dir = os.path.join(options.prefix,
+ options.includedir,
+ 'botan-%d' % (ver_major),
+ 'botan')
+
+ for d in [options.prefix, lib_dir, bin_dir, target_include_dir]:
+ makedirs(prepend_destdir(d))
+
+ build_include_dir = os.path.join(options.build_dir, 'include', 'botan')
+
+ for include in sorted(os.listdir(build_include_dir)):
+ if include == 'internal':
+ continue
+ copy_file(os.path.join(build_include_dir, include),
+ prepend_destdir(os.path.join(target_include_dir, include)))
+
+ build_external_include_dir = os.path.join(options.build_dir, 'include', 'external')
+
+ for include in sorted(os.listdir(build_external_include_dir)):
+ copy_file(os.path.join(build_external_include_dir, include),
+ prepend_destdir(os.path.join(target_include_dir, include)))
+
+ if build_static_lib or target_os == 'windows':
+ static_lib = cfg['static_lib_name']
+ copy_file(os.path.join(out_dir, static_lib),
+ prepend_destdir(os.path.join(lib_dir, os.path.basename(static_lib))))
+
+ if build_shared_lib:
+ if target_os == "windows":
+ libname = cfg['libname']
+ soname_base = libname + '.dll'
+ copy_executable(os.path.join(out_dir, soname_base),
+ prepend_destdir(os.path.join(bin_dir, soname_base)))
+ else:
+ soname_patch = cfg['soname_patch']
+ soname_abi = cfg['soname_abi']
+ soname_base = cfg['soname_base']
+
+ copy_executable(os.path.join(out_dir, soname_patch),
+ prepend_destdir(os.path.join(lib_dir, soname_patch)))
+
+ if target_os != "openbsd":
+ prev_cwd = os.getcwd()
+ try:
+ os.chdir(prepend_destdir(lib_dir))
+ force_symlink(soname_patch, soname_abi)
+ force_symlink(soname_patch, soname_base)
+ finally:
+ os.chdir(prev_cwd)
+
+ if build_cli:
+ copy_executable(cfg['cli_exe'], prepend_destdir(os.path.join(bin_dir, cfg['cli_exe_name'])))
+
+ if 'botan_pkgconfig' in cfg:
+ pkgconfig_dir = os.path.join(options.prefix, options.libdir, options.pkgconfigdir)
+ makedirs(prepend_destdir(pkgconfig_dir))
+ copy_file(cfg['botan_pkgconfig'],
+ prepend_destdir(os.path.join(pkgconfig_dir, os.path.basename(cfg['botan_pkgconfig']))))
+
+ if 'ffi' in cfg['mod_list'] and cfg['build_shared_lib'] is True and cfg['install_python_module'] is True:
+ for ver in cfg['python_version'].split(','):
+ py_lib_path = os.path.join(lib_dir, 'python%s' % (ver), 'site-packages')
+ logging.debug('Installing python module to %s' % (py_lib_path))
+ makedirs(prepend_destdir(py_lib_path))
+
+ py_dir = cfg['python_dir']
+
+ copy_file(os.path.join(py_dir, 'botan2.py'),
+ prepend_destdir(os.path.join(py_lib_path, 'botan2.py')))
+
+ if cfg['with_documentation']:
+ target_doc_dir = os.path.join(options.prefix, options.docdir,
+ 'botan-%d.%d.%d' % (ver_major, ver_minor, ver_patch))
+
+ shutil.rmtree(prepend_destdir(target_doc_dir), True)
+ shutil.copytree(cfg['doc_output_dir'], prepend_destdir(target_doc_dir))
+
+ copy_file(os.path.join(cfg['base_dir'], 'license.txt'),
+ prepend_destdir(os.path.join(target_doc_dir, 'license.txt')))
+ copy_file(os.path.join(cfg['base_dir'], 'news.rst'),
+ prepend_destdir(os.path.join(target_doc_dir, 'news.txt')))
+ for f in [f for f in os.listdir(cfg['doc_dir']) if f.endswith('.txt')]:
+ copy_file(os.path.join(cfg['doc_dir'], f), prepend_destdir(os.path.join(target_doc_dir, f)))
+
+ if cfg['with_rst2man']:
+ man1_dir = prepend_destdir(os.path.join(options.prefix, os.path.join(cfg['mandir'], 'man1')))
+ makedirs(man1_dir)
+
+ copy_file(os.path.join(cfg['build_dir'], 'botan.1'),
+ os.path.join(man1_dir, 'botan.1'))
+
+ logging.info('Botan %s installation complete', cfg['version'])
+ return 0
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main(sys.argv))
+ except Exception as e: # pylint: disable=broad-except
+ logging.error('Failure: %s' % (e))
+ logging.info(traceback.format_exc())
+ sys.exit(1)
diff --git a/comm/third_party/botan/src/scripts/macro_checks.py b/comm/third_party/botan/src/scripts/macro_checks.py
new file mode 100755
index 0000000000..df1a503c6c
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/macro_checks.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# (C) 2018 Jack Lloyd
+# Botan is released under the Simplified BSD License (see license.txt)
+
+# Scans all source and test files and makes sure we are not using a
+# BOTAN_HAS_xxx macro which is not actually defined anywhere.
+
+from configure import ModuleInfo, load_info_files
+import os
+import re
+import logging
+
+src_dir = 'src'
+lib_dir = os.path.join(src_dir, 'lib')
+
+info_modules = load_info_files(lib_dir, 'Modules', "info.txt", ModuleInfo)
+
+all_defines = set()
+
+for module in info_modules.values():
+ for define in module._defines:
+ all_defines.add(define)
+
+extras = ['MP_DWORD', 'VALGRIND', 'SANITIZER_UNDEFINED',
+ 'ONLINE_REVOCATION_CHECKS', 'NIST_PRIME_REDUCERS_W32']
+
+for extra in extras:
+ all_defines.add(extra)
+
+macro = re.compile('BOTAN_HAS_([A-Z0-9_]+)')
+
+for dirname, subdirs, files in os.walk(src_dir):
+ for fname in files:
+ if fname.endswith('.h') or fname.endswith('.cpp'):
+ contents = open(os.path.join(dirname, fname)).read()
+
+ for m in re.finditer(macro, contents):
+
+ if m.group(1) not in all_defines:
+ logging.error('In %s found unknown feature macro %s' % (fname, m.group(1)))
+
diff --git a/comm/third_party/botan/src/scripts/monty.py b/comm/third_party/botan/src/scripts/monty.py
new file mode 100755
index 0000000000..f253da3f67
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/monty.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python3
+
+import sys
+import datetime
+
+# (C) 2018 Jack Lloyd
+# Botan is released under the Simplified BSD License (see license.txt)
+
+# Used to generate src/lib/math/mp/mp_monty_n.cpp
+
+def monty_redc_code(n):
+
+ lines = []
+
+ lines.append("word w2 = 0, w1 = 0, w0 = 0;")
+ lines.append("w0 = z[0];")
+ lines.append("ws[0] = w0 * p_dash;")
+
+ lines.append("word3_muladd(&w2, &w1, &w0, ws[0], p[0]);")
+ lines.append("w0 = w1; w1 = w2; w2 = 0;")
+
+ for i in range(1, n):
+ for j in range(0, i):
+ lines.append("word3_muladd(&w2, &w1, &w0, ws[%d], p[%d]);" % (j, i-j))
+
+ lines.append("word3_add(&w2, &w1, &w0, z[%d]);" % (i))
+ lines.append("ws[%d] = w0 * p_dash;" % (i))
+
+ lines.append("word3_muladd(&w2, &w1, &w0, ws[%d], p[0]);" % (i))
+ lines.append("w0 = w1; w1 = w2; w2 = 0;")
+
+ for i in range(0, n):
+ for j in range(i + 1, n):
+ lines.append("word3_muladd(&w2, &w1, &w0, ws[%d], p[%d]);" % (j, n + i-j))
+
+ lines.append("word3_add(&w2, &w1, &w0, z[%d]);" % (n+i))
+ lines.append("ws[%d] = w0;" % (i))
+ lines.append("w0 = w1; w1 = w2; w2 = 0;")
+
+ lines.append("word3_add(&w2, &w1, &w0, z[%d]);" % (2*(n+1) - 1))
+
+ lines.append("ws[%d] = w0;" % (n))
+ lines.append("ws[%d] = w1;" % (n+1))
+
+ if n < 16:
+ lines.append("word borrow = 0;")
+ for i in range(n):
+ lines.append("ws[%d] = word_sub(ws[%d], p[%d], &borrow);" % (n + 1 + i, i, i))
+ lines.append("ws[%d] = word_sub(ws[%d], 0, &borrow);" % (2*n+1, n))
+ else:
+ lines.append("word borrow = bigint_sub3(ws + %d + 1, ws, %d + 1, p, %d);" % (n, n, n))
+
+ lines.append("CT::conditional_copy_mem(borrow, z, ws, ws + %d, %d);" % (n + 1, n + 1))
+ lines.append("clear_mem(z + %d, 2*(%d+1) - %d);" % (n, n, n))
+
+ for line in lines:
+ print(" %s" % (line))
+
+def main(args = None):
+ if args is None:
+ args = sys.argv
+
+ if len(args) <= 1:
+ sizes = [4, 6, 8, 16, 24, 32]
+ else:
+ sizes = map(int, args[1:])
+
+ print("""/*
+* This file was automatically generated by %s on %s
+* All manual changes will be lost. Edit the script instead.
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/internal/mp_monty.h>
+#include <botan/internal/mp_core.h>
+#include <botan/internal/mp_asmi.h>
+#include <botan/internal/ct_utils.h>
+
+namespace Botan {
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d")))
+
+ for n in sizes:
+ print("void bigint_monty_redc_%d(word z[], const word p[%d], word p_dash, word ws[])" % (n, n))
+ print(" {")
+
+ monty_redc_code(n)
+
+ print(" }\n")
+
+ print("}")
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
+
+
diff --git a/comm/third_party/botan/src/scripts/oids.py b/comm/third_party/botan/src/scripts/oids.py
new file mode 100755
index 0000000000..323b6efb50
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/oids.py
@@ -0,0 +1,337 @@
+#!/usr/bin/python
+
+"""
+(C) 2016 Jack Lloyd
+(C) 2017 Fabian Weissberg, Rohde & Schwarz Cybersecurity
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import sys
+import datetime
+import re
+from collections import defaultdict
+
+def format_oid(oid):
+ #return '"' + oid + '"'
+ return "{" + oid.replace('.', ',') + '}'
+
+def format_map(m, for_oid = False):
+ s = ''
+ for k in sorted(m.keys()):
+ v = m[k]
+
+ if len(s) > 0:
+ s += ' '
+
+ if for_oid:
+ s += '{ "%s", OID(%s) },\n' % (k,format_oid(v))
+ else:
+ s += '{ "%s", "%s" },\n' % (k,v)
+
+ s = s[:-2] # chomp last two chars
+
+ return s
+
+
+def format_as_map(oid2str, str2oid):
+ return """/*
+* OID maps
+*
+* This file was automatically generated by %s on %s
+*
+* All manual edits to this file will be lost. Edit the script
+* then regenerate this source file.
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/oids.h>
+#include <unordered_map>
+
+namespace Botan {
+
+std::unordered_map<std::string, std::string> OIDS::load_oid2str_map()
+ {
+ return std::unordered_map<std::string,std::string>{
+ %s
+ };
+ }
+
+std::unordered_map<std::string, OID> OIDS::load_str2oid_map()
+ {
+ return std::unordered_map<std::string,OID>{
+ %s
+ };
+ }
+
+}
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d"),
+ format_map(oid2str), format_map(str2oid, True))
+
+
+def format_if(m, nm,t=False):
+ s = ''
+ for k in sorted(m.keys()):
+ v = m[k]
+
+ if t:
+ s += ' if(%s == "%s") return OID(%s);\n' % (nm,k, format_oid(v))
+ else:
+ s += ' if(%s == "%s") return "%s";\n' % (nm,k, v)
+
+ s = s[:-1]
+
+ return s
+
+def format_as_ifs(oid2str, str2oid):
+ return """/*
+* OID maps
+*
+* This file was automatically generated by %s on %s
+*
+* All manual edits to this file will be lost. Edit the script
+* then regenerate this source file.
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/oids.h>
+
+namespace Botan {
+
+namespace OIDS {
+
+std::string lookup(const OID& oid)
+ {
+ const std::string oid_str = oid.to_string();
+%s
+
+ return std::string();
+ }
+
+OID lookup(const std::string& name)
+ {
+%s
+
+ return OID();
+ }
+
+}
+
+}
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d"),
+ format_if(oid2str,"oid_str"), format_if(str2oid, "name", True))
+
+
+def format_dn_ub_map(dn_ub, oid2str):
+ s = ''
+ for k in sorted(dn_ub.keys()):
+ v = dn_ub[k]
+
+ s += ' { Botan::OID({%s}), %s }, // %s\n' % (k.replace('.',','),v,oid2str[k])
+
+ # delete last ',' and \n
+ idx = s.rfind(',')
+ if idx != -1:
+ s = s[:idx] + s[idx+1:-1]
+
+ return s
+
+
+def format_dn_ub_as_map(dn_ub, oid2str):
+ return """/*
+* DN_UB maps: Upper bounds on the length of DN strings
+*
+* This file was automatically generated by %s on %s
+*
+* All manual edits to this file will be lost. Edit the script
+* then regenerate this source file.
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/x509_dn.h>
+#include <botan/asn1_oid.h>
+#include <map>
+
+namespace {
+
+/**
+ * Upper bounds for the length of distinguished name fields as given in RFC 5280, Appendix A.
+ * Only OIDS recognized by botan are considered, so far.
+ * Maps OID string representations instead of human readable strings in order
+ * to avoid an additional lookup.
+ */
+static const std::map<Botan::OID, size_t> DN_UB =
+ {
+%s
+ };
+}
+
+namespace Botan {
+
+//static
+size_t X509_DN::lookup_ub(const OID& oid)
+ {
+ auto ub_entry = DN_UB.find(oid);
+ if(ub_entry != DN_UB.end())
+ {
+ return ub_entry->second;
+ }
+ else
+ {
+ return 0;
+ }
+ }
+}
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d"),
+ format_dn_ub_map(dn_ub,oid2str))
+
+
+def format_set_map(m):
+ s = ''
+ for k in sorted(m.keys()):
+ v = m[k]
+
+ if len(s) > 0:
+ s += ' '
+
+ s += '{ "%s", {' % k
+ for pad in v:
+ s += '"%s", ' % pad
+ if len(v) is not 0:
+ s = s[:-2]
+ s += '} },\n'
+ s = s[:-1]
+ return s
+
+
+def format_pads_as_map(sig_dict):
+ return """/*
+* Sets of allowed padding schemes for public key types
+*
+* This file was automatically generated by %s on %s
+*
+* All manual edits to this file will be lost. Edit the script
+* then regenerate this source file.
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+#include <botan/internal/padding.h>
+#include <unordered_map>
+#include <vector>
+#include <string>
+#include <algorithm>
+
+namespace Botan {
+
+namespace {
+
+const std::unordered_map<const std::string, std::vector<std::string>> allowed_signature_paddings =
+ {
+ %s
+ };
+
+}
+
+const std::vector<std::string> get_sig_paddings(const std::string algo)
+ {
+ auto i = allowed_signature_paddings.find(algo);
+ if(i != allowed_signature_paddings.end())
+ return i->second;
+ return {};
+ }
+
+bool sig_algo_and_pad_ok(const std::string algo, std::string padding)
+ {
+ const std::vector<std::string> pads = get_sig_paddings(algo);
+ return std::find(pads.begin(), pads.end(), padding) != pads.end();
+ }
+
+}
+""" % (sys.argv[0], datetime.date.today().strftime("%Y-%m-%d"),
+ format_set_map(sig_dict))
+
+
+def main(args = None):
+ """ Print header files (oids.cpp, dn_ub.cpp) depending on the first argument and on src/build-data/oids.txt
+
+ Choose 'oids' to print oids.cpp, needs to be written to src/lib/asn1/oids.cpp
+ Choose 'dn_ub' to print dn_ub.cpp, needs to be written to src/lib/x509/X509_dn_ub.cpp
+ Choose 'pads' to print padding.cpp, needs to be written to src/lib/pk_pad/padding.cpp
+ """
+ if args is None:
+ args = sys.argv
+ if len(args) < 2:
+ raise Exception("Use either 'oids', 'dn_ub', 'pads' as first argument")
+
+ oid_lines = open('./src/build-data/oids.txt').readlines()
+
+ oid_re = re.compile("^([0-9][0-9.]+) = ([A-Za-z0-9_\./\(\), -]+)(?: = )?([0-9]+)?$")
+ hdr_re = re.compile("^\[([a-z0-9_]+)\]$")
+ pad_re = re.compile("^([A-Za-z0-9_\., -]+)/([A-Za-z0-9_-]+)[A-Za-z0-9_\.\(\), -]*$")
+
+ oid2str = {}
+ str2oid = {}
+ dn_ub = {}
+ sig2pads = defaultdict(set)
+ cur_hdr = None
+
+ for line in oid_lines:
+ line = line.strip()
+ if len(line) == 0:
+ continue
+
+ if line[0] == '#':
+ continue
+
+ match = hdr_re.match(line)
+ if match is not None:
+ cur_hdr = match.group(1)
+ continue
+
+ match = oid_re.match(line)
+ if match is None:
+ raise Exception(line)
+
+ oid = match.group(1)
+ nam = match.group(2)
+
+ if oid in str2oid:
+ print("Duplicated OID", oid, name, oid2str[oid])
+ sys.exit() # hard error
+ else:
+ oid2str[oid] = nam
+
+ # parse upper bounds for DNs
+ if cur_hdr == "dn":
+ if match.lastindex < 3:
+ raise Exception("Could not find an upper bound for DN " + match.group(1))
+ dn_ub[oid] = match.group(3)
+ # parse signature paddings
+ elif cur_hdr == "signature":
+ pad_match = pad_re.search(nam)
+ if pad_match is not None:
+ sig2pads[pad_match.group(1)].add(pad_match.group(2))
+
+ if nam in str2oid:
+ #str2oid[nam] = oid
+ pass
+ else:
+ str2oid[nam] = oid
+
+ if args[1] == "oids":
+ print(format_as_map(oid2str, str2oid))
+ elif args[1] == "dn_ub":
+ print(format_dn_ub_as_map(dn_ub,oid2str))
+ elif args[1] == "pads":
+ print(format_pads_as_map(sig2pads))
+ else:
+ print("Unknown command: try oids, dn_ub, or pads")
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/python_unittests.py b/comm/third_party/botan/src/scripts/python_unittests.py
new file mode 100755
index 0000000000..a6a22f3f0b
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/python_unittests.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python3
+
+"""
+Unittests for Botan Python scripts.
+
+Requires Python 3.
+
+(C) 2017 Simon Warta (Kullo GmbH)
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import sys
+import unittest
+
+sys.path.append("../..") # Botan repo root
+from configure import AmalgamationHelper # pylint: disable=wrong-import-position
+from configure import ModulesChooser # pylint: disable=wrong-import-position
+
+class AmalgamationHelperTests(unittest.TestCase):
+ def test_matcher_std_includes(self):
+ self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <string>"), "string")
+ self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <string> // comment"), "string")
+
+ self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <myfile.h>"), None)
+ self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <unistd.h>"), None)
+ self.assertEqual(AmalgamationHelper.is_unconditional_std_include(" #include <string>"), None)
+
+ def test_matcher_botan_include(self):
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/oids.h>"),
+ "oids.h")
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/internal/socket.h>"),
+ "internal/socket.h")
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/oids.h> // comment"),
+ "oids.h")
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/internal/socket.h> // comment"),
+ "internal/socket.h")
+ self.assertEqual(AmalgamationHelper.is_botan_include(" #include <botan/oids.h>"),
+ "oids.h")
+ self.assertEqual(AmalgamationHelper.is_botan_include(" #include <botan/internal/socket.h>"),
+ "internal/socket.h")
+
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <string>"), None)
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <myfile.h>"), None)
+ self.assertEqual(AmalgamationHelper.is_botan_include("#include <unistd.h>"), None)
+
+ def test_matcher_any_includes(self):
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <string>"), "string")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <myfile.h>"), "myfile.h")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <unistd.h>"), "unistd.h")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <botan/oids.h>"),
+ "botan/oids.h")
+ self.assertEqual(AmalgamationHelper.is_any_include(" #include <string>"), "string")
+ self.assertEqual(AmalgamationHelper.is_any_include(" #include <myfile.h>"), "myfile.h")
+ self.assertEqual(AmalgamationHelper.is_any_include(" #include <unistd.h>"), "unistd.h")
+ self.assertEqual(AmalgamationHelper.is_any_include(" #include <botan/oids.h>"),
+ "botan/oids.h")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <string> // comment"), "string")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <myfile.h> // comment"), "myfile.h")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <unistd.h> // comment"), "unistd.h")
+ self.assertEqual(AmalgamationHelper.is_any_include("#include <botan/oids.h> // comment"),
+ "botan/oids.h")
+
+class ModulesChooserResolveDependencies(unittest.TestCase):
+ def test_base(self):
+ available_modules = set(["A", "B"])
+ table = {
+ "A": [],
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A"]))
+
+ def test_no_dependencies_defined(self):
+ available_modules = set(["A", "B"])
+ table = {
+ "A": [],
+ }
+ with self.assertRaises(KeyError):
+ ModulesChooser.resolve_dependencies(available_modules, table, "B")
+
+ available_modules = set(["A", "B"])
+ table = {
+ "A": ["B"],
+ }
+ with self.assertRaises(KeyError):
+ ModulesChooser.resolve_dependencies(available_modules, table, "A")
+
+ def test_add_dependency(self):
+ available_modules = set(["A", "B"])
+ table = {
+ "A": ["B"],
+ "B": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "B"]))
+
+ def test_add_dependencies_two_levels(self):
+ available_modules = set(["A", "B", "C"])
+ table = {
+ "A": ["B"],
+ "B": ["C"],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "B", "C"]))
+
+ def test_circular(self):
+ available_modules = set(["A", "B", "C"])
+ table = {
+ "A": ["B"],
+ "B": ["C"],
+ "C": ["A"]
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "B", "C"]))
+
+ def test_not_available(self):
+ available_modules = set(["A", "C"])
+ table = {
+ "A": ["B"],
+ "B": ["C"],
+ "C": ["A"]
+ }
+ ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "B")
+ self.assertFalse(ok)
+
+ def test_dependency_not_available(self):
+ available_modules = set(["A", "C"])
+ table = {
+ "A": ["B"],
+ "B": ["C"],
+ "C": ["A"]
+ }
+ ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertFalse(ok)
+
+ def test_dependency2_not_available(self):
+ available_modules = set(["A", "B"])
+ table = {
+ "A": ["B"],
+ "B": ["C"],
+ "C": ["A"]
+ }
+ ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertFalse(ok)
+
+ def test_dependency_choices(self):
+ available_modules = set(["A", "B", "C"])
+ table = {
+ "A": ["B|C"],
+ "B": [],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertTrue(modules == set(["A", "B"]) or modules == set(["A", "C"]))
+
+ def test_dependency_prefer_existing(self):
+ available_modules = set(["A", "B", "C"])
+ table = {
+ "A": ["C", "B|C"],
+ "B": [],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "C"]))
+
+ def test_dependency_prefer_existing2(self):
+ available_modules = set(["A", "B", "C"])
+ table = {
+ "A": ["B", "B|C"],
+ "B": [],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "B"]))
+
+ def test_dependency_choices_impossible(self):
+ available_modules = set(["A", "C"])
+ table = {
+ "A": ["B|C"],
+ "B": [],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "C"]))
+
+ def test_dependency_choices_impossible2(self):
+ available_modules = set(["A", "B"])
+ table = {
+ "A": ["B|C"],
+ "B": [],
+ "C": []
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["A", "B"]))
+
+ def test_deep(self):
+ available_modules = set(["A", "B", "C", "E", "G"])
+ table = {
+ "A": ["B|C"],
+ "B": ["D"],
+ "C": ["E"],
+ "D": [],
+ "E": ["F|G"],
+ "F": ["A", "B"],
+ "G": ["A", "G"]
+ }
+ ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "G")
+ self.assertTrue(ok)
+ self.assertEqual(modules, set(["G", "A", "C", "E"]))
+
+
+if __name__ == '__main__':
+ unittest.TestCase.longMessage = True
+ unittest.main()
diff --git a/comm/third_party/botan/src/scripts/python_unittests_unix.py b/comm/third_party/botan/src/scripts/python_unittests_unix.py
new file mode 100755
index 0000000000..fe9f06a62a
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/python_unittests_unix.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+
+"""
+Unittests for Botan Python scripts. Those tests only need to pass un UNIX-like
+operating systems.
+
+Requires Python 3.
+
+(C) 2017 Simon Warta (Kullo GmbH)
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import os
+import sys
+import unittest
+
+sys.path.append("../..") # Botan repo root
+from install import prepend_destdir # pylint: disable=wrong-import-position
+from install import PrependDestdirError # pylint: disable=wrong-import-position
+
+
+class PrependDestdir(unittest.TestCase):
+ def test_absolute_destdir(self):
+ os.environ["DESTDIR"] = "/"
+ self.assertEqual(prepend_destdir("/home/me"), "/home/me")
+ self.assertEqual(prepend_destdir("/home/me/"), "/home/me")
+ self.assertEqual(prepend_destdir("/home/me/../me2"), "/home/me2")
+
+ os.environ["DESTDIR"] = "/opt"
+ self.assertEqual(prepend_destdir("/home/me"), "/opt/home/me")
+ self.assertEqual(prepend_destdir("/home/me/"), "/opt/home/me")
+ self.assertEqual(prepend_destdir("/home/me/../me2"), "/opt/home/me2")
+
+ def test_relative_destdir(self):
+ os.environ["DESTDIR"] = "."
+ self.assertEqual(prepend_destdir("/home/me"), "./home/me")
+ self.assertEqual(prepend_destdir("/home/me/"), "./home/me")
+ self.assertEqual(prepend_destdir("/home/me/../me2"), "./home/me2")
+
+ os.environ["DESTDIR"] = "bar"
+ self.assertEqual(prepend_destdir("/home/me"), "bar/home/me")
+ self.assertEqual(prepend_destdir("/home/me/"), "bar/home/me")
+ self.assertEqual(prepend_destdir("/home/me/../me2"), "bar/home/me2")
+
+ def test_relative(self):
+ # No destdir set
+ os.environ["DESTDIR"] = ""
+ self.assertEqual(prepend_destdir("foo"), "foo")
+ self.assertEqual(prepend_destdir("../foo"), "../foo")
+
+ # Destdir set
+ os.environ["DESTDIR"] = "/opt"
+ with self.assertRaises(PrependDestdirError):
+ prepend_destdir("foo")
+ with self.assertRaises(PrependDestdirError):
+ prepend_destdir("../foo")
+
+ def test_escaping(self):
+ os.environ["DESTDIR"] = "/opt"
+ with self.assertRaises(PrependDestdirError):
+ prepend_destdir("/foo/../..")
+
+
+if __name__ == '__main__':
+ unittest.TestCase.longMessage = True
+ unittest.main()
diff --git a/comm/third_party/botan/src/scripts/run_tls_attacker.py b/comm/third_party/botan/src/scripts/run_tls_attacker.py
new file mode 100755
index 0000000000..a773646334
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/run_tls_attacker.py
@@ -0,0 +1,138 @@
+#!/usr/bin/python
+
+import os
+import sys
+import subprocess
+import tempfile
+import time
+import random
+import optparse
+import string
+
+def run_subprocess(cmd):
+ print("Running '%s'" % (' '.join(cmd)))
+
+ proc = subprocess.Popen(cmd, bufsize=-1)
+ proc.communicate()
+
+ if proc.returncode != 0:
+ print('Running "%s" failed rc %d' % (' '.join(cmd), proc.returncode))
+ sys.exit(proc.returncode)
+
+def spawn_server(cmd):
+ print("Spawning '%s'" % (' '.join(cmd)))
+ return subprocess.Popen(cmd, bufsize=-1)#,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ parser = optparse.OptionParser()
+
+ parser.add_option('--type', default='tests',
+ help='Which TLS-Attacker tests to run (tests, policy, fuzzer)')
+ parser.add_option('--src-dir', metavar='DIR', default='./src',
+ help='Specify path to botan sources (default "%default")')
+ parser.add_option('--verbose', action='store_true',
+ help='Be noisy')
+
+ (options, args) = parser.parse_args(args)
+
+ if len(args) != 3:
+ print("Usage: %s botan_cli_exe botan_ci_tools" % (args[0]))
+ return 1
+
+ cli_exe = args[1]
+ ci_tools = args[2]
+ test_type = options.type
+ src_dir = options.src_dir
+
+ if test_type not in ['tests', 'policy', 'fuzzer']:
+ print("Unknown --type %s" % (options.test_type))
+ return 1
+
+ if os.access(cli_exe, os.X_OK) != True:
+ print("Unable to find CLI tool at %s" % (cli_exe))
+ return 1
+
+ if os.access(src_dir, os.X_OK) != True:
+ print("Unable to find src dir at %s" % (src_dir))
+ return 1
+
+ test_data_dir = os.path.join(src_dir, 'tests/data')
+
+ lax_policy_txt = os.path.join(test_data_dir, 'tls-policy/compat.txt')
+ bsi_policy_txt = os.path.join(test_data_dir, 'tls-policy/bsi.txt')
+
+ tls_attacker_dir = os.path.join(ci_tools, 'TLS-Attacker')
+ tls_attacker_jar = os.path.join(tls_attacker_dir, 'TLS-Attacker-1.2.jar')
+ tls_attacker_testsuites = os.path.join(tls_attacker_dir, 'resources/testsuite')
+ tls_fuzzer_workflows = os.path.join(tls_attacker_dir, 'resources/fuzzing/workflows')
+
+ if os.access(tls_attacker_jar, os.R_OK) != True:
+ print("Unable to find TLS-Attacker jar at %s" % (tls_attacker_jar))
+ return 1
+
+ rsa_key = tempfile.NamedTemporaryFile(prefix='rsa_key_')
+ rsa_crt = tempfile.NamedTemporaryFile(prefix='rsa_crt_')
+
+ run_subprocess([cli_exe, 'keygen', '--algo=RSA', '--params=2048', '--output=%s' % (rsa_key.name)])
+ run_subprocess([cli_exe, 'gen_self_signed', rsa_key.name, 'localhost', '--output=%s' % (rsa_crt.name)])
+
+ server_log = 'botan_log.txt'
+ server_err_log = 'botan_err_log.txt'
+
+ tls_port = random.randint(50000, 60000)
+
+ botan_server_cmd = [cli_exe, 'tls_server', rsa_crt.name, rsa_key.name,
+ '--port=%d' % (tls_port),
+ '--output='+server_log,
+ '--error-output='+server_err_log]
+
+ java_tls_attacker = ['java', '-jar', tls_attacker_jar,
+ '-loglevel', 'DEBUG' if options.verbose else 'ERROR']
+ tls_attacker_opts = ['-tls_timeout', '300', '-connect', 'localhost:%d' % (tls_port)]
+
+ if test_type == 'tests':
+ try:
+ server_process = spawn_server(botan_server_cmd +
+ ['--policy=%s' % (lax_policy_txt)])
+ time.sleep(1)
+ run_subprocess(java_tls_attacker + ['testsuite_server'] + tls_attacker_opts +
+ ['-folder', tls_attacker_testsuites])
+ finally:
+ server_process.terminate()
+ elif test_type == 'policy':
+ try:
+ server_process = spawn_server(botan_server_cmd +
+ ['--policy=%s' % (bsi_policy_txt)])
+ time.sleep(1)
+ run_subprocess(java_tls_attacker + ['testtls_server'] + tls_attacker_opts +
+ ['-policy', bsi_policy_txt])
+ finally:
+ server_process.terminate()
+ elif test_type == 'fuzzer':
+
+ template_mapping = {
+ 'rsa_key': rsa_key.name,
+ 'rsa_cert': rsa_crt.name,
+ 'botan_cli': cli_exe,
+ 'workflow_dir': tls_fuzzer_workflows,
+ 'fuzz_policy': lax_policy_txt,
+ 'tls_port': str(tls_port),
+ 'PORT': '$PORT' # this is a var for TLS-Attacker don't touch it
+ }
+
+ template_txt = open(os.path.join(src_dir, 'scripts/fuzzer.xml')).read()
+
+ config = string.Template(template_txt).substitute(template_mapping)
+
+ fuzzer_config = tempfile.NamedTemporaryFile(prefix='fuzzer_cfg_', delete=False)
+ fuzzer_config.write(config.encode('ascii'))
+ fuzzer_config.close()
+
+ run_subprocess(java_tls_attacker + ['multi_fuzzer'] +
+ ['-startup_command_file', fuzzer_config.name])
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/run_tls_fuzzer.py b/comm/third_party/botan/src/scripts/run_tls_fuzzer.py
new file mode 100755
index 0000000000..b4ee91d247
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/run_tls_fuzzer.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+
+import argparse
+import subprocess
+import logging
+import sys
+import os
+import time
+
+def script_is_disabled(script_name):
+ if script_name.find('tls13') >= 0:
+ return True
+ if script_name.find('sslv2') >= 0:
+ return True
+
+ disabled = {
+ 'test-SSLv3-padding.py',
+ 'test-serverhello-random.py', # assumes support for SSLv2 hello
+ 'test-x25519.py', # assumes support for X448 (!)
+ }
+
+ if script_name in disabled:
+ return True
+
+ slow = {
+ 'test-bleichenbacher-workaround.py',
+ 'test-client-compatibility.py',
+ 'test-dhe-key-share-random.py',
+ 'test-dhe-no-shared-secret-padding.py',
+ 'test-ecdhe-padded-shared-secret.py',
+ 'test-ecdhe-rsa-key-share-random.py',
+ 'test-fuzzed-plaintext.py',
+ 'test-invalid-client-hello-w-record-overflow.py',
+ 'test-invalid-client-hello.py',
+ 'test-large-hello.py',
+ }
+ if script_name in slow:
+ return True
+
+ return False
+
+def main(args = None):
+ if args is None:
+ args = sys.argv[1:]
+
+ parser = argparse.ArgumentParser()
+
+ # TODO generate key and spawn the server on some random port in tmp dir
+ # TODO support running tls_server binary under valgrind
+
+ parser.add_argument('--verbose', action='store_true', default=False)
+ parser.add_argument('tls-fuzzer-dir')
+
+ args = vars(parser.parse_args(args))
+
+ tlsfuzzer_dir = args['tls-fuzzer-dir']
+
+ if not os.access(tlsfuzzer_dir, os.X_OK):
+ raise Exception("Unable to read TLS fuzzer dir")
+
+ tls_scripts_dir = os.path.join(tlsfuzzer_dir, 'scripts')
+ if not os.access(tlsfuzzer_dir, os.X_OK):
+ raise Exception("Unable to read TLS fuzzer scripts dir")
+
+ scripts = sorted(os.listdir(tls_scripts_dir))
+
+ procs = {}
+
+ for script in scripts:
+ if script_is_disabled(script):
+ logging.debug('Skipping %s' % (script))
+ continue
+
+ procs[script] = subprocess.Popen([sys.executable, os.path.join(tls_scripts_dir, script)],
+ cwd=tlsfuzzer_dir,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ results = {}
+
+ while len(results) != len(procs):
+ time.sleep(.5)
+ for (script, proc) in procs.items():
+
+ if script in results:
+ continue
+
+ if proc.poll() != None:
+ rv = proc.returncode
+ results[script] = rv
+ if rv == 0:
+ print("PASS %s" % (script))
+ else:
+ print("FAIL %s" % (script))
+ sys.stdout.flush()
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/show_dependencies.py b/comm/third_party/botan/src/scripts/show_dependencies.py
new file mode 100755
index 0000000000..edf2d91e01
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/show_dependencies.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+
+"""
+Show Botan module dependencies as a list or graph.
+
+Requires graphviz from pip when graphical output is selected:
+https://pypi.python.org/pypi/graphviz
+
+(C) 2015,2018 Simon Warta (Kullo GmbH)
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+# global
+import argparse
+import copy
+import sys
+import subprocess
+from collections import OrderedDict
+import glob
+import os
+
+# Assume this script is in botan/src/scripts
+botan_root = os.path.join(os.path.dirname(sys.argv[0]), "..", "..")
+
+# locale
+sys.path.append(botan_root)
+from configure import ModuleInfo
+
+parser = argparse.ArgumentParser(description=
+ 'Show Botan module dependencies. '
+ 'The output is reduced by indirect dependencies, '
+ 'i.e. you must look at the result recursively to get all dependencies.')
+
+parser.add_argument('mode',
+ choices=["list", "draw"],
+ help='The output mode')
+parser.add_argument('--format',
+ nargs='?',
+ choices=["pdf", "png"],
+ default="pdf",
+ help='The file format (drawing mode only)')
+parser.add_argument('--engine',
+ nargs='?',
+ choices=["fdp", "dot"],
+ default="dot",
+ help='The graph engine (drawing mode only)')
+parser.add_argument('--all', dest='all', action='store_const',
+ const=True, default=False,
+ help='Show all dependencies. Default: direct dependencies only. (list mode only)')
+parser.add_argument('--verbose', dest='verbose', action='store_const',
+ const=True, default=False,
+ help='Verbose output (default: false)')
+args = parser.parse_args()
+
+files = []
+files += glob.glob(botan_root + '/src/lib/*/*/*/*/*/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/*/*/*/*/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/*/*/*/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/*/*/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/*/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/*/info.txt')
+files += glob.glob(botan_root + '/src/lib/info.txt')
+files.sort()
+
+if len(files) == 0:
+ print("No info.txt files found.")
+ sys.exit(1)
+
+modules = []
+
+def dicts(t): return {k: dicts(t[k]) for k in t}
+
+def paths(t, path = [], level=0):
+ ret = []
+ for key in t:
+ ret.append(path + [key])
+ ret += paths(t[key], path + [key], level+1)
+ return ret
+
+if args.verbose:
+ print("Getting dependencies from into.txt files ...")
+
+for filename in files:
+ (rest, info_txt) = os.path.split(filename)
+ (rest, modname) = os.path.split(rest)
+ module = ModuleInfo(filename)
+ modules.append(module)
+ if args.verbose:
+ print(module.basename)
+ print("\t" + str(set(module.dependencies(None))))
+
+if args.verbose:
+ print(str(len(modules)) + " modules:")
+ names=[m.basename for m in modules]
+ names.sort()
+ print(names)
+ print("")
+
+if args.verbose:
+ print("resolving dependencies ...")
+
+def cartinality(depdict):
+ return sum([len(depdict[k]) for k in depdict])
+
+registered_dependencies = dict()
+all_dependencies = dict()
+direct_dependencies = dict()
+
+for module in modules:
+ lst = module.dependencies(None)
+ registered_dependencies[module.basename] = set(lst) - set([module.basename])
+
+# Get all_dependencies from registered_dependencies
+def add_dependency():
+ for key in all_dependencies:
+ potentially_new_modules_for_key = None
+ new_modules_for_key = None
+ for currently_in in all_dependencies[key]:
+ if currently_in in all_dependencies:
+ potentially_new_modules_for_key = all_dependencies[currently_in] - set([key])
+ if not potentially_new_modules_for_key <= all_dependencies[key]:
+ new_modules_for_key = potentially_new_modules_for_key.copy()
+ break
+ if new_modules_for_key:
+ all_dependencies[key] |= new_modules_for_key
+ return
+
+
+all_dependencies = copy.deepcopy(registered_dependencies)
+direct_dependencies = copy.deepcopy(registered_dependencies)
+
+# Sort
+all_dependencies = OrderedDict(sorted(all_dependencies.items()))
+direct_dependencies = OrderedDict(sorted(direct_dependencies.items()))
+
+#print(direct_dependencies)
+
+last_card = -1
+while True:
+ card = cartinality(all_dependencies)
+ # print(card)
+ if card == last_card:
+ break
+ last_card = card
+ add_dependency()
+
+# Return true iff a depends on b,
+# i.e. b is in the dependencies of a
+def depends_on(a, b):
+ if not a in direct_dependencies:
+ return False
+ else:
+ return b in direct_dependencies[a]
+
+def remove_indirect_dependencies():
+ for mod in direct_dependencies:
+ for one in direct_dependencies[mod]:
+ others = direct_dependencies[mod] - set([one])
+ for other in others:
+ if depends_on(other, one):
+ direct_dependencies[mod].remove(one)
+ return
+ # Go to next mod
+
+last_card = -1
+while True:
+ card = cartinality(direct_dependencies)
+ # print(card)
+ if card == last_card:
+ break
+ last_card = card
+ remove_indirect_dependencies()
+
+def openfile(f):
+ # pylint: disable=no-member
+ # os.startfile is available on Windows only
+ if sys.platform.startswith('linux'):
+ subprocess.call(["xdg-open", f])
+ else:
+ os.startfile(f)
+
+if args.verbose:
+ print("Done resolving dependencies.")
+
+if args.mode == "list":
+ if args.all:
+ for key in all_dependencies:
+ print(key.ljust(17) + " : " + ", ".join(sorted(all_dependencies[key])))
+ else:
+ for key in direct_dependencies:
+ print(key.ljust(17) + " : " + ", ".join(sorted(direct_dependencies[key])))
+
+if args.mode == "draw":
+ import graphviz as gv
+ import tempfile
+
+ tmpdir = tempfile.mkdtemp(prefix="botan-")
+
+ g2 = gv.Digraph(format=args.format, engine=args.engine)
+ g2.attr('graph', rankdir='RL') # draw horizontally
+ for key in direct_dependencies:
+ g2.node(key)
+ for dep in direct_dependencies[key]:
+ g2.edge(key, dep)
+
+ if args.verbose:
+ print("Rendering graph ...")
+ filename = g2.render(filename='graph', directory=tmpdir)
+
+ if args.verbose:
+ print("Opening " + filename + " ...")
+ openfile(filename)
diff --git a/comm/third_party/botan/src/scripts/test_all_configs.py b/comm/third_party/botan/src/scripts/test_all_configs.py
new file mode 100755
index 0000000000..227abd952f
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/test_all_configs.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+
+"""
+This configures and builds with many different sub-configurations
+in an attempt to flush out missing feature macro checks, etc.
+
+There is probably no reason for you to run this. Unless you want to.
+
+(C) 2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import optparse # pylint: disable=deprecated-module
+import sys
+import subprocess
+
+def get_module_list(configure_py):
+ configure = subprocess.Popen([configure_py, '--list-modules'], stdout=subprocess.PIPE)
+
+ (stdout, _) = configure.communicate()
+
+ if configure.returncode != 0:
+ raise Exception("Running configure.py --list-modules failed")
+
+ modules = [s.decode('ascii') for s in stdout.split()]
+ modules.remove('tpm') # can't test
+ modules.remove('base') # can't remove
+ return modules
+
+def get_concurrency():
+ def_concurrency = 2
+
+ try:
+ import multiprocessing
+ return max(def_concurrency, multiprocessing.cpu_count())
+ except ImportError:
+ return def_concurrency
+
+def try_to_run(cmdline):
+ print("Running %s ... " % (' '.join(cmdline)))
+ sys.stdout.flush()
+
+ cmd = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = cmd.communicate()
+
+ failed = (cmd.returncode != 0)
+
+ if failed:
+ print("FAILURE")
+ print(stdout)
+ print(stderr)
+ sys.stdout.flush()
+
+ return not failed
+
+def run_test_build(configure_py, modules, include, jobs, run_tests):
+ config = [configure_py, '--without-documentation']
+
+ if include:
+ config.append('--minimized')
+ if modules:
+ config.append('--enable-modules=' + ','.join(modules))
+ else:
+ config.append('--disable-modules=' + ','.join(modules))
+
+ if try_to_run(config) is False:
+ return False
+
+ if try_to_run(['make', '-j', str(jobs)]) is False:
+ return False
+
+ if run_tests is False:
+ return True
+
+ # Flaky test causing errors when running tests
+ tests_to_skip = []
+
+ cmdline = ['./botan-test', '--test-threads=%d' % (jobs)]
+
+ if len(tests_to_skip) > 0:
+ cmdline.append('--skip-tests=%s' % (','.join(tests_to_skip)))
+
+ return try_to_run(cmdline)
+
+def main(args):
+
+ # TODO take configure.py and botan-test paths via options
+
+ parser = optparse.OptionParser()
+
+ parser.add_option('--run-tests', default=False, action='store_true')
+ parser.add_option('--jobs', default=get_concurrency(),
+ help="jobs to run (default %default)")
+
+ (options, args) = parser.parse_args(args)
+
+ run_tests = options.run_tests
+ jobs = int(options.jobs)
+
+ configure_py = './configure.py'
+ modules = get_module_list(configure_py)
+
+ cant_disable = ['block', 'hash', 'hex', 'mac', 'modes', 'rng', 'stream', 'utils', 'cpuid', 'entropy']
+ always_include = ['thread_utils', 'sha2_64']#, 'sha2_64', 'aes']
+
+ fails = 0
+ failed = []
+
+ for module in sorted(modules):
+ if (module in always_include) or (module in cant_disable):
+ continue # already testing it
+
+ extra = []
+ if module == 'auto_rng':
+ extra.append('dev_random')
+ if run_test_build(configure_py, [module] + always_include + extra, True, jobs, run_tests) is False:
+ failed.append(module)
+ fails += 1
+
+ for module in sorted(modules):
+ if module in cant_disable or module in always_include:
+ continue
+ if run_test_build(configure_py, [module], False, jobs, run_tests) is False:
+ failed.append(module)
+ fails += 1
+
+ if len(failed) > 0:
+ print("Failed building with %s" % (' '.join(failed)))
+ else:
+ print("All configurations ok")
+
+ return fails
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/comm/third_party/botan/src/scripts/test_cli.py b/comm/third_party/botan/src/scripts/test_cli.py
new file mode 100755
index 0000000000..4e0f8ab830
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/test_cli.py
@@ -0,0 +1,1429 @@
+#!/usr/bin/python
+
+"""
+(C) 2018,2019 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import subprocess
+import sys
+import os
+import logging
+import optparse # pylint: disable=deprecated-module
+import time
+import shutil
+import tempfile
+import re
+import random
+import json
+import binascii
+import multiprocessing
+from multiprocessing.pool import ThreadPool
+
+# pylint: disable=global-statement,unused-argument
+
+CLI_PATH = None
+TESTS_RUN = 0
+TESTS_FAILED = 0
+
+class TestLogHandler(logging.StreamHandler, object):
+ def emit(self, record):
+ # Do the default stuff first
+ super(TestLogHandler, self).emit(record)
+ if record.levelno >= logging.ERROR:
+ global TESTS_FAILED
+ TESTS_FAILED += 1
+
+def setup_logging(options):
+ if options.verbose:
+ log_level = logging.DEBUG
+ elif options.quiet:
+ log_level = logging.WARNING
+ else:
+ log_level = logging.INFO
+
+ lh = TestLogHandler(sys.stdout)
+ lh.setFormatter(logging.Formatter('%(levelname) 7s: %(message)s'))
+ logging.getLogger().addHandler(lh)
+ logging.getLogger().setLevel(log_level)
+
+def random_port_number():
+ return random.randint(1024, 65535)
+
+def test_cli(cmd, cmd_options, expected_output=None, cmd_input=None, expected_stderr=None, use_drbg=True):
+ global TESTS_RUN
+
+ TESTS_RUN += 1
+
+ opt_list = []
+
+ if isinstance(cmd_options, str):
+ opt_list = cmd_options.split(' ')
+ elif isinstance(cmd_options, list):
+ opt_list = cmd_options
+
+ if use_drbg:
+ fixed_drbg_seed = "802" * 32
+ drbg_options = ['--rng-type=drbg', '--drbg-seed=' + fixed_drbg_seed]
+ else:
+ drbg_options = []
+
+ cmdline = [CLI_PATH, cmd] + drbg_options + opt_list
+
+ logging.debug("Executing '%s'" % (' '.join([CLI_PATH, cmd] + opt_list)))
+
+ stdout = None
+ stderr = None
+
+ if cmd_input is None:
+ proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ else:
+ proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate(cmd_input.encode())
+
+ if stderr:
+ if expected_stderr is None:
+ logging.error("Got output on stderr %s (stdout was %s)", stderr, stdout)
+ else:
+ if stderr != expected_stderr:
+ logging.error("Got output on stderr %s which did not match expected value %s", stderr, expected_stderr)
+ else:
+ if expected_stderr is not None:
+ logging.error('Expected output on stderr but got nothing')
+
+ output = stdout.decode('ascii').strip()
+
+ if expected_output is not None:
+ if output != expected_output:
+ logging.error("Got unexpected output running cmd %s %s", cmd, cmd_options)
+ logging.info("Output lengths %d vs expected %d", len(output), len(expected_output))
+ logging.info("Got %s", output)
+ logging.info("Exp %s", expected_output)
+
+ return output
+
+def check_for_command(cmd):
+ cmdline = [CLI_PATH, 'has_command', cmd]
+ proc = subprocess.Popen(cmdline)
+ proc.communicate()
+
+ return proc.returncode == 0
+
+def cli_config_tests(_tmp_dir):
+ prefix = test_cli("config", "prefix")
+ cflags = test_cli("config", "cflags")
+ ldflags = test_cli("config", "ldflags")
+ libs = test_cli("config", "libs")
+
+ if len(prefix) < 4 or prefix[0] != '/':
+ logging.error("Bad prefix %s" % (prefix))
+ if ("-I%s/include/botan-2" % (prefix)) not in cflags:
+ logging.error("Bad cflags %s" % (cflags))
+ if not ldflags.endswith(("-L%s/lib" % (prefix))):
+ logging.error("Bad ldflags %s" % (ldflags))
+ if "-lbotan-2" not in libs:
+ logging.error("Bad libs %s" % (libs))
+
+def cli_help_tests(_tmp_dir):
+ output = test_cli("help", None, None)
+
+ # Maybe test format somehow??
+ if len(output) < 500:
+ logging.error("Help output seems very short")
+
+def cli_version_tests(_tmp_dir):
+ output = test_cli("version", None, None)
+
+ version_re = re.compile(r'[0-9]\.[0-9]+\.[0-9]')
+ if not version_re.match(output):
+ logging.error("Unexpected version output %s" % (output))
+
+ output = test_cli("version", ["--full"], None, None)
+ version_full_re = re.compile(r'Botan [0-9]\.[0-9]+\.[0-9] \(.* revision .*, distribution .*\)$')
+ if not version_full_re.match(output):
+ logging.error("Unexpected version output %s" % (output))
+
+def cli_is_prime_tests(_tmp_dir):
+ test_cli("is_prime", "5", "5 is probably prime")
+ test_cli("is_prime", "9", "9 is composite")
+ test_cli("is_prime", "548950623407687320763", "548950623407687320763 is probably prime")
+
+def cli_gen_prime_tests(_tmp_dir):
+ test_cli("gen_prime", "64", "15568813029901363163")
+ test_cli("gen_prime", "128", "287193909494025008847286845478788766073")
+
+def cli_cycle_counter(_tmp_dir):
+ output = test_cli("cpu_clock", None, None)
+
+ if output.startswith('No CPU cycle counter on this machine'):
+ return
+
+ have_clock_re = re.compile(r'Estimated CPU clock [0-9\.]+ (M|G)Hz')
+
+ if have_clock_re.match(output):
+ return
+
+ logging.error('Unexpected output from cpu_clock: %s', output)
+
+def cli_entropy_tests(_tmp_dir):
+ output = test_cli("entropy", ["all"], None)
+
+ status_re = re.compile('Polling [a-z0-9_]+ gathered [0-9]+ bytes in [0-9]+ outputs with estimated entropy [0-9]+')
+ unavail_re = re.compile('Source [a-z0-9_]+ is unavailable')
+ comp_re = re.compile('Sample from [a-z0-9_]+ was .* compressed from [0-9]+ bytes to [0-9]+ bytes')
+ output_re = re.compile(r'[A-F0-9]+(...)?')
+
+ status_next = True
+
+ for line in output.split('\n'):
+ if comp_re.match(line):
+ continue
+
+ if status_next:
+ if status_re.match(line) is not None:
+ status_next = False
+ elif unavail_re.match(line) is not None:
+ pass
+ else:
+ logging.error('Unexpected status line %s', line)
+ status_next = False
+ else:
+ if output_re.match(line) is None:
+ logging.error('Unexpected sample line %s', line)
+ status_next = True
+
+def cli_factor_tests(_tmp_dir):
+ test_cli("factor", "97", "97: 97")
+ test_cli("factor", "9753893489562389", "9753893489562389: 21433 455087644733")
+ test_cli("factor", "12019502040659149507", "12019502040659149507: 3298628633 3643787579")
+
+def cli_mod_inverse_tests(_tmp_dir):
+ test_cli("mod_inverse", "97 802", "339")
+ test_cli("mod_inverse", "98 802", "0")
+
+def cli_base64_tests(_tmp_dir):
+ test_cli("base64_enc", "-", "YmVlcyE=", "bees!")
+ test_cli("base64_dec", "-", "bees!", "YmVlcyE=")
+
+def cli_base32_tests(_tmp_dir):
+ test_cli("base32_enc", "-", "MJSWK4ZB", "bees!")
+ test_cli("base32_dec", "-", "bees!", "MJSWK4ZB")
+
+def cli_base58_tests(_tmp_dir):
+ test_cli("base58_enc", "-", "C6sRAr4", "bees!")
+ test_cli("base58_dec", "-", "bees!", "C6sRAr4")
+
+ test_cli("base58_enc", ["--check", "-"], "Cjv15cdjaBc", "F00F")
+ test_cli("base58_dec", ["--check", "-"], "F00F", "Cjv15cdjaBc")
+
+def cli_hex_tests(_tmp_dir):
+ test_cli("hex_enc", "-", "6265657321", "bees!")
+ test_cli("hex_dec", "-", "bees!", "6265657321")
+
+def cli_hash_tests(_tmp_dir):
+ test_cli("hash", "--algo=SHA-256",
+ "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855 -", "")
+
+ test_cli("hash", "--algo=SHA-256",
+ "BA7816BF8F01CFEA414140DE5DAE2223B00361A396177A9CB410FF61F20015AD -", "abc")
+
+ test_cli("hash", ["--algo=SHA-256", "--format=base64"],
+ "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= -", "abc")
+
+ test_cli("hash", ["--algo=SHA-224", "--format=base58", "--no-fsname"],
+ "MuGc8HkSVyJjfMjPM5UQikPToBTzNucEghcGLe", "abc")
+
+ test_cli("hash", ["--algo=SHA-224", "--format=base58check", "--no-fsname"],
+ "3MmfMqgrhemdVa9bDAGfooukbviWtKMBx2xauL2RsyAe", "abc")
+
+def cli_hmac_tests(tmp_dir):
+ key_file = os.path.join(tmp_dir, 'hmac.key')
+
+ test_cli("rng", ["64", "--output=%s" % (key_file)], "")
+
+ test_cli("hmac", ["--no-fsname", "--hash=SHA-384", key_file, key_file],
+ "E3A8529377030B28A7DBDFC50DDEC8E4ECEFB6EA850D95EB785938CD3E3AFEF9EF8B08AF219C1496633193468AB755CB")
+
+def cli_bcrypt_tests(_tmp_dir):
+ test_cli("gen_bcrypt", "--work-factor=4 s3kr1t",
+ "$2a$04$0.8G7o08XYwvBBWA3l0WUujtwoGZgGDzVSN8fNkNqXikcK4A3lHPS")
+
+ test_cli("check_bcrypt", "s3kr1t $2a$04$gHX4Qg7pDSJuXiPXnmt8leyb.FFzX1Bv4rXwIj2cPSakJ8zNnhIka",
+ "Password is valid")
+
+ test_cli("check_bcrypt", "santa $2a$04$gHX4Qg7pDSJuXiPXnmt8leyb.FFzX1Bv4rXwIj2cPSakJ8zNnhIka",
+ "Password is NOT valid")
+
+def cli_argon2_tests(_tmp_dir):
+ password = "s3kr1t"
+ expected = "$argon2id$v=19$m=8,t=1,p=1$2A+I9q2+ZayxDDYC5n2YWw$/Lhx+Jbtlpw+Kxpskfv7+AKhBL/5ebalTJkVC1O5+1E"
+ test_cli("gen_argon2", ['--mem=8', password], expected)
+ test_cli("gen_argon2", ['--mem=8', '--t=1', password], expected)
+ test_cli("gen_argon2", ['--mem=8', '--t=1', '--p=1', password], expected)
+
+ test_cli("check_argon2", [password, expected], "Password is valid")
+ test_cli("check_argon2", ["guessing", expected], "Password is NOT valid")
+
+def cli_gen_dl_group_tests(_tmp_dir):
+
+ pem = """-----BEGIN X9.42 DH PARAMETERS-----
+MIIBJAKBgwTw7LQiLkXJsrgMVQxTPlWaQlYz/raZ+5RtIZe4YluQgRQGPFADLZ/t
+TOYzuIzZJFOcdKtEtrVkxZRGSkjZwKFKLUD6fzSjoC2M2EHktK/y5HsvxBxL4tKr
+q1ffbyPQi+iBLYTZAXygvxj2vWyrvA+/w4nbt1fStCHTDhWjLWqFpV9nAoGDAKzA
+HUu/IRl7OiUtW/dz36gzEJnaYtz4ZtJl0FG8RJiOe02lD8myqW2sVzYqMvKD0LGx
+x9fdSKC1G+aZ/NWtqrQjb66Daf7b0ddDx+bfWTWJ2dOtZd8IL2rmQQJm+JogDi9i
+huVYFicDNQGzi+nEKAzrZ1L/VxtiSiw/qw0IyOuVtz8CFjgPiPatvmWssQw2AuZ9
+mFvAZ/8wal0=
+-----END X9.42 DH PARAMETERS-----"""
+
+ test_cli("gen_dl_group", "--pbits=1043", pem)
+
+ dsa_grp = """-----BEGIN X9.42 DH PARAMETERS-----
+MIIBHgKBgQCyP1vosC/axliM2hmJ9EOSdd1zBkuzMP25CYD8PFkRVrPLr1ClSUtn
+eXTIsHToJ7d7sRwtidQGW9BrvUEyiAWE06W/wnLPxB3/g2/l/P2EhbNmNHAO7rV7
+ZVz/uKR4Xcvzxg9uk5MpT1VsxA8H6VEwzefNF1Rya92rqGgBTNT3/wKBgC7HLL8A
+Gu3tqJxTk1iNgojjOiSreLn6ihA8R8kQnRXDTNtDKz996KHGInfMBurUI1zPM3xq
+bHc0CvU1Nf87enhPIretzJcFgiCWrNFUIC25zPEjp0s3/ERHT4Bi1TABZ3j6YUEQ
+fnnj+9XriKKHf2WtX0T4FXorvnKq30m934rzAhUAvwhWDK3yZEmphc7dwl4/J3Zp
++MU=
+-----END X9.42 DH PARAMETERS-----"""
+
+ test_cli("gen_dl_group", ["--type=dsa", "--pbits=1024"], dsa_grp)
+
+
+def cli_key_tests(tmp_dir):
+
+ pem = """-----BEGIN PRIVATE KEY-----
+MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQg2A+I9q2+ZayxDDYC5n2Y
+W8Bn/zBm4D3mwS5qMwADRDehRANCAATwnDFqsjXL9SD/Rr1Vy4pb79PswXdQNZBN
+mlLtJ5JvZ0/p6zP3x+Y9yPIrAR8L/acG5ItSrAKXzzuqQQZMv4aN
+-----END PRIVATE KEY-----"""
+
+ priv_key = os.path.join(tmp_dir, 'priv.pem')
+ pub_key = os.path.join(tmp_dir, 'pub.pem')
+ pub_der_key = os.path.join(tmp_dir, 'pub.der')
+ enc_pem = os.path.join(tmp_dir, 'priv_enc.pem')
+ enc_der = os.path.join(tmp_dir, 'priv_enc.der')
+ ca_cert = os.path.join(tmp_dir, 'ca.crt')
+ crt_req = os.path.join(tmp_dir, 'crt.req')
+ user_cert = os.path.join(tmp_dir, 'user.crt')
+
+ test_cli("keygen", ["--algo=ECDSA", "--params=secp256k1"], pem)
+
+ test_cli("keygen", ["--algo=ECDSA", "--params=secp256r1", "--output=" + priv_key], "")
+
+ test_cli("pkcs8", "--pub-out --output=%s %s" % (pub_key, priv_key), "")
+ test_cli("pkcs8", "--pub-out --der-out --output=%s %s" % (pub_der_key, priv_key), "")
+
+ test_cli("pkcs8", "--pass-out=foof --der-out --output=%s %s" % (enc_der, priv_key), "")
+ test_cli("pkcs8", "--pass-out=foof --output=%s %s" % (enc_pem, priv_key), "")
+
+ dec_pem = test_cli("pkcs8", ["--pass-in=foof", enc_pem], None)
+ dec_der = test_cli("pkcs8", ["--pass-in=foof", enc_der], None)
+
+ if dec_pem != dec_der:
+ logging.error("Problem decrypting PKCS8 key")
+
+ test_cli("fingerprint", ['--no-fsname', pub_key],
+ "83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4")
+
+ test_cli("fingerprint", ['--no-fsname', pub_der_key],
+ "83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4")
+
+ test_cli("fingerprint", ['--no-fsname', pub_key, pub_der_key],
+ "83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4\n"
+ "83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4")
+
+ test_cli("fingerprint", [pub_der_key],
+ pub_der_key +
+ ": 83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4")
+
+ test_cli("fingerprint", ['-'],
+ "83:FC:67:87:30:C7:0C:9C:54:9A:E7:A1:FA:25:83:4C:77:A4:43:16:33:6D:47:3C:CE:4B:91:62:30:97:62:D4",
+ open(pub_key, 'rb').read().decode())
+
+ valid_sig = "nI4mI1ec14Y7nYUWs2edysAVvkob0TWpmGh5rrYWDA+/W9Fj0ZM21qJw8qa3/avAOIVBO6hoMEVmfJYXlS+ReA=="
+
+ test_cli("sign", "--provider=base %s %s" % (priv_key, pub_key), valid_sig)
+
+ test_cli("verify", [pub_key, pub_key, '-'],
+ "Signature is valid", valid_sig)
+
+ test_cli("verify", [pub_key, pub_key, '-'],
+ "Signature is invalid",
+ valid_sig.replace("G", "H"))
+
+ test_cli("gen_self_signed",
+ [priv_key, "CA", "--ca", "--country=VT",
+ "--dns=ca.example", "--hash=SHA-384", "--output="+ca_cert],
+ "")
+
+ test_cli("cert_verify", ca_cert, "Certificate did not validate - Cannot establish trust")
+
+ cert_info = test_cli("cert_info", ['--fingerprint', ca_cert], None)
+
+ if cert_info.find('Subject: CN="CA",C="VT"') < 0:
+ logging.error('Unexpected output for cert_info command %s', cert_info)
+ if cert_info.find('Subject keyid: 69DD911C9EEE3400C67CBC3F3056CBE711BD56AF9495013F') < 0:
+ logging.error('Unexpected output for cert_info command %s', cert_info)
+
+ test_cli("gen_pkcs10", "%s User --output=%s" % (priv_key, crt_req))
+
+ test_cli("sign_cert", "%s %s %s --output=%s" % (ca_cert, priv_key, crt_req, user_cert))
+
+ test_cli("cert_verify", [user_cert, ca_cert],
+ "Certificate passes validation checks")
+
+ test_cli("cert_verify", user_cert,
+ "Certificate did not validate - Certificate issuer not found")
+
+def cli_xmss_sign_tests(tmp_dir):
+ priv_key = os.path.join(tmp_dir, 'priv.pem')
+ pub_key = os.path.join(tmp_dir, 'pub.pem')
+ pub_key2 = os.path.join(tmp_dir, 'pub2.pem')
+ msg = os.path.join(tmp_dir, 'input')
+ sig1 = os.path.join(tmp_dir, 'sig1')
+ sig2 = os.path.join(tmp_dir, 'sig2')
+
+ test_cli("rng", ['--output=%s' % (msg)], "")
+ test_cli("hash", ["--no-fsname", msg], "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855")
+
+ test_cli("keygen", ["--algo=XMSS", "--output=%s" % (priv_key)], "")
+ test_cli("hash", ["--no-fsname", priv_key], "5B38F737BA41BE7F40433DB30EAEF7C41ABB0F7D9E7A09DEB5FDCE7B6811693F")
+
+ test_cli("pkcs8", "--pub-out --output=%s %s" % (pub_key, priv_key), "")
+ test_cli("fingerprint", ['--no-fsname', pub_key],
+ "B0:F4:98:6E:D8:4E:05:63:A1:D8:4B:37:61:5A:A0:41:78:7E:DE:0E:72:46:E0:A8:D6:CF:09:54:08:DA:A4:22")
+
+ # verify the key is updated after each signature:
+ test_cli("sign", [priv_key, msg, "--output=%s" % (sig1)], "")
+ test_cli("verify", [pub_key, msg, sig1], "Signature is valid")
+ test_cli("hash", ["--no-fsname", sig1], "04AF45451C7A9AF2D828E1AD6EC262E012436F4087C5DA6F32C689D781E597D0")
+ test_cli("hash", ["--no-fsname", priv_key], "67929FAEC636E43DE828C1CD7E2D11CE7C3388CE90DD0A0F687C6627FFA850CD")
+
+ test_cli("sign", [priv_key, msg, "--output=%s" % (sig2)], "")
+ test_cli("verify", [pub_key, msg, sig2], "Signature is valid")
+ test_cli("hash", ["--no-fsname", sig2], "0785A6AD54CC7D01F2BE2BC6463A3EAA1159792E52210ED754992C5068E8F24F")
+ test_cli("hash", ["--no-fsname", priv_key], "1940945D68B1CF54D79E05DD7913A4D0B4959183F1E12B81A4E43EF4E63FBD20")
+
+ # private key updates, public key is unchanged:
+ test_cli("pkcs8", "--pub-out --output=%s %s" % (pub_key2, priv_key), "")
+ test_cli("fingerprint", ['--no-fsname', pub_key2],
+ "B0:F4:98:6E:D8:4E:05:63:A1:D8:4B:37:61:5A:A0:41:78:7E:DE:0E:72:46:E0:A8:D6:CF:09:54:08:DA:A4:22")
+
+def cli_pbkdf_tune_tests(_tmp_dir):
+ if not check_for_command("pbkdf_tune"):
+ return
+
+ expected = re.compile(r'For (default|[1-9][0-9]*) ms selected Scrypt\([0-9]+,[0-9]+,[0-9]+\) using [0-9]+ MiB')
+
+ output = test_cli("pbkdf_tune", ["--check", "1", "10", "50", "default"], None).split('\n')
+
+ for line in output:
+ if expected.match(line) is None:
+ logging.error("Unexpected line '%s'" % (line))
+
+ expected_pbkdf2 = re.compile(r'For (default|[1-9][0-9]*) ms selected PBKDF2\(HMAC\(SHA-256\),[0-9]+\)')
+
+ output = test_cli("pbkdf_tune", ["--algo=PBKDF2(SHA-256)", "--check", "1", "10", "50", "default"], None).split('\n')
+
+ for line in output:
+ if expected_pbkdf2.match(line) is None:
+ logging.error("Unexpected line '%s'" % (line))
+
+ expected_argon2 = re.compile(r'For (default|[1-9][0-9]*) ms selected Argon2id\([0-9]+,[0-9]+,[0-9]+\)')
+
+ output = test_cli("pbkdf_tune", ["--algo=Argon2id", "--check", "1", "10", "50", "default"], None).split('\n')
+
+ for line in output:
+ if expected_argon2.match(line) is None:
+ logging.error("Unexpected line '%s'" % (line))
+
+def cli_psk_db_tests(tmp_dir):
+ if not check_for_command("psk_get"):
+ return
+
+ psk_db = os.path.join(tmp_dir, 'psk.db')
+ db_key1 = "909"*32
+ db_key2 = "451"*32
+
+ test_cli("psk_set", [psk_db, db_key1, "name", "F00FEE"], "")
+ test_cli("psk_set", [psk_db, db_key2, "name", "C00FEE11"], "")
+ test_cli("psk_set", [psk_db, db_key1, "name2", "50051029"], "")
+
+ test_cli("psk_get", [psk_db, db_key1, "name"], "F00FEE")
+ test_cli("psk_get", [psk_db, db_key2, "name"], "C00FEE11")
+
+ test_cli("psk_list", [psk_db, db_key1], "name\nname2")
+ test_cli("psk_list", [psk_db, db_key2], "name")
+
+def cli_compress_tests(tmp_dir):
+
+ if not check_for_command("compress"):
+ return
+
+ input_file = os.path.join(tmp_dir, 'input.txt')
+ output_file = os.path.join(tmp_dir, 'input.txt.gz')
+
+ with open(input_file, 'w') as f:
+ f.write("hi there")
+ f.close()
+
+ test_cli("compress", input_file)
+
+ if not os.access(output_file, os.R_OK):
+ logging.error("Compression did not created expected output file")
+
+ is_py3 = sys.version_info[0] == 3
+
+ output_hdr = open(output_file, 'rb').read(2)
+
+ if is_py3:
+ if output_hdr[0] != 0x1F or output_hdr[1] != 0x8B:
+ logging.error("Did not see expected gzip header")
+ else:
+ if ord(output_hdr[0]) != 0x1F or ord(output_hdr[1]) != 0x8B:
+ logging.error("Did not see expected gzip header")
+
+ os.unlink(input_file)
+
+ test_cli("decompress", output_file)
+
+ if not os.access(input_file, os.R_OK):
+ logging.error("Decompression did not created expected output file")
+
+ recovered = open(input_file).read()
+ if recovered != "hi there":
+ logging.error("Decompression did not recover original input")
+
+def cli_rng_tests(_tmp_dir):
+ test_cli("rng", "10", "D80F88F6ADBE65ACB10C")
+ test_cli("rng", "16", "D80F88F6ADBE65ACB10C3602E67D985B")
+ test_cli("rng", "10 6", "D80F88F6ADBE65ACB10C\n1B119CC068AF")
+
+ test_cli("rng", ['--format=base64', '10'], "2A+I9q2+ZayxDA==")
+ test_cli("rng", ['--format=base58', '10'], "D93XRyVfxqs7oR")
+ test_cli("rng", ['--format=base58check', '10'], "2NS1jYUq92TyGFVnhVLa")
+
+ hex_10 = re.compile('[A-F0-9]{20}')
+
+ for rng in ['system', 'auto', 'entropy']:
+ output = test_cli("rng", ["10", '--%s' % (rng)], use_drbg=False)
+ if output == "D80F88F6ADBE65ACB10C":
+ logging.error('RNG produced DRBG output')
+ if hex_10.match(output) is None:
+ logging.error('Unexpected RNG output %s' % (output))
+
+ has_rdrand = test_cli("cpuid", []).find(' rdrand ') > 0
+
+ if has_rdrand:
+ output = test_cli("rng", ["10", '--rdrand'], use_drbg=False)
+
+ if output == "D80F88F6ADBE65ACB10C":
+ logging.error('RDRAND produced DRBG output')
+ if hex_10.match(output) is None:
+ logging.error('Unexpected RNG output %s' % (output))
+
+def cli_roughtime_check_tests(tmp_dir):
+ # pylint: disable=line-too-long
+ if not check_for_command("roughtime_check"):
+ return
+ chain = os.path.join(tmp_dir, 'roughtime-chain')
+
+ with open(chain, 'w') as f:
+ f.write("""\
+ed25519 bbT+RPS7zKX6w71ssPibzmwWqU9ffRV5oj2OresSmhE= eu9yhsJfVfguVSqGZdE8WKIxaBBM0ZG3Vmuc+IyZmG2YVmrIktUByDdwIFw6F4rZqmSFsBO85ljoVPz5bVPCOw== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWBnGOEajOwPA6G7oL47seBP4C7eEpr57H43C2/fK/kMA0UGZVUdf4KNX8oxOK6JIcsbVk8qhghTwA70qtwpYmQkDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AJrA8tEqPBQAqisiuAxgy2Pj7UJAiWbCdzGz1xcCnja3T+AqhC8fwpeIwW4GPy/vEb/awXW2DgSLKJfzWIAz+2lsR7t4UjNPvAgAAAEAAAABTSUcAREVMRes9Ch4X0HIw5KdOTB8xK4VDFSJBD/G9t7Et/CU7UW61OiTBXYYQTG2JekWZmGa0OHX1JPGG+APkpbsNw0BKUgYDAAAAIAAAACgAAABQVUJLTUlOVE1BWFR/9BWjpsWTQ1f6iUJea3EfZ1MkX3ftJiV3ABqNLpncFwAAAAAAAAAA//////////8AAAAA
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= uLeTON9D+2HqJMzK6sYWLNDEdtBl9t/9yw1cVAOm0/sONH5Oqdq9dVPkC9syjuWbglCiCPVF+FbOtcxCkrgMmA== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWOw1jl0uSiBEH9HE8/6r7zxoSc01f48vw+UzH8+VJoPelnvVJBj4lnH8uRLh5Aw0i4Du7XM1dp2u0r/I5PzhMQoDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AUBo+tEqPBQC47l77to7ESFTVhlw1SC74P5ssx6gpuJ6eP+1916GuUiySGE/x3Fp0c3otUGAdsRQou5p9PDTeane/YEeVq4/8AgAAAEAAAABTSUcAREVMRe5T1ml8wHyWAcEtHP/U5Rg/jFXTEXOSglngSa4aI/CECVdy4ZNWeP6vv+2//ZW7lQsrWo7ZkXpvm9BdBONRSQIDAAAAIAAAACgAAABQVUJLTUlOVE1BWFQpXlenV0OfVisvp9jDHXLw8vymZVK9Pgw9k6Edf8ZEhUgSGEc5jwUASHLvZE2PBQAAAAAA
+ed25519 etPaaIxcBMY1oUeGpwvPMCJMwlRVNxv51KK/tktoJTQ= U53wX99JzZwy4BXa9C6R04bPu4yqFB5w5/wTgG8Mw5wm+VLrY70ECxJ9ZHnpdHVHaLEU3aeLnQFZyZPRAEOCyw== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWMh3mPWCCbOlX8xDWbU9qdfKoReJX/XLsivom8bJJYmcC7T03tyXrtWUheEJweHtg4qMgSyifQS1MjHJSy1jPAsDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8Akxw/tEqPBQBfOsOuciR7jiAW5itQ39y8yVr/ZJmgMwvTjqaU4/wA05ZqG4RqoLdvDXh5bCNySL6LrrnBNSAHwn5COt0CItNuAgAAAEAAAABTSUcAREVMRVP3BIOzsZmuxqMi+ScIBPyKtzFfK7ZlPFNP0JrNwln2QYtAcQFIKywDdNAAL+n8i3dz1p99K50FJjCkCl2J6AMDAAAAIAAAACgAAABQVUJLTUlOVE1BWFQKC/kZVdjiNT2NCSGfnpot4eqipyMFsyMjiIQmqqqXqQCAa245jwUAAGCgA56PBQAAAAAA
+ed25519 AW5uAoTSTDfG5NfY1bTh08GUnOqlRb+HVhbJ3ODJvsE= IcZcXFuaLKYYhWcK3sT/6PrVeXMmabCRbf9hvVfkMkqEW1PFL++ZnHJ1/m+G8azITxvktwsfP1YAOOxWdbf9XQ== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWL5DAl8GPNUQ/mSXl0tI4N9yZAO+PiXTodJOTDL+WU/x26iqgyyQRikSSocRMzAEVLDGasdyW19mVC6H/6vfXggDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8Av/JAtEqPBQBIP346SHhCdDfughzeH+uYSbxngDYxqHzBDtZt0obUKrzxfRWzD1oR61B1reLvoPVCKSfzEngi/g1NSQjTrzNMAgAAAEAAAABTSUcAREVMRTQLLplQv0rN4p77Bo59qT8bbquV6MKSwILI/Tw2LLGo9noaZegUFmM+rNu1d1AVOEVQ01j6/2xDmBvp0d6MZgEDAAAAIAAAACgAAABQVUJLTUlOVE1BWFS4a1dYoIB5u/zkbR3sIteuhVrQkszzj+Gng9ywo6O9VgAAAAAAAAAA//////////8AAAAA
+ed25519 cj8GsiNlRkqiDElAeNMSBBMwrAl15hYPgX50+GWX/lA= Tsy82BBU2xxVqNe1ip11OyEGoKWhKoSggWjBmDTSBmKbTs7bPPCEidYc5TQ23sQUWe62G35fQOVU28q+Eq5uhQ== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWDAmi7zgXAqLgQXVfbjeqnUZRiXCZI64QIoAKFL83CQHbyXgB4cNwHfQ9mSg0hYxTp1M8QxOuzusnUpk05DIRwwDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AcOBCtEqPBQBhsr1mKOxxCf4VDFzAtYB4Nhs332AN1LrJU/8+VqktzfPd2R7awJHEVEWugvSvOrr+9d332mQObAkYfKfDtbSFAgAAAEAAAABTSUcAREVMRUjnhDvkIjFzTEYtgHOfMpRHtnNZj4P31RFtapkwzGjOtc93pYDd7zqQCw2AVcfbSnPqa8k26z96Q9fVRzq0pw8DAAAAIAAAACgAAABQVUJLTUlOVE1BWFR7qp2oerjpbN8Y23nUGARIlsgkodW4owH29ZKhxDMn8AAAAAAAAAAA//////////8AAAAA
+""")
+
+ test_cli("roughtime_check", chain, """\
+1: UTC 2019-08-04T13:38:17 (+-1000000us)
+ 2: UTC 2019-08-04T13:38:17 (+-1000000us)
+ 3: UTC 2019-08-04T13:38:17 (+-1000000us)
+ 4: UTC 2019-08-04T13:38:18 (+-1000000us)
+ 5: UTC 2019-08-04T13:38:18 (+-1000000us)""")
+
+ with open(chain, 'w') as f:
+ f.write("ed25519 bbT+RPS7zKX6w71ssPibzmwWqU9ffRV5oj2OresSmhE= eu9yhsJfVfguVSqGZdE8WKIxaBBM0ZG3Vmuc+IyZmG2YVmrIktUByDdwIFw6F4rZqmSFsBO85ljoVPz5bVPCOw== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWBnGOEajOwPA6G7oL47seBP4C7eEpr57H43C2/fK/kMA0UGZVUdf4KNX8oxOK6JIcsbVk8qhghTwA70qtwpYmQkDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AJrA8tEqPBQAqisiuAxgy2Pj7UJAiWbCdzGz1xcCnja3T+AqhC8fwpeIwW4GPy/vEb/awXW2DgSLKJfzWIAz+2lsR7t4UjNPvAgAAAEAAAABTSUcAREVMRes9Ch4X0HIw5KdOTB8xK4VDFSJBD/G9t7Et/CU7UW61OiTBXYYQTG2JekWZmGa0OHX1JPGG+APkpbsNw0BKUgYDAAAAIAAAACgAAABQVUJLTUlOVE1BWFR/9BWjpsWTQ1f6iUJea3EfZ1MkX3ftJiV3ABqNLpncFwAAAAAAAAAA//////////8AAAAA")
+ test_cli("roughtime_check", [chain, "--raw-time"], "1: UTC 1564925897781286 (+-1000000us)")
+
+ with open(chain, 'w') as f:
+ f.write("ed25519 cbT+RPS7zKX6w71ssPibzmwWqU9ffRV5oj2OresSmhE= eu9yhsJfVfguVSqGZdE8WKIxaBBM0ZG3Vmuc+IyZmG2YVmrIktUByDdwIFw6F4rZqmSFsBO85ljoVPz5bVPCOw== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWBnGOEajOwPA6G7oL47seBP4C7eEpr57H43C2/fK/kMA0UGZVUdf4KNX8oxOK6JIcsbVk8qhghTwA70qtwpYmQkDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AJrA8tEqPBQAqisiuAxgy2Pj7UJAiWbCdzGz1xcCnja3T+AqhC8fwpeIwW4GPy/vEb/awXW2DgSLKJfzWIAz+2lsR7t4UjNPvAgAAAEAAAABTSUcAREVMRes9Ch4X0HIw5KdOTB8xK4VDFSJBD/G9t7Et/CU7UW61OiTBXYYQTG2JekWZmGa0OHX1JPGG+APkpbsNw0BKUgYDAAAAIAAAACgAAABQVUJLTUlOVE1BWFR/9BWjpsWTQ1f6iUJea3EfZ1MkX3ftJiV3ABqNLpncFwAAAAAAAAAA//////////8AAAAA")
+ test_cli("roughtime_check", chain, expected_stderr=b'Error: Roughtime Invalid signature or public key\n')
+
+def cli_roughtime_tests(tmp_dir):
+ # pylint: disable=line-too-long
+ # pylint: disable=too-many-locals
+ import socket
+ import base64
+ import threading
+
+ if not check_for_command("roughtime"):
+ return
+
+ server_port = random_port_number()
+ chain_file = os.path.join(tmp_dir, 'roughtime-chain')
+ ecosystem = os.path.join(tmp_dir, 'ecosystem')
+
+ def run_udp_server():
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ server_address = ('127.0.0.1', server_port)
+ sock.bind(server_address)
+
+ while True:
+ data, address = sock.recvfrom(4096)
+
+ if data:
+ if data != base64.b64decode(server_request):
+ logging.error("unexpected request")
+
+ sock.sendto(base64.b64decode(server_response), address)
+
+ udp_thread = threading.Thread(target=run_udp_server)
+ udp_thread.daemon = True
+ udp_thread.start()
+
+ chain = [
+ """\
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= 2A+I9q2+ZayxDDYC5n2YW8Bn/zBm4D3mwS5qMwADRDcbFpBcf3yPOyeZiqpLBTkxo8GT8zMQFeApv4ScffjC8A== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWDwlo/AkUnTrecAW4Ci5Tkh3KOqs6R7KLTsFtq16RXN5F7G5ckGv11UtzHoZTbKbEk03a6ogAOK54Q2CI/7XGA8DAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AWDLihlaSBQAoq/5gEjRCrhfH16X2GYjQJSG/CgSuGhYeCsrw7XkphLI3cxw2unJRDW8DAJrYqEGaW0NPKZk7bbpPjU/Q6Es1AgAAAEAAAABTSUcAREVMRUJbs67Sb5Wx/jzWyT1PhWR0c4kg59tjSGofo8R3eHzcA9CGwavuRdxOArhVWWODG99gYgfmjcRLgt9/jH+99w4DAAAAIAAAACgAAABQVUJLTUlOVE1BWFRXRfQ1RHLWGOgqABUTYfVBDZrv3OL2nPLYve9ldfNVLOjdPVFFkgUA6D0Vb1mSBQAAAAAA
+""",
+ """\
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= 2A+I9q2+ZayxDDYC5n2YW8Bn/zBm4D3mwS5qMwADRDcbFpBcf3yPOyeZiqpLBTkxo8GT8zMQFeApv4ScffjC8A== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWDwlo/AkUnTrecAW4Ci5Tkh3KOqs6R7KLTsFtq16RXN5F7G5ckGv11UtzHoZTbKbEk03a6ogAOK54Q2CI/7XGA8DAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AWDLihlaSBQAoq/5gEjRCrhfH16X2GYjQJSG/CgSuGhYeCsrw7XkphLI3cxw2unJRDW8DAJrYqEGaW0NPKZk7bbpPjU/Q6Es1AgAAAEAAAABTSUcAREVMRUJbs67Sb5Wx/jzWyT1PhWR0c4kg59tjSGofo8R3eHzcA9CGwavuRdxOArhVWWODG99gYgfmjcRLgt9/jH+99w4DAAAAIAAAACgAAABQVUJLTUlOVE1BWFRXRfQ1RHLWGOgqABUTYfVBDZrv3OL2nPLYve9ldfNVLOjdPVFFkgUA6D0Vb1mSBQAAAAAA
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= 2A+I9q2+ZayxDDYC5n2YW8Bn/zBm4D3mwS5qMwADRDcbFpBcf3yPOyeZiqpLBTkxo8GT8zMQFeApv4ScffjC8A== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWHH5Ofs4HciIFXjE9egjDbistJptoMXIC7ugCgHhI4NPJqfYY256NpULXKc9c30ul7oHXQyKLfGd84mIAxC3UwQDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AuOoUh1aSBQANeC4gGGG3a23PpmF+y6CrUS9VWjyj0Ydpl2tMVDLaK2vd5QtYKKJ3UOyprGKk0D/aPn4E3Bk2rE3BKBZRXM1AAgAAAEAAAABTSUcAREVMRci9uvioJssgd8txxFlqz9RqPx+YLVMkHmm24fMUtYGWF/nhkoEYVGT7O+tXSfHHY/KHcUZjVaZpEt/tmXlXBAUDAAAAIAAAACgAAABQVUJLTUlOVE1BWFSxhKhavdriTvCAtNVcK5yr0cAbsWp2MsrwUV5YTc+7V0CsaLZSkgUAQAxA1GaSBQAAAAAA
+""",
+ """\
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= SbWKPilWYrt+1vgFU3jlxGNOH6I/1npX8wl+KoraN3S6VDsyM6EfCV+JPEK8BsNoM2VIpMcSdjcVna/GwXwZkg== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWHH5Ofs4HciIFXjE9egjDbistJptoMXIC7ugCgHhI4NPJqfYY256NpULXKc9c30ul7oHXQyKLfGd84mIAxC3UwQDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AuOoUh1aSBQANeC4gGGG3a23PpmF+y6CrUS9VWjyj0Ydpl2tMVDLaK2vd5QtYKKJ3UOyprGKk0D/aPn4E3Bk2rE3BKBZRXM1AAgAAAEAAAABTSUcAREVMRci9uvioJssgd8txxFlqz9RqPx+YLVMkHmm24fMUtYGWF/nhkoEYVGT7O+tXSfHHY/KHcUZjVaZpEt/tmXlXBAUDAAAAIAAAACgAAABQVUJLTUlOVE1BWFSxhKhavdriTvCAtNVcK5yr0cAbsWp2MsrwUV5YTc+7V0CsaLZSkgUAQAxA1GaSBQAAAAAA
+ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= 2A+I9q2+ZayxDDYC5n2YW8Bn/zBm4D3mwS5qMwADRDcbFpBcf3yPOyeZiqpLBTkxo8GT8zMQFeApv4ScffjC8A== BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWN5Y0b2irPS1JgqJFQMciPg4aWd9qj1ZqcJc5bGXe1m4ZdAXa5OIhXa0+680MgpyhEHhqYJDIwH1XRa1OZx5YAUDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AgBW3iFaSBQD9WI+Qr6NOZsDmP0PsnCo66mstM3ac5ZON+I+ZeEK8lZWBASvsD2JIfq3v4d1QH5g4STs3wOazQPc25Puy659ZAgAAAEAAAABTSUcAREVMRUJbs67Sb5Wx/jzWyT1PhWR0c4kg59tjSGofo8R3eHzcA9CGwavuRdxOArhVWWODG99gYgfmjcRLgt9/jH+99w4DAAAAIAAAACgAAABQVUJLTUlOVE1BWFRXRfQ1RHLWGOgqABUTYfVBDZrv3OL2nPLYve9ldfNVLOjdPVFFkgUA6D0Vb1mSBQAAAAAA
+""",
+ ]
+ request = [
+ "AgAAAEAAAABOT05DUEFE/9gPiPatvmWssQw2AuZ9mFvAZ/8wZuA95sEuajMAA0Q3GxaQXH98jzsnmYqqSwU5MaPBk/MzEBXgKb+EnH34wvAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
+ "AgAAAEAAAABOT05DUEFE/0m1ij4pVmK7ftb4BVN45cRjTh+iP9Z6V/MJfiqK2jd0ulQ7MjOhHwlfiTxCvAbDaDNlSKTHEnY3FZ2vxsF8GZIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
+ "AgAAAEAAAABOT05DUEFE/0AcDP0F/L7NTiOCQlHovyMlovVtG4lBRqAgydNYk9WOoanOwclZuV8z2b/SCHj5thxbSNxuLNZoDQ2b6TWgPfsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
+ ]
+ response = [
+ "BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWDwlo/AkUnTrecAW4Ci5Tkh3KOqs6R7KLTsFtq16RXN5F7G5ckGv11UtzHoZTbKbEk03a6ogAOK54Q2CI/7XGA8DAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AWDLihlaSBQAoq/5gEjRCrhfH16X2GYjQJSG/CgSuGhYeCsrw7XkphLI3cxw2unJRDW8DAJrYqEGaW0NPKZk7bbpPjU/Q6Es1AgAAAEAAAABTSUcAREVMRUJbs67Sb5Wx/jzWyT1PhWR0c4kg59tjSGofo8R3eHzcA9CGwavuRdxOArhVWWODG99gYgfmjcRLgt9/jH+99w4DAAAAIAAAACgAAABQVUJLTUlOVE1BWFRXRfQ1RHLWGOgqABUTYfVBDZrv3OL2nPLYve9ldfNVLOjdPVFFkgUA6D0Vb1mSBQAAAAAA",
+ "BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWHH5Ofs4HciIFXjE9egjDbistJptoMXIC7ugCgHhI4NPJqfYY256NpULXKc9c30ul7oHXQyKLfGd84mIAxC3UwQDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AuOoUh1aSBQANeC4gGGG3a23PpmF+y6CrUS9VWjyj0Ydpl2tMVDLaK2vd5QtYKKJ3UOyprGKk0D/aPn4E3Bk2rE3BKBZRXM1AAgAAAEAAAABTSUcAREVMRci9uvioJssgd8txxFlqz9RqPx+YLVMkHmm24fMUtYGWF/nhkoEYVGT7O+tXSfHHY/KHcUZjVaZpEt/tmXlXBAUDAAAAIAAAACgAAABQVUJLTUlOVE1BWFSxhKhavdriTvCAtNVcK5yr0cAbsWp2MsrwUV5YTc+7V0CsaLZSkgUAQAxA1GaSBQAAAAAA",
+ "BQAAAEAAAABAAAAApAAAADwBAABTSUcAUEFUSFNSRVBDRVJUSU5EWN5Y0b2irPS1JgqJFQMciPg4aWd9qj1ZqcJc5bGXe1m4ZdAXa5OIhXa0+680MgpyhEHhqYJDIwH1XRa1OZx5YAUDAAAABAAAAAwAAABSQURJTUlEUFJPT1RAQg8AgBW3iFaSBQD9WI+Qr6NOZsDmP0PsnCo66mstM3ac5ZON+I+ZeEK8lZWBASvsD2JIfq3v4d1QH5g4STs3wOazQPc25Puy659ZAgAAAEAAAABTSUcAREVMRUJbs67Sb5Wx/jzWyT1PhWR0c4kg59tjSGofo8R3eHzcA9CGwavuRdxOArhVWWODG99gYgfmjcRLgt9/jH+99w4DAAAAIAAAACgAAABQVUJLTUlOVE1BWFRXRfQ1RHLWGOgqABUTYfVBDZrv3OL2nPLYve9ldfNVLOjdPVFFkgUA6D0Vb1mSBQAAAAAA",
+ ]
+
+ server_request = request[0]
+ server_response = response[0]
+ test_cli("roughtime", [], expected_stderr=b'Please specify either --servers-file or --host and --pubkey\n')
+
+ with open(ecosystem, 'w') as f:
+ f.write("Cloudflare-Roughtime ed25519 gD63hSj4ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= udp 127.0.0.1:" + str(server_port))
+
+ test_cli("roughtime", [
+ "--check-local-clock=0",
+ "--chain-file=",
+ "--servers-file=" + ecosystem]
+ , expected_stderr=b'ERROR: Public key does not match!\n')
+
+ with open(ecosystem, 'w') as f:
+ f.write("Cloudflare-Roughtime ed25519 gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo= udp 127.0.0.1:" + str(server_port))
+
+ test_cli("roughtime", [
+ "--chain-file=",
+ "--servers-file=" + ecosystem]
+ , expected_stderr=b'ERROR: Local clock mismatch\n')
+
+ test_cli("roughtime", [
+ "--check-local-clock=0",
+ "--chain-file=" + chain_file,
+ "--servers-file=" + ecosystem]
+ , "Cloudflare-Roughtime : UTC 2019-09-12T08:00:11 (+-1000000us)")
+
+ with open(chain_file, 'r') as f:
+ read_data = f.read()
+ if read_data != chain[0]:
+ logging.error("unexpected chain")
+
+ server_request = request[1]
+ server_response = response[1]
+ test_cli("roughtime", [
+ "--check-local-clock=0",
+ "--chain-file=" + chain_file,
+ "--host=127.0.0.1:" + str(server_port),
+ "--pubkey=gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo=",
+ "--raw-time"]
+ , "UTC 1568275214691000 (+-1000000us)")
+
+ with open(chain_file, 'r') as f:
+ read_data = f.read()
+ if read_data != chain[1]:
+ logging.error("unexpected chain")
+
+ server_request = request[2]
+ server_response = response[2]
+ test_cli("roughtime", [
+ "--check-local-clock=0",
+ "--chain-file=" + chain_file,
+ "--host=127.0.0.1:" + str(server_port),
+ "--pubkey=gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo=",
+ "--max-chain-size=2"]
+ , "UTC 2019-09-12T08:00:42 (+-1000000us)")
+
+ with open(chain_file, 'r') as f:
+ read_data = f.read()
+ if read_data != chain[2]:
+ logging.error("unexpected chain")
+
+def cli_pk_workfactor_tests(_tmp_dir):
+ test_cli("pk_workfactor", "1024", "80")
+ test_cli("pk_workfactor", "2048", "111")
+ test_cli("pk_workfactor", ["--type=rsa", "512"], "58")
+ test_cli("pk_workfactor", ["--type=dl", "512"], "58")
+ test_cli("pk_workfactor", ["--type=dl_exp", "512"], "128")
+
+def cli_dl_group_info_tests(_tmp_dir):
+
+ dl_output = re.compile('(P|G) = [A-F0-9]+')
+
+ for bits in [1024, 1536, 2048, 3072, 4096, 6144, 8192]:
+ output = test_cli("dl_group_info", "modp/ietf/%d" % (bits))
+ lines = output.split('\n')
+
+ if len(lines) != 2:
+ logging.error('Unexpected output from dl_group_info')
+
+ for l in lines:
+ if not dl_output.match(l):
+ logging.error('Unexpected output from dl_group_info')
+
+
+
+def cli_ec_group_info_tests(_tmp_dir):
+
+ # pylint: disable=line-too-long
+ secp256r1_info = """P = FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF
+A = FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC
+B = 5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B
+N = FFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551
+G = 6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296,4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5"""
+
+ secp256r1_pem = """-----BEGIN EC PARAMETERS-----
+MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////
+/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6
+k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDydwN9
+gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1AiEA
+/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=
+-----END EC PARAMETERS-----"""
+
+ test_cli("ec_group_info", "secp256r1", secp256r1_info)
+ test_cli("ec_group_info", "--pem secp256r1", secp256r1_pem)
+
+def cli_cpuid_tests(_tmp_dir):
+ cpuid_output = test_cli("cpuid", [])
+
+ if not cpuid_output.startswith('CPUID flags:'):
+ logging.error('Unexpected cpuid output "%s"' % (cpuid_output))
+
+ flag_re = re.compile('[a-z0-9_]+')
+ flags = cpuid_output[13:].split(' ')
+ for flag in flags:
+ if flag != '' and flag_re.match(flag) is None:
+ logging.error('Unexpected CPUID flag name "%s"' % (flag))
+
+def cli_cc_enc_tests(_tmp_dir):
+ test_cli("cc_encrypt", ["8028028028028029", "pass"], "4308989841607208")
+ test_cli("cc_decrypt", ["4308989841607208", "pass"], "8028028028028027")
+
+def cli_cert_issuance_tests(tmp_dir):
+ root_key = os.path.join(tmp_dir, 'root.key')
+ root_crt = os.path.join(tmp_dir, 'root.crt')
+ int_key = os.path.join(tmp_dir, 'int.key')
+ int_crt = os.path.join(tmp_dir, 'int.crt')
+ int_csr = os.path.join(tmp_dir, 'int.csr')
+ leaf_key = os.path.join(tmp_dir, 'leaf.key')
+ leaf_crt = os.path.join(tmp_dir, 'leaf.crt')
+ leaf_csr = os.path.join(tmp_dir, 'leaf.csr')
+
+ test_cli("keygen", ["--params=2048", "--output=" + root_key], "")
+ test_cli("keygen", ["--params=2048", "--output=" + int_key], "")
+ test_cli("keygen", ["--params=2048", "--output=" + leaf_key], "")
+
+ test_cli("gen_self_signed",
+ [root_key, "Root", "--ca", "--path-limit=2", "--output="+root_crt], "")
+
+ test_cli("gen_pkcs10", "%s Intermediate --ca --output=%s" % (int_key, int_csr))
+ test_cli("sign_cert", "%s %s %s --output=%s" % (root_crt, root_key, int_csr, int_crt))
+
+ test_cli("gen_pkcs10", "%s Leaf --output=%s" % (leaf_key, leaf_csr))
+ test_cli("sign_cert", "%s %s %s --output=%s" % (int_crt, int_key, leaf_csr, leaf_crt))
+
+ test_cli("cert_verify" "%s %s %s" % (leaf_crt, int_crt, root_crt), "Certificate passes validation checks")
+
+def cli_timing_test_tests(_tmp_dir):
+
+ timing_tests = ["bleichenbacher", "manger",
+ "ecdsa", "ecc_mul", "inverse_mod", "pow_mod",
+ "lucky13sec3", "lucky13sec4sha1",
+ "lucky13sec4sha256", "lucky13sec4sha384"]
+
+ output_re = re.compile('[0-9]+;[0-9];[0-9]+')
+
+ for suite in timing_tests:
+ output = test_cli("timing_test", [suite, "--measurement-runs=16", "--warmup-runs=3"], None).split('\n')
+
+ for line in output:
+ if output_re.match(line) is None:
+ logging.error("Unexpected output in timing_test %s: %s", suite, line)
+
+def cli_tls_ciphersuite_tests(_tmp_dir):
+ policies = ['default', 'suiteb_128', 'suiteb_192', 'strict', 'all']
+
+ versions = ['tls1.0', 'tls1.1', 'tls1.2']
+
+ ciphersuite_re = re.compile('^[A-Z0-9_]+$')
+
+ for policy in policies:
+ for version in versions:
+
+ if version != 'tls1.2' and policy != 'all':
+ continue
+
+ output = test_cli("tls_ciphers", ["--version=" + version, "--policy=" + policy], None).split('\n')
+
+ for line in output:
+ if ciphersuite_re.match(line) is None:
+ logging.error("Unexpected ciphersuite line %s", line)
+
+def cli_asn1_tests(_tmp_dir):
+ input_pem = """-----BEGIN BLOB-----
+MCACAQUTBnN0cmluZzEGAQH/AgFjBAUAAAAAAAMEAP///w==
+-----END BLOB------
+"""
+
+ expected = """d= 0, l= 32: SEQUENCE
+ d= 1, l= 1: INTEGER 05
+ d= 1, l= 6: PRINTABLE STRING string
+ d= 1, l= 6: SET
+ d= 2, l= 1: BOOLEAN true
+ d= 2, l= 1: INTEGER 63
+ d= 1, l= 5: OCTET STRING 0000000000
+ d= 1, l= 4: BIT STRING FFFFFF"""
+
+ test_cli("asn1print", "--pem -", expected, input_pem)
+
+def cli_tls_socket_tests(tmp_dir):
+ client_msg = b'Client message %d\n' % (random.randint(0, 2**128))
+ server_port = random_port_number()
+
+ priv_key = os.path.join(tmp_dir, 'priv.pem')
+ ca_cert = os.path.join(tmp_dir, 'ca.crt')
+ crt_req = os.path.join(tmp_dir, 'crt.req')
+ server_cert = os.path.join(tmp_dir, 'server.crt')
+
+ test_cli("keygen", ["--algo=ECDSA", "--params=secp256r1", "--output=" + priv_key], "")
+
+ test_cli("gen_self_signed",
+ [priv_key, "CA", "--ca", "--country=VT",
+ "--dns=ca.example", "--hash=SHA-384", "--output="+ca_cert],
+ "")
+
+ test_cli("cert_verify", ca_cert, "Certificate did not validate - Cannot establish trust")
+
+ test_cli("gen_pkcs10", "%s localhost --output=%s" % (priv_key, crt_req))
+
+ test_cli("sign_cert", "%s %s %s --output=%s" % (ca_cert, priv_key, crt_req, server_cert))
+
+ tls_server = subprocess.Popen([CLI_PATH, 'tls_server', '--max-clients=1',
+ '--port=%d' % (server_port), server_cert, priv_key],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ wait_time = 1.0
+
+ time.sleep(wait_time)
+
+ tls_client = subprocess.Popen([CLI_PATH, 'tls_client', 'localhost',
+ '--port=%d' % (server_port), '--trusted-cas=%s' % (ca_cert)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ time.sleep(wait_time)
+
+ tls_client.stdin.write(client_msg)
+ tls_client.stdin.flush()
+
+ time.sleep(wait_time)
+
+ (stdout, stderr) = tls_client.communicate()
+
+ if stderr:
+ logging.error("Got unexpected stderr output %s" % (stderr))
+
+ if b'Handshake complete' not in stdout:
+ logging.error('Failed to complete handshake: %s' % (stdout))
+
+ if client_msg not in stdout:
+ logging.error("Missing client message from stdout %s" % (stdout))
+
+ tls_server.communicate()
+
+def cli_tls_http_server_tests(tmp_dir):
+ if not check_for_command("tls_http_server"):
+ return
+
+ try:
+ from http.client import HTTPSConnection
+ except ImportError:
+ try:
+ from httplib import HTTPSConnection
+ except ImportError:
+ return
+ import ssl
+
+ server_port = random_port_number()
+
+ priv_key = os.path.join(tmp_dir, 'priv.pem')
+ ca_cert = os.path.join(tmp_dir, 'ca.crt')
+ crt_req = os.path.join(tmp_dir, 'crt.req')
+ server_cert = os.path.join(tmp_dir, 'server.crt')
+
+ test_cli("keygen", ["--algo=ECDSA", "--params=secp384r1", "--output=" + priv_key], "")
+
+ test_cli("gen_self_signed",
+ [priv_key, "CA", "--ca", "--country=VT",
+ "--dns=ca.example", "--hash=SHA-384", "--output="+ca_cert],
+ "")
+
+ test_cli("gen_pkcs10", "%s localhost --output=%s" % (priv_key, crt_req))
+
+ test_cli("sign_cert", "%s %s %s --output=%s" % (ca_cert, priv_key, crt_req, server_cert))
+
+ tls_server = subprocess.Popen([CLI_PATH, 'tls_http_server', '--max-clients=2',
+ '--port=%d' % (server_port), server_cert, priv_key],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ wait_time = 1.0
+ time.sleep(wait_time)
+
+ context = ssl.create_default_context(cafile=ca_cert)
+ conn = HTTPSConnection('localhost', port=server_port, context=context)
+ conn.request("GET", "/")
+ resp = conn.getresponse()
+
+ if resp.status != 200:
+ logging.error('Unexpected response status %d' % (resp.status))
+
+ body = str(resp.read())
+
+ if body.find('TLS negotiation with Botan 2.') < 0:
+ logging.error('Unexpected response body')
+
+ conn.request("POST", "/logout")
+ resp = conn.getresponse()
+
+ if resp.status != 405:
+ logging.error('Unexpected response status %d' % (resp.status))
+
+ if sys.version_info.major >= 3:
+ rc = tls_server.wait(5) # pylint: disable=too-many-function-args
+ else:
+ rc = tls_server.wait()
+
+ if rc != 0:
+ logging.error("Unexpected return code from https_server %d", rc)
+
+def cli_tls_proxy_tests(tmp_dir):
+ # pylint: disable=too-many-locals,too-many-statements
+ if not check_for_command("tls_proxy"):
+ return
+
+ try:
+ from http.client import HTTPSConnection
+ except ImportError:
+ try:
+ from httplib import HTTPSConnection
+ except ImportError:
+ return
+
+ try:
+ from http.server import HTTPServer, BaseHTTPRequestHandler
+ except ImportError:
+ try:
+ from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
+ except ImportError:
+ return
+
+ import ssl
+ import threading
+
+ server_port = random_port_number()
+ proxy_port = random_port_number()
+
+ while server_port == proxy_port:
+ proxy_port = random_port_number()
+
+ priv_key = os.path.join(tmp_dir, 'priv.pem')
+ ca_cert = os.path.join(tmp_dir, 'ca.crt')
+ crt_req = os.path.join(tmp_dir, 'crt.req')
+ server_cert = os.path.join(tmp_dir, 'server.crt')
+
+ test_cli("keygen", ["--algo=ECDSA", "--params=secp384r1", "--output=" + priv_key], "")
+
+ test_cli("gen_self_signed",
+ [priv_key, "CA", "--ca", "--country=VT",
+ "--dns=ca.example", "--hash=SHA-384", "--output="+ca_cert],
+ "")
+
+ test_cli("gen_pkcs10", "%s localhost --output=%s" % (priv_key, crt_req))
+
+ test_cli("sign_cert", "%s %s %s --output=%s" % (ca_cert, priv_key, crt_req, server_cert))
+
+ tls_proxy = subprocess.Popen([CLI_PATH, 'tls_proxy', str(proxy_port), '127.0.0.1', str(server_port),
+ server_cert, priv_key, '--output=/tmp/proxy.err', '--max-clients=2'],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ wait_time = 1.0
+
+ time.sleep(wait_time)
+
+ server_response = binascii.hexlify(os.urandom(32))
+
+ def run_http_server():
+ class Handler(BaseHTTPRequestHandler):
+
+ def do_GET(self): # pylint: disable=invalid-name
+ self.send_response(200)
+ self.end_headers()
+ self.wfile.write(server_response)
+
+ httpd = HTTPServer(('', server_port), Handler)
+ httpd.serve_forever()
+
+ http_thread = threading.Thread(target=run_http_server)
+ http_thread.daemon = True
+ http_thread.start()
+
+ time.sleep(wait_time)
+
+ context = ssl.create_default_context(cafile=ca_cert)
+
+ for _i in range(2):
+ conn = HTTPSConnection('localhost', port=proxy_port, context=context)
+ conn.request("GET", "/")
+ resp = conn.getresponse()
+
+ if resp.status != 200:
+ logging.error('Unexpected response status %d' % (resp.status))
+
+ body = resp.read()
+
+ if body != server_response:
+ logging.error('Unexpected response from server %s' % (body))
+
+ if sys.version_info.major >= 3:
+ rc = tls_proxy.wait(5) # pylint: disable=too-many-function-args
+ else:
+ rc = tls_proxy.wait()
+
+ if rc != 0:
+ logging.error('Unexpected return code %d', rc)
+
+def cli_trust_root_tests(tmp_dir):
+ pem_file = os.path.join(tmp_dir, 'pems')
+ dn_file = os.path.join(tmp_dir, 'dns')
+
+ test_cli("trust_roots", ['--dn-only', '--output=%s' % (dn_file)], "")
+
+ dn_re = re.compile('(.+=\".+\")(,.+=\".+\")')
+ for line in open(dn_file):
+ if dn_re.match(line) is None:
+ logging.error("Unexpected DN line %s", line)
+
+ test_cli("trust_roots", ['--output=%s' % (pem_file)], "")
+
+def cli_tss_tests(tmp_dir):
+ data_file = os.path.join(tmp_dir, 'data')
+
+ exp_hash = "53B3C59276AE30EA7FD882268E80FD96AD80CC9FEB15F9FB940E7C4B5CF80B9E"
+
+ test_cli("rng", ["32", "--output=%s" % (data_file)], "")
+ test_cli("hash", ["--no-fsname", data_file], exp_hash)
+
+ m = 3
+ n = 5
+
+ test_cli("tss_split", [str(m), str(n), data_file, "--share-prefix=%s/split" % (tmp_dir)], "")
+
+ share_files = []
+
+ for i in range(1, n+1):
+ share = os.path.join(tmp_dir, "split%d.tss" % (i))
+ if not os.access(share, os.R_OK):
+ logging.error("Failed to create expected split file %s", share)
+ share_files.append(share)
+
+ rec5 = os.path.join(tmp_dir, "recovered_5")
+ test_cli("tss_recover", share_files + ["--output=%s" % (rec5)], "")
+ test_cli("hash", ["--no-fsname", rec5], exp_hash)
+
+ rec4 = os.path.join(tmp_dir, "recovered_4")
+ test_cli("tss_recover", share_files[1:] + ["--output=%s" % (rec4)], "")
+ test_cli("hash", ["--no-fsname", rec4], exp_hash)
+
+ rec3 = os.path.join(tmp_dir, "recovered_3")
+ test_cli("tss_recover", share_files[2:] + ["--output=%s" % (rec3)], "")
+ test_cli("hash", ["--no-fsname", rec3], exp_hash)
+
+ rec2 = os.path.join(tmp_dir, "recovered_2")
+ test_cli("tss_recover", share_files[3:] + ["--output=%s" % (rec2)], "", None,
+ b'Error: Insufficient shares to do TSS reconstruction\n')
+
+
+def cli_pk_encrypt_tests(tmp_dir):
+ input_file = os.path.join(tmp_dir, 'input')
+ ctext_file = os.path.join(tmp_dir, 'ctext')
+ recovered_file = os.path.join(tmp_dir, 'recovered')
+ rsa_priv_key = os.path.join(tmp_dir, 'rsa.priv')
+ rsa_pub_key = os.path.join(tmp_dir, 'rsa.pub')
+
+ test_cli("keygen", ["--algo=RSA", "--provider=base", "--params=2048", "--output=%s" % (rsa_priv_key)], "")
+
+ key_hash = "72AF3227EF57A728E894D54623EB8E2C0CD11A4A98BF2DF32DB052BF60897873"
+ test_cli("hash", ["--no-fsname", "--algo=SHA-256", rsa_priv_key], key_hash)
+
+ test_cli("pkcs8", ["--pub-out", "%s/rsa.priv" % (tmp_dir), "--output=%s" % (rsa_pub_key)], "")
+
+ # Generate a random input file
+ test_cli("rng", ["10", "16", "32", "--output=%s" % (input_file)], "")
+
+ # Because we used a fixed DRBG for each invocation the same ctext is generated each time
+ rng_output_hash = "32F5E7B61357DE8397EFDA1E598379DFD5EE21767BDF4E2A435F05117B836AC6"
+ ctext_hash = "FF1F0EEC2C42DD61D78505C5DF624A19AE6FE2BAB0B8F7D878C7655D54C68FE0"
+
+ test_cli("hash", ["--no-fsname", "--algo=SHA-256", input_file], rng_output_hash)
+
+ # Encrypt and verify ciphertext is the expected value
+ test_cli("pk_encrypt", [rsa_pub_key, input_file, "--output=%s" % (ctext_file)], "")
+ test_cli("hash", ["--no-fsname", "--algo=SHA-256", ctext_file], ctext_hash)
+
+ # Decrypt and verify plaintext is recovered
+ test_cli("pk_decrypt", [rsa_priv_key, ctext_file, "--output=%s" % (recovered_file)], "")
+ test_cli("hash", ["--no-fsname", "--algo=SHA-256", recovered_file], rng_output_hash)
+
+def cli_uuid_tests(_tmp_dir):
+ test_cli("uuid", [], "D80F88F6-ADBE-45AC-B10C-3602E67D985B")
+
+ uuid_re = re.compile(r'[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}')
+
+ output = test_cli("uuid", [])
+
+ if uuid_re.match(output) is None:
+ logging.error('Bad uuid output %s' % (output))
+
+def cli_tls_client_hello_tests(_tmp_dir):
+
+ # pylint: disable=line-too-long
+ chello = "16030100cf010000cb03035b3cf2457b864d7bef2a4b1f84fc3ced2b68d9551f3455ffdd305af277a91bb200003a16b816b716ba16b9cca9cca8c02cc030c02bc02fc0adc0acc024c00ac028c014c023c009c027c013ccaa009f009ec09fc09e006b003900670033010000680000000e000c000009676d61696c2e636f6d000500050100000000000a001a0018001d0017001a0018001b0019001c01000101010201030104000b00020100000d00140012080508040806050106010401050306030403001600000017000000230000ff01000100"
+
+ output = test_cli("tls_client_hello", ["--hex", "-"], None, chello)
+
+ output_hash = "8EBFC3205ACFA98461128FE5D081D19254237AF84F7DAF000A3C992C3CF6DE44"
+ test_cli("hash", ["--no-fsname", "--algo=SHA-256", "-"], output_hash, output)
+
+def cli_speed_pk_tests(_tmp_dir):
+ msec = 1
+
+ pk_algos = ["ECDSA", "ECDH", "SM2", "ECKCDSA", "ECGDSA", "GOST-34.10",
+ "DH", "DSA", "ElGamal", "Ed25519", "Curve25519", "NEWHOPE", "McEliece",
+ "RSA", "RSA_keygen", "XMSS"]
+
+ output = test_cli("speed", ["--msec=%d" % (msec)] + pk_algos, None).split('\n')
+
+ # ECDSA-secp256r1 106 keygen/sec; 9.35 ms/op 37489733 cycles/op (1 op in 9 ms)
+ format_re = re.compile(r'^.* [0-9]+ ([A-Za-z ]+)/sec; [0-9]+\.[0-9]+ ms/op .*\([0-9]+ (op|ops) in [0-9\.]+ ms\)')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+def cli_speed_pbkdf_tests(_tmp_dir):
+ msec = 1
+ pbkdf_ops = ['bcrypt', 'passhash9', 'argon2']
+
+ format_re = re.compile(r'^.* [0-9]+ /sec; [0-9]+\.[0-9]+ ms/op .*\([0-9]+ (op|ops) in [0-9]+(\.[0-9]+)? ms\)')
+ for op in pbkdf_ops:
+ output = test_cli("speed", ["--msec=%d" % (msec), op], None).split('\n')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+def cli_speed_table_tests(_tmp_dir):
+ msec = 1
+
+ version_re = re.compile(r'^Botan 2\.[0-9]+\.[0-9] \(.*, revision .*, distribution .*\)')
+ cpuid_re = re.compile(r'^CPUID: [a-z_0-9 ]*$')
+ format_re = re.compile(r'^AES-128 .* buffer size [0-9]+ bytes: [0-9]+\.[0-9]+ MiB\/sec .*\([0-9]+\.[0-9]+ MiB in [0-9]+\.[0-9]+ ms\)')
+ tbl_hdr_re = re.compile(r'^algo +operation +1024 bytes$')
+ tbl_val_re = re.compile(r'^AES-128 +(encrypt|decrypt) +[0-9]+(\.[0-9]{2})$')
+
+ output = test_cli("speed", ["--format=table", "--provider=base", "--msec=%d" % (msec), "AES-128"], None).split('\n')
+
+ if len(output) != 11:
+ logging.error('Unexpected number of lines from table output')
+
+ if version_re.match(output[0]) is None:
+ logging.error("Unexpected version line %s", output[0])
+
+ if output[1] != '':
+ if cpuid_re.match(output[1]) is None:
+ logging.error("Unexpected cpuid line %s", output[1])
+ elif output[2] != '':
+ logging.error("Expected newline got %s", output[2])
+
+ if format_re.match(output[3]) is None:
+ logging.error("Unexpected line %s", output[3])
+ if format_re.match(output[4]) is None:
+ logging.error("Unexpected line %s", output[4])
+ if output[5] != '':
+ logging.error("Expected newline got %s", output[5])
+
+ if tbl_hdr_re.match(output[6]) is None:
+ logging.error("Unexpected table header %s", output[6])
+ if tbl_val_re.match(output[7]) is None:
+ logging.error("Unexpected table header %s", output[7])
+ if tbl_val_re.match(output[8]) is None:
+ logging.error("Unexpected table header %s", output[8])
+ if output[9] != '':
+ logging.error("Expected newline got %s", output[9])
+ if output[10].find('results are the number of 1000s bytes processed per second') < 0:
+ logging.error("Unexpected trailing message got %s", output[10])
+
+def cli_speed_invalid_option_tests(_tmp_dir):
+ speed_usage = b"Usage: speed --msec=500 --format=default --ecc-groups= --provider= --buf-size=1024 --clear-cpuid= --cpu-clock-speed=0 --cpu-clock-ratio=1.0 *algos\n"
+
+ test_cli("speed", ["--buf-size=0", "--msec=1", "AES-128"],
+ expected_stderr=b"Usage error: Cannot have a zero-sized buffer\n%s" % (speed_usage))
+
+ test_cli("speed", ["--buf-size=F00F", "--msec=1", "AES-128"],
+ expected_stderr=b"Usage error: Invalid integer value 'F00F' for option buf-size\n%s" % (speed_usage))
+
+ test_cli("speed", ["--buf-size=90000000", "--msec=1", "AES-128"],
+ expected_stderr=b"Usage error: Specified buffer size is too large\n%s" % (speed_usage))
+
+ test_cli("speed", ["--clear-cpuid=goku", "--msec=1", "AES-128"],
+ expected_stderr=b"Warning don't know CPUID flag 'goku'\n")
+
+def cli_speed_math_tests(_tmp_dir):
+ msec = 1
+ # these all have a common output format
+ math_ops = ['mp_mul', 'mp_div', 'mp_div10', 'modexp', 'random_prime', 'inverse_mod',
+ 'rfc3394', 'fpe_fe1', 'ecdsa_recovery', 'ecc_init', 'poly_dbl',
+ 'bn_redc', 'nistp_redc', 'ecc_mult', 'ecc_ops', 'os2ecp', 'primality_test']
+
+ format_re = re.compile(r'^.* [0-9]+ /sec; [0-9]+\.[0-9]+ ms/op .*\([0-9]+ (op|ops) in [0-9]+(\.[0-9]+)? ms\)')
+ for op in math_ops:
+ output = test_cli("speed", ["--msec=%d" % (msec), op], None).split('\n')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+def cli_speed_tests(_tmp_dir):
+ # pylint: disable=too-many-branches
+
+ msec = 1
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "--buf-size=64,512", "AES-128"], None).split('\n')
+
+ if len(output) % 4 != 0:
+ logging.error("Unexpected number of lines for AES-128 speed test")
+
+ # pylint: disable=line-too-long
+ format_re = re.compile(r'^AES-128 .* buffer size [0-9]+ bytes: [0-9]+\.[0-9]+ MiB\/sec .*\([0-9]+\.[0-9]+ MiB in [0-9]+\.[0-9]+ ms\)')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "ChaCha20", "SHA-256", "HMAC(SHA-256)"], None).split('\n')
+
+ # pylint: disable=line-too-long
+ format_re = re.compile(r'^.* buffer size [0-9]+ bytes: [0-9]+\.[0-9]+ MiB\/sec .*\([0-9]+\.[0-9]+ MiB in [0-9]+\.[0-9]+ ms\)')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "AES-128/GCM"], None).split('\n')
+ format_re_ks = re.compile(r'^AES-128/GCM\(16\).* [0-9]+ key schedule/sec; [0-9]+\.[0-9]+ ms/op .*\([0-9]+ (op|ops) in [0-9\.]+ ms\)')
+ format_re_cipher = re.compile(r'^AES-128/GCM\(16\) .* buffer size [0-9]+ bytes: [0-9]+\.[0-9]+ MiB\/sec .*\([0-9]+\.[0-9]+ MiB in [0-9]+\.[0-9]+ ms\)')
+ for line in output:
+ if format_re_ks.match(line) is None:
+ if format_re_cipher.match(line) is None:
+ logging.error('Unexpected line %s', line)
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "scrypt"], None).split('\n')
+
+ format_re = re.compile(r'^scrypt-[0-9]+-[0-9]+-[0-9]+ \([0-9]+ MiB\) [0-9]+ /sec; [0-9]+\.[0-9]+ ms/op .*\([0-9]+ (op|ops) in [0-9\.]+ ms\)')
+
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "RNG"], None).split('\n')
+
+ # ChaCha_RNG generate buffer size 1024 bytes: 954.431 MiB/sec 4.01 cycles/byte (477.22 MiB in 500.00 ms)
+ format_re = re.compile(r'^.* generate buffer size [0-9]+ bytes: [0-9]+\.[0-9]+ MiB/sec .*\([0-9]+\.[0-9]+ MiB in [0-9]+\.[0-9]+ ms')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+ # Entropy source rdseed output 128 bytes estimated entropy 0 in 0.02168 ms total samples 32
+ output = test_cli("speed", ["--msec=%d" % (msec), "entropy"], None).split('\n')
+ format_re = re.compile(r'^Entropy source [_a-z0-9]+ output [0-9]+ bytes estimated entropy [0-9]+ in [0-9]+\.[0-9]+ ms .*total samples [0-9]+')
+ for line in output:
+ if format_re.match(line) is None:
+ logging.error("Unexpected line %s", line)
+
+ output = test_cli("speed", ["--msec=%d" % (msec), "--format=json", "AES-128"], None)
+
+ json_blob = json.loads(output)
+ if len(json_blob) < 2:
+ logging.error("Unexpected size for JSON output")
+
+ for b in json_blob:
+ for field in ['algo', 'op', 'events', 'bps', 'buf_size', 'nanos']:
+ if field not in b:
+ logging.error('Missing field %s in JSON record %s' % (field, b))
+
+def run_test(fn_name, fn):
+ start = time.time()
+ tmp_dir = tempfile.mkdtemp(prefix='botan_cli_')
+ try:
+ fn(tmp_dir)
+ except Exception as e: # pylint: disable=broad-except
+ logging.error("Test %s threw exception: %s", fn_name, e)
+
+ shutil.rmtree(tmp_dir)
+ end = time.time()
+ logging.info("Ran %s in %.02f sec", fn_name, end-start)
+
+def main(args=None):
+ # pylint: disable=too-many-branches,too-many-locals
+ if args is None:
+ args = sys.argv
+
+ parser = optparse.OptionParser(
+ formatter=optparse.IndentedHelpFormatter(max_help_position=50))
+
+ parser.add_option('--verbose', action='store_true', default=False)
+ parser.add_option('--quiet', action='store_true', default=False)
+ parser.add_option('--threads', action='store', type='int', default=0)
+
+ (options, args) = parser.parse_args(args)
+
+ setup_logging(options)
+
+ if len(args) < 2:
+ logging.error("Usage: %s path_to_botan_cli [test_regex]", args[0])
+ return 1
+
+ if not os.access(args[1], os.X_OK):
+ logging.error("Could not access/execute %s", args[1])
+ return 2
+
+ threads = options.threads
+ if threads == 0:
+ threads = multiprocessing.cpu_count()
+
+ global CLI_PATH
+ CLI_PATH = args[1]
+
+ test_regex = None
+ if len(args) == 3:
+ try:
+ test_regex = re.compile(args[2])
+ except re.error as e:
+ logging.error("Invalid regex: %s", str(e))
+ return 1
+
+ # some of the slowest tests are grouped up front
+ test_fns = [
+ cli_speed_tests,
+ cli_speed_pk_tests,
+ cli_speed_math_tests,
+ cli_speed_pbkdf_tests,
+ cli_speed_table_tests,
+ cli_speed_invalid_option_tests,
+ cli_xmss_sign_tests,
+
+ cli_argon2_tests,
+ cli_asn1_tests,
+ cli_base32_tests,
+ cli_base58_tests,
+ cli_base64_tests,
+ cli_bcrypt_tests,
+ cli_cc_enc_tests,
+ cli_cycle_counter,
+ cli_cert_issuance_tests,
+ cli_compress_tests,
+ cli_config_tests,
+ cli_cpuid_tests,
+ cli_dl_group_info_tests,
+ cli_ec_group_info_tests,
+ cli_entropy_tests,
+ cli_factor_tests,
+ cli_gen_dl_group_tests,
+ cli_gen_prime_tests,
+ cli_hash_tests,
+ cli_help_tests,
+ cli_hex_tests,
+ cli_hmac_tests,
+ cli_is_prime_tests,
+ cli_key_tests,
+ cli_mod_inverse_tests,
+ cli_pbkdf_tune_tests,
+ cli_pk_encrypt_tests,
+ cli_pk_workfactor_tests,
+ cli_psk_db_tests,
+ cli_rng_tests,
+ cli_roughtime_check_tests,
+ cli_roughtime_tests,
+ cli_timing_test_tests,
+ cli_tls_ciphersuite_tests,
+ cli_tls_client_hello_tests,
+ cli_tls_http_server_tests,
+ cli_tls_proxy_tests,
+ cli_tls_socket_tests,
+ cli_trust_root_tests,
+ cli_tss_tests,
+ cli_uuid_tests,
+ cli_version_tests,
+ ]
+
+ tests_to_run = []
+ for fn in test_fns:
+ fn_name = fn.__name__
+
+ if test_regex is None or test_regex.search(fn_name) is not None:
+ tests_to_run.append((fn_name, fn))
+
+ start_time = time.time()
+
+ if threads > 1:
+ pool = ThreadPool(processes=threads)
+ results = []
+ for test in tests_to_run:
+ results.append(pool.apply_async(run_test, test))
+
+ for result in results:
+ result.get()
+ else:
+ for test in tests_to_run:
+ run_test(test[0], test[1])
+
+ end_time = time.time()
+
+ print("Ran %d tests with %d failures in %.02f seconds" % (
+ TESTS_RUN, TESTS_FAILED, end_time - start_time))
+
+ if TESTS_FAILED > 0:
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/test_cli_crypt.py b/comm/third_party/botan/src/scripts/test_cli_crypt.py
new file mode 100755
index 0000000000..6160d03690
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/test_cli_crypt.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python
+
+import binascii
+import argparse
+import re
+import subprocess
+import sys
+import os.path
+import logging
+import time
+from collections import OrderedDict
+import multiprocessing
+from multiprocessing.pool import ThreadPool
+
+SUPPORTED_ALGORITHMS = {
+ "AES-128/CFB": "aes-128-cfb",
+ "AES-192/CFB": "aes-192-cfb",
+ "AES-256/CFB": "aes-256-cfb",
+ "AES-128/GCM": "aes-128-gcm",
+ "AES-192/GCM": "aes-192-gcm",
+ "AES-256/GCM": "aes-256-gcm",
+ "AES-128/OCB": "aes-128-ocb",
+ "AES-128/XTS": "aes-128-xts",
+ "AES-256/XTS": "aes-256-xts",
+ "ChaCha20Poly1305": "chacha20poly1305",
+}
+
+class VecDocument:
+ def __init__(self, filepath):
+ self.data = OrderedDict()
+ last_testcase_number = 1
+ current_testcase_number = 1
+ current_group_name = ""
+ last_group_name = ""
+ current_testcase = {}
+
+ PATTERN_GROUPHEADER = "^\[(.+)\]$"
+ PATTERN_KEYVALUE = "^\s*([a-zA-Z]+)\s*=(.*)$"
+
+ with open(filepath, 'r') as f:
+ # Append one empty line to simplify parsing
+ lines = f.read().splitlines() + ["\n"]
+
+ for line in lines:
+ line = line.strip()
+ if line.startswith("#"):
+ pass # Skip
+ elif line == "":
+ current_testcase_number += 1
+ elif re.match(PATTERN_GROUPHEADER, line):
+ match = re.match(PATTERN_GROUPHEADER, line)
+ current_group_name = match.group(1)
+ elif re.match(PATTERN_KEYVALUE, line):
+ match = re.match(PATTERN_KEYVALUE, line)
+ key = match.group(1)
+ value = match.group(2).strip()
+ current_testcase[key] = value
+
+ if current_testcase_number != last_testcase_number:
+ if not current_group_name in self.data:
+ self.data[current_group_name] = []
+ if len(current_testcase) != 0:
+ self.data[current_group_name].append(current_testcase)
+ current_testcase = {}
+ last_testcase_number = current_testcase_number
+
+ if current_group_name != last_group_name:
+ last_group_name = current_group_name
+ # Reset testcase number
+ last_testcase_number = 1
+ current_testcase_number = 1
+
+ def get_data(self):
+ return self.data
+
+TESTS_RUN = 0
+TESTS_FAILED = 0
+
+class TestLogHandler(logging.StreamHandler, object):
+ def emit(self, record):
+ # Do the default stuff first
+ super(TestLogHandler, self).emit(record)
+ if record.levelno >= logging.ERROR:
+ global TESTS_FAILED
+ TESTS_FAILED += 1
+
+def setup_logging(options):
+ if options.verbose:
+ log_level = logging.DEBUG
+ elif options.quiet:
+ log_level = logging.WARNING
+ else:
+ log_level = logging.INFO
+
+ lh = TestLogHandler(sys.stdout)
+ lh.setFormatter(logging.Formatter('%(levelname) 7s: %(message)s'))
+ logging.getLogger().addHandler(lh)
+ logging.getLogger().setLevel(log_level)
+
+def test_cipher_kat(cli_binary, data):
+ iv = data['Nonce']
+ key = data['Key']
+ ad = data['AD'] if 'AD' in data else ""
+ plaintext = data['In'].lower()
+ ciphertext = data['Out'].lower()
+ algorithm = data['Algorithm']
+ direction = data['Direction']
+
+ mode = SUPPORTED_ALGORITHMS.get(algorithm)
+ if mode is None:
+ raise Exception("Unknown algorithm: '" + algorithm + "'")
+
+ cmd = [
+ cli_binary,
+ "encryption",
+ "--mode=%s" % mode,
+ "--iv=%s" % iv,
+ "--ad=%s" % ad,
+ "--key=%s" % key]
+ if direction == "decrypt":
+ cmd += ['--decrypt']
+
+ if direction == "decrypt":
+ invalue = ciphertext
+ else:
+ invalue = plaintext
+
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+ out_raw = p.communicate(input=binascii.unhexlify(invalue))[0]
+ output = binascii.hexlify(out_raw).decode("UTF-8").lower()
+
+ expected = plaintext if direction == "decrypt" else ciphertext
+ if expected != output:
+ logging.error("For test %s got %s expected %s" % (data['testname'], output, expected))
+
+def get_testdata(document, max_tests):
+ out = []
+ for algorithm in document:
+ if algorithm in SUPPORTED_ALGORITHMS:
+ testcase_number = 0
+ for testcase in document[algorithm]:
+ testcase_number += 1
+ for direction in ['encrypt', 'decrypt']:
+ testname = "{} no {:0>3} ({})".format(
+ algorithm.lower(), testcase_number, direction)
+ testname = re.sub("[^a-z0-9-]", "_", testname)
+ testname = re.sub("_+", "_", testname)
+ testname = testname.strip("_")
+ test = {'testname': testname}
+ for key in testcase:
+ value = testcase[key]
+ test[key] = value
+ test['Algorithm'] = algorithm
+ test['Direction'] = direction
+
+ out.append(test)
+
+ if max_tests > 0 and testcase_number > max_tests:
+ break
+ return out
+
+def main(args=None):
+ if args is None:
+ args = sys.argv
+
+ parser = argparse.ArgumentParser(description="")
+ parser.add_argument('cli_binary', help='path to the botan cli binary')
+ parser.add_argument('--max-tests', type=int, default=50, metavar="M")
+ parser.add_argument('--threads', type=int, default=0, metavar="T")
+ parser.add_argument('--verbose', action='store_true', default=False)
+ parser.add_argument('--quiet', action='store_true', default=False)
+ args = parser.parse_args()
+
+ setup_logging(args)
+
+ cli_binary = args.cli_binary
+ max_tests = args.max_tests
+ threads = args.threads
+
+ if threads == 0:
+ threads = multiprocessing.cpu_count()
+
+ test_data_dir = os.path.join('src', 'tests', 'data')
+
+ mode_test_data = [os.path.join(test_data_dir, 'modes', 'cfb.vec'),
+ os.path.join(test_data_dir, 'aead', 'gcm.vec'),
+ os.path.join(test_data_dir, 'aead', 'ocb.vec'),
+ os.path.join(test_data_dir, 'modes', 'xts.vec'),
+ os.path.join(test_data_dir, 'aead', 'chacha20poly1305.vec')]
+
+ kats = []
+ for f in mode_test_data:
+ vecfile = VecDocument(f)
+ kats += get_testdata(vecfile.get_data(), max_tests)
+
+ start_time = time.time()
+
+ if threads > 1:
+ pool = ThreadPool(processes=threads)
+ results = []
+ for test in kats:
+ results.append(pool.apply_async(test_cipher_kat, (cli_binary, test)))
+
+ for result in results:
+ result.get()
+ else:
+ for test in kats:
+ test_cipher_kat(test)
+
+ end_time = time.time()
+
+ print("Ran %d tests with %d failures in %.02f seconds" % (
+ len(kats), TESTS_FAILED, end_time - start_time))
+
+ if TESTS_FAILED > 0:
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/test_fuzzers.py b/comm/third_party/botan/src/scripts/test_fuzzers.py
new file mode 100755
index 0000000000..01c202f236
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/test_fuzzers.py
@@ -0,0 +1,187 @@
+#!/usr/bin/python
+
+# (C) 2017,2018 Jack Lloyd
+
+import sys
+import os
+import subprocess
+import optparse # pylint: disable=deprecated-module
+import stat
+import multiprocessing
+import time
+
+def run_fuzzer_gdb(args):
+ (fuzzer_bin, corpus_file) = args
+
+ gdb_proc = subprocess.Popen(['gdb', '--quiet', '--return-child-result', fuzzer_bin],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ close_fds=True)
+
+ gdb_commands = ('run < %s\nbt\nquit\n' % (corpus_file)).encode('ascii')
+
+ (stdout, stderr) = gdb_proc.communicate(gdb_commands)
+
+ if gdb_proc.returncode == 0:
+ return (0, '', '')
+
+ return (corpus_file, gdb_proc.returncode, stdout.decode('ascii'), stderr.decode('ascii'))
+
+def run_fuzzer(args):
+ (fuzzer_bin, corpus_file) = args
+ corpus_fd = open(corpus_file, 'r')
+ fuzzer_proc = subprocess.Popen([fuzzer_bin], stdin=corpus_fd,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
+ (stdout, stderr) = fuzzer_proc.communicate()
+ corpus_fd.close()
+ return (corpus_file, fuzzer_proc.returncode, stdout.decode('ascii'), stderr.decode('ascii'))
+
+def run_fuzzer_many_files(fuzzer_bin, corpus_files):
+ fuzzer_proc = subprocess.Popen([fuzzer_bin] + corpus_files, stdin=None,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
+ (stdout, stderr) = fuzzer_proc.communicate()
+ return (fuzzer_proc.returncode, stdout.decode('ascii'), stderr.decode('ascii'))
+
+def main(args=None):
+ #pylint: disable=too-many-branches
+ #pylint: disable=too-many-statements
+ #pylint: disable=too-many-locals
+
+ if args is None:
+ args = sys.argv
+
+ parser = optparse.OptionParser(
+ usage='Usage: %prog [options] corpus_dir fuzzers_dir',
+ )
+
+ parser.add_option('--gdb', action='store_true',
+ help='Run under GDB and capture backtraces')
+
+ parser.add_option('--one-at-a-time', action='store_true', default=False,
+ help='Test one corpus input at a time')
+
+ (options, args) = parser.parse_args(args)
+
+ if len(args) != 3:
+ parser.print_usage()
+ return 1
+
+ if options.gdb and not options.one_at_a_time:
+ print("Option --gdb requires --one-at-a-time")
+ return 1
+
+ corpus_dir = args[1]
+ fuzzer_dir = args[2]
+
+ if not os.access(corpus_dir, os.R_OK):
+ print("Error could not access corpus directory '%s'" % (corpus_dir))
+ return 1
+
+ if not os.access(fuzzer_dir, os.R_OK):
+ print("Error could not access fuzzers directory '%s'" % (fuzzer_dir))
+ return 1
+
+ fuzzers = set([])
+ for fuzzer in os.listdir(fuzzer_dir):
+ if fuzzer.endswith('.zip'):
+ continue
+ fuzzers.add(fuzzer)
+
+ corpii = set([])
+ for corpus in os.listdir(corpus_dir):
+ # Ignore regular files in toplevel dir
+ if not stat.S_ISDIR(os.stat(os.path.join(corpus_dir, corpus)).st_mode):
+ continue
+
+ if corpus == '.git':
+ continue
+
+ corpii.add(corpus)
+
+ fuzzers_without_corpus = fuzzers - corpii
+ corpus_without_fuzzers = corpii - fuzzers
+
+ for f in sorted(list(fuzzers_without_corpus)):
+ print("Warning: Fuzzer %s has no corpus" % (f))
+ for c in sorted(list(corpus_without_fuzzers)):
+ print("Warning: Corpus %s has no fuzzer" % (c))
+
+ fuzzers_with_corpus = fuzzers & corpii
+
+ crash_count = 0
+ stderr_count = 0
+ stdout_count = 0
+
+ if options.one_at_a_time:
+ pool = multiprocessing.Pool(multiprocessing.cpu_count() * 2)
+ chunk_size = 32 # arbitrary
+
+ run_fuzzer_func = run_fuzzer_gdb if options.gdb else run_fuzzer
+
+ for fuzzer in sorted(list(fuzzers_with_corpus)):
+ fuzzer_bin = os.path.join(fuzzer_dir, fuzzer)
+ corpus_subdir = os.path.join(corpus_dir, fuzzer)
+ corpus_files = [os.path.join(corpus_subdir, l) for l in sorted(list(os.listdir(corpus_subdir)))]
+
+ # We have to do this hack because multiprocessing's Pool.map doesn't support
+ # passing any initial arguments, just the single iteratable
+ map_args = [(fuzzer_bin, f) for f in corpus_files]
+
+ start = time.time()
+
+ for result in pool.map(run_fuzzer_func, map_args, chunk_size):
+ (corpus_file, retcode, stdout, stderr) = result
+
+ if retcode != 0:
+ print("Fuzzer %s crashed with input %s returncode %d" % (fuzzer, corpus_file, retcode))
+ crash_count += 1
+
+ if stdout:
+ print("Fuzzer %s produced stdout on input %s:\n%s" % (fuzzer, corpus_file, stdout))
+ stdout_count += 1
+
+ if stderr:
+ print("Fuzzer %s produced stderr on input %s:\n%s" % (fuzzer, corpus_file, stderr))
+ stderr_count += 1
+
+ duration = time.time() - start
+ print("Tested fuzzer %s with %d test cases, %d crashes in %.02f seconds" % (
+ fuzzer, len(corpus_files), crash_count, duration))
+ crash_count = 0
+ sys.stdout.flush()
+ else:
+ for fuzzer in sorted(list(fuzzers_with_corpus)):
+ fuzzer_bin = os.path.join(fuzzer_dir, fuzzer)
+ corpus_subdir = os.path.join(corpus_dir, fuzzer)
+ corpus_files = [os.path.join(corpus_subdir, l) for l in sorted(list(os.listdir(corpus_subdir)))]
+
+ start = time.time()
+
+ (retcode, stdout, stderr) = run_fuzzer_many_files(fuzzer_bin, corpus_files)
+
+ if retcode != 0:
+ print("Fuzzer %s crashed returncode %d" % (fuzzer, retcode))
+ crash_count += 1
+
+ if stdout:
+ print("Fuzzer %s produced stdout:\n%s" % (fuzzer, stdout))
+ stdout_count += 1
+
+ if stderr:
+ print("Fuzzer %s produced stderr:\n%s" % (fuzzer, stderr))
+ stderr_count += 1
+
+ duration = time.time() - start
+
+ print("Tested fuzzer %s with %d test cases, %d crashes in %.02f seconds" % (
+ fuzzer, len(corpus_files), crash_count, duration))
+ crash_count = 0
+
+ if crash_count > 0 or stderr_count > 0 or stdout_count > 0:
+ print("Ran fuzzer tests, %d crashes %d stdout %d stderr" % (crash_count, stdout_count, stderr_count))
+ return 2
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/test_python.py b/comm/third_party/botan/src/scripts/test_python.py
new file mode 100644
index 0000000000..2202c0e4bc
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/test_python.py
@@ -0,0 +1,695 @@
+#!/usr/bin/env python
+
+"""
+(C) 2015,2017,2018,2019 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import unittest
+import binascii
+import botan2
+
+def hex_encode(buf):
+ return binascii.hexlify(buf).decode('ascii')
+
+def hex_decode(buf):
+ return binascii.unhexlify(buf.encode('ascii'))
+
+class BotanPythonTests(unittest.TestCase):
+ # pylint: disable=too-many-public-methods,too-many-locals
+
+ def test_version(self):
+ version_str = botan2.version_string()
+ self.assertTrue(version_str.startswith('Botan '))
+
+ self.assertEqual(botan2.version_major(), 2)
+ self.assertGreaterEqual(botan2.version_minor(), 8)
+
+ self.assertGreaterEqual(botan2.ffi_api_version(), 20180713)
+
+ def test_compare(self):
+
+ x = "1234"
+ y = "1234"
+ z = "1233"
+ self.assertTrue(botan2.const_time_compare(x, y))
+ self.assertFalse(botan2.const_time_compare(x, z))
+ self.assertFalse(botan2.const_time_compare(x, x + z))
+
+ def test_block_cipher(self):
+ aes = botan2.BlockCipher("AES-128")
+ self.assertEqual(aes.algo_name(), "AES-128")
+ self.assertEqual(aes.block_size(), 16)
+ self.assertEqual(aes.minimum_keylength(), 16)
+ self.assertEqual(aes.maximum_keylength(), 16)
+
+ aes.set_key(hex_decode("000102030405060708090a0b0c0d0e0f"))
+ ct = aes.encrypt(hex_decode("00112233445566778899aabbccddeeff"))
+
+ self.assertEqual(hex_encode(ct), "69c4e0d86a7b0430d8cdb78070b4c55a")
+
+ pt = aes.decrypt(ct)
+
+ self.assertEqual(hex_encode(pt), "00112233445566778899aabbccddeeff")
+
+ def test_kdf(self):
+
+ secret = hex_decode('6FD4C3C0F38E5C7A6F83E99CD9BD')
+ salt = hex_decode('DBB986')
+ label = hex_decode('')
+ expected = hex_decode('02AEB40A3D4B66FBA540F9D4B20006F2046E0F3A029DEAB201FC692B79EB27CEF7E16069046A')
+
+ produced = botan2.kdf('KDF2(SHA-1)', secret, 38, salt, label)
+
+ self.assertEqual(hex_encode(produced), hex_encode(expected))
+
+ def test_pbkdf(self):
+
+ (salt, iterations, pbkdf) = botan2.pbkdf('PBKDF2(SHA-1)', '', 32, 10000, hex_decode('0001020304050607'))
+
+ self.assertEqual(iterations, 10000)
+ self.assertEqual(hex_encode(pbkdf),
+ '59b2b1143b4cb1059ec58d9722fb1c72471e0d85c6f7543ba5228526375b0127')
+
+ (salt, iterations, pbkdf) = botan2.pbkdf_timed('PBKDF2(SHA-256)', 'xyz', 32, 200)
+
+ cmp_pbkdf = botan2.pbkdf('PBKDF2(SHA-256)', 'xyz', 32, iterations, salt)[2]
+
+ self.assertEqual(pbkdf, cmp_pbkdf)
+
+ def test_scrypt(self):
+ scrypt = botan2.scrypt(10, '', '', 16, 1, 1)
+ self.assertEqual(hex_encode(scrypt), "77d6576238657b203b19")
+
+ scrypt = botan2.scrypt(32, 'password', 'NaCl', 1024, 8, 16)
+ self.assertEqual(hex_encode(scrypt), "fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b373162")
+
+ def test_bcrypt(self):
+ r = botan2.RandomNumberGenerator()
+ phash = botan2.bcrypt('testing', r)
+ self.assertTrue(isinstance(phash, str))
+ self.assertTrue(phash.startswith("$2a$"))
+
+ self.assertTrue(botan2.check_bcrypt('testing', phash))
+ self.assertFalse(botan2.check_bcrypt('live fire', phash))
+
+ self.assertTrue(botan2.check_bcrypt('test', '$2a$04$wjen1fAA.UW6UxthpKK.huyOoxvCR7ATRCVC4CBIEGVDOCtr8Oj1C'))
+
+ def test_mac(self):
+
+ hmac = botan2.MsgAuthCode('HMAC(SHA-256)')
+ self.assertEqual(hmac.algo_name(), 'HMAC(SHA-256)')
+ self.assertEqual(hmac.minimum_keylength(), 0)
+ self.assertEqual(hmac.maximum_keylength(), 4096)
+ hmac.set_key(hex_decode('0102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F20'))
+ hmac.update(hex_decode('616263'))
+
+ expected = hex_decode('A21B1F5D4CF4F73A4DD939750F7A066A7F98CC131CB16A6692759021CFAB8181')
+ produced = hmac.final()
+
+ self.assertEqual(hex_encode(expected), hex_encode(produced))
+
+ def test_rng(self):
+ user_rng = botan2.RandomNumberGenerator("user")
+
+ output1 = user_rng.get(32)
+ output2 = user_rng.get(32)
+
+ self.assertEqual(len(output1), 32)
+ self.assertEqual(len(output2), 32)
+ self.assertNotEqual(output1, output2)
+
+ output3 = user_rng.get(1021)
+ self.assertEqual(len(output3), 1021)
+
+ system_rng = botan2.RandomNumberGenerator('system')
+
+ user_rng.reseed_from_rng(system_rng, 256)
+
+ user_rng.add_entropy('seed material...')
+
+ def test_hash(self):
+
+ try:
+ _h = botan2.HashFunction('NoSuchHash')
+ except botan2.BotanException as e:
+ self.assertEqual(str(e), "botan_hash_init failed: -40 (Not implemented)")
+
+ sha256 = botan2.HashFunction('SHA-256')
+ self.assertEqual(sha256.algo_name(), 'SHA-256')
+ self.assertEqual(sha256.output_length(), 32)
+ self.assertEqual(sha256.block_size(), 64)
+ sha256.update('ignore this please')
+ sha256.clear()
+ sha256.update('a')
+ hash1 = sha256.final()
+
+ self.assertEqual(hex_encode(hash1), "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb")
+
+ sha256.update(hex_decode('61'))
+ sha256_2 = sha256.copy_state()
+ sha256.update(hex_decode('6263'))
+ h2 = sha256.final()
+ self.assertEqual(hex_encode(h2), "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
+
+ self.assertEqual(hex_encode(sha256_2.final()), hex_encode(hash1))
+
+ def test_cipher(self):
+ for mode in ['AES-128/CTR-BE', 'Serpent/GCM', 'ChaCha20Poly1305']:
+ enc = botan2.SymmetricCipher(mode, encrypt=True)
+
+ if mode == 'AES-128/CTR-BE':
+ self.assertEqual(enc.algo_name(), 'CTR-BE(AES-128)')
+ elif mode == 'Serpent/GCM':
+ self.assertEqual(enc.algo_name(), 'Serpent/GCM(16)')
+ else:
+ self.assertEqual(enc.algo_name(), mode)
+
+ (kmin, kmax) = enc.key_length()
+
+ self.assertLessEqual(kmin, kmax)
+
+ rng = botan2.RandomNumberGenerator()
+ iv = rng.get(enc.default_nonce_length())
+ key = rng.get(kmax)
+ pt = rng.get(21)
+
+ enc.set_key(key)
+ enc.start(iv)
+
+ update_result = enc.update('')
+ assert not update_result
+
+ ct = enc.finish(pt)
+
+ dec = botan2.SymmetricCipher(mode, encrypt=False)
+ dec.set_key(key)
+ dec.start(iv)
+ decrypted = dec.finish(ct)
+
+ self.assertEqual(decrypted, pt)
+
+
+ def test_mceliece(self):
+ rng = botan2.RandomNumberGenerator()
+ mce_priv = botan2.PrivateKey.create('McEliece', '2960,57', rng)
+ mce_pub = mce_priv.get_public_key()
+ self.assertEqual(mce_pub.estimated_strength(), 128)
+
+ mce_plaintext = rng.get(16)
+ mce_ad = rng.get(48)
+ mce_ciphertext = botan2.mceies_encrypt(mce_pub, rng, 'ChaCha20Poly1305', mce_plaintext, mce_ad)
+
+ mce_decrypt = botan2.mceies_decrypt(mce_priv, 'ChaCha20Poly1305', mce_ciphertext, mce_ad)
+
+ self.assertEqual(mce_plaintext, mce_decrypt)
+
+ def test_rsa_load_store(self):
+
+ rsa_priv_pem = """-----BEGIN PRIVATE KEY-----
+MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBALWtiBjcofJW/4+r
+CIjQZn2V3yCYsNIBpMdVkNPr36FZ3ZHGSv2ggmCe+IWy0fTcBVyP+fo3HC8zmOC2
+EsYDFRExyB2zIsjRXlPrVrTfcyXwUEaInLJQId5CguFrmyj1y7K43ezg+OTop39n
+TyaukrciCSCh++Q/UQOanHnR8ctrAgMBAAECgYBPfKySgBmk31ZyA7k4rsFgye01
+JEkcoNZ41iGG7ujJffl4maLew9a3MmZ2jI3azVbVMDMFPA5rQm5tRowBMYEJ5oBc
+LP4AP41Lujfa+vua6l3t94bAV+CufZiY0297FcPbGqNu+xSQ2Bol2uHh9mrcgQUs
+fevA50KOLR9hv4zH6QJBAPCOKiExONtVhJn8qVPCBlJ8Vjjnt9Uno5EzMBAKMbZi
+OySkGwo9/9LUWO03r7tjrGSy5jJk+iOrcLeDl6zETfkCQQDBV6PpD/3ccQ1IfWcw
+jG8yik0bIuXgrD0uW4g8Cvj+05wrv7RYPHuFtj3Rtb94YjtgYn7QvjH7y88XmTC4
+2k2DAkEA4E9Ae7kBUoz42/odDswyxwHICMIRyoJu5Ht9yscmufH5Ql6AFFnhzf9S
+eMjfZfY4j6G+Q6mjElXQAl+DtIdMSQJBAJzdMkuBggI8Zv6NYA9voThsJSsDIWcr
+12epM9sjO+nkXizQmM2OJNnThkyDHRna+Tm2MBXEemFEdn06+ODBnWkCQQChAbG4
+255RiCuYdrfiTPF/WLtvRyGd1LRwHcYIW4mJFPzxYAMTwQKbppLAnxw73vyef/zC
+2BgXEW02tjRBtgZ+
+-----END PRIVATE KEY-----
+"""
+
+ rsa_pub_pem = """-----BEGIN PUBLIC KEY-----
+MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC1rYgY3KHyVv+PqwiI0GZ9ld8g
+mLDSAaTHVZDT69+hWd2Rxkr9oIJgnviFstH03AVcj/n6NxwvM5jgthLGAxURMcgd
+syLI0V5T61a033Ml8FBGiJyyUCHeQoLha5so9cuyuN3s4Pjk6Kd/Z08mrpK3Igkg
+ofvkP1EDmpx50fHLawIDAQAB
+-----END PUBLIC KEY-----
+"""
+
+ rsapriv = botan2.PrivateKey.load(rsa_priv_pem)
+
+ self.assertEqual(rsapriv.to_pem(), rsa_priv_pem)
+
+ rsapub = rsapriv.get_public_key()
+ self.assertEqual(rsapub.to_pem(), rsa_pub_pem)
+
+ rsapub = botan2.PublicKey.load(rsa_pub_pem)
+ self.assertEqual(rsapub.to_pem(), rsa_pub_pem)
+
+ n = 0xB5AD8818DCA1F256FF8FAB0888D0667D95DF2098B0D201A4C75590D3EBDFA159DD91C64AFDA082609EF885B2D1F4DC055C8FF9FA371C2F3398E0B612C603151131C81DB322C8D15E53EB56B4DF7325F05046889CB25021DE4282E16B9B28F5CBB2B8DDECE0F8E4E8A77F674F26AE92B7220920A1FBE43F51039A9C79D1F1CB6B # pylint: disable=line-too-long
+ e = 0x10001
+
+ rsapub2 = botan2.PublicKey.load_rsa(n, e)
+ self.assertEqual(rsapub2.to_pem(), rsa_pub_pem)
+
+ self.assertEqual(rsapub2.get_field("n"), n)
+ self.assertEqual(rsapub2.get_field("e"), e)
+
+ def test_key_crypto(self):
+ rng = botan2.RandomNumberGenerator()
+ priv = botan2.PrivateKey.create('RSA', '1024', rng)
+ passphrase = "super secret tell noone"
+
+ for is_pem in [True, False]:
+ ref_val = priv.export(is_pem)
+
+ enc1 = priv.export_encrypted(passphrase, rng, True, msec=10)
+ dec1 = botan2.PrivateKey.load(enc1, passphrase)
+ self.assertEqual(dec1.export(is_pem), ref_val)
+
+ pem2 = priv.export_encrypted(passphrase, rng, True, msec=10, cipher="AES-128/SIV")
+ dec2 = botan2.PrivateKey.load(pem2, passphrase)
+ self.assertEqual(dec2.export(is_pem), ref_val)
+
+ pem3 = priv.export_encrypted(passphrase, rng, True, msec=10, cipher="AES-128/GCM", pbkdf="Scrypt")
+ dec3 = botan2.PrivateKey.load(pem3, passphrase)
+ self.assertEqual(dec3.export(is_pem), ref_val)
+
+ def test_check_key(self):
+ # valid (if rather small) RSA key
+ n = 273279220906618527352827457840955116141
+ e = 0x10001
+
+ rng = botan2.RandomNumberGenerator()
+
+ rsapub = botan2.PublicKey.load_rsa(n, e)
+ self.assertTrue(rsapub.check_key(rng))
+
+ # invalid
+ try:
+ rsapub = botan2.PublicKey.load_rsa(n - 1, e)
+ except botan2.BotanException as e:
+ self.assertEqual(str(e), "botan_pubkey_load_rsa failed: -1 (Invalid input)")
+
+ def test_rsa(self):
+ # pylint: disable=too-many-locals
+ rng = botan2.RandomNumberGenerator()
+ rsapriv = botan2.PrivateKey.create('RSA', '1024', rng)
+ self.assertEqual(rsapriv.algo_name(), 'RSA')
+
+ priv_pem = rsapriv.to_pem()
+ priv_der = rsapriv.to_der()
+
+ self.assertEqual(priv_pem[0:28], "-----BEGIN PRIVATE KEY-----\n")
+ self.assertGreater(len(priv_pem), len(priv_der))
+
+ rsapub = rsapriv.get_public_key()
+ self.assertEqual(rsapub.algo_name(), 'RSA')
+ self.assertEqual(rsapub.estimated_strength(), 80)
+
+ pub_pem = rsapub.to_pem()
+ pub_der = rsapub.to_der()
+
+ self.assertEqual(pub_pem[0:27], "-----BEGIN PUBLIC KEY-----\n")
+ self.assertGreater(len(pub_pem), len(pub_der))
+
+ enc = botan2.PKEncrypt(rsapub, "OAEP(SHA-256)")
+ dec = botan2.PKDecrypt(rsapriv, "OAEP(SHA-256)")
+
+ symkey = rng.get(32)
+ ctext = enc.encrypt(symkey, rng)
+
+ ptext = dec.decrypt(ctext)
+
+ self.assertEqual(ptext, symkey)
+
+ signer = botan2.PKSign(rsapriv, 'EMSA4(SHA-384)')
+
+ signer.update('messa')
+ signer.update('ge')
+ sig = signer.finish(botan2.RandomNumberGenerator())
+
+ verify = botan2.PKVerify(rsapub, 'EMSA4(SHA-384)')
+
+ verify.update('mess')
+ verify.update('age')
+ self.assertTrue(verify.check_signature(sig))
+
+ verify.update('mess of things')
+ verify.update('age')
+ self.assertFalse(verify.check_signature(sig))
+
+ verify.update('message')
+ self.assertTrue(verify.check_signature(sig))
+
+ def test_ecdsa(self):
+ rng = botan2.RandomNumberGenerator()
+
+ hash_fn = 'EMSA1(SHA-256)'
+ group = 'secp256r1'
+ msg = 'test message'
+
+ priv = botan2.PrivateKey.create('ECDSA', group, rng)
+ pub = priv.get_public_key()
+ self.assertEqual(pub.get_field('public_x'), priv.get_field('public_x'))
+ self.assertEqual(pub.get_field('public_y'), priv.get_field('public_y'))
+
+ signer = botan2.PKSign(priv, hash_fn, True)
+ signer.update(msg)
+ signature = signer.finish(rng)
+
+ verifier = botan2.PKVerify(pub, hash_fn)
+ verifier.update(msg)
+ #fails because DER/not-DER mismatch
+ self.assertFalse(verifier.check_signature(signature))
+
+ verifier = botan2.PKVerify(pub, hash_fn, True)
+ verifier.update(msg)
+ self.assertTrue(verifier.check_signature(signature))
+
+ pub_x = pub.get_field('public_x')
+ pub_y = priv.get_field('public_y')
+ pub2 = botan2.PublicKey.load_ecdsa(group, pub_x, pub_y)
+ verifier = botan2.PKVerify(pub2, hash_fn, True)
+ verifier.update(msg)
+ self.assertTrue(verifier.check_signature(signature))
+
+ priv2 = botan2.PrivateKey.load_ecdsa(group, priv.get_field('x'))
+ signer = botan2.PKSign(priv2, hash_fn, True)
+ # sign empty message
+ signature = signer.finish(rng)
+
+ # verify empty message
+ self.assertTrue(verifier.check_signature(signature))
+
+ def test_sm2(self):
+ rng = botan2.RandomNumberGenerator()
+
+ hash_fn = 'EMSA1(SM3)'
+ group = 'sm2p256v1'
+ msg = 'test message'
+
+ priv = botan2.PrivateKey.create('SM2', group, rng)
+ pub = priv.get_public_key()
+ self.assertEqual(pub.get_field('public_x'), priv.get_field('public_x'))
+ self.assertEqual(pub.get_field('public_y'), priv.get_field('public_y'))
+
+ signer = botan2.PKSign(priv, hash_fn)
+ signer.update(msg)
+ signature = signer.finish(rng)
+
+ verifier = botan2.PKVerify(pub, hash_fn)
+ verifier.update(msg)
+ self.assertTrue(verifier.check_signature(signature))
+
+ pub_x = pub.get_field('public_x')
+ pub_y = priv.get_field('public_y')
+ pub2 = botan2.PublicKey.load_sm2(group, pub_x, pub_y)
+ verifier = botan2.PKVerify(pub2, hash_fn)
+ verifier.update(msg)
+ self.assertTrue(verifier.check_signature(signature))
+
+ priv2 = botan2.PrivateKey.load_sm2(group, priv.get_field('x'))
+ signer = botan2.PKSign(priv2, hash_fn)
+ # sign empty message
+ signature = signer.finish(rng)
+
+ # verify empty message
+ self.assertTrue(verifier.check_signature(signature))
+
+ def test_ecdh(self):
+ # pylint: disable=too-many-locals
+ a_rng = botan2.RandomNumberGenerator('user')
+ b_rng = botan2.RandomNumberGenerator('user')
+
+ kdf = 'KDF2(SHA-384)'
+
+ for grp in ['secp256r1', 'secp384r1', 'brainpool256r1']:
+ a_priv = botan2.PrivateKey.create('ECDH', grp, a_rng)
+ b_priv = botan2.PrivateKey.create('ECDH', grp, b_rng)
+
+ a_op = botan2.PKKeyAgreement(a_priv, kdf)
+ b_op = botan2.PKKeyAgreement(b_priv, kdf)
+
+ a_pub = a_op.public_value()
+ b_pub = b_op.public_value()
+
+ salt = a_rng.get(8) + b_rng.get(8)
+
+ a_key = a_op.agree(b_pub, 32, salt)
+ b_key = b_op.agree(a_pub, 32, salt)
+
+ self.assertEqual(a_key, b_key)
+
+ a_pem = a_priv.to_pem()
+
+ a_priv_x = a_priv.get_field('x')
+
+ new_a = botan2.PrivateKey.load_ecdh(grp, a_priv_x)
+
+ self.assertEqual(a_pem, new_a.to_pem())
+
+ def test_certs(self):
+ # pylint: disable=too-many-statements
+ cert = botan2.X509Cert(filename="src/tests/data/x509/ecc/CSCA.CSCA.csca-germany.1.crt")
+ pubkey = cert.subject_public_key()
+
+ self.assertEqual(pubkey.algo_name(), 'ECDSA')
+ self.assertEqual(pubkey.estimated_strength(), 112)
+
+ self.assertEqual(cert.fingerprint("SHA-1"),
+ "32:42:1C:C3:EC:54:D7:E9:43:EC:51:F0:19:23:BD:85:1D:F2:1B:B9")
+
+ self.assertEqual(hex_encode(cert.serial_number()), "01")
+ self.assertEqual(hex_encode(cert.authority_key_id()),
+ "0096452de588f966c4ccdf161dd1f3f5341b71e7")
+
+ self.assertEqual(cert.subject_dn('Name', 0), 'csca-germany')
+ self.assertEqual(cert.subject_dn('Email', 0), 'csca-germany@bsi.bund.de')
+ self.assertEqual(cert.subject_dn('Organization', 0), 'bund')
+ self.assertEqual(cert.subject_dn('Organizational Unit', 0), 'bsi')
+ self.assertEqual(cert.subject_dn('Country', 0), 'DE')
+
+ self.assertTrue(cert.to_string().startswith("Version: 3"))
+
+ self.assertEqual(cert.issuer_dn('Name', 0), 'csca-germany')
+ self.assertEqual(cert.issuer_dn('Organization', 0), 'bund')
+ self.assertEqual(cert.issuer_dn('Organizational Unit', 0), 'bsi')
+ self.assertEqual(cert.issuer_dn('Country', 0), 'DE')
+
+ self.assertTrue(cert.hostname_match('csca-germany'))
+ self.assertFalse(cert.hostname_match('csca-slovakia'))
+
+ self.assertEqual(cert.not_before(), 1184858838)
+ self.assertEqual(cert.not_after(), 1831907880)
+
+ self.assertTrue(cert.allowed_usage(["CRL_SIGN", "KEY_CERT_SIGN"]))
+ self.assertTrue(cert.allowed_usage(["KEY_CERT_SIGN"]))
+ self.assertFalse(cert.allowed_usage(["DIGITAL_SIGNATURE"]))
+ self.assertFalse(cert.allowed_usage(["DIGITAL_SIGNATURE", "CRL_SIGN"]))
+
+ root = botan2.X509Cert("src/tests/data/x509/nist/root.crt")
+
+ int09 = botan2.X509Cert("src/tests/data/x509/nist/test09/int.crt")
+ end09 = botan2.X509Cert("src/tests/data/x509/nist/test09/end.crt")
+ self.assertEqual(end09.verify([int09], [root]), 2001)
+
+ end04 = botan2.X509Cert("src/tests/data/x509/nist/test04/end.crt")
+ int04_1 = botan2.X509Cert("src/tests/data/x509/nist/test04/int1.crt")
+ int04_2 = botan2.X509Cert("src/tests/data/x509/nist/test04/int2.crt")
+ self.assertEqual(end04.verify([int04_1, int04_2], [], "src/tests/data/x509/nist/", required_strength=80), 0)
+ self.assertEqual(end04.verify([int04_1, int04_2], [], required_strength=80), 3000)
+ self.assertEqual(end04.verify([int04_1, int04_2], [root], required_strength=80, hostname="User1-CP.02.01"), 0)
+ self.assertEqual(end04.verify([int04_1, int04_2], [root], required_strength=80, hostname="invalid"), 4008)
+ self.assertEqual(end04.verify([int04_1, int04_2], [root], required_strength=80, reference_time=1), 2000)
+
+ self.assertEqual(botan2.X509Cert.validation_status(0), 'Verified')
+ self.assertEqual(botan2.X509Cert.validation_status(3000), 'Certificate issuer not found')
+ self.assertEqual(botan2.X509Cert.validation_status(4008), 'Certificate does not match provided name')
+
+ rootcrl = botan2.X509CRL("src/tests/data/x509/nist/root.crl")
+
+ end01 = botan2.X509Cert("src/tests/data/x509/nist/test01/end.crt")
+ self.assertEqual(end01.verify([], [root], required_strength=80, crls=[rootcrl]), 0)
+
+ int20 = botan2.X509Cert("src/tests/data/x509/nist/test20/int.crt")
+ end20 = botan2.X509Cert("src/tests/data/x509/nist/test20/end.crt")
+ int20crl = botan2.X509CRL("src/tests/data/x509/nist/test20/int.crl")
+
+ self.assertEqual(end20.verify([int20], [root], required_strength=80, crls=[int20crl, rootcrl]), 5000)
+ self.assertEqual(botan2.X509Cert.validation_status(5000), 'Certificate is revoked')
+
+ int21 = botan2.X509Cert("src/tests/data/x509/nist/test21/int.crt")
+ end21 = botan2.X509Cert("src/tests/data/x509/nist/test21/end.crt")
+ int21crl = botan2.X509CRL("src/tests/data/x509/nist/test21/int.crl")
+ self.assertEqual(end21.verify([int21], [root], required_strength=80, crls=[int21crl, rootcrl]), 5000)
+
+ self.assertTrue(int20.is_revoked(rootcrl))
+ self.assertFalse(int04_1.is_revoked(rootcrl))
+ self.assertTrue(end21.is_revoked(int21crl))
+
+
+ def test_mpi(self):
+ # pylint: disable=too-many-statements,too-many-locals
+ z = botan2.MPI()
+ self.assertEqual(z.bit_count(), 0)
+ five = botan2.MPI('5')
+ self.assertEqual(five.bit_count(), 3)
+ big = botan2.MPI('0x85839682368923476892367235')
+ self.assertEqual(big.bit_count(), 104)
+ small = botan2.MPI(0xDEADBEEF)
+ radix = botan2.MPI("DEADBEEF", 16)
+
+ self.assertEqual(hex_encode(small.to_bytes()), "deadbeef")
+ self.assertEqual(hex_encode(big.to_bytes()), "85839682368923476892367235")
+
+ self.assertEqual(int(small), 0xDEADBEEF)
+ self.assertEqual(int(radix), int(small))
+
+ self.assertEqual(int(small >> 16), 0xDEAD)
+
+ small >>= 15
+
+ self.assertEqual(int(small), 0x1BD5B)
+
+ small <<= 15
+
+ self.assertEqual(int(small), 0xDEAD8000)
+
+ ten = botan2.MPI(10)
+
+ self.assertEqual(ten, five + five)
+ self.assertNotEqual(ten, five)
+ self.assertLess(five, ten)
+ self.assertLessEqual(five, ten)
+
+ x = botan2.MPI(five)
+
+ self.assertEqual(x, five)
+
+ x += botan2.MPI(1)
+ self.assertNotEqual(x, five)
+
+ self.assertEqual(int(x * five), 30)
+
+ x *= five
+ x *= five
+ self.assertEqual(int(x), 150)
+
+ self.assertTrue(not x.is_negative())
+
+ x.flip_sign()
+ self.assertTrue(x.is_negative())
+ self.assertEqual(int(x), -150)
+
+ x.flip_sign()
+
+ x.set_bit(0)
+ self.assertTrue(int(x), 151)
+ self.assertTrue(x.get_bit(0))
+ self.assertTrue(x.get_bit(4))
+ self.assertFalse(x.get_bit(6))
+
+ x.clear_bit(4)
+ self.assertEqual(int(x), 135)
+
+ rng = botan2.RandomNumberGenerator()
+ self.assertFalse(x.is_prime(rng))
+
+ two = botan2.MPI(2)
+
+ x += two
+ self.assertTrue(x.is_prime(rng))
+
+ mod = x + two
+
+ inv = x.inverse_mod(mod)
+ self.assertEqual(int(inv), 69)
+ self.assertEqual(int((inv * x) % mod), 1)
+
+ p = inv.pow_mod(botan2.MPI(46), mod)
+ self.assertEqual(int(p), 42)
+
+ one = botan2.MPI(1)
+ twelve = botan2.MPI("C", 16)
+ eight = botan2.MPI(8)
+
+ mul = twelve.mod_mul(eight, inv)
+ self.assertEqual(int(mul), 27)
+
+ gcd = one.gcd(one)
+ self.assertEqual(one, gcd)
+ gcd = one.gcd(twelve)
+ self.assertEqual(one, gcd)
+ gcd = twelve.gcd(eight)
+ self.assertEqual(4, int(gcd))
+
+ def test_mpi_random(self):
+ rng = botan2.RandomNumberGenerator()
+
+ u = botan2.MPI.random(rng, 512)
+ self.assertEqual(u.bit_count(), 512)
+
+ l = u >> 32
+ self.assertEqual(l.bit_count(), 512-32)
+
+ for _i in range(10):
+ x = botan2.MPI.random_range(rng, l, u)
+ self.assertLess(x, u)
+ self.assertGreater(x, l)
+
+ def test_fpe(self):
+
+ modulus = botan2.MPI('1000000000')
+ key = b'001122334455'
+
+ fpe = botan2.FormatPreservingEncryptionFE1(modulus, key)
+
+ value = botan2.MPI('392910392')
+ tweak = 'tweak value'
+
+ ctext = fpe.encrypt(value, tweak)
+
+ ptext = fpe.decrypt(ctext, tweak)
+
+ self.assertEqual(value, ptext)
+
+ def test_keywrap(self):
+ key = hex_decode('00112233445566778899aabbccddeeff')
+ kek = hex_decode('000102030405060708090a0b0c0d0e0f')
+
+ wrapped = botan2.nist_key_wrap(kek, key)
+ self.assertEqual(hex_encode(wrapped), '1fa68b0a8112b447aef34bd8fb5a7b829d3e862371d2cfe5')
+
+ self.assertEqual(len(wrapped), 16+8)
+ unwrapped = botan2.nist_key_unwrap(kek, wrapped)
+ self.assertEqual(hex_encode(unwrapped), '00112233445566778899aabbccddeeff')
+
+ def test_hotp(self):
+
+ hotp = botan2.HOTP(b'12345678901234567890')
+
+ self.assertEqual(hotp.generate(0), 755224)
+ self.assertEqual(hotp.generate(1), 287082)
+ self.assertEqual(hotp.generate(9), 520489)
+
+ self.assertEqual(hotp.check(520489, 8), (False, 8))
+ self.assertEqual(hotp.check(520489, 8, 1), (True, 10))
+ self.assertEqual(hotp.check(520489, 7, 2), (True, 10))
+ self.assertEqual(hotp.check(520489, 0, 9), (True, 10))
+
+ def test_totp(self):
+
+ totp = botan2.TOTP(b'12345678901234567890', digest="SHA-1", digits=8)
+
+ self.assertEqual(totp.generate(59), 94287082)
+ self.assertEqual(totp.generate(1111111109), 7081804)
+ self.assertEqual(totp.generate(1111111111), 14050471)
+ self.assertEqual(totp.generate(1234567890), 89005924)
+ self.assertEqual(totp.generate(1234567890), 89005924)
+ self.assertEqual(totp.generate(2000000000), 69279037)
+
+ self.assertTrue(totp.check(7081804, 1111111109))
+ self.assertTrue(totp.check(7081804, 1111111109 - 29))
+ self.assertFalse(totp.check(7081804, 1111111109 + 1))
+ self.assertTrue(totp.check(7081804, 1111111109 + 30, 1))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/comm/third_party/botan/src/scripts/tls_scanner/boa.txt b/comm/third_party/botan/src/scripts/tls_scanner/boa.txt
new file mode 100644
index 0000000000..436b785728
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_scanner/boa.txt
@@ -0,0 +1 @@
+bankofamerica.com
diff --git a/comm/third_party/botan/src/scripts/tls_scanner/policy.txt b/comm/third_party/botan/src/scripts/tls_scanner/policy.txt
new file mode 100644
index 0000000000..ddd7a7c57d
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_scanner/policy.txt
@@ -0,0 +1,19 @@
+allow_tls10=true
+allow_tls11=true
+allow_tls12=true
+allow_dtls10=false
+allow_dtls12=false
+
+# Camellia first just to see if there is anyone out there who will negotiate it with us
+ciphers=Camellia-128 Camellia-256 Camellia-128/GCM Camellia-256/GCM ChaCha20Poly1305 AES-256/GCM AES-128/GCM AES-256 AES-128
+signature_hashes=SHA-384 SHA-256 SHA-1
+macs=AEAD SHA-384 SHA-256 SHA-1
+key_exchange_methods=CECPQ1 ECDH DH RSA
+signature_methods=ECDSA RSA DSA IMPLICIT
+ecc_curves=x25519 secp256r1 secp384r1
+minimum_dh_group_size=1024
+minimum_ecdh_group_size=255
+minimum_rsa_bits=2048
+
+allow_insecure_renegotiation=false
+allow_server_initiated_renegotiation=false
diff --git a/comm/third_party/botan/src/scripts/tls_scanner/readme.txt b/comm/third_party/botan/src/scripts/tls_scanner/readme.txt
new file mode 100644
index 0000000000..a4754b02df
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_scanner/readme.txt
@@ -0,0 +1,5 @@
+
+Simple script to scan hosts to check basic TLS client compatability.
+
+URL list chosen mostly from large tech/software vendors, feel free to
+send suggestions.
diff --git a/comm/third_party/botan/src/scripts/tls_scanner/tls_scanner.py b/comm/third_party/botan/src/scripts/tls_scanner/tls_scanner.py
new file mode 100755
index 0000000000..8fdf046ca7
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_scanner/tls_scanner.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python2
+
+import sys
+import time
+import subprocess
+import re
+
+def format_report(client_output):
+ version_re = re.compile('TLS (v1\.[0-2]) using ([A-Z0-9_]+)')
+
+ version_match = version_re.search(client_output)
+
+ #print client_output
+
+ if version_match:
+ return "Established %s %s" % (version_match.group(1), version_match.group(2))
+ else:
+ return client_output
+
+def scanner(args = None):
+ if args is None:
+ args = sys.argv
+
+ if len(args) != 2:
+ print "Error: Usage tls_scanner.py host_file"
+ return 2
+
+ scanners = {}
+
+ for url in [s.strip() for s in open(args[1]).readlines()]:
+ scanners[url] = subprocess.Popen(['../../../botan', 'tls_client', '--policy=policy.txt', url],
+ stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ for url in scanners.keys():
+ scanners[url].stdin.close()
+
+ report = {}
+ timeout = 10
+
+ for url in scanners.keys():
+ print "waiting for", url
+
+ for i in range(timeout):
+ scanners[url].poll()
+ if scanners[url].returncode != None:
+ break
+ #print "Waiting %d more seconds for %s" % (timeout-i, url)
+ time.sleep(1)
+
+ if scanners[url].returncode != None:
+ output = scanners[url].stdout.read() + scanners[url].stderr.read()
+ report[url] = format_report(output)
+
+ for url in report.keys():
+ print url, ":", report[url]
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(scanner())
diff --git a/comm/third_party/botan/src/scripts/tls_scanner/urls.txt b/comm/third_party/botan/src/scripts/tls_scanner/urls.txt
new file mode 100644
index 0000000000..3be7276b32
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_scanner/urls.txt
@@ -0,0 +1,58 @@
+adobe.com
+adp.com
+airbnb.com
+akamai.com
+amazon.com
+apache.org
+apple.com
+bbc.co.uk
+bing.com
+ca.com
+cisco.com
+citrix.com
+cloudflare.com
+craigslist.org
+dell.com
+ebay.com
+facebook.com
+github.com
+gmail.com
+google.com
+hp.com
+huawei.com
+ibm.com
+ietf.org
+intuit.com
+linkedin.com
+medium.com
+microsoft.com
+mikestoolbox.org
+netflix.com
+openssl.org
+oracle.com
+chase.com
+bankofamerica.com
+citibank.com
+wellsfargo.com
+ebay.com
+paypal.com
+randombit.net
+reddit.com
+redhat.com
+salesforce.com
+sas.com
+siemens.com
+sony.com
+stripe.com
+symantec.com
+tls.mbed.org
+twitter.com
+uber.com
+vmware.com
+whatsapp.com
+wikipedia.org
+www.iso.org
+www.lg.com
+yahoo.com
+yandex.ru
+youtube.com
diff --git a/comm/third_party/botan/src/scripts/tls_suite_info.py b/comm/third_party/botan/src/scripts/tls_suite_info.py
new file mode 100755
index 0000000000..21dcd7fcdb
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/tls_suite_info.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python2
+
+"""
+Used to generate lib/tls/tls_suite_info.cpp from IANA params
+
+(C) 2011, 2012, 2013, 2014, 2015, 2016, 2017 Jack Lloyd
+
+Botan is released under the Simplified BSD License (see license.txt)
+"""
+
+import sys
+import re
+import datetime
+import hashlib
+import optparse
+
+def to_ciphersuite_info(code, name):
+
+ (sig_and_kex,cipher_and_mac) = name.split('_WITH_')
+
+ if sig_and_kex == 'RSA':
+ sig_algo = 'IMPLICIT'
+ kex_algo = 'RSA'
+ elif 'PSK' in sig_and_kex:
+ sig_algo = 'IMPLICIT'
+ kex_algo = sig_and_kex
+ elif 'SRP' in sig_and_kex:
+ srp_info = sig_and_kex.split('_')
+ if len(srp_info) == 2: # 'SRP_' + hash
+ kex_algo = sig_and_kex
+ sig_algo = 'IMPLICIT'
+ else:
+ kex_algo = '_'.join(srp_info[0:-1])
+ sig_algo = srp_info[-1]
+ else:
+ (kex_algo, sig_algo) = sig_and_kex.split('_')
+
+ cipher_and_mac = cipher_and_mac.split('_')
+
+ mac_algo = cipher_and_mac[-1]
+
+ cipher = cipher_and_mac[:-1]
+
+ if mac_algo == '8' and cipher[-1] == 'CCM':
+ cipher = cipher[:-1]
+ mac_algo = 'CCM_8'
+ elif cipher[-2] == 'CCM' and cipher[-1] == '8':
+ cipher = cipher[:-1]
+ mac_algo = 'CCM_8'
+
+ if mac_algo == 'CCM':
+ cipher += ['CCM']
+ mac_algo = 'SHA256'
+ elif mac_algo == 'CCM_8':
+ cipher += ['CCM(8)']
+ mac_algo = 'SHA256'
+
+ cipher_info = {
+ 'CHACHA20': ('ChaCha',32),
+ 'IDEA': ('IDEA',16),
+ 'DES': ('DES',8),
+ '3DES': ('3DES',24),
+ 'CAMELLIA': ('Camellia',None),
+ 'AES': ('AES',None),
+ 'SEED': ('SEED',16),
+ 'ARIA': ('ARIA',None),
+ }
+
+ tls_to_botan_names = {
+ 'IMPLICIT': 'IMPLICIT',
+
+ 'anon': 'ANONYMOUS',
+ 'MD5': 'MD5',
+ 'SHA': 'SHA-1',
+ 'SHA256': 'SHA-256',
+ 'SHA384': 'SHA-384',
+ 'SHA512': 'SHA-512',
+
+ 'CHACHA': 'ChaCha',
+ '3DES': 'TripleDES',
+
+ 'DSS': 'DSA',
+ 'ECDSA': 'ECDSA',
+ 'RSA': 'RSA',
+ 'SRP_SHA': 'SRP_SHA',
+ 'DHE': 'DH',
+ 'DH': 'DH',
+ 'ECDHE': 'ECDH',
+ 'ECDH': 'ECDH',
+ '': '',
+ 'PSK': 'PSK',
+ 'DHE_PSK': 'DHE_PSK',
+ 'PSK_DHE': 'DHE_PSK',
+ 'ECDHE_PSK': 'ECDHE_PSK',
+ 'CECPQ1': 'CECPQ1',
+ 'CECPQ1_PSK': 'CECPQ1_PSK',
+ }
+
+ mac_keylen = {
+ 'MD5': 16,
+ 'SHA-1': 20,
+ 'SHA-256': 32,
+ 'SHA-384': 48,
+ 'SHA-512': 64,
+ }
+
+ mac_algo = tls_to_botan_names[mac_algo]
+ sig_algo = tls_to_botan_names[sig_algo]
+ kex_algo = tls_to_botan_names[kex_algo]
+ if kex_algo == 'RSA':
+ kex_algo = 'STATIC_RSA'
+
+ (cipher_algo, cipher_keylen) = cipher_info[cipher[0]]
+
+ if cipher_keylen is None:
+ cipher_keylen = int(cipher[1]) / 8
+
+ if cipher_algo in ['AES', 'Camellia', 'ARIA']:
+ cipher_algo += '-%d' % (cipher_keylen*8)
+
+ mode = ''
+
+ if cipher[0] == 'CHACHA20' and cipher[1] == 'POLY1305':
+ return (name, code, sig_algo, kex_algo, "ChaCha20Poly1305", cipher_keylen, "AEAD", 0, mac_algo, 'AEAD_XOR_12')
+
+ mode = cipher[-1]
+ if mode not in ['CBC', 'GCM', 'CCM(8)', 'CCM', 'OCB']:
+ print "#warning Unknown mode '%s' for ciphersuite %s (0x%d)" % (' '.join(cipher), name, code)
+
+ if mode != 'CBC':
+ if mode == 'OCB':
+ cipher_algo += '/OCB(12)'
+ else:
+ cipher_algo += '/' + mode
+
+ if mode == 'CBC':
+ return (name, code, sig_algo, kex_algo, cipher_algo, cipher_keylen, mac_algo, mac_keylen[mac_algo], mac_algo, 'CBC_MODE')
+ elif mode == 'OCB':
+ return (name, code, sig_algo, kex_algo, cipher_algo, cipher_keylen, "AEAD", 0, mac_algo, 'AEAD_XOR_12')
+ else:
+ return (name, code, sig_algo, kex_algo, cipher_algo, cipher_keylen, "AEAD", 0, mac_algo, 'AEAD_IMPLICIT_4')
+
+def open_input(args):
+ iana_url = 'https://www.iana.org/assignments/tls-parameters/tls-parameters.txt'
+
+ if len(args) == 1:
+ try:
+ return open('tls-parameters.txt')
+ except OSError:
+ pass
+
+ import urllib2
+ return urllib2.urlopen(iana_url)
+ else:
+ return open(args[1])
+
+"""
+Handle command line options
+"""
+def process_command_line(args):
+
+ parser = optparse.OptionParser()
+
+ parser.add_option('--with-ocb', action='store_true', default=True,
+ help='enable OCB AEAD suites')
+ parser.add_option('--without-ocb', action='store_false', dest='with_ocb',
+ help='disable OCB AEAD suites')
+
+ parser.add_option('--with-aria-cbc', action='store_true', default=False,
+ help='enable ARIA CBC suites')
+ parser.add_option('--without-aria-cbc', action='store_false', dest='with_aria_cbc',
+ help='disable ARIA CBC suites')
+
+ parser.add_option('--with-cecpq1', action='store_true', default=True,
+ help='enable CECPQ1 suites')
+ parser.add_option('--without-cecpq1', action='store_false', dest='with_cecpq1',
+ help='disable CECPQ1 suites')
+
+ parser.add_option('--with-srp-aead', action='store_true', default=False,
+ help='add SRP AEAD suites')
+ parser.add_option('--without-srp-aead', action='store_false', dest='with_srp_aead',
+ help='disable SRP AEAD suites')
+
+ parser.add_option('--save-download', action='store_true', default=False,
+ help='save downloaded tls-parameters.txt to cwd')
+
+ parser.add_option('--output', '-o',
+ help='file to write output to (default %default)',
+ default='src/lib/tls/tls_suite_info.cpp')
+
+ return parser.parse_args(args)
+
+def main(args = None):
+ if args is None:
+ args = sys.argv
+
+ weak_crypto = ['EXPORT', 'RC2', 'IDEA', 'RC4', '_DES_', 'WITH_NULL', 'GOST']
+ static_dh = ['ECDH_ECDSA', 'ECDH_RSA', 'DH_DSS', 'DH_RSA'] # not supported
+ protocol_goop = ['SCSV', 'KRB5']
+ maybe_someday = ['RSA_PSK', 'ECCPWD']
+ not_supported = weak_crypto + static_dh + protocol_goop + maybe_someday
+
+ (options, args) = process_command_line(args)
+
+ if not options.with_aria_cbc:
+ not_supported += ['ARIA_128_CBC', 'ARIA_256_CBC']
+
+ ciphersuite_re = re.compile(' +0x([0-9a-fA-F][0-9a-fA-F]),0x([0-9a-fA-F][0-9a-fA-F]) + TLS_([A-Za-z_0-9]+) ')
+
+ suites = {}
+
+ contents = ''
+
+ for line in open_input(args):
+ contents += line
+ match = ciphersuite_re.match(line)
+ if match:
+ code = match.group(1) + match.group(2)
+ name = match.group(3)
+
+ should_use = True
+ for ns in not_supported:
+ if ns in name:
+ should_use = False
+
+ if should_use and name.find('_WITH_') > 0:
+ suites[code] = to_ciphersuite_info(code, name)
+
+ sha1 = hashlib.sha1()
+ sha1.update(contents)
+ contents_hash = sha1.hexdigest()
+
+ if options.save_download:
+ out = open('tls-parameters.txt', 'w')
+ out.write(contents)
+ out.close()
+
+ def define_custom_ciphersuite(name, code):
+ suites[code] = to_ciphersuite_info(code, name)
+
+ if options.with_cecpq1:
+ # CECPQ1 key exchange
+ define_custom_ciphersuite('CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256', '16B7')
+ define_custom_ciphersuite('CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256', '16B8')
+ define_custom_ciphersuite('CECPQ1_RSA_WITH_AES_256_GCM_SHA384', '16B9')
+ define_custom_ciphersuite('CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384', '16BA')
+
+ if options.with_ocb:
+ # OCB ciphersuites draft-zauner-tls-aes-ocb-04
+ define_custom_ciphersuite('DHE_RSA_WITH_AES_128_OCB_SHA256', 'FFC0')
+ define_custom_ciphersuite('DHE_RSA_WITH_AES_256_OCB_SHA256', 'FFC1')
+ define_custom_ciphersuite('ECDHE_RSA_WITH_AES_128_OCB_SHA256', 'FFC2')
+ define_custom_ciphersuite('ECDHE_RSA_WITH_AES_256_OCB_SHA256', 'FFC3')
+ define_custom_ciphersuite('ECDHE_ECDSA_WITH_AES_128_OCB_SHA256', 'FFC4')
+ define_custom_ciphersuite('ECDHE_ECDSA_WITH_AES_256_OCB_SHA256', 'FFC5')
+
+ define_custom_ciphersuite('PSK_WITH_AES_128_OCB_SHA256', 'FFC6')
+ define_custom_ciphersuite('PSK_WITH_AES_256_OCB_SHA256', 'FFC7')
+ define_custom_ciphersuite('DHE_PSK_WITH_AES_128_OCB_SHA256', 'FFC8')
+ define_custom_ciphersuite('DHE_PSK_WITH_AES_256_OCB_SHA256', 'FFC9')
+ define_custom_ciphersuite('ECDHE_PSK_WITH_AES_128_OCB_SHA256', 'FFCA')
+ define_custom_ciphersuite('ECDHE_PSK_WITH_AES_256_OCB_SHA256', 'FFCB')
+
+ if options.with_cecpq1 and options.with_ocb:
+ # CECPQ1 OCB ciphersuites - Botan extension
+ define_custom_ciphersuite('CECPQ1_RSA_WITH_AES_256_OCB_SHA256', 'FFCC')
+ define_custom_ciphersuite('CECPQ1_ECDSA_WITH_AES_256_OCB_SHA256', 'FFCD')
+ #define_custom_ciphersuite('CECPQ1_PSK_WITH_AES_256_OCB_SHA256', 'FFCE')
+
+ if options.with_srp_aead:
+ # SRP using GCM or OCB - Botan extension
+ define_custom_ciphersuite('SRP_SHA_WITH_AES_256_GCM_SHA384', 'FFA0')
+ define_custom_ciphersuite('SRP_SHA_RSA_WITH_AES_256_GCM_SHA384', 'FFA1')
+ define_custom_ciphersuite('SRP_SHA_DSS_WITH_AES_256_GCM_SHA384', 'FFA2')
+ define_custom_ciphersuite('SRP_SHA_ECDSA_WITH_AES_256_GCM_SHA384', 'FFA3')
+
+ if options.with_ocb:
+ define_custom_ciphersuite('SRP_SHA_WITH_AES_256_OCB_SHA256', 'FFA4')
+ define_custom_ciphersuite('SRP_SHA_RSA_WITH_AES_256_OCB_SHA256', 'FFA5')
+ define_custom_ciphersuite('SRP_SHA_DSS_WITH_AES_256_OCB_SHA256', 'FFA6')
+ define_custom_ciphersuite('SRP_SHA_ECDSA_WITH_AES_256_OCB_SHA256', 'FFA7')
+
+ suite_info = ''
+
+ def header():
+ return """/*
+* TLS cipher suite information
+*
+* This file was automatically generated from the IANA assignments
+* (tls-parameters.txt hash %s)
+* by %s on %s
+*
+* Botan is released under the Simplified BSD License (see license.txt)
+*/
+
+""" % (contents_hash, sys.argv[0], datetime.date.today().strftime("%Y-%m-%d"))
+
+ suite_info += header()
+
+ suite_info += """#include <botan/tls_ciphersuite.h>
+
+namespace Botan {
+
+namespace TLS {
+
+//static
+const std::vector<Ciphersuite>& Ciphersuite::all_known_ciphersuites()
+ {
+ // Note that this list of ciphersuites is ordered by id!
+ static const std::vector<Ciphersuite> g_ciphersuite_list = {
+"""
+
+ for code in sorted(suites.keys()):
+ info = suites[code]
+ assert len(info) == 10
+
+ suite_expr = 'Ciphersuite(0x%s, "%s", Auth_Method::%s, Kex_Algo::%s, "%s", %d, "%s", %d, KDF_Algo::%s, Nonce_Format::%s)' % (
+ code, info[0], info[2], info[3], info[4], info[5], info[6], info[7], info[8].replace('-','_'), info[9])
+
+ suite_info += " " + suite_expr + ",\n"
+
+ suite_info += """ };
+
+ return g_ciphersuite_list;
+ }
+
+}
+
+}
+"""
+
+ if options.output == '-':
+ print suite_info,
+ else:
+ out = open(options.output, 'w')
+ out.write(suite_info)
+ out.close()
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/comm/third_party/botan/src/scripts/website.py b/comm/third_party/botan/src/scripts/website.py
new file mode 100755
index 0000000000..e28909531f
--- /dev/null
+++ b/comm/third_party/botan/src/scripts/website.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+
+"""
+Generate the Botan website
+
+(C) 2017 Jack Lloyd
+"""
+
+import optparse # pylint: disable=deprecated-module
+import subprocess
+import sys
+import errno
+import shutil
+import tempfile
+import os
+
+def run_and_check(cmd_line, cwd=None):
+ print("Executing %s ..." % (' '.join(cmd_line)))
+
+ proc = subprocess.Popen(cmd_line,
+ cwd=cwd,
+ close_fds=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ (stdout, stderr) = proc.communicate()
+
+ if proc.returncode != 0:
+ print("Error running %s" % (' '.join(cmd_line)))
+ print(stdout)
+ print(stderr)
+ sys.exit(1)
+
+def rmtree_ignore_missing(path):
+ try:
+ shutil.rmtree(path)
+ except OSError:
+ # check errno?
+ pass
+
+def configure_build(botan_dir, build_dir):
+
+ run_and_check([os.path.join(botan_dir, 'configure.py'),
+ '--with-doxygen', '--with-sphinx',
+ '--with-build-dir=%s' % (build_dir)])
+
+def run_doxygen(tmp_dir, output_dir):
+ run_and_check(['doxygen', os.path.join(tmp_dir, 'build/botan.doxy')])
+ shutil.move(os.path.join(tmp_dir, 'build/docs/doxygen'), output_dir)
+
+def run_sphinx(botan_dir, tmp_dir, output_dir):
+
+ sphinx_config = os.path.join(botan_dir, 'src/configs/sphinx')
+ sphinx_dir = os.path.join(tmp_dir, 'sphinx')
+ os.mkdir(sphinx_dir)
+
+ shutil.copyfile(os.path.join(botan_dir, 'readme.rst'),
+ os.path.join(sphinx_dir, 'index.rst'))
+
+ for f in ['news.rst', os.path.join('doc', 'security.rst')]:
+ shutil.copy(os.path.join(botan_dir, f), sphinx_dir)
+
+ toc = """.. toctree::
+
+ index
+ news
+ security
+ User Guide <https://botan.randombit.net/handbook>
+ API Reference <https://botan.randombit.net/doxygen>
+"""
+
+ contents_rst = open(os.path.join(sphinx_dir, 'contents.rst'), 'w')
+ contents_rst.write(toc)
+ contents_rst.close()
+
+ sphinx_invoke = ['sphinx-build', '-t', 'website', '-c', sphinx_config, '-b', 'html']
+
+ handbook_dir = os.path.join(botan_dir, 'doc')
+
+ run_and_check(sphinx_invoke + [sphinx_dir, output_dir])
+ run_and_check(sphinx_invoke + [handbook_dir, os.path.join(output_dir, 'handbook')])
+
+ rmtree_ignore_missing(os.path.join(output_dir, '.doctrees'))
+ rmtree_ignore_missing(os.path.join(output_dir, 'handbook', '.doctrees'))
+ os.remove(os.path.join(output_dir, '.buildinfo'))
+ os.remove(os.path.join(output_dir, 'handbook', '.buildinfo'))
+
+ # share _static subdirs
+ shutil.rmtree(os.path.join(output_dir, 'handbook', '_static'))
+ os.symlink('../_static', os.path.join(output_dir, 'handbook', '_static'))
+
+ # Build PDF
+ latex_output = os.path.join(tmp_dir, 'latex')
+ run_and_check(['sphinx-build', '-c', sphinx_config, '-b', 'latex', handbook_dir, latex_output])
+
+ # Have to run twice because TeX
+ run_and_check(['pdflatex', 'botan.tex'], cwd=latex_output)
+ run_and_check(['pdflatex', 'botan.tex'], cwd=latex_output)
+
+ shutil.copy(os.path.join(latex_output, 'botan.pdf'),
+ os.path.join(output_dir, 'handbook'))
+
+
+def main(args):
+ parser = optparse.OptionParser()
+
+ parser.add_option('-o', '--output-dir', default=None,
+ help="Where to write output")
+
+ (options, args) = parser.parse_args(args)
+
+ output_dir = options.output_dir
+ tmp_dir = tempfile.mkdtemp(prefix='botan_website_')
+
+ # assumes we live in src/scripts
+ botan_dir = os.path.normpath(os.path.join(os.path.dirname(__file__),
+ "..", ".."))
+
+ if os.access(os.path.join(botan_dir, 'configure.py'), os.X_OK) is False:
+ print("Can't find configure.py in %s", botan_dir)
+ return 1
+
+ if output_dir is None:
+ cwd = os.getcwd()
+
+ if os.path.basename(cwd) == 'botan-website':
+ output_dir = '.'
+ else:
+ output_dir = os.path.join(cwd, 'botan-website')
+
+ try:
+ os.mkdir(output_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+ else:
+ raise e
+
+ for subdir in ['_static', '_sources', 'doxygen', 'handbook']:
+ try:
+ shutil.rmtree(os.path.join(output_dir, subdir))
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ pass
+ else:
+ print("Error removing dir", e)
+ return 1
+
+ configure_build(botan_dir, tmp_dir)
+ run_doxygen(tmp_dir, output_dir)
+ run_sphinx(botan_dir, tmp_dir, output_dir)
+
+ for f in ['doc/pgpkey.txt', 'license.txt']:
+ shutil.copy(os.path.join(botan_dir, f), output_dir)
+
+ favicon = open(os.path.join(output_dir, 'favicon.ico'), 'w')
+ # Create an empty favicon.ico file so it gets cached by browsers
+ favicon.close()
+
+ shutil.rmtree(tmp_dir)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))