diff options
Diffstat (limited to 'test/tools')
-rwxr-xr-x | test/tools/dump_coveralls.py | 66 | ||||
-rwxr-xr-x | test/tools/gcov_coveralls.py | 206 | ||||
-rwxr-xr-x | test/tools/http_put.py | 51 | ||||
-rwxr-xr-x | test/tools/merge_coveralls.py | 207 |
4 files changed, 530 insertions, 0 deletions
diff --git a/test/tools/dump_coveralls.py b/test/tools/dump_coveralls.py new file mode 100755 index 0000000..a96dc92 --- /dev/null +++ b/test/tools/dump_coveralls.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 + +# Small tool to dump JSON payload for coveralls.io API + +import json +from operator import itemgetter +import os +import sys + + +def warn(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +def dump_file(json_file): + """Dumps coveralls.io API payload stored in json_file + Returns: 0 if successful, 1 otherwise + """ + try: + with open(json_file, encoding='utf8') as f: + data = json.load(f) + except OSError as err: + warn(err) + return os.EX_DATAERR + except json.decoder.JSONDecodeError: + warn("{}: json parsing error".format(json_file)) + return 1 + + if 'source_files' not in data: + warn("{}: no source_files, not a coveralls.io payload?".format(json_file)) + return 1 + + print("{} ({} source files)".format(json_file, len(data['source_files']))) + + for src_file in sorted(data['source_files'], key=itemgetter('name')): + covered_lines = not_skipped_lines = 0 + for cnt in src_file['coverage']: + if cnt is None: + continue + not_skipped_lines += 1 + if cnt > 0: + covered_lines += 1 + if not_skipped_lines > 0: + coverage = "{:.0%}".format(covered_lines / not_skipped_lines) + else: + coverage = 'N/A' + + print("\t{:>3} {}".format(coverage, src_file['name'])) + + return 0 + + +def main(): + if (len(sys.argv) < 2): + warn("usage: {} file.json ...".format(sys.argv[0])) + return os.EX_USAGE + + exit_status = 0 + for f in sys.argv[1:]: + exit_status += dump_file(f) + + return exit_status + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/test/tools/gcov_coveralls.py b/test/tools/gcov_coveralls.py new file mode 100755 index 0000000..dcbd93f --- /dev/null +++ b/test/tools/gcov_coveralls.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +""" +Script to save coverage info for C source files in JSON for coveralls.io + +When C code compiled with --coverage flag, for each object files *.gcno is +generated, it contains information to reconstruct the basic block graphs and +assign source line numbers to blocks + +When binary executed *.gcda file is written on exit, with same base name as +corresponding *.gcno file. It contains some summary information, counters, e.t.c. + +gcov(1) utility can be used to get information from *.gcda file and write text +reports to *.gocov file (one file for each source file from which object was compiled). + +The script finds *.gcno files, uses gcov to generate *.gcov files, parses them +and accumulates statistics for all source files. + +This script was written with quite a few assumptions: + + * Code was build using absolute path to source directory (and absolute path + stored in object file debug symbols). + + * Current directory is writable and there is no useful *.gcov files in it + (because they will be deleted). + + * Object file has same base name as *.gcno file (e. g. foo.c.gcno and foo.c.o). + This is the case for cmake builds, but probably not for other build systems + + * Source file names contain only ASCII characters. +""" + +import argparse +from collections import defaultdict +from glob import glob +import hashlib +import json +import os +from os.path import isabs, join, normpath, relpath +import os.path +import subprocess +import sys + + +def warn(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +def parse_gcov_file(gcov_file): + """Parses the content of .gcov file written by gcov -i + + Returns: + str: Source file name + dict: coverage info { line_number: hits } + """ + count = {} + with open(gcov_file) as fh: + for line in fh: + tag, value = line.split(':') + if tag == 'file': + src_file = value.rstrip() + elif tag == 'lcount': + line_num, exec_count = value.split(',') + count[int(line_num)] = int(exec_count) + + return src_file, count + + +def run_gcov(filename, coverage, args): + """ * run gcov on given file + * parse generated .gcov files and update coverage structure + * store source file md5 (if not yet stored) + * delete .gcov files + """ + if args.verbose: + warn("calling:", 'gcov', '-i', filename) + stdout = None + else: + # gcov is noisy and don't have quit flag so redirect stdout to /dev/null + stdout = subprocess.DEVNULL + + subprocess.check_call(['gcov', '-i', filename], stdout=stdout) + + for gcov_file in glob('*.gcov'): + if args.verbose: + warn('parsing', gcov_file) + src_file, count = parse_gcov_file(gcov_file) + os.remove(gcov_file) + + if src_file not in coverage: + coverage[src_file] = defaultdict(int, count) + else: + # sum execution counts + for line, exe_cnt in count.items(): + coverage[src_file][line] += exe_cnt + + +def main(): + parser = argparse.ArgumentParser( + description='Save gcov coverage results in JSON file for coveralls.io.') + parser.add_argument( + '-v', + '--verbose', + action="store_true", + help='Display additional information and gcov command output.') + parser.add_argument( + '-e', + '--exclude', + action='append', + metavar='DIR', + help= + ("Don't look for .gcno/.gcda files in this directories (repeat option to skip several directories). " + "Path is relative to the directory where script was started, e. g. '.git'")) + parser.add_argument( + '-p', + '--prefix', + action='append', + help= + ("Strip this prefix from absolute path to source file. " + "If this option is provided, then only files with given prefixes in absolute path " + "will be added to coverage (option can be repeated).")) + parser.add_argument( + '--out', + type=argparse.FileType('w'), + required=True, + metavar='FILE', + help='Save JSON payload to this file') + args = parser.parse_args() + + # ensure that there is no unrelated .gcov files in current directory + for gcov_file in glob('*.gcov'): + os.remove(gcov_file) + warn("Warning: {} deleted".format(gcov_file)) + + # dict { src_file_name: {line1: exec_count1, line2: exec_count2, ...} } + coverage = {} + + # find . -name '*.gcno' (respecting args.exclude) + for root, dirs, files in os.walk('.'): + for f in files: + # Usually gcov called with a source file as an argument, but this + # name used only to find .gcno and .gcda files. To find source + # file information from debug symbols is used. So we can call gcov + # on .gcno file. + if f.endswith('.gcno'): + run_gcov(join(root, f), coverage, args) + + # don't look into excluded dirs + for subdir in dirs: + # path relative to start dir + path = normpath(join(root, subdir)) + if path in args.exclude: + if args.verbose: + warn('directory "{}" excluded'.format(path)) + dirs.remove(subdir) + + # prepare JSON pyload for coveralls.io API + # https://docs.coveralls.io/api-introduction + coveralls_data = {'source_files': []} + + for src_file in coverage: + # filter by prefix and save path with stripped prefix + src_file_rel = src_file + if args.prefix and isabs(src_file): + for prefix in args.prefix: + if src_file.startswith(prefix): + src_file_rel = relpath(src_file, start=prefix) + break + else: + # skip file outside given prefixes + # it can be e. g. library include file + if args.verbose: + warn('file "{}" is not matched by prefix, skipping'.format(src_file)) + continue + + try: + with open(src_file, mode='rb') as fh: + line_count = sum(1 for _ in fh) + fh.seek(0) + md5 = hashlib.md5(fh.read()).hexdigest() + except OSError as err: + # skip files for which source file is not available + warn(err, 'not adding to coverage') + continue + + coverage_array = [None] * line_count + + for line_num, exe_cnt in coverage[src_file].items(): + # item at index 0 representing the coverage for line 1 of the source code + assert 1 <= line_num <= line_count + coverage_array[line_num - 1] = exe_cnt + + coveralls_data['source_files'].append({ + 'name': src_file_rel, + 'coverage': coverage_array, + 'source_digest': md5 + }) + + args.out.write(json.dumps(coveralls_data)) + + if args.verbose: + warn('Coverage for {} source files was written'.format( + len(coveralls_data['source_files']))) + + +if __name__ == '__main__': + main() diff --git a/test/tools/http_put.py b/test/tools/http_put.py new file mode 100755 index 0000000..8ede68e --- /dev/null +++ b/test/tools/http_put.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +""" +Small script to upload file using HTTP PUT +""" + +import argparse +import os +import sys + +import requests + + +def main(): + parser = argparse.ArgumentParser( + description='Upload a file using HTTP PUT method', + epilog=( + "To use HTTP Auth set HTTP_PUT_AUTH environment variable to user:password\n" + "Example: %(prog)s file1 file2 https://example.com/dir/")) + parser.add_argument( + "file", type=argparse.FileType('rb'), nargs='+', help="File to upload") + parser.add_argument( + "dir_url", help="Remote URL (path to a directory, must include a trailing /)") + args = parser.parse_args() + + if not args.dir_url.endswith('/'): + parser.error("URL must end with /") + + http_auth = os.getenv('HTTP_PUT_AUTH') + if http_auth: + user, password = http_auth.split(':') + auth = (user, password) + else: + auth = None + + exit_code = 0 + + for fh in args.file: + try: + r = requests.put(args.dir_url + fh.name, data=fh, auth=auth, timeout=(45, 90)) + r.raise_for_status() + print("{} uploaded to {}".format(fh.name, r.url)) + except (requests.exceptions.ConnectionError, + requests.exceptions.HTTPError) as err: + print(err, file=sys.stderr) + exit_code = 1 + + return exit_code + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/test/tools/merge_coveralls.py b/test/tools/merge_coveralls.py new file mode 100755 index 0000000..1d294cc --- /dev/null +++ b/test/tools/merge_coveralls.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 + +from __future__ import print_function + +import argparse +import json +import os +import sys +import codecs + +import requests + +# Python 2/3 compatibility +if sys.version_info.major > 2: + xrange = range + +# install path to repository mapping +# if path mapped to None, it means that the file should be ignored (i.e. test file/helper) +# first matched path counts. +# terminating slash should be added for directories +path_mapping = [ + ("${install-dir}/share/rspamd/lib/fun.lua", None), + ("${install-dir}/share/rspamd/lib/", "lualib/"), + ("${install-dir}/share/rspamd/rules/" , "rules/"), + ("${install-dir}/share/rspamd/lib/torch/" , None), + ("${build-dir}/CMakeFiles/", None), + ("${build-dir}/contrib/", None), + ("${build-dir}/test", None), + ("${project-root}/test/lua/", None), + ("${project-root}/test/", None), + ("${project-root}/clang-plugin/", None), + ("${project-root}/CMakeFiles/", None), + ("${project-root}/contrib/", None), + ("${project-root}/", ""), + ("contrib/", None), + ("CMakeFiles/", None), +] + +parser = argparse.ArgumentParser(description='') +parser.add_argument('--input', nargs='+', help='input files') +parser.add_argument('--output', help='output file)') +parser.add_argument('--root', default="/rspamd/src/github.com/rspamd/rspamd", help='repository root)') +parser.add_argument('--install-dir', default="/rspamd/install", help='install root)') +parser.add_argument('--build-dir', default="/rspamd/build", help='build root)') +parser.add_argument('--token', help='If present, the file will be uploaded to coveralls)') +parser.add_argument('--parallel', action='store_true', help='If present, this is a parallel build)') +parser.add_argument('--parallel-close', action='store_true', help='If present, close parallel build and exit)') + + +def merge_coverage_vectors(c1, c2): + assert(len(c1) == len(c2)) + + for i in range(0, len(c1)): + if c1[i] is None and c2[i] is None: + pass + elif type(c1[i]) is int and c2[i] is None: + pass + elif c1[i] is None and type(c2[i]) is int: + c1[i] = c2[i] + elif type(c1[i]) is int and type(c2[i]) is int: + c1[i] += c2[i] + else: + raise RuntimeError("bad element types at %d: %s, %s", i, type(c1[i]), type(c1[i])) + + return c1 + + +def normalize_name(name): + name = os.path.normpath(name) + if not os.path.isabs(name): + name = os.path.abspath(repository_root + "/" + name) + for k in path_mapping: + if name.startswith(k[0]): + if k[1] is None: + return None + else: + name = k[1] + name[len(k[0]):] + break + return name + +def merge(files, j1): + for sf in j1['source_files']: + name = normalize_name(sf['name']) + if name is None: + continue + if name in files: + files[name]['coverage'] = merge_coverage_vectors(files[name]['coverage'], sf['coverage']) + else: + sf['name'] = name + files[name] = sf + + return files + +def prepare_path_mapping(): + for i in range(0, len(path_mapping)): + new_key = path_mapping[i][0].replace("${install-dir}", install_dir) + new_key = new_key.replace("${project-root}", repository_root) + new_key = new_key.replace("${build-dir}", build_dir) + + path_mapping[i] = (new_key, path_mapping[i][1]) + +def close_parallel_build(): + j = {'payload':{'status': 'done'}} + j['payload']['build_num'] = os.getenv('DRONE_BUILD_NUMBER') + query_str = {'repo_token': args.token} + try: + r = requests.post('https://coveralls.io/webhook', params=query_str, json=j) + r.raise_for_status() + except requests.exceptions.RequestException as e: + print("Failed to send data to coveralls: %s" % e) + sys.exit() + + try: + response = r.json() + if 'url' in response: + print("[coveralls] URL %s" % response['url']) + if 'error' in response: + print("[coveralls] ERROR: %s" % response['error']) + except json.decoder.JSONDecodeError: + print("Bad response: '%s'" % r.text) + +if __name__ == '__main__': + args = parser.parse_args() + + if args.parallel_close: + close_parallel_build() + sys.exit(0) + + if not args.input: + print("error: the following arguments are required: --input") + sys.exit(1) + + repository_root = os.path.abspath(os.path.expanduser(args.root)) + install_dir = os.path.normpath(os.path.expanduser(args.install_dir)) + build_dir = os.path.normpath(os.path.expanduser(args.build_dir)) + + prepare_path_mapping() + + with codecs.open(args.input[0], 'r', encoding='utf-8') as fh: + j1 = json.load(fh) + + files = merge({}, j1) + for i in range(1, len(args.input)): + with codecs.open(args.input[i], 'r', encoding='utf-8') as fh: + j2 = json.load(fh) + + files = merge(files, j2) + + if 'git' not in j1 and 'git' in j2: + j1['git'] = j2['git'] + if 'service_name' not in j1 and 'service_name' in j2: + j1['service_name'] = j2['service_name'] + if 'service_job_id' not in j1 and 'service_job_id' in j2: + j1['service_job_id'] = j2['service_job_id'] + + if args.parallel: + j1['parallel'] = True + + if os.getenv('CIRCLECI'): + j1['service_name'] = 'circleci' + j1['service_job_id'] = os.getenv('CIRCLE_BUILD_NUM') + elif os.getenv('DRONE') == 'true': + j1['service_name'] = 'drone' + j1['service_branch'] = os.getenv('DRONE_COMMIT_BRANCH') + j1['service_build_url'] = os.getenv('DRONE_BUILD_LINK') + j1['service_number'] = os.getenv('DRONE_BUILD_NUMBER') + j1['commit_sha'] = os.getenv('DRONE_COMMIT_SHA') + if os.getenv('DRONE_BUILD_EVENT') == 'pull_request': + j1['service_pull_request'] = os.getenv('DRONE_PULL_REQUEST') + # git data can be filled by cpp-coveralls, but in our layout it can't find repo + # so we can override git info witout merging + j1['git'] = { + 'head': { + 'id': j1['commit_sha'], + 'author_email': os.getenv('DRONE_COMMIT_AUTHOR_EMAIL'), + 'message': os.getenv('DRONE_COMMIT_MESSAGE') + }, + 'branch': j1['service_branch'], + 'remotes': [{ + 'name': 'origin', + 'url': os.getenv('DRONE_GIT_HTTP_URL') + }] + } + + + j1['source_files'] = list(files.values()) + + if args.output: + with open(args.output, 'w') as f: + f.write(json.dumps(j1)) + + if args.token: + j1['repo_token'] = args.token + try: + r = requests.post('https://coveralls.io/api/v1/jobs', files={"json_file": json.dumps(j1)}) + r.raise_for_status() + except requests.exceptions.RequestException as e: + print("Failed to send data to coveralls: %s" % e) + sys.exit() + + try: + response = r.json() + print("[coveralls] %s" % response['message']) + if 'url' in response: + print("[coveralls] Uploaded to %s" % response['url']) + except json.decoder.JSONDecodeError: + print("Bad response: '%s'" % r.text) |