summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:41:38 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:41:38 +0000
commit7b6e527f440cd7e6f8be2b07cee320ee6ca18786 (patch)
tree4a2738d69fa2814659fdadddf5826282e73d81f4 /tools
parentInitial commit. (diff)
downloadmeson-7b6e527f440cd7e6f8be2b07cee320ee6ca18786.tar.xz
meson-7b6e527f440cd7e6f8be2b07cee320ee6ca18786.zip
Adding upstream version 1.0.1.upstream/1.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools')
-rwxr-xr-xtools/ac_converter.py453
-rwxr-xr-xtools/boost_names.py300
-rwxr-xr-xtools/build_website.py51
-rwxr-xr-xtools/cmake2meson.py330
-rw-r--r--tools/copy_files.py55
-rwxr-xr-xtools/dircondenser.py91
-rwxr-xr-xtools/regenerate_docs.py184
-rwxr-xr-xtools/run_with_cov.py53
8 files changed, 1517 insertions, 0 deletions
diff --git a/tools/ac_converter.py b/tools/ac_converter.py
new file mode 100755
index 0000000..f2a5599
--- /dev/null
+++ b/tools/ac_converter.py
@@ -0,0 +1,453 @@
+#!/usr/bin/env python3
+
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+help_message = """Usage: {} <config.h.meson>
+
+This script reads config.h.meson, looks for header
+checks and writes the corresponding meson declaration.
+
+Copy config.h.in to config.h.meson, replace #undef
+with #mesondefine and run this. We can't do this automatically
+because some configure scripts have #undef statements
+that are unrelated to configure checks.
+"""
+
+import sys
+
+
+# Add stuff here as it is encountered.
+function_data = \
+ {'HAVE_FEENABLEEXCEPT': ('feenableexcept', 'fenv.h'),
+ 'HAVE_FECLEAREXCEPT': ('feclearexcept', 'fenv.h'),
+ 'HAVE_FEDISABLEEXCEPT': ('fedisableexcept', 'fenv.h'),
+ 'HAVE_MMAP': ('mmap', 'sys/mman.h'),
+ 'HAVE_GETPAGESIZE': ('getpagesize', 'unistd.h'),
+ 'HAVE_GETISAX': ('getisax', 'sys/auxv.h'),
+ 'HAVE_GETTIMEOFDAY': ('gettimeofday', 'sys/time.h'),
+ 'HAVE_MPROTECT': ('mprotect', 'sys/mman.h'),
+ 'HAVE_POSIX_MEMALIGN': ('posix_memalign', 'stdlib.h'),
+ 'HAVE_SIGACTION': ('sigaction', 'signal.h'),
+ 'HAVE_ALARM': ('alarm', 'unistd.h'),
+ 'HAVE_CTIME_R': ('ctime_r', 'time.h'),
+ 'HAVE_DRAND48': ('drand48', 'stdlib.h'),
+ 'HAVE_FLOCKFILE': ('flockfile', 'stdio.h'),
+ 'HAVE_FORK': ('fork', 'unistd.h'),
+ 'HAVE_FUNLOCKFILE': ('funlockfile', 'stdio.h'),
+ 'HAVE_GETLINE': ('getline', 'stdio.h'),
+ 'HAVE_LINK': ('link', 'unistd.h'),
+ 'HAVE_RAISE': ('raise', 'signal.h'),
+ 'HAVE_STRNDUP': ('strndup', 'string.h'),
+ 'HAVE_SCHED_GETAFFINITY': ('sched_getaffinity', 'sched.h'),
+ 'HAVE_WAITPID': ('waitpid', 'sys/wait.h'),
+ 'HAVE_XRENDERCREATECONICALGRADIENT': ('XRenderCreateConicalGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATELINEARGRADIENT': ('XRenderCreateLinearGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATERADIALGRADIENT': ('XRenderCreateRadialGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATESOLIDFILL': ('XRenderCreateSolidFill', 'xcb/render.h'),
+ 'HAVE_DCGETTEXT': ('dcgettext', 'libintl.h'),
+ 'HAVE_ENDMNTENT': ('endmntent', 'mntent.h'),
+ 'HAVE_ENDSERVENT': ('endservent', 'netdb.h'),
+ 'HAVE_EVENTFD': ('eventfd', 'sys/eventfd.h'),
+ 'HAVE_FALLOCATE': ('fallocate', 'fcntl.h'),
+ 'HAVE_FCHMOD': ('fchmod', 'sys/stat.h'),
+ 'HAVE_FCHOWN': ('fchown', 'unistd.h'),
+ 'HAVE_FDWALK': ('fdwalk', 'stdlib.h'),
+ 'HAVE_FSYNC': ('fsync', 'unistd.h'),
+ 'HAVE_GETC_UNLOCKED': ('getc_unlocked', 'stdio.h'),
+ 'HAVE_GETFSSTAT': ('getfsstat', 'sys/mount.h'),
+ 'HAVE_GETMNTENT_R': ('getmntent_r', 'mntent.h'),
+ 'HAVE_GETPROTOBYNAME_R': ('getprotobyname_r', 'netdb.h'),
+ 'HAVE_GETRESUID': ('getresuid', 'unistd.h'),
+ 'HAVE_GETVFSSTAT': ('getvfsstat', 'sys/statvfs.h'),
+ 'HAVE_GMTIME_R': ('gmtime_r', 'time.h'),
+ 'HAVE_HASMNTOPT': ('hasmntopt', 'mntent.h'),
+ 'HAVE_IF_INDEXTONAME': ('if_indextoname', 'net/if.h'),
+ 'HAVE_IF_NAMETOINDEX': ('if_nametoindex', 'net/if.h'),
+ 'HAVE_INOTIFY_INIT1': ('inotify_init1', 'sys/inotify.h'),
+ 'HAVE_ISSETUGID': ('issetugid', 'unistd.h'),
+ 'HAVE_KEVENT': ('kevent', 'sys/event.h'),
+ 'HAVE_KQUEUE': ('kqueue', 'sys/event.h'),
+ 'HAVE_LCHMOD': ('lchmod', 'sys/stat.h'),
+ 'HAVE_LCHOWN': ('lchown', 'unistd.h'),
+ 'HAVE_LSTAT': ('lstat', 'sys/stat.h'),
+ 'HAVE_MEMCPY': ('memcpy', 'string.h'),
+ 'HAVE_MEMALIGN': ('memalign', 'stdlib.h'),
+ 'HAVE_MEMMEM': ('memmem', 'string.h'),
+ 'HAVE_NEWLOCALE': ('newlocale', 'locale.h'),
+ 'HAVE_PIPE2': ('pipe2', 'fcntl.h'),
+ 'HAVE_POLL': ('poll', 'poll.h'),
+ 'HAVE_PRLIMIT': ('prlimit', 'sys/resource.h'),
+ 'HAVE_PTHREAD_ATTR_SETSTACKSIZE': ('pthread_attr_setstacksize', 'pthread.h'),
+ 'HAVE_PTHREAD_CONDATTR_SETCLOCK': ('pthread_condattr_setclock', 'pthread.h'),
+ 'HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP': ('pthread_cond_timedwait_relative_np', 'pthread.h'),
+ 'HAVE_READLINK': ('readlink', 'unistd.h'),
+ 'HAVE_RES_INIT': ('res_init', 'resolv.h'),
+ 'HAVE_SENDMMSG': ('sendmmsg', 'sys/socket.h'),
+ 'HAVE_SOCKET': ('socket', 'sys/socket.h'),
+ 'HAVE_GETENV': ('getenv', 'stdlib.h'),
+ 'HAVE_SETENV': ('setenv', 'stdlib.h'),
+ 'HAVE_PUTENV': ('putenv', 'stdlib.h'),
+ 'HAVE_UNSETENV': ('unsetenv', 'stdlib.h'),
+ 'HAVE_SETMNTENT': ('setmntent', 'mntent.h'),
+ 'HAVE_SNPRINTF': ('snprintf', 'stdio.h'),
+ 'HAVE_SPLICE': ('splice', 'fcntl.h'),
+ 'HAVE_STATFS': ('statfs', 'mount.h'),
+ 'HAVE_STATVFS': ('statvfs', 'sys/statvfs.h'),
+ 'HAVE_STPCOPY': ('stpcopy', 'string.h'),
+ 'HAVE_STRCASECMP': ('strcasecmp', 'strings.h'),
+ 'HAVE_STRLCPY': ('strlcpy', 'string.h'),
+ 'HAVE_STRNCASECMP': ('strncasecmp', 'strings.h'),
+ 'HAVE_STRSIGNAL': ('strsignal', 'signal.h'),
+ 'HAVE_STRTOD_L': ('strtod_l', 'stdlib.h'),
+ 'HAVE_STRTOLL_L': ('strtoll_l', 'stdlib.h'),
+ 'HAVE_STRTOULL_L': ('strtoull_l', 'stdlib.h'),
+ 'HAVE_SYMLINK': ('symlink', 'unistd.h'),
+ 'HAVE_SYSCTLBYNAME': ('sysctlbyname', 'sys/sysctl.h'),
+ 'HAVE_TIMEGM': ('timegm', 'time.h'),
+ 'HAVE_USELOCALE': ('uselocale', 'xlocale.h'),
+ 'HAVE_UTIMES': ('utimes', 'sys/time.h'),
+ 'HAVE_VALLOC': ('valloc', 'stdlib.h'),
+ 'HAVE_VASPRINTF': ('vasprintf', 'stdio.h'),
+ 'HAVE_VSNPRINTF': ('vsnprintf', 'stdio.h'),
+ 'HAVE_BCOPY': ('bcopy', 'strings.h'),
+ 'HAVE_STRERROR': ('strerror', 'string.h'),
+ 'HAVE_MEMMOVE': ('memmove', 'string.h'),
+ 'HAVE_STRTOIMAX': ('strtoimax', 'inttypes.h'),
+ 'HAVE_STRTOLL': ('strtoll', 'stdlib.h'),
+ 'HAVE_STRTOQ': ('strtoq', 'stdlib.h'),
+ 'HAVE_ACCEPT4': ('accept4', 'sys/socket.h'),
+ 'HAVE_CHMOD': ('chmod', 'sys/stat.h'),
+ 'HAVE_CHOWN': ('chown', 'unistd.h'),
+ 'HAVE_FSTAT': ('fstat', 'sys/stat.h'),
+ 'HAVE_GETADDRINFO': ('getaddrinfo', 'netdb.h'),
+ 'HAVE_GETGRGID_R': ('getgrgid_r', 'grp.h'),
+ 'HAVE_GETGRNAM_R': ('getgrnam_r', 'grp.h'),
+ 'HAVE_GETGROUPS': ('getgroups', 'grp.h'),
+ 'HAVE_GETOPT_LONG': ('getopt_long', 'getopt.h'),
+ 'HAVE_GETPWNAM_R': ('getpwnam', 'pwd.h'),
+ 'HAVE_GETPWUID_R': ('getpwuid_r', 'pwd.h'),
+ 'HAVE_GETUID': ('getuid', 'unistd.h'),
+ 'HAVE_LRINTF': ('lrintf', 'math.h'),
+ 'HAVE_DECL_ISNAN': ('isnan', 'math.h'),
+ 'HAVE_DECL_ISINF': ('isinf', 'math.h'),
+ 'HAVE_ROUND': ('round', 'math.h'),
+ 'HAVE_NEARBYINT': ('nearbyint', 'math.h'),
+ 'HAVE_RINT': ('rint', 'math.h'),
+ 'HAVE_MKFIFO': ('mkfifo', 'sys/stat.h'),
+ 'HAVE_MLOCK': ('mlock', 'sys/mman.h'),
+ 'HAVE_NANOSLEEP': ('nanosleep', 'time.h'),
+ 'HAVE_PIPE': ('pipe', 'unistd.h'),
+ 'HAVE_PPOLL': ('ppoll', 'poll.h'),
+ 'HAVE_REGEXEC': ('regexec', 'regex.h'),
+ 'HAVE_SETEGID': ('setegid', 'unistd.h'),
+ 'HAVE_SETEUID': ('seteuid', 'unistd.h'),
+ 'HAVE_SETPGID': ('setpgid', 'unistd.h'),
+ 'HAVE_SETREGID': ('setregid', 'unistd.h'),
+ 'HAVE_SETRESGID': ('setresgid', 'unistd.h'),
+ 'HAVE_SETRESUID': ('setresuid', 'unistd.h'),
+ 'HAVE_SHM_OPEN': ('shm_open', 'fcntl.h'),
+ 'HAVE_SLEEP': ('sleep', 'unistd.h'),
+ 'HAVE_STRERROR_R': ('strerror_r', 'string.h'),
+ 'HAVE_STRTOF': ('strtof', 'stdlib.h'),
+ 'HAVE_SYSCONF': ('sysconf', 'unistd.h'),
+ 'HAVE_USLEEP': ('usleep', 'unistd.h'),
+ 'HAVE_VFORK': ('vfork', 'unistd.h'),
+ 'HAVE_MALLOC': ('malloc', 'stdlib.h'),
+ 'HAVE_CALLOC': ('calloc', 'stdlib.h'),
+ 'HAVE_REALLOC': ('realloc', 'stdlib.h'),
+ 'HAVE_FREE': ('free', 'stdlib.h'),
+ 'HAVE_ALLOCA': ('alloca', 'alloca.h'),
+ 'HAVE_QSORT': ('qsort', 'stdlib.h'),
+ 'HAVE_ABS': ('abs', 'stdlib.h'),
+ 'HAVE_MEMSET': ('memset', 'string.h'),
+ 'HAVE_MEMCMP': ('memcmp', 'string.h'),
+ 'HAVE_STRLEN': ('strlen', 'string.h'),
+ 'HAVE_STRLCAT': ('strlcat', 'string.h'),
+ 'HAVE_STRDUP': ('strdup', 'string.h'),
+ 'HAVE__STRREV': ('_strrev', 'string.h'),
+ 'HAVE__STRUPR': ('_strupr', 'string.h'),
+ 'HAVE__STRLWR': ('_strlwr', 'string.h'),
+ 'HAVE_INDEX': ('index', 'strings.h'),
+ 'HAVE_RINDEX': ('rindex', 'strings.h'),
+ 'HAVE_STRCHR': ('strchr', 'string.h'),
+ 'HAVE_STRRCHR': ('strrchr', 'string.h'),
+ 'HAVE_STRSTR': ('strstr', 'string.h'),
+ 'HAVE_STRTOL': ('strtol', 'stdlib.h'),
+ 'HAVE_STRTOUL': ('strtoul', 'stdlib.h'),
+ 'HAVE_STRTOULL': ('strtoull', 'stdlib.h'),
+ 'HAVE_STRTOD': ('strtod', 'stdlib.h'),
+ 'HAVE_ATOI': ('atoi', 'stdlib.h'),
+ 'HAVE_ATOF': ('atof', 'stdlib.h'),
+ 'HAVE_STRCMP': ('strcmp', 'string.h'),
+ 'HAVE_STRNCMP': ('strncmp', 'string.h'),
+ 'HAVE_VSSCANF': ('vsscanf', 'stdio.h'),
+ 'HAVE_CHROOT': ('chroot', 'unistd.h'),
+ 'HAVE_CLOCK': ('clock', 'time.h'),
+ 'HAVE_CLOCK_GETRES': ('clock_getres', 'time.h'),
+ 'HAVE_CLOCK_GETTIME': ('clock_gettime', 'time.h'),
+ 'HAVE_CLOCK_SETTIME': ('clock_settime', 'time.h'),
+ 'HAVE_CONFSTR': ('confstr', 'time.h'),
+ 'HAVE_CTERMID': ('ctermid', 'stdio.h'),
+ 'HAVE_DIRFD': ('dirfd', 'dirent.h'),
+ 'HAVE_DLOPEN': ('dlopen', 'dlfcn.h'),
+ 'HAVE_DUP2': ('dup2', 'unistd.h'),
+ 'HAVE_DUP3': ('dup3', 'unistd.h'),
+ 'HAVE_EPOLL_CREATE1': ('epoll_create1', 'sys/epoll.h'),
+ 'HAVE_ERF': ('erf', 'math.h'),
+ 'HAVE_ERFC': ('erfc', 'math.h'),
+ 'HAVE_EXECV': ('execv', 'unistd.h'),
+ 'HAVE_FACCESSAT': ('faccessat', 'unistd.h'),
+ 'HAVE_FCHDIR': ('fchdir', 'unistd.h'),
+ 'HAVE_FCHMODAT': ('fchmodat', 'sys/stat.h'),
+ 'HAVE_FDATASYNC': ('fdatasync', 'unistd.h'),
+ 'HAVE_FDOPENDIR': ('fdopendir', 'dirent.h'),
+ 'HAVE_FEXECVE': ('fexecve', 'unistd.h'),
+ 'HAVE_FLOCK': ('flock', 'sys/file.h'),
+ 'HAVE_FORKPTY': ('forkpty', 'pty.h'),
+ 'HAVE_FPATHCONF': ('fpathconf', 'unistd.h'),
+ 'HAVE_FSTATAT': ('fstatat', 'unistd.h'),
+ 'HAVE_FSTATVFS': ('fstatvfs', 'sys/statvfs.h'),
+ 'HAVE_FTELLO': ('ftello', 'stdio.h'),
+ 'HAVE_FTIME': ('ftime', 'sys/timeb.h'),
+ 'HAVE_FTRUNCATE': ('ftruncate', 'unistd.h'),
+ 'HAVE_FUTIMENS': ('futimens', 'sys/stat.h'),
+ 'HAVE_FUTIMES': ('futimes', 'sys/time.h'),
+ 'HAVE_GAI_STRERROR': ('gai_strerror', 'netdb.h'),
+ 'HAVE_GETGROUPLIST': ('getgrouplist', 'grp.h'),
+ 'HAVE_GETHOSTBYNAME': ('gethostbyname', 'netdb.h'),
+ 'HAVE_GETHOSTBYNAME_R': ('gethostbyname_r', 'netdb.h'),
+ 'HAVE_GETITIMER': ('getitimer', 'sys/time.h'),
+ 'HAVE_GETLOADAVG': ('getloadavg', 'stdlib.h'),
+ 'HAVE_GETLOGIN': ('getlogin', 'unistd.h'),
+ 'HAVE_GETNAMEINFO': ('getnameinfo', 'netdb.h'),
+ 'HAVE_GETPEERNAME': ('getpeername', 'sys/socket.h'),
+ 'HAVE_GETPGID': ('getpgid', 'unistd.h'),
+ 'HAVE_GETPGRP': ('getpgrp', 'unistd.h'),
+ 'HAVE_GETPID': ('getpid', 'unistd.h'),
+ 'HAVE_GETPRIORITY': ('getpriority', 'sys/resource.h'),
+ 'HAVE_GETPWENT': ('getpwent', 'pwd.h'),
+ 'HAVE_GETRANDOM': ('getrandom', 'linux/random.h'),
+ 'HAVE_GETRESGID': ('getresgid', 'unistd.h'),
+ 'HAVE_GETSID': ('getsid', 'unistd.h'),
+ 'HAVE_GETSPENT': ('getspent', 'shadow.h'),
+ 'HAVE_GETSPNAM': ('getspnam', 'shadow.h'),
+ 'HAVE_GETWD': ('getwd', 'unistd.h'),
+ 'HAVE_HSTRERROR': ('hstrerror', 'netdb.h'),
+ 'HAVE_HTOLE64': ('htole64', 'endian.h'),
+ 'HAVE_IF_NAMEINDEX': ('if_nameindex', 'net/if.h'),
+ 'HAVE_INET_ATON': ('inet_aton', 'arpa/inet.h'),
+ 'HAVE_INET_PTON': ('inet_pton', 'arpa/inet.h'),
+ 'HAVE_INITGROUPS': ('initgroups', 'grp.h'),
+ 'HAVE_KILL': ('kill', 'signal.h'),
+ 'HAVE_KILLPG': ('killpg', 'signal.h'),
+ 'HAVE_LINKAT': ('linkat', 'unistd.h'),
+ 'HAVE_LOCKF': ('lockf', 'unistd.h'),
+ 'HAVE_LUTIMES': ('lutimes', 'sys/time.h'),
+ 'HAVE_MAKEDEV': ('makedev', 'sys/sysmacros.h'),
+ 'HAVE_MBRTOWC': ('mbrtowc', 'wchar.h'),
+ 'HAVE_MEMRCHR': ('memrchr', 'string.h'),
+ 'HAVE_MKDIRAT': ('mkdirat', 'sys/stat.h'),
+ 'HAVE_MKFIFOAT': ('mkfifoat', 'sys/stat.h'),
+ 'HAVE_MKNOD': ('mknod', 'unistd.h'),
+ 'HAVE_MKNODAT': ('mknodat', 'unistd.h'),
+ 'HAVE_MKTIME': ('mktime', 'unistd.h'),
+ 'HAVE_MKREMAP': ('mkremap', 'sys/mman.h'),
+ 'HAVE_NICE': ('nice', 'unistd.h'),
+ 'HAVE_OPENAT': ('openat', 'fcntl.h'),
+ 'HAVE_OPENPTY': ('openpty', 'pty.h'),
+ 'HAVE_PATHCONF': ('pathconf', 'unistd.h'),
+ 'HAVE_PAUSE': ('pause', 'unistd.h'),
+ 'HAVE_PREAD': ('pread', 'unistd.h'),
+ 'HAVE_PTHREAD_KILL': ('pthread_kill', 'signal.h'),
+ 'HAVE_PTHREAD_SIGMASK': ('pthread_sigmask', 'signal.h'),
+ 'HAVE_PWRITE': ('pwrite', 'unistd.h'),
+ 'HAVE_READLINKAT': ('readlinkat', 'unistd.h'),
+ 'HAVE_READV': ('readv', 'sys/uio.h'),
+ 'HAVE_RENAMEAT': ('renamat', 'stdio.h'),
+ 'HAVE_SCHED_GET_PRIORITY_MAX': ('sched_get_priority_max', 'sched.h'),
+ 'HAVE_SCHED_RR_GET_INTERVAL': ('sched_rr_get_interval', 'sched.h'),
+ 'HAVE_SCHED_SETAFFINITY': ('sched_setaffinity', 'sched.h'),
+ 'HAVE_SCHED_SETPARAM': ('sched_setparam', 'sched.h'),
+ 'HAVE_SCHED_SETSCHEDULER': ('sched_setscheduler', 'sched.h'),
+ 'HAVE_SELECT': ('select', 'sys/select.h'),
+ 'HAVE_SEM_GETVALUE': ('sem_getvalue', 'semaphore.h'),
+ 'HAVE_SEM_OPEN': ('sem_open', 'semaphore.h'),
+ 'HAVE_SEM_TIMEDWAIT': ('sem_timedwait', 'semaphore.h'),
+ 'HAVE_SEM_UNLINK': ('sem_unlink', 'semaphore.h'),
+ 'HAVE_SENDFILE': ('sendfile', 'sys/sendfile.h'),
+ 'HAVE_SETGID': ('setgid', 'unistd.h'),
+ 'HAVE_SETGROUPS': ('setgroups', 'grp.h'),
+ 'HAVE_SETHOSTNAME': ('sethostname', 'unistd.h'),
+ 'HAVE_SETITIMER': ('setitimer', 'sys/time.h'),
+ 'HAVE_SETLOCALE': ('setlocale', 'locale.h'),
+ 'HAVE_SETPGRP': ('setpgrp', 'unistd.h'),
+ 'HAVE_SETPRIORITY': ('setpriority', 'sys/resource.h'),
+ 'HAVE_SETREUID': ('setreuid', 'unistd.h'),
+ 'HAVE_SETSID': ('setsid', 'unistd.h'),
+ 'HAVE_SETUID': ('setuid', 'unistd.h'),
+ 'HAVE_SETVBUF': ('setvbuf', 'unistd.h'),
+ 'HAVE_SIGALTSTACK': ('sigaltstack', 'signal.h'),
+ 'HAVE_SIGINTERRUPT': ('siginterrupt', 'signal.h'),
+ 'HAVE_SIGPENDING': ('sigpending', 'signal.h'),
+ 'HAVE_SIGRELSE': ('sigrelse', 'signal.h'),
+ 'HAVE_SIGTIMEDWAIT': ('sigtimedwait', 'signal.h'),
+ 'HAVE_SIGWAIT': ('sigwait', 'signal.h'),
+ 'HAVE_SIGWAITINFO': ('sigwaitinfo', 'signal.h'),
+ 'HAVE_SOCKETPAIR': ('socketpair', 'sys/socket.h'),
+ 'HAVE_STRFTIME': ('strftime', 'time.h'),
+ 'HAVE_SYMLINKAT': ('symlinkat', 'unistd.h'),
+ 'HAVE_SYNC': ('sync', 'unistd.h'),
+ 'HAVE_TCGETPGRP': ('tcgetpgrp', 'unistd.h'),
+ 'HAVE_TCSETPGRP': ('tcsetpgrp', 'unistd.h'),
+ 'HAVE_TEMPNAM': ('tempnam', 'stdio.h'),
+ 'HAVE_TIMES': ('times', 'sys/times.h'),
+ 'HAVE_TEMPFILE': ('tempfile', 'stdio.h'),
+ 'HAVE_TMPNAM': ('tmpnam', 'stdio.h'),
+ 'HAVE_TMPNAM_R': ('tmpnam_r', 'stdio.h'),
+ 'HAVE_TRUNCATE': ('truncate', 'unistd.h'),
+ 'HAVE_TZNAME': ('tzname', 'time.h'),
+ 'HAVE_UNAME': ('uname', 'sys/utsname.h'),
+ 'HAVE_UNLINKAT': ('unlinkat', 'unistd.h'),
+ 'HAVE_UTIMENSAT': ('utimensat', 'sys/stat.h'),
+ 'HAVE_WAIT3': ('wait3', 'sys/wait.h'),
+ 'HAVE_WAIT4': ('wait4', 'sys/wait.h'),
+ 'HAVE_WAITID': ('waitid', 'sys/wait.h'),
+ 'HAVE_WRITEV': ('writev', 'sys/uio.h'),
+ 'HAVE_WMEMCMP': ('wmemcmp', 'wchar.h'),
+ 'HAVE_ATAN': ('atan', 'math.h'),
+ 'HAVE_ATAN2': ('atan2', 'math.h'),
+ 'HAVE_ACOS': ('acos', 'math.h'),
+ 'HAVE_ACOSH': ('acosh', 'math.h'),
+ 'HAVE_ASIN': ('asin', 'math.h'),
+ 'HAVE_ASINH': ('asinh', 'math.h'),
+ 'HAVE_ATANH': ('atanh', 'math.h'),
+ 'HAVE_CEIL': ('ceil', 'math.h'),
+ 'HAVE_COPYSIGN': ('copysign', 'math.h'),
+ 'HAVE_COS': ('cos', 'math.h'),
+ 'HAVE_COSH': ('cosh', 'math.h'),
+ 'HAVE_COSF': ('cosf', 'math.h'),
+ 'HAVE_EXPM1': ('expm1', 'math.h'),
+ 'HAVE_FABS': ('fabs', 'math.h'),
+ 'HAVE_FINITE': ('finite', 'math.h'),
+ 'HAVE_FLOOR': ('floor', 'math.h'),
+ 'HAVE_GAMMA': ('gamma', 'math.h'),
+ 'HAVE_HYPOT': ('hypot', 'math.h'),
+ 'HAVE_ISINF': ('isinf', 'math.h'),
+ 'HAVE_LOG': ('log', 'math.h'),
+ 'HAVE_LOG1P': ('log1p', 'math.h'),
+ 'HAVE_LOG2': ('log2', 'math.h'),
+ 'HAVE_LGAMMA': ('lgamma', 'math.h'),
+ 'HAVE_POW': ('pow', 'math.h'),
+ 'HAVE_SCALBN': ('scalbn', 'math.h'),
+ 'HAVE_SIN': ('sin', 'math.h'),
+ 'HAVE_SINF': ('sinf', 'math.h'),
+ 'HAVE_SINH': ('sinh', 'math.h'),
+ 'HAVE_SQRT': ('sqrt', 'math.h'),
+ 'HAVE_TGAMMA': ('tgamma', 'math.h'),
+ 'HAVE_FSEEKO': ('fseeko', 'stdio.h'),
+ 'HAVE_FSEEKO64': ('fseeko64', 'stdio.h'),
+ 'HAVE_SETJMP': ('setjmp', 'setjmp.h'),
+ 'HAVE_PTHREAD_SETNAME_NP': ('pthread_setname_np', 'pthread.h'),
+ 'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'),
+ }
+
+headers = []
+functions = []
+sizes = []
+
+if len(sys.argv) != 2:
+ print(help_message.format(sys.argv[0]))
+ sys.exit(0)
+
+with open(sys.argv[1], encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ arr = line.split()
+
+ # Check for headers.
+ if line.startswith('#mesondefine') and line.endswith('_H'):
+ token = line.split()[1]
+ tarr = token.split('_')[1:-1]
+ tarr = [x.lower() for x in tarr]
+ hname = '/'.join(tarr) + '.h'
+ headers.append(hname)
+
+ # Check for functions.
+ try:
+ token = arr[1]
+ if token in function_data:
+ fdata = function_data[token]
+ functions.append([token, fdata[0], fdata[1]])
+ elif token.startswith('HAVE_') and not token.endswith('_H'):
+ functions.append([token])
+ except Exception:
+ pass
+
+ # Check for sizeof tests.
+ if len(arr) != 2:
+ continue
+ elem = arr[1]
+ if elem.startswith('SIZEOF_'):
+ typename = elem.split('_', 1)[1] \
+ .replace('_P', '*') \
+ .replace('_', ' ') \
+ .lower() \
+ .replace('size t', 'size_t')
+ sizes.append((elem, typename))
+
+print('''cc = meson.get_compiler('c')
+cdata = configuration_data()''')
+
+# Convert header checks.
+
+print('check_headers = [')
+for hname in headers:
+ print(f" '{hname}',")
+print(']\n')
+
+print('''foreach h : check_headers
+ if cc.has_header(h)
+ cdata.set('HAVE_' + h.underscorify().to_upper(), 1)
+ endif
+endforeach
+''')
+
+# Convert function checks.
+
+print('check_functions = [')
+for tok in functions:
+ if len(tok) == 3:
+ tokstr, fdata0, fdata1 = tok
+ print(f" ['{tokstr}', '{fdata0}', '#include<{fdata1}>'],")
+ else:
+ print('# check token', tok)
+print(']\n')
+
+print('''foreach f : check_functions
+ if cc.has_function(f.get(1), prefix : f.get(2))
+ cdata.set(f.get(0), 1)
+ endif
+endforeach
+''')
+
+# Convert sizeof checks.
+
+for elem, typename in sizes:
+ print(f"cdata.set('{elem}', cc.sizeof('{typename}'))")
+
+print('''
+configure_file(input : 'config.h.meson',
+ output : 'config.h',
+ configuration : cdata)''')
diff --git a/tools/boost_names.py b/tools/boost_names.py
new file mode 100755
index 0000000..b716ccb
--- /dev/null
+++ b/tools/boost_names.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python3
+
+# Copyright 2017 Niklas Claesson
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is two implementations for how to get module names from the boost
+sources. One relies on json metadata files in the sources, the other relies on
+the folder names.
+
+Run the tool in the boost directory and append the stdout to the misc.py:
+
+boost/$ path/to/meson/tools/boost_names.py >> path/to/meson/dependencies/misc.py
+"""
+
+import sys
+import json
+import re
+import textwrap
+import functools
+import typing as T
+from pathlib import Path
+
+lib_dir = Path('libs')
+jamroot = Path('Jamroot')
+
+not_modules = ['config', 'disjoint_sets', 'headers']
+
+export_modules = False
+
+
+@functools.total_ordering
+class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = sorted(set(shared))
+ self.static = sorted(set(static))
+ self.single = sorted(set(single))
+ self.multi = sorted(set(multi))
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, BoostLibrary):
+ return self.name < other.name
+ return NotImplemented
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, BoostLibrary):
+ return self.name == other.name
+ elif isinstance(other, str):
+ return self.name == other
+ return NotImplemented
+
+ def __hash__(self) -> int:
+ return hash(self.name)
+
+@functools.total_ordering
+class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[BoostLibrary]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, BoostModule):
+ return self.key < other.key
+ return NotImplemented
+
+
+def get_boost_version() -> T.Optional[str]:
+ raw = jamroot.read_text(encoding='utf-8')
+ m = re.search(r'BOOST_VERSION\s*:\s*([0-9\.]+)\s*;', raw)
+ if m:
+ return m.group(1)
+ return None
+
+
+def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
+ # Extract libraries from the boost Jamfiles. This includes:
+ # - library name
+ # - compiler flags
+
+ libs: T.List[BoostLibrary] = []
+ raw = jamfile.read_text(encoding='utf-8')
+ raw = re.sub(r'#.*\n', '\n', raw) # Remove comments
+ raw = re.sub(r'\s+', ' ', raw) # Force single space
+ raw = re.sub(r'}', ';', raw) # Cheat code blocks by converting } to ;
+
+ cmds = raw.split(';') # Commands always terminate with a ; (I hope)
+ cmds = [x.strip() for x in cmds] # Some cleanup
+
+ project_usage_requirements: T.List[str] = []
+
+ # "Parse" the relevant sections
+ for i in cmds:
+ parts = i.split(' ')
+ parts = [x for x in parts if x not in ['']]
+ if not parts:
+ continue
+
+ # Parse project
+ if parts[0] in ['project']:
+ attributes: T.Dict[str, T.List[str]] = {}
+ curr: T.Optional[str] = None
+
+ for j in parts:
+ if j == ':':
+ curr = None
+ elif curr is None:
+ curr = j
+ else:
+ if curr not in attributes:
+ attributes[curr] = []
+ attributes[curr] += [j]
+
+ if 'usage-requirements' in attributes:
+ project_usage_requirements = attributes['usage-requirements']
+
+ # Parse libraries
+ elif parts[0] in ['lib', 'boost-lib']:
+ assert len(parts) >= 2
+
+ # Get and check the library name
+ lname = parts[1]
+ if parts[0] == 'boost-lib':
+ lname = f'boost_{lname}'
+ if not lname.startswith('boost_'):
+ continue
+
+ # Count `:` to only select the 'usage-requirements'
+ # See https://boostorg.github.io/build/manual/master/index.html#bbv2.main-target-rule-syntax
+ colon_counter = 0
+ usage_requirements: T.List[str] = []
+ for j in parts:
+ if j == ':':
+ colon_counter += 1
+ elif colon_counter >= 4:
+ usage_requirements += [j]
+
+ # Get shared / static defines
+ shared: T.List[str] = []
+ static: T.List[str] = []
+ single: T.List[str] = []
+ multi: T.List[str] = []
+ for j in usage_requirements + project_usage_requirements:
+ m1 = re.match(r'<link>shared:<define>(.*)', j)
+ m2 = re.match(r'<link>static:<define>(.*)', j)
+ m3 = re.match(r'<threading>single:<define>(.*)', j)
+ m4 = re.match(r'<threading>multi:<define>(.*)', j)
+
+ if m1:
+ shared += [f'-D{m1.group(1)}']
+ if m2:
+ static += [f'-D{m2.group(1)}']
+ if m3:
+ single +=[f'-D{m3.group(1)}']
+ if m4:
+ multi += [f'-D{m4.group(1)}']
+
+ libs += [BoostLibrary(lname, shared, static, single, multi)]
+
+ return libs
+
+
+def process_lib_dir(ldir: Path) -> T.List[BoostModule]:
+ meta_file = ldir / 'meta' / 'libraries.json'
+ bjam_file = ldir / 'build' / 'Jamfile.v2'
+ if not meta_file.exists():
+ print(f'WARNING: Meta file {meta_file} does not exist')
+ return []
+
+ # Extract libs
+ libs: T.List[BoostLibrary] = []
+ if bjam_file.exists():
+ libs = get_libraries(bjam_file)
+
+ # Extract metadata
+ data = json.loads(meta_file.read_text(encoding='utf-8'))
+ if not isinstance(data, list):
+ data = [data]
+
+ modules: T.List[BoostModule] = []
+ for i in data:
+ modules += [BoostModule(i['name'], i['key'], i['description'], libs)]
+
+ return modules
+
+
+def get_modules() -> T.List[BoostModule]:
+ modules: T.List[BoostModule] = []
+ for i in lib_dir.iterdir():
+ if not i.is_dir() or i.name in not_modules:
+ continue
+
+ # numeric has sub libs
+ subdirs = i / 'sublibs'
+ metadir = i / 'meta'
+ if subdirs.exists() and not metadir.exists():
+ for j in i.iterdir():
+ if not j.is_dir():
+ continue
+ modules += process_lib_dir(j)
+ else:
+ modules += process_lib_dir(i)
+
+ return modules
+
+
+def main() -> int:
+ if not lib_dir.is_dir() or not jamroot.exists():
+ print("ERROR: script must be run in boost source directory")
+ return 1
+
+ vers = get_boost_version()
+ modules = get_modules()
+ modules = sorted(modules)
+ libraries = [x for y in modules for x in y.libs]
+ libraries = sorted(set(libraries))
+
+ print(textwrap.dedent(f'''\
+ #### ---- BEGIN GENERATED ---- ####
+ # #
+ # Generated with tools/boost_names.py:
+ # - boost version: {vers}
+ # - modules found: {len(modules)}
+ # - libraries found: {len(libraries)}
+ #
+
+ class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = shared
+ self.static = static
+ self.single = single
+ self.multi = multi
+
+ class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+
+ # dict of all know libraries with additional compile options
+ boost_libraries = {{\
+ '''))
+
+ for i in libraries:
+ print(textwrap.indent(textwrap.dedent(f"""\
+ '{i.name}': BoostLibrary(
+ name='{i.name}',
+ shared={i.shared},
+ static={i.static},
+ single={i.single},
+ multi={i.multi},
+ ),\
+ """), ' '))
+
+ if export_modules:
+ print(textwrap.dedent(f'''\
+ }}
+
+
+ # dict of all modules with metadata
+ boost_modules = {{\
+ '''))
+
+ for mod in modules:
+ desc_excaped = re.sub(r"'", "\\'", mod.desc)
+ print(textwrap.indent(textwrap.dedent(f"""\
+ '{mod.key}': BoostModule(
+ name='{mod.name}',
+ key='{mod.key}',
+ desc='{desc_excaped}',
+ libs={[x.name for x in mod.libs]},
+ ),\
+ """), ' '))
+
+ print(textwrap.dedent(f'''\
+ }}
+
+ # #
+ #### ---- END GENERATED ---- ####\
+ '''))
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/build_website.py b/tools/build_website.py
new file mode 100755
index 0000000..77a049c
--- /dev/null
+++ b/tools/build_website.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python3
+
+import os, subprocess, shutil
+
+assert os.getcwd() == '/home/jpakkane'
+
+from glob import glob
+
+def purge(fname: str) -> None:
+ if not os.path.exists(fname):
+ return
+ if os.path.isdir(fname):
+ shutil.rmtree(fname)
+ os.unlink(fname)
+
+def update() -> None:
+ webdir = 'mesonweb'
+ repodir = 'mesonwebbuild'
+ docdir = os.path.join(repodir, 'docs')
+ builddir = os.path.join(docdir, 'builddir')
+ htmldir = os.path.join(builddir, 'Meson documentation-doc/html')
+# subprocess.check_call(['git', 'pull'], cwd=webdir)
+ subprocess.check_call(['git', 'fetch', '-a'], cwd=repodir)
+ subprocess.check_call(['git', 'reset', '--hard', 'origin/master'],
+ cwd=repodir)
+ if os.path.isdir(htmldir):
+ shutil.rmtree(htmldir)
+ if os.path.isdir(builddir):
+ shutil.rmtree(builddir)
+ env = os.environ.copy()
+ env['PATH'] = env['PATH'] + ':/home/jpakkane/.local/bin'
+ subprocess.check_call(['../meson.py', '.', 'builddir'], cwd=docdir, env=env)
+ subprocess.check_call(['ninja'], cwd=builddir)
+ old_files = glob(os.path.join(webdir, '*'))
+ for f in old_files:
+ base = f[len(webdir)+1:]
+ if base == 'CNAME' or base == 'favicon.png':
+ continue
+ subprocess.check_call(['git', 'rm', '-rf', base], cwd=webdir)
+ assert os.path.isdir(webdir)
+ new_entries = glob(os.path.join(htmldir, '*'))
+ for e in new_entries:
+ shutil.move(e, webdir)
+ subprocess.check_call('git add *', shell=True, cwd=webdir)
+ subprocess.check_call(['git', 'commit', '-a', '-m', 'Bleep. Bloop. I am a bot.'],
+ cwd=webdir)
+ subprocess.check_call(['git', 'push'], cwd=webdir)
+ shutil.rmtree(builddir)
+
+if __name__ == '__main__':
+ update()
diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py
new file mode 100755
index 0000000..a12d9cf
--- /dev/null
+++ b/tools/cmake2meson.py
@@ -0,0 +1,330 @@
+#!/usr/bin/env python3
+
+# Copyright 2014 Jussi Pakkanen
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+from pathlib import Path
+import sys
+import re
+import argparse
+
+
+class Token:
+ def __init__(self, tid: str, value: str):
+ self.tid = tid
+ self.value = value
+ self.lineno = 0
+ self.colno = 0
+
+class Statement:
+ def __init__(self, name: str, args: list):
+ self.name = name.lower()
+ self.args = args
+
+class Lexer:
+ def __init__(self) -> None:
+ self.token_specification = [
+ # Need to be sorted longest to shortest.
+ ('ignore', re.compile(r'[ \t]')),
+ ('string', re.compile(r'"([^\\]|(\\.))*?"', re.M)),
+ ('varexp', re.compile(r'\${[-_0-9a-z/A-Z.]+}')),
+ ('id', re.compile('''[,-><${}=+_0-9a-z/A-Z|@.*]+''')),
+ ('eol', re.compile(r'\n')),
+ ('comment', re.compile(r'#.*')),
+ ('lparen', re.compile(r'\(')),
+ ('rparen', re.compile(r'\)')),
+ ]
+
+ def lex(self, code: str) -> T.Iterator[Token]:
+ lineno = 1
+ line_start = 0
+ loc = 0
+ col = 0
+ while loc < len(code):
+ matched = False
+ for (tid, reg) in self.token_specification:
+ mo = reg.match(code, loc)
+ if mo:
+ col = mo.start() - line_start
+ matched = True
+ loc = mo.end()
+ match_text = mo.group()
+ if tid == 'ignore':
+ continue
+ if tid == 'comment':
+ yield(Token('comment', match_text))
+ elif tid == 'lparen':
+ yield(Token('lparen', '('))
+ elif tid == 'rparen':
+ yield(Token('rparen', ')'))
+ elif tid == 'string':
+ yield(Token('string', match_text[1:-1]))
+ elif tid == 'id':
+ yield(Token('id', match_text))
+ elif tid == 'eol':
+ # yield('eol')
+ lineno += 1
+ col = 1
+ line_start = mo.end()
+ elif tid == 'varexp':
+ yield(Token('varexp', match_text[2:-1]))
+ else:
+ raise ValueError(f'lex: unknown element {tid}')
+ break
+ if not matched:
+ raise ValueError('Lexer got confused line %d column %d' % (lineno, col))
+
+class Parser:
+ def __init__(self, code: str) -> None:
+ self.stream = Lexer().lex(code)
+ self.getsym()
+
+ def getsym(self) -> None:
+ try:
+ self.current = next(self.stream)
+ except StopIteration:
+ self.current = Token('eof', '')
+
+ def accept(self, s: str) -> bool:
+ if self.current.tid == s:
+ self.getsym()
+ return True
+ return False
+
+ def expect(self, s: str) -> bool:
+ if self.accept(s):
+ return True
+ raise ValueError(f'Expecting {s} got {self.current.tid}.', self.current.lineno, self.current.colno)
+
+ def statement(self) -> Statement:
+ cur = self.current
+ if self.accept('comment'):
+ return Statement('_', [cur.value])
+ self.accept('id')
+ self.expect('lparen')
+ args = self.arguments()
+ self.expect('rparen')
+ return Statement(cur.value, args)
+
+ def arguments(self) -> T.List[T.Union[Token, T.Any]]:
+ args = [] # type: T.List[T.Union[Token, T.Any]]
+ if self.accept('lparen'):
+ args.append(self.arguments())
+ self.expect('rparen')
+ arg = self.current
+ if self.accept('comment'):
+ rest = self.arguments()
+ args += rest
+ elif self.accept('string') \
+ or self.accept('varexp') \
+ or self.accept('id'):
+ args.append(arg)
+ rest = self.arguments()
+ args += rest
+ return args
+
+ def parse(self) -> T.Iterator[Statement]:
+ while not self.accept('eof'):
+ yield(self.statement())
+
+def token_or_group(arg: T.Union[Token, T.List[Token]]) -> str:
+ if isinstance(arg, Token):
+ return ' ' + arg.value
+ elif isinstance(arg, list):
+ line = ' ('
+ for a in arg:
+ line += ' ' + token_or_group(a)
+ line += ' )'
+ return line
+ raise RuntimeError('Conversion error in token_or_group')
+
+class Converter:
+ ignored_funcs = {'cmake_minimum_required': True,
+ 'enable_testing': True,
+ 'include': True}
+
+ def __init__(self, cmake_root: str):
+ self.cmake_root = Path(cmake_root).expanduser()
+ self.indent_unit = ' '
+ self.indent_level = 0
+ self.options = [] # type: T.List[tuple]
+
+ def convert_args(self, args: T.List[Token], as_array: bool = True) -> str:
+ res = []
+ if as_array:
+ start = '['
+ end = ']'
+ else:
+ start = ''
+ end = ''
+ for i in args:
+ if i.tid == 'id':
+ res.append("'%s'" % i.value)
+ elif i.tid == 'varexp':
+ res.append('%s' % i.value.lower())
+ elif i.tid == 'string':
+ res.append("'%s'" % i.value)
+ else:
+ raise ValueError(f'Unknown arg type {i.tid}')
+ if len(res) > 1:
+ return start + ', '.join(res) + end
+ if len(res) == 1:
+ return res[0]
+ return ''
+
+ def write_entry(self, outfile: T.TextIO, t: Statement) -> None:
+ if t.name in Converter.ignored_funcs:
+ return
+ preincrement = 0
+ postincrement = 0
+ if t.name == '_':
+ line = t.args[0]
+ elif t.name == 'add_subdirectory':
+ line = "subdir('" + t.args[0].value + "')"
+ elif t.name == 'pkg_search_module' or t.name == 'pkg_search_modules':
+ varname = t.args[0].value.lower()
+ mods = ["dependency('%s')" % i.value for i in t.args[1:]]
+ if len(mods) == 1:
+ line = '{} = {}'.format(varname, mods[0])
+ else:
+ line = '{} = [{}]'.format(varname, ', '.join(["'%s'" % i for i in mods]))
+ elif t.name == 'find_package':
+ line = "{}_dep = dependency('{}')".format(t.args[0].value, t.args[0].value)
+ elif t.name == 'find_library':
+ line = "{} = find_library('{}')".format(t.args[0].value.lower(), t.args[0].value)
+ elif t.name == 'add_executable':
+ line = '{}_exe = executable({})'.format(t.args[0].value, self.convert_args(t.args, False))
+ elif t.name == 'add_library':
+ if t.args[1].value == 'SHARED':
+ libcmd = 'shared_library'
+ args = [t.args[0]] + t.args[2:]
+ elif t.args[1].value == 'STATIC':
+ libcmd = 'static_library'
+ args = [t.args[0]] + t.args[2:]
+ else:
+ libcmd = 'library'
+ args = t.args
+ line = '{}_lib = {}({})'.format(t.args[0].value, libcmd, self.convert_args(args, False))
+ elif t.name == 'add_test':
+ line = 'test(%s)' % self.convert_args(t.args, False)
+ elif t.name == 'option':
+ optname = t.args[0].value
+ description = t.args[1].value
+ if len(t.args) > 2:
+ default = t.args[2].value
+ else:
+ default = None
+ self.options.append((optname, description, default))
+ return
+ elif t.name == 'project':
+ pname = t.args[0].value
+ args = [pname]
+ for l in t.args[1:]:
+ l = l.value.lower()
+ if l == 'cxx':
+ l = 'cpp'
+ args.append(l)
+ args = ["'%s'" % i for i in args]
+ line = 'project(' + ', '.join(args) + ", default_options : ['default_library=static'])"
+ elif t.name == 'set':
+ varname = t.args[0].value.lower()
+ line = '{} = {}\n'.format(varname, self.convert_args(t.args[1:]))
+ elif t.name == 'if':
+ postincrement = 1
+ try:
+ line = 'if %s' % self.convert_args(t.args, False)
+ except AttributeError: # complex if statements
+ line = t.name
+ for arg in t.args:
+ line += token_or_group(arg)
+ elif t.name == 'elseif':
+ preincrement = -1
+ postincrement = 1
+ try:
+ line = 'elif %s' % self.convert_args(t.args, False)
+ except AttributeError: # complex if statements
+ line = t.name
+ for arg in t.args:
+ line += token_or_group(arg)
+ elif t.name == 'else':
+ preincrement = -1
+ postincrement = 1
+ line = 'else'
+ elif t.name == 'endif':
+ preincrement = -1
+ line = 'endif'
+ else:
+ line = '''# {}({})'''.format(t.name, self.convert_args(t.args))
+ self.indent_level += preincrement
+ indent = self.indent_level * self.indent_unit
+ outfile.write(indent)
+ outfile.write(line)
+ if not(line.endswith('\n')):
+ outfile.write('\n')
+ self.indent_level += postincrement
+
+ def convert(self, subdir: Path = None) -> None:
+ if not subdir:
+ subdir = self.cmake_root
+ cfile = Path(subdir).expanduser() / 'CMakeLists.txt'
+ try:
+ with cfile.open(encoding='utf-8') as f:
+ cmakecode = f.read()
+ except FileNotFoundError:
+ print('\nWarning: No CMakeLists.txt in', subdir, '\n', file=sys.stderr)
+ return
+ p = Parser(cmakecode)
+ with (subdir / 'meson.build').open('w', encoding='utf-8') as outfile:
+ for t in p.parse():
+ if t.name == 'add_subdirectory':
+ # print('\nRecursing to subdir',
+ # self.cmake_root / t.args[0].value,
+ # '\n')
+ self.convert(subdir / t.args[0].value)
+ # print('\nReturning to', self.cmake_root, '\n')
+ self.write_entry(outfile, t)
+ if subdir == self.cmake_root and len(self.options) > 0:
+ self.write_options()
+
+ def write_options(self) -> None:
+ filename = self.cmake_root / 'meson_options.txt'
+ with filename.open('w', encoding='utf-8') as optfile:
+ for o in self.options:
+ (optname, description, default) = o
+ if default is None:
+ typestr = ''
+ defaultstr = ''
+ else:
+ if default == 'OFF':
+ typestr = ' type : \'boolean\','
+ default = 'false'
+ elif default == 'ON':
+ default = 'true'
+ typestr = ' type : \'boolean\','
+ else:
+ typestr = ' type : \'string\','
+ defaultstr = ' value : %s,' % default
+ line = "option({!r},{}{} description : '{}')\n".format(optname,
+ typestr,
+ defaultstr,
+ description)
+ optfile.write(line)
+
+if __name__ == '__main__':
+ p = argparse.ArgumentParser(description='Convert CMakeLists.txt to meson.build and meson_options.txt')
+ p.add_argument('cmake_root', help='CMake project root (where top-level CMakeLists.txt is)')
+ P = p.parse_args()
+
+ Converter(P.cmake_root).convert()
diff --git a/tools/copy_files.py b/tools/copy_files.py
new file mode 100644
index 0000000..de25189
--- /dev/null
+++ b/tools/copy_files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Copy files
+'''
+
+import argparse
+import shutil
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def copy_files(files: T.List[str], input_dir: PathLike, output_dir: PathLike) -> None:
+ if not input_dir:
+ raise ValueError(f'Input directory value is not set')
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ input_dir = Path(input_dir).resolve()
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ for f in files:
+ if (input_dir/f).is_dir():
+ shutil.copytree(input_dir/f, output_dir/f)
+ else:
+ shutil.copy2(input_dir/f, output_dir/f)
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Copy files')
+ parser.add_argument('files', metavar='FILE', nargs='*')
+ parser.add_argument('-C', dest='input_dir', required=True)
+ parser.add_argument('--output-dir', required=True)
+
+ args = parser.parse_args()
+
+ copy_files(files=args.files,
+ input_dir=args.input_dir,
+ output_dir=args.output_dir)
diff --git a/tools/dircondenser.py b/tools/dircondenser.py
new file mode 100755
index 0000000..fa299e9
--- /dev/null
+++ b/tools/dircondenser.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Renames test case directories using Git from this:
+
+1 something
+3 other
+3 foo
+3 bar
+
+to this:
+
+1 something
+2 other
+3 foo
+4 bar
+
+This directory must be run from source root as it touches run_unittests.py.
+'''
+
+import typing as T
+import os
+import sys
+import subprocess
+
+from glob import glob
+
+def get_entries() -> T.List[T.Tuple[int, str]]:
+ entries = []
+ for e in glob('*'):
+ if not os.path.isdir(e):
+ raise SystemExit('Current directory must not contain any files.')
+ (number, rest) = e.split(' ', 1)
+ try:
+ numstr = int(number)
+ except ValueError:
+ raise SystemExit(f'Dir name {e} does not start with a number.')
+ entries.append((numstr, rest))
+ entries.sort()
+ return entries
+
+def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]) -> None:
+ with open(sourcefile, encoding='utf-8') as f:
+ contents = f.read()
+ for old_name, new_name in replacements:
+ contents = contents.replace(old_name, new_name)
+ with open(sourcefile, 'w', encoding='utf-8') as f:
+ f.write(contents)
+
+def condense(dirname: str) -> None:
+ curdir = os.getcwd()
+ os.chdir(dirname)
+ entries = get_entries()
+ replacements = []
+ for _i, e in enumerate(entries):
+ i = _i + 1
+ if e[0] != i:
+ old_name = str(e[0]) + ' ' + e[1]
+ new_name = str(i) + ' ' + e[1]
+ #print('git mv "%s" "%s"' % (old_name, new_name))
+ subprocess.check_call(['git', 'mv', old_name, new_name])
+ replacements.append((old_name, new_name))
+ # update any appearances of old_name in expected stdout in test.json
+ json = os.path.join(new_name, 'test.json')
+ if os.path.isfile(json):
+ replace_source(json, [(old_name, new_name)])
+ os.chdir(curdir)
+ replace_source('run_unittests.py', replacements)
+ replace_source('run_project_tests.py', replacements)
+ for f in glob('unittests/*.py'):
+ replace_source(f, replacements)
+
+if __name__ == '__main__':
+ if len(sys.argv) != 1:
+ raise SystemExit('This script takes no arguments.')
+ for d in glob('test cases/*'):
+ condense(d)
diff --git a/tools/regenerate_docs.py b/tools/regenerate_docs.py
new file mode 100755
index 0000000..6e4d8f9
--- /dev/null
+++ b/tools/regenerate_docs.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Regenerate markdown docs by using `meson.py` from the root dir
+'''
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import json
+import typing as T
+from pathlib import Path
+from urllib.request import urlopen
+
+PathLike = T.Union[Path,str]
+
+def _get_meson_output(root_dir: Path, args: T.List) -> str:
+ env = os.environ.copy()
+ env['COLUMNS'] = '80'
+ return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
+
+def get_commands(help_output: str) -> T.Set[str]:
+ # Python's argument parser might put the command list to its own line. Or it might not.
+ assert(help_output.startswith('usage: '))
+ lines = help_output.split('\n')
+ line1 = lines[0]
+ line2 = lines[1]
+ if '{' in line1:
+ cmndline = line1
+ else:
+ assert('{' in line2)
+ cmndline = line2
+ cmndstr = cmndline.split('{')[1]
+ assert('}' in cmndstr)
+ help_commands = set(cmndstr.split('}')[0].split(','))
+ assert(len(help_commands) > 0)
+ return {c.strip() for c in help_commands}
+
+def get_commands_data(root_dir: Path) -> T.Dict[str, T.Any]:
+ usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
+ positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+ options_start_pattern = re.compile(r'^(optional arguments|options):[\t ]*[\r\n]+', re.MULTILINE)
+ commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
+
+ def get_next_start(iterators: T.Sequence[T.Any], end: T.Optional[int]) -> int:
+ return next((i.start() for i in iterators if i), end)
+
+ def normalize_text(text: str) -> str:
+ # clean up formatting
+ out = text
+ out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
+ out = re.sub(r'^ +$', '', out, flags=re.MULTILINE) # remove trailing whitespace
+ out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
+ return out
+
+ def parse_cmd(cmd: str) -> T.Dict[str, str]:
+ cmd_len = len(cmd)
+ usage = usage_start_pattern.search(cmd)
+ positionals = positional_start_pattern.search(cmd)
+ options = options_start_pattern.search(cmd)
+ commands = commands_start_pattern.search(cmd)
+
+ arguments_start = get_next_start([positionals, options, commands], None)
+ assert arguments_start
+
+ # replace `usage:` with `$` and dedent
+ dedent_size = (usage.end() - usage.start()) - len('$ ')
+ usage_text = textwrap.dedent(f'{dedent_size * " "}$ {normalize_text(cmd[usage.end():arguments_start])}')
+
+ return {
+ 'usage': usage_text,
+ 'arguments': normalize_text(cmd[arguments_start:cmd_len]),
+ }
+
+ def clean_dir_arguments(text: str) -> str:
+ # Remove platform specific defaults
+ args = [
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir'
+ ]
+ out = text
+ for a in args:
+ out = re.sub(r'(--' + a + r' .+?)\s+\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL)
+ return out
+
+ output = _get_meson_output(root_dir, ['--help'])
+ commands = get_commands(output)
+ commands.remove('help')
+
+ cmd_data = dict()
+
+ for cmd in commands:
+ cmd_output = _get_meson_output(root_dir, [cmd, '--help'])
+ cmd_data[cmd] = parse_cmd(cmd_output)
+ if cmd in ['setup', 'configure']:
+ cmd_data[cmd]['arguments'] = clean_dir_arguments(cmd_data[cmd]['arguments'])
+
+ return cmd_data
+
+def generate_hotdoc_includes(root_dir: Path, output_dir: Path) -> None:
+ cmd_data = get_commands_data(root_dir)
+
+ for cmd, parsed in cmd_data.items():
+ for typ in parsed.keys():
+ with open(output_dir / (cmd+'_'+typ+'.inc'), 'w', encoding='utf-8') as f:
+ f.write(parsed[typ])
+
+def generate_wrapdb_table(output_dir: Path) -> None:
+ url = urlopen('https://wrapdb.mesonbuild.com/v2/releases.json')
+ releases = json.loads(url.read().decode())
+ with open(output_dir / 'wrapdb-table.md', 'w', encoding='utf-8') as f:
+ f.write('| Project | Versions | Provided dependencies | Provided programs |\n')
+ f.write('| ------- | -------- | --------------------- | ----------------- |\n')
+ for name, info in releases.items():
+ versions = []
+ added_tags = set()
+ for v in info['versions']:
+ tag, build = v.rsplit('-', 1)
+ if tag not in added_tags:
+ added_tags.add(tag)
+ versions.append(f'[{v}](https://wrapdb.mesonbuild.com/v2/{name}_{v}/{name}.wrap)')
+ # Highlight latest version.
+ versions_str = f'<big>**{versions[0]}**</big><br/>' + ', '.join(versions[1:])
+ dependency_names = info.get('dependency_names', [])
+ dependency_names_str = ', '.join(dependency_names)
+ program_names = info.get('program_names', [])
+ program_names_str = ', '.join(program_names)
+ f.write(f'| {name} | {versions_str} | {dependency_names_str} | {program_names_str} |\n')
+
+def regenerate_docs(output_dir: PathLike,
+ dummy_output_file: T.Optional[PathLike]) -> None:
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ root_dir = Path(__file__).resolve().parent.parent
+
+ generate_hotdoc_includes(root_dir, output_dir)
+ generate_wrapdb_table(output_dir)
+
+ if dummy_output_file:
+ with open(output_dir/dummy_output_file, 'w', encoding='utf-8') as f:
+ f.write('dummy file for custom_target output')
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Generate meson docs')
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--dummy-output-file', type=str)
+
+ args = parser.parse_args()
+
+ regenerate_docs(output_dir=args.output_dir,
+ dummy_output_file=args.dummy_output_file)
diff --git a/tools/run_with_cov.py b/tools/run_with_cov.py
new file mode 100755
index 0000000..3f78efc
--- /dev/null
+++ b/tools/run_with_cov.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import coverage
+import os
+import sys
+from pathlib import Path
+
+root_path = Path(__file__).parent.parent.absolute()
+
+# Python magic so we can import mesonlib
+sys.path.append(root_path.as_posix())
+from mesonbuild import mesonlib
+
+def generate_coveragerc() -> Path:
+ i_file = (root_path / 'data' / '.coveragerc.in')
+ o_file = (root_path / '.coveragerc')
+ raw = i_file.read_text(encoding='utf-8')
+ raw = raw.replace('@ROOT@', root_path.as_posix())
+ o_file.write_text(raw, encoding='utf-8')
+ return o_file
+
+def main() -> int:
+ # Remove old run data
+ out_dir = root_path / '.coverage'
+ mesonlib.windows_proof_rmtree(out_dir.as_posix())
+ out_dir.mkdir(parents=True, exist_ok=True)
+
+ # Setup coverage
+ python_path = (root_path / 'ci').as_posix()
+ os.environ['PYTHONPATH'] = os.pathsep.join([python_path, os.environ.get('PYTHONPATH', '')])
+ os.environ['COVERAGE_PROCESS_START'] = generate_coveragerc().as_posix()
+ coverage.process_startup()
+
+ # Run the actual command
+ cmd = mesonlib.python_command + sys.argv[1:]
+ return subprocess.run(cmd, env=os.environ.copy()).returncode
+
+if __name__ == '__main__':
+ raise SystemExit(main())