summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/.gitignore1
-rwxr-xr-xscripts/powerline-config22
-rwxr-xr-xscripts/powerline-daemon495
-rwxr-xr-xscripts/powerline-lint13
-rwxr-xr-xscripts/powerline-release.py242
-rwxr-xr-xscripts/powerline-render31
6 files changed, 804 insertions, 0 deletions
diff --git a/scripts/.gitignore b/scripts/.gitignore
new file mode 100644
index 0000000..f2ffc12
--- /dev/null
+++ b/scripts/.gitignore
@@ -0,0 +1 @@
+powerline
diff --git a/scripts/powerline-config b/scripts/powerline-config
new file mode 100755
index 0000000..1fb2029
--- /dev/null
+++ b/scripts/powerline-config
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# vim:fileencoding=utf-8:noet
+from __future__ import (unicode_literals, division, absolute_import, print_function)
+
+try:
+ from powerline.commands.config import get_argparser
+except ImportError:
+ import sys
+ import os
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
+ from powerline.commands.config import get_argparser
+
+import powerline.bindings.config as config
+
+
+if __name__ == '__main__':
+ parser = get_argparser()
+ args = parser.parse_args()
+
+ pl = config.create_powerline_logger(args)
+
+ args.function(pl, args)
diff --git a/scripts/powerline-daemon b/scripts/powerline-daemon
new file mode 100755
index 0000000..f15ac69
--- /dev/null
+++ b/scripts/powerline-daemon
@@ -0,0 +1,495 @@
+#!/usr/bin/env python
+# vim:fileencoding=utf-8:noet
+from __future__ import (unicode_literals, division, absolute_import, print_function)
+
+import socket
+import os
+import errno
+import sys
+import fcntl
+import atexit
+import stat
+
+from argparse import ArgumentParser
+from select import select
+from signal import signal, SIGTERM
+from time import sleep
+from functools import partial
+from io import BytesIO
+from threading import Event
+from itertools import chain
+from logging import StreamHandler
+
+from powerline.shell import ShellPowerline
+from powerline.commands.main import finish_args, write_output
+from powerline.lib.monotonic import monotonic
+from powerline.lib.encoding import get_preferred_output_encoding, get_preferred_arguments_encoding, get_unicode_writer
+from powerline.bindings.wm import wm_threads
+
+from powerline.commands.main import get_argparser as get_main_argparser
+from powerline.commands.daemon import get_argparser as get_daemon_argparser
+
+
+USE_FILESYSTEM = not sys.platform.lower().startswith('linux')
+
+
+class NonInteractiveArgParser(ArgumentParser):
+ def print_usage(self, file=None):
+ raise Exception(self.format_usage())
+
+ def print_help(self, file=None):
+ raise Exception(self.format_help())
+
+ def exit(self, status=0, message=None):
+ pass
+
+ def error(self, message):
+ raise Exception(self.format_usage())
+
+
+EOF = b'EOF\0\0'
+
+
+class State(object):
+ __slots__ = ('powerlines', 'logger', 'config_loader', 'started_wm_threads',
+ 'ts_shutdown_event')
+
+ def __init__(self, **kwargs):
+ self.logger = None
+ self.config_loader = None
+ self.started_wm_threads = {}
+ self.powerlines = {}
+ self.ts_shutdown_event = Event()
+
+
+HOME = os.path.expanduser('~')
+
+
+class NonDaemonShellPowerline(ShellPowerline):
+ def get_log_handler(self):
+ return StreamHandler()
+
+
+def start_wm(args, environ, cwd, is_daemon, state):
+ wm_name = args.ext[0][3:]
+ if wm_name in state.started_wm_threads:
+ return b''
+ thread_shutdown_event = Event()
+ thread = wm_threads[wm_name](
+ thread_shutdown_event=thread_shutdown_event,
+ pl_shutdown_event=state.ts_shutdown_event,
+ pl_config_loader=state.config_loader,
+ )
+ thread.start()
+ state.started_wm_threads[wm_name] = (thread, thread_shutdown_event)
+ return b''
+
+
+def render(args, environ, cwd, is_daemon, state):
+ segment_info = {
+ 'getcwd': lambda: cwd,
+ 'home': environ.get('HOME', HOME),
+ 'environ': environ,
+ 'args': args,
+ }
+ key = (
+ args.ext[0],
+ args.renderer_module,
+ tuple(args.config_override) if args.config_override else None,
+ tuple(args.theme_override) if args.theme_override else None,
+ tuple(args.config_path) if args.config_path else None,
+ environ.get('POWERLINE_THEME_OVERRIDES', ''),
+ environ.get('POWERLINE_CONFIG_OVERRIDES', ''),
+ environ.get('POWERLINE_CONFIG_PATHS', ''),
+ )
+
+ PowerlineClass = ShellPowerline if is_daemon else NonDaemonShellPowerline
+ powerline = None
+ try:
+ powerline = state.powerlines[key]
+ except KeyError:
+ try:
+ powerline = state.powerlines[key] = PowerlineClass(
+ args,
+ logger=state.logger,
+ config_loader=state.config_loader,
+ run_once=False,
+ shutdown_event=state.ts_shutdown_event,
+ )
+ if state.logger is None:
+ state.logger = powerline.logger
+ if state.config_loader is None:
+ state.config_loader = powerline.config_loader
+ except SystemExit:
+ # Somebody thought raising system exit was a good idea,
+ return ''
+ except Exception as e:
+ if powerline:
+ powerline.pl.exception('Failed to render {0}: {1}', str(key), str(e))
+ else:
+ return 'Failed to render {0}: {1}'.format(str(key), str(e))
+ s = BytesIO()
+ write_output(args, powerline, segment_info, get_unicode_writer(stream=s))
+ s.seek(0)
+ return s.read()
+
+
+def eintr_retry_call(func, *args, **kwargs):
+ while True:
+ try:
+ return func(*args, **kwargs)
+ except EnvironmentError as e:
+ if getattr(e, 'errno', None) == errno.EINTR:
+ continue
+ raise
+
+
+def do_read(conn, timeout=2.0):
+ ''' Read data from the client. If the client fails to send data within
+ timeout seconds, abort. '''
+ read = []
+ end_time = monotonic() + timeout
+ while not read or not read[-1].endswith(b'\0\0'):
+ r, w, e = select((conn,), (), (conn,), timeout)
+ if e:
+ return
+ if monotonic() > end_time:
+ return
+ if not r:
+ continue
+ x = eintr_retry_call(conn.recv, 4096)
+ if x:
+ read.append(x)
+ else:
+ break
+ return b''.join(read)
+
+
+def do_write(conn, result):
+ try:
+ eintr_retry_call(conn.sendall, result)
+ except Exception:
+ pass
+
+
+def safe_bytes(o, encoding=get_preferred_output_encoding()):
+ '''Return bytes instance without ever throwing an exception.'''
+ try:
+ try:
+ # We are assuming that o is a unicode object
+ return o.encode(encoding, 'replace')
+ except Exception:
+ # Object may have defined __bytes__ (python 3) or __str__ method
+ # (python 2)
+ # This also catches problem with non_ascii_bytes.encode('utf-8')
+ # that first tries to decode to UTF-8 using ascii codec (and fails
+ # in this case) and then encode to given encoding: errors= argument
+ # is not used in the first stage.
+ return bytes(o)
+ except Exception as e:
+ return safe_bytes(str(e), encoding)
+
+
+def parse_args(req, parser, encoding=get_preferred_arguments_encoding()):
+ args = [x.decode(encoding) for x in req.split(b'\0') if x]
+ numargs = int(args[0], 16)
+ shell_args = parser.parse_args(args[1:numargs + 1])
+ cwd = args[numargs + 1]
+ environ = dict(((k, v) for k, v in (x.partition('=')[0::2] for x in args[numargs + 2:])))
+ cwd = cwd or environ.get('PWD', '/')
+ return shell_args, environ, cwd
+
+
+def get_answer(req, is_daemon, argparser, state):
+ try:
+ args, environ, cwd = parse_args(req, argparser)
+ finish_args(argparser, environ, args, is_daemon=True)
+ if args.ext[0].startswith('wm.'):
+ return safe_bytes(start_wm(args, environ, cwd, is_daemon, state))
+ else:
+ return safe_bytes(render(args, environ, cwd, is_daemon, state))
+ except Exception as e:
+ return safe_bytes(str(e))
+
+
+def do_one(sock, read_sockets, write_sockets, result_map, is_daemon, argparser,
+ state):
+ r, w, e = select(
+ tuple(read_sockets) + (sock,),
+ tuple(write_sockets),
+ tuple(read_sockets) + tuple(write_sockets) + (sock,),
+ 60.0
+ )
+
+ if sock in e:
+ # We cannot accept any more connections, so we exit
+ raise SystemExit(1)
+
+ for s in e:
+ # Discard all broken connections to clients
+ s.close()
+ read_sockets.discard(s)
+ write_sockets.discard(s)
+
+ for s in r:
+ if s == sock:
+ # A client wants to connect
+ conn, _ = eintr_retry_call(sock.accept)
+ read_sockets.add(conn)
+ else:
+ # A client has sent some data
+ read_sockets.discard(s)
+ req = do_read(s)
+ if req == EOF:
+ raise SystemExit(0)
+ elif req:
+ ans = get_answer(req, is_daemon, argparser, state)
+ result_map[s] = ans
+ write_sockets.add(s)
+ else:
+ s.close()
+
+ for s in w:
+ # A client is ready to receive the result
+ write_sockets.discard(s)
+ result = result_map.pop(s)
+ try:
+ do_write(s, result)
+ finally:
+ s.close()
+
+
+def shutdown(sock, read_sockets, write_sockets, state):
+ '''Perform operations necessary for nicely shutting down daemon
+
+ Specifically it
+
+ #. Closes all sockets.
+ #. Notifies segments based on
+ :py:class:`powerline.lib.threaded.ThreadedSegment` and WM-specific
+ threads that daemon is shutting down.
+ #. Waits for threads to finish, but no more then 2 seconds total.
+ #. Waits so that total execution time of this function is 2 seconds in order
+ to allow ThreadedSegments to finish.
+ '''
+ total_wait_time = 2
+ shutdown_start_time = monotonic()
+
+ for s in chain((sock,), read_sockets, write_sockets):
+ s.close()
+
+ # Notify ThreadedSegments
+ state.ts_shutdown_event.set()
+ for thread, shutdown_event in state.started_wm_threads.values():
+ shutdown_event.set()
+
+ for thread, shutdown_event in state.started_wm_threads.values():
+ wait_time = total_wait_time - (monotonic() - shutdown_start_time)
+ if wait_time > 0:
+ thread.join(wait_time)
+
+ wait_time = total_wait_time - (monotonic() - shutdown_start_time)
+ sleep(wait_time)
+
+
+def main_loop(sock, is_daemon):
+ sock.listen(128)
+ sock.setblocking(0)
+
+ read_sockets, write_sockets = set(), set()
+ result_map = {}
+ parser = get_main_argparser(NonInteractiveArgParser)
+ state = State()
+ try:
+ try:
+ while True:
+ do_one(
+ sock, read_sockets, write_sockets, result_map,
+ is_daemon=is_daemon,
+ argparser=parser,
+ state=state,
+ )
+ except KeyboardInterrupt:
+ raise SystemExit(0)
+ except SystemExit as e:
+ shutdown(sock, read_sockets, write_sockets, state)
+ raise e
+ return 0
+
+
+def daemonize(stdin=os.devnull, stdout=os.devnull, stderr=os.devnull):
+ try:
+ pid = os.fork()
+ if pid > 0:
+ # exit first parent
+ raise SystemExit(0)
+ except OSError as e:
+ sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
+ raise SystemExit(1)
+
+ # decouple from parent environment
+ os.chdir("/")
+ os.setsid()
+ os.umask(0)
+
+ # do second fork
+ try:
+ pid = os.fork()
+ if pid > 0:
+ # exit from second parent
+ raise SystemExit(0)
+ except OSError as e:
+ sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
+ raise SystemExit(1)
+
+ # Redirect standard file descriptors.
+ si = open(stdin, 'rb')
+ so = open(stdout, 'a+b')
+ se = open(stderr, 'a+b', 0)
+ os.dup2(si.fileno(), sys.stdin.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(se.fileno(), sys.stderr.fileno())
+ return True
+
+
+def check_existing(address):
+ if USE_FILESYSTEM:
+ # We cannot bind if the socket file already exists so remove it, we
+ # already have a lock on pidfile, so this should be safe.
+ try:
+ os.unlink(address)
+ except EnvironmentError:
+ pass
+
+ sock = socket.socket(family=socket.AF_UNIX)
+ try:
+ sock.bind(address)
+ except socket.error as e:
+ if getattr(e, 'errno', None) == errno.EADDRINUSE:
+ return None
+ raise
+ return sock
+
+
+def kill_daemon(address):
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ try:
+ try:
+ eintr_retry_call(sock.connect, address)
+ except socket.error:
+ return False
+ else:
+ eintr_retry_call(sock.sendall, EOF)
+ finally:
+ sock.close()
+ return True
+
+
+def cleanup_lockfile(pidfile, fd, *args):
+ try:
+ # Remove the directory entry for the lock file
+ os.unlink(pidfile)
+ # Close the file descriptor
+ os.close(fd)
+ except EnvironmentError:
+ pass
+ if args:
+ # Called in signal handler
+ raise SystemExit(1)
+
+
+def lockpidfile(pidfile):
+ fd = os.open(
+ pidfile,
+ os.O_WRONLY | os.O_CREAT,
+ stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+ )
+ try:
+ fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except EnvironmentError:
+ os.close(fd)
+ return None
+ os.lseek(fd, 0, os.SEEK_SET)
+ os.ftruncate(fd, 0)
+ os.write(fd, ('%d' % os.getpid()).encode('ascii'))
+ os.fsync(fd)
+ cleanup = partial(cleanup_lockfile, pidfile, fd)
+ signal(SIGTERM, cleanup)
+ atexit.register(cleanup)
+ return fd
+
+
+def main():
+ parser = get_daemon_argparser()
+ args = parser.parse_args()
+ is_daemon = False
+ address = None
+ pidfile = None
+
+ if args.socket:
+ address = args.socket
+ if not USE_FILESYSTEM:
+ address = '\0' + address
+ else:
+ if USE_FILESYSTEM:
+ address = '/tmp/powerline-ipc-%d'
+ else:
+ # Use the abstract namespace for sockets rather than the filesystem
+ # (Available only in linux)
+ address = '\0powerline-ipc-%d'
+
+ address = address % os.getuid()
+
+ if USE_FILESYSTEM:
+ pidfile = address + '.pid'
+
+ if args.kill:
+ if args.foreground or args.replace:
+ parser.error('--kill and --foreground/--replace cannot be used together')
+ if kill_daemon(address):
+ if not args.quiet:
+ print ('Kill command sent to daemon, if it does not die in a couple of seconds use kill to kill it')
+ raise SystemExit(0)
+ else:
+ if not args.quiet:
+ print ('No running daemon found')
+ raise SystemExit(1)
+
+ if args.replace:
+ while kill_daemon(address):
+ if not args.quiet:
+ print ('Kill command sent to daemon, waiting for daemon to exit, press Ctrl-C to terminate wait and exit')
+ sleep(2)
+
+ if USE_FILESYSTEM and not args.foreground:
+ # We must daemonize before creating the locked pidfile, unfortunately,
+ # this means further print statements are discarded
+ is_daemon = daemonize()
+
+ if USE_FILESYSTEM:
+ # Create a locked pid file containing the daemon’s PID
+ if lockpidfile(pidfile) is None:
+ if not args.quiet:
+ sys.stderr.write(
+ 'The daemon is already running. Use %s -k to kill it.\n' % (
+ os.path.basename(sys.argv[0])))
+ raise SystemExit(1)
+
+ # Bind to address or bail if we cannot bind
+ sock = check_existing(address)
+ if sock is None:
+ if not args.quiet:
+ sys.stderr.write(
+ 'The daemon is already running. Use %s -k to kill it.\n' % (
+ os.path.basename(sys.argv[0])))
+ raise SystemExit(1)
+
+ if not USE_FILESYSTEM and not args.foreground:
+ # We daemonize on linux
+ is_daemon = daemonize()
+
+ return main_loop(sock, is_daemon)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/powerline-lint b/scripts/powerline-lint
new file mode 100755
index 0000000..f665ba1
--- /dev/null
+++ b/scripts/powerline-lint
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# vim:fileencoding=utf-8:noet
+from __future__ import (unicode_literals, division, absolute_import, print_function)
+
+import sys
+
+from powerline.lint import check
+from powerline.commands.lint import get_argparser
+
+
+if __name__ == '__main__':
+ args = get_argparser().parse_args()
+ sys.exit(check(args.config_path, args.debug))
diff --git a/scripts/powerline-release.py b/scripts/powerline-release.py
new file mode 100755
index 0000000..42381bd
--- /dev/null
+++ b/scripts/powerline-release.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python
+# vim:fileencoding=utf-8:noet
+from __future__ import (unicode_literals, division, absolute_import, print_function)
+
+import argparse
+import codecs
+import os
+import re
+
+from subprocess import check_output, check_call, CalledProcessError
+from getpass import getpass
+
+from github import Github
+
+
+OVERLAY_NAME = 'raiagent'
+OVERLAY = 'leycec/' + OVERLAY_NAME
+OVERLAY_BRANCH_FORMAT = 'powerline-release-{0}'
+
+
+def parse_version(s):
+ if s == ('+' * len(s)):
+ try:
+ last_version = next(iter(sorted([
+ tuple(map(int, tag.split('.')))
+ for tag in check_output(['git', 'tag', '-l', '[0-9]*.*']).split('\n')[:-1]
+ ], reverse=True)))
+ except StopIteration:
+ raise ValueError('No existing versions found')
+
+ version = []
+ for i in range(len(s) - 1):
+ try:
+ version.append(last_version[i])
+ except IndexError:
+ version.append(0)
+
+ try:
+ version.append(last_version[len(s) - 1] + 1)
+ except IndexError:
+ version.append(1)
+
+ if len(version) == 1:
+ version.append(0)
+
+ return tuple(version)
+ else:
+ return tuple(map(int, s.split('.')))
+
+
+def setup_py_filter(filter_func):
+ with codecs.open('.setup.py.new', 'w', encoding='utf-8') as NS:
+ with codecs.open('setup.py', 'r', encoding='utf-8') as OS:
+ for line in OS:
+ line = filter_func(line)
+ NS.write(line)
+
+ os.unlink('setup.py')
+ os.rename('.setup.py.new', 'setup.py')
+
+
+def setup_py_develop_filter(line, version_string):
+ if line.startswith('\tbase_version = '):
+ line = '\tbase_version = \'' + version_string + '\'\n'
+ return line
+
+
+def setup_py_master_filter(line, version_string):
+ if line.startswith('\tversion='):
+ line = '\tversion=\'' + version_string + '\',\n'
+ elif 'Development Status' in line:
+ line = '\t\t\'Development Status :: 5 - Production/Stable\',\n'
+ return line
+
+
+def merge(version_string, rev, **kwargs):
+ check_call(['git', 'checkout', rev])
+
+ temp_branch_name = 'release-' + version_string
+ check_call(['git', 'checkout', '-b', temp_branch_name])
+ setup_py_filter(lambda line: setup_py_develop_filter(line, version_string))
+ check_call(['git', 'add', 'setup.py'])
+ check_call(['git', 'commit', '-m', 'Update base version'])
+ check_call(['git', 'checkout', rev])
+ check_call(['git', 'merge', '--no-ff',
+ '--strategy', 'recursive',
+ '--strategy-option', 'theirs',
+ '--commit',
+ '-m', 'Merge branch \'{0}\' into {1}'.format(temp_branch_name, rev),
+ temp_branch_name])
+ check_call(['git', 'branch', '-d', temp_branch_name])
+
+ rev = check_output(['git', 'rev-parse', 'HEAD']).strip()
+
+ check_call(['git', 'checkout', 'master'])
+ try:
+ check_call(['git', 'merge', '--no-ff', '--no-commit', '--log', rev])
+ except CalledProcessError:
+ check_call(['git', 'mergetool', '--tool', 'vimdiff2'])
+
+ setup_py_filter(lambda line: setup_py_master_filter(line, version_string))
+ check_call(['git', 'add', 'setup.py'])
+
+ check_call(['git', 'commit'])
+ check_call(['git', 'tag', '-m', 'Release ' + version_string, '-a', version_string])
+
+
+def push(version_string, rev, **kwargs):
+ check_call(['git', 'push', 'upstream', 'master'])
+ check_call(['git', 'push', 'upstream', version_string])
+ check_call(['git', 'push', 'upstream', rev])
+
+
+def upload(**args):
+ check_call(['python', 'setup.py', 'sdist', 'upload'])
+ check_call(['python', 'setup.py', 'upload_docs'])
+
+
+gh = None
+
+
+def get_gh(user, password):
+ global gh
+ if not gh:
+ gh = Github(user, password)
+ return gh
+
+
+def create_ebuilds(version_string, overlay, user, **kwargs):
+ overlay_url = 'git://github.com/' + OVERLAY
+ if not os.path.isdir(overlay):
+ check_call(['git', 'clone', overlay_url, overlay])
+ check_call(['git', 'checkout', 'master'], cwd=overlay)
+ check_call(['git', 'pull', '--ff-only', overlay_url, 'master'], cwd=overlay)
+ branch = OVERLAY_BRANCH_FORMAT.format(version_string)
+ check_call(['git', 'branch', branch], cwd=overlay)
+ check_call(['git', 'checkout', branch], cwd=overlay)
+ os.environ['DISTDIR'] = '/tmp/powerline-distfiles'
+ if not os.path.isdir(os.environ['DISTDIR']):
+ os.mkdir(os.environ['DISTDIR'])
+ new_files = []
+ for category, pn in (
+ ('app-misc', 'powerline'),
+ ('app-vim', 'powerline-vim'),
+ ):
+ pdir = os.path.join(overlay, category, pn)
+ live_ebuild = None
+ for ebuild in os.listdir(pdir):
+ if ebuild.endswith('.ebuild') and '9999' in ebuild:
+ live_ebuild_base = ebuild
+ live_ebuild = os.path.join(pdir, ebuild)
+ break
+ assert(live_ebuild)
+ vcur = os.path.join(pdir, '{0}-{1}.ebuild'.format(pn, version_string))
+ if pn == 'powerline-vim':
+ with open(live_ebuild) as LEF:
+ with open(vcur, 'w') as F:
+ dropnext = False
+ for line in LEF:
+ if line.startswith('EGIT'):
+ # Drop all EGIT_… and the next empty line
+ dropnext = True
+ next_re = re.compile('^$')
+ continue
+ if dropnext:
+ assert(next_re.match(line))
+ dropnext = False
+ continue
+ if line.startswith('# Note the lack of an assignment to ${S}'):
+ next_re = re.compile('^#')
+ dropnext = True
+ line = 'S="${WORKDIR}/${MY_P}"\n'
+ if line.startswith('inherit'):
+ line = line.replace(' git-r3', '')
+ line += '\n'
+ line += 'MY_PN="powerline-status"\n'
+ line += 'MY_P="${MY_PN}-${PV}"'
+ line += '\n'
+ elif line.startswith('HOMEPAGE'):
+ line += 'SRC_URI="mirror://pypi/p/${MY_PN}/${MY_P}.tar.gz"\n'
+ elif line.startswith('KEYWORDS'):
+ line = 'KEYWORDS="~amd64 ~ppc ~x86 ~x86-fbsd"\n'
+ F.write(line)
+ else:
+ os.symlink(live_ebuild_base, vcur)
+ new_files.append(vcur)
+ check_call(['ebuild', vcur, 'manifest'])
+ new_files.append(os.path.join(pdir, 'Manifest'))
+ check_call(['git', 'add', '--'] + new_files, cwd=overlay)
+ check_call(['git', 'commit'] + new_files + ['-m', 'powerline*: Release {0}'.format(version_string)],
+ cwd=overlay)
+ check_call(['git', 'push', '-f', 'git@github.com:{0}/{1}'.format(user, OVERLAY_NAME), branch], cwd=overlay)
+
+
+def update_overlay(version_string, user, password, **kwargs):
+ gh = get_gh(user, password)
+ overlay = gh.get_repo(OVERLAY)
+ overlay.create_pull(
+ title='New powerline version: ' + version_string,
+ body='Add ebuilds for new powerline version\n\n---\n\nCreated automatically by release script',
+ base='master',
+ head=user + ':' + OVERLAY_BRANCH_FORMAT.format(version_string),
+ )
+
+
+stages = (
+ ('merge', merge),
+ ('push', push),
+ ('upload', upload),
+ ('create_ebuilds', create_ebuilds),
+ ('update_overlay', update_overlay),
+)
+
+
+def create_release(version, user, password=None, run_stages=None, **kwargs):
+ version_string = '.'.join((str(v) for v in version))
+ if not password:
+ password = getpass('Password for {0}: '.format(user))
+ for stname, stfunc in stages:
+ if run_stages is None or stname in run_stages:
+ stfunc(version_string=version_string, user=user, password=password, **kwargs)
+
+
+p = argparse.ArgumentParser(description='Powerline release script')
+p.add_argument('-u', '--user', type=str, metavar='USER', help='Github username.', required=True)
+p.add_argument('-v', '--version', type=parse_version, metavar='VERSION', help='Use given version for the release. If version contains only `+\' signs then it will increase latest version number: one `+\' increases major version number (e.g. 1.2.3 -> 2.0), `++\' increases minor version number (e.g. 1.2.3 -> 1.3), `+++\' increases patch level (e.g. 1.2.3 -> 1.2.4). Defaults to `+++\'.', default='+++')
+p.add_argument('-r', '--rev', metavar='COMMIT', help='Use given revision for the release. Defaults to `develop\'.', default='develop')
+p.add_argument('-s', '--stages', action='append', metavar='STAGE', help='Only run one of the given stages (default to all).', choices=tuple((stname for stname, stfunc in stages)))
+p.add_argument('-p', '--password', type=str, metavar='PASS', help='Github password. You will be prompted if it is not supplied.')
+p.add_argument('-o', '--overlay', type=str, metavar='PATH', help='Location of the local clone of the {0} overlay. If provided directory does not exist it will be created by “git clone”. Defaults to /tmp/powerline-{0}.'.format(OVERLAY_NAME), default='/tmp/powerline-' + OVERLAY_NAME)
+
+
+if __name__ == '__main__':
+ args = p.parse_args()
+ create_release(
+ version=args.version,
+ rev=args.rev,
+ user=args.user,
+ password=args.password,
+ overlay=args.overlay,
+ run_stages=args.stages,
+ )
diff --git a/scripts/powerline-render b/scripts/powerline-render
new file mode 100755
index 0000000..8b71b00
--- /dev/null
+++ b/scripts/powerline-render
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# vim:fileencoding=utf-8:noet
+
+from __future__ import (unicode_literals, division, absolute_import, print_function)
+
+import sys
+import os
+
+try:
+ from powerline.shell import ShellPowerline
+except ImportError:
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
+ from powerline.shell import ShellPowerline
+
+from powerline.commands.main import get_argparser, finish_args, write_output
+from powerline.lib.encoding import get_unicode_writer
+
+
+if sys.version_info < (3,):
+ write = sys.stdout.write
+else:
+ write = sys.stdout.buffer.write
+
+
+if __name__ == '__main__':
+ parser = get_argparser()
+ args = parser.parse_args()
+ finish_args(parser, os.environ, args)
+ powerline = ShellPowerline(args, run_once=True)
+ segment_info = {'args': args, 'environ': os.environ}
+ write_output(args, powerline, segment_info, get_unicode_writer())