From e44ff1f2a8eaf9dd90b8fff78721e3006c9474b1 Mon Sep 17 00:00:00 2001
From: Adar Nimrod <nimrod@shore.co.il>
Date: Thu, 29 Jul 2021 12:46:26 +0300
Subject: [PATCH] Move a bunch of stuff to the workbench image.

- Move Bash completion of tools to the image (it makes sense to have
  them together).
- bfg download and script.
- Remove the binaries Makefile, remove almost all of the vendored files,
  merge everything back to a single Makefile.
- Delete the Python requirements.txt, it's all in the image anyway.
- Some nicer output in the Git post-merge hook.
- RabbitMQ admin script.
- Update the README.
---
 .bash_completion.d/aws                    |    1 -
 .bash_completion.d/docker-compose         |  670 ------------
 .bash_completion.d/docker-machine.bash    |  416 --------
 .bash_completion.d/fabric-completion.bash |  124 ---
 .bash_completion.d/gcloud                 |   82 --
 .bash_completion.d/molecule               |  178 ----
 .bash_completion.d/packer                 |    1 -
 .bash_completion.d/terraform              |    1 -
 .bash_completion.d/vault                  |    1 -
 .githooks/post-merge                      |   18 +-
 .local/share/bfg/.gitkeep                 |    0
 Documents/bin/bfg                         |   13 -
 Documents/bin/rabbitmqadmin               | 1184 ---------------------
 Makefile                                  |   96 +-
 README.md                                 |   27 +-
 binaries.mk                               |  146 ---
 generated.mk                              |  153 ---
 requirements.txt                          |   49 -
 vendored.mk                               |   37 -
 19 files changed, 113 insertions(+), 3084 deletions(-)
 delete mode 100644 .bash_completion.d/aws
 delete mode 100644 .bash_completion.d/docker-compose
 delete mode 100644 .bash_completion.d/docker-machine.bash
 delete mode 100644 .bash_completion.d/fabric-completion.bash
 delete mode 100644 .bash_completion.d/gcloud
 delete mode 100644 .bash_completion.d/molecule
 delete mode 100644 .bash_completion.d/packer
 delete mode 100644 .bash_completion.d/terraform
 delete mode 100644 .bash_completion.d/vault
 delete mode 100644 .local/share/bfg/.gitkeep
 delete mode 100755 Documents/bin/bfg
 delete mode 100755 Documents/bin/rabbitmqadmin
 delete mode 100644 binaries.mk
 delete mode 100644 generated.mk
 delete mode 100644 requirements.txt
 delete mode 100644 vendored.mk

diff --git a/.bash_completion.d/aws b/.bash_completion.d/aws
deleted file mode 100644
index 44d825d..0000000
--- a/.bash_completion.d/aws
+++ /dev/null
@@ -1 +0,0 @@
-complete -C 'aws_completer' aws
diff --git a/.bash_completion.d/docker-compose b/.bash_completion.d/docker-compose
deleted file mode 100644
index 2add0c9..0000000
--- a/.bash_completion.d/docker-compose
+++ /dev/null
@@ -1,670 +0,0 @@
-#!/bin/bash
-#
-# bash completion for docker-compose
-#
-# This work is based on the completion for the docker command.
-#
-# This script provides completion of:
-#  - commands and their options
-#  - service names
-#  - filepaths
-#
-# To enable the completions either:
-#  - place this file in /etc/bash_completion.d
-#  or
-#  - copy this file to e.g. ~/.docker-compose-completion.sh and add the line
-#    below to your .bashrc after bash completion features are loaded
-#    . ~/.docker-compose-completion.sh
-
-__docker_compose_previous_extglob_setting=$(shopt -p extglob)
-shopt -s extglob
-
-__docker_compose_q() {
-	docker-compose 2>/dev/null "${top_level_options[@]}" "$@"
-}
-
-# Transforms a multiline list of strings into a single line string
-# with the words separated by "|".
-__docker_compose_to_alternatives() {
-	local parts=( $1 )
-	local IFS='|'
-	echo "${parts[*]}"
-}
-
-# Transforms a multiline list of options into an extglob pattern
-# suitable for use in case statements.
-__docker_compose_to_extglob() {
-	local extglob=$( __docker_compose_to_alternatives "$1" )
-	echo "@($extglob)"
-}
-
-# Determines whether the option passed as the first argument exist on
-# the commandline. The option may be a pattern, e.g. `--force|-f`.
-__docker_compose_has_option() {
-	local pattern="$1"
-	for (( i=2; i < $cword; ++i)); do
-		if [[ ${words[$i]} =~ ^($pattern)$ ]] ; then
-			return 0
-		fi
-	done
-	return 1
-}
-
-# Returns `key` if we are currently completing the value of a map option (`key=value`)
-# which matches the extglob passed in as an argument.
-# This function is needed for key-specific completions.
-__docker_compose_map_key_of_current_option() {
-        local glob="$1"
-
-        local key glob_pos
-        if [ "$cur" = "=" ] ; then        # key= case
-                key="$prev"
-                glob_pos=$((cword - 2))
-        elif [[ $cur == *=* ]] ; then     # key=value case (OSX)
-                key=${cur%=*}
-                glob_pos=$((cword - 1))
-        elif [ "$prev" = "=" ] ; then
-                key=${words[$cword - 2]}  # key=value case
-                glob_pos=$((cword - 3))
-        else
-                return
-        fi
-
-        [ "${words[$glob_pos]}" = "=" ] && ((glob_pos--))  # --option=key=value syntax
-
-        [[ ${words[$glob_pos]} == @($glob) ]] && echo "$key"
-}
-
-# suppress trailing whitespace
-__docker_compose_nospace() {
-	# compopt is not available in ancient bash versions
-	type compopt &>/dev/null && compopt -o nospace
-}
-
-
-# Outputs a list of all defined services, regardless of their running state.
-# Arguments for `docker-compose ps` may be passed in order to filter the service list,
-# e.g. `status=running`.
-__docker_compose_services() {
-	__docker_compose_q ps --services "$@"
-}
-
-# Applies completion of services based on the current value of `$cur`.
-# Arguments for `docker-compose ps` may be passed in order to filter the service list,
-# see `__docker_compose_services`.
-__docker_compose_complete_services() {
-	COMPREPLY=( $(compgen -W "$(__docker_compose_services "$@")" -- "$cur") )
-}
-
-# The services for which at least one running container exists
-__docker_compose_complete_running_services() {
-	local names=$(__docker_compose_services --filter status=running)
-	COMPREPLY=( $(compgen -W "$names" -- "$cur") )
-}
-
-
-_docker_compose_build() {
-	case "$prev" in
-		--build-arg)
-			COMPREPLY=( $( compgen -e -- "$cur" ) )
-			__docker_compose_nospace
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory --no-cache --pull --parallel" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services --filter source=build
-			;;
-	esac
-}
-
-
-_docker_compose_bundle() {
-	case "$prev" in
-		--output|-o)
-			_filedir
-			return
-			;;
-	esac
-
-	COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) )
-}
-
-
-_docker_compose_config() {
-	case "$prev" in
-		--hash)
-			if [[ $cur == \\* ]] ; then
-				COMPREPLY=( '\*' )
-			else
-				COMPREPLY=( $(compgen -W "$(__docker_compose_services) \\\* " -- "$cur") )
-			fi
-			return
-			;;
-	esac
-
-	COMPREPLY=( $( compgen -W "--hash --help --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
-}
-
-
-_docker_compose_create() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--build --force-recreate --help --no-build --no-recreate" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_docker_compose() {
-	case "$prev" in
-		--tlscacert|--tlscert|--tlskey)
-			_filedir
-			return
-			;;
-		--file|-f)
-			_filedir "y?(a)ml"
-			return
-			;;
-		--log-level)
-			COMPREPLY=( $( compgen -W "debug info warning error critical" -- "$cur" ) )
-			return
-			;;
-		--project-directory)
-			_filedir -d
-			return
-			;;
-		$(__docker_compose_to_extglob "$daemon_options_with_args") )
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "$daemon_boolean_options $daemon_options_with_args $top_level_options_with_args --help -h --no-ansi --verbose --version -v" -- "$cur" ) )
-			;;
-		*)
-			COMPREPLY=( $( compgen -W "${commands[*]}" -- "$cur" ) )
-			;;
-	esac
-}
-
-
-_docker_compose_down() {
-	case "$prev" in
-		--rmi)
-			COMPREPLY=( $( compgen -W "all local" -- "$cur" ) )
-			return
-			;;
-		--timeout|-t)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --rmi --timeout -t --volumes -v --remove-orphans" -- "$cur" ) )
-			;;
-	esac
-}
-
-
-_docker_compose_events() {
-	case "$prev" in
-		--json)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --json" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_exec() {
-	case "$prev" in
-		--index|--user|-u|--workdir|-w)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u --workdir -w" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_help() {
-	COMPREPLY=( $( compgen -W "${commands[*]}" -- "$cur" ) )
-}
-
-_docker_compose_images() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --quiet -q" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-_docker_compose_kill() {
-	case "$prev" in
-		-s)
-			COMPREPLY=( $( compgen -W "SIGHUP SIGINT SIGKILL SIGUSR1 SIGUSR2" -- "$(echo $cur | tr '[:lower:]' '[:upper:]')" ) )
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help -s" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_logs() {
-	case "$prev" in
-		--tail)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_pause() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_port() {
-	case "$prev" in
-		--protocol)
-			COMPREPLY=( $( compgen -W "tcp udp" -- "$cur" ) )
-			return;
-			;;
-		--index)
-			return;
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --index --protocol" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_ps() {
-	local key=$(__docker_compose_map_key_of_current_option '--filter')
-	case "$key" in
-		source)
-			COMPREPLY=( $( compgen -W "build image" -- "${cur##*=}" ) )
-			return
-			;;
-		status)
-			COMPREPLY=( $( compgen -W "paused restarting running stopped" -- "${cur##*=}" ) )
-			return
-			;;
-	esac
-
-	case "$prev" in
-		--filter)
-			COMPREPLY=( $( compgen -W "source status" -S "=" -- "$cur" ) )
-			__docker_compose_nospace
-			return;
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--all -a --filter --help --quiet -q --services" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_pull() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --no-parallel --quiet -q" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services --filter source=image
-			;;
-	esac
-}
-
-
-_docker_compose_push() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --ignore-push-failures" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_restart() {
-	case "$prev" in
-		--timeout|-t)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_rm() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--force -f --help --stop -s -v" -- "$cur" ) )
-			;;
-		*)
-			if __docker_compose_has_option "--stop|-s" ; then
-				__docker_compose_complete_services
-			else
-				__docker_compose_complete_services --filter status=stopped
-			fi
-			;;
-	esac
-}
-
-
-_docker_compose_run() {
-	case "$prev" in
-		-e)
-			COMPREPLY=( $( compgen -e -- "$cur" ) )
-			__docker_compose_nospace
-			return
-			;;
-		--entrypoint|--label|-l|--name|--user|-u|--volume|-v|--workdir|-w)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--detach -d --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --use-aliases --user -u --volume -v --workdir -w" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_scale() {
-	case "$prev" in
-		=)
-			COMPREPLY=("$cur")
-			return
-			;;
-		--timeout|-t)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
-			;;
-		*)
-			COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
-			__docker_compose_nospace
-			;;
-	esac
-}
-
-
-_docker_compose_start() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services --filter status=stopped
-			;;
-	esac
-}
-
-
-_docker_compose_stop() {
-	case "$prev" in
-		--timeout|-t)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_top() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_running_services
-			;;
-	esac
-}
-
-
-_docker_compose_unpause() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services --filter status=paused
-			;;
-	esac
-}
-
-
-_docker_compose_up() {
-	case "$prev" in
-		=)
-			COMPREPLY=("$cur")
-			return
-			;;
-		--exit-code-from)
-			__docker_compose_complete_services
-			return
-			;;
-		--scale)
-			COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
-			__docker_compose_nospace
-			return
-			;;
-		--timeout|-t)
-			return
-			;;
-	esac
-
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
-			;;
-		*)
-			__docker_compose_complete_services
-			;;
-	esac
-}
-
-
-_docker_compose_version() {
-	case "$cur" in
-		-*)
-			COMPREPLY=( $( compgen -W "--short" -- "$cur" ) )
-			;;
-	esac
-}
-
-
-_docker_compose() {
-	local previous_extglob_setting=$(shopt -p extglob)
-	shopt -s extglob
-
-	local commands=(
-		build
-		bundle
-		config
-		create
-		down
-		events
-		exec
-		help
-		images
-		kill
-		logs
-		pause
-		port
-		ps
-		pull
-		push
-		restart
-		rm
-		run
-		scale
-		start
-		stop
-		top
-		unpause
-		up
-		version
-	)
-
-	# Options for the docker daemon that have to be passed to secondary calls to
-	# docker-compose executed by this script.
-	local daemon_boolean_options="
-		--skip-hostname-check
-		--tls
-		--tlsverify
-	"
-	local daemon_options_with_args="
-		--file -f
-		--host -H
-		--project-directory
-		--project-name -p
-		--tlscacert
-		--tlscert
-		--tlskey
-	"
-
-	# These options are require special treatment when searching the command.
-	local top_level_options_with_args="
-		--log-level
-	"
-
-	COMPREPLY=()
-	local cur prev words cword
-	_get_comp_words_by_ref -n : cur prev words cword
-
-	# search subcommand and invoke its handler.
-	# special treatment of some top-level options
-	local command='docker_compose'
-	local top_level_options=()
-	local counter=1
-
-	while [ $counter -lt $cword ]; do
-		case "${words[$counter]}" in
-			$(__docker_compose_to_extglob "$daemon_boolean_options") )
-				local opt=${words[counter]}
-				top_level_options+=($opt)
-				;;
-			$(__docker_compose_to_extglob "$daemon_options_with_args") )
-				local opt=${words[counter]}
-				local arg=${words[++counter]}
-				top_level_options+=($opt $arg)
-				;;
-			$(__docker_compose_to_extglob "$top_level_options_with_args") )
-				(( counter++ ))
-				;;
-			-*)
-				;;
-			*)
-				command="${words[$counter]}"
-				break
-				;;
-		esac
-		(( counter++ ))
-	done
-
-	local completions_func=_docker_compose_${command//-/_}
-	declare -F $completions_func >/dev/null && $completions_func
-
-	eval "$previous_extglob_setting"
-	return 0
-}
-
-eval "$__docker_compose_previous_extglob_setting"
-unset __docker_compose_previous_extglob_setting
-
-complete -F _docker_compose docker-compose docker-compose.exe
diff --git a/.bash_completion.d/docker-machine.bash b/.bash_completion.d/docker-machine.bash
deleted file mode 100644
index 07d39d1..0000000
--- a/.bash_completion.d/docker-machine.bash
+++ /dev/null
@@ -1,416 +0,0 @@
-#
-# bash completion file for docker-machine commands
-#
-# This script provides completion of:
-#  - commands and their options
-#  - machine names
-#  - filepaths
-#
-# To enable the completions either:
-#  - place this file in /etc/bash_completion.d
-#  or
-#  - copy this file to e.g. ~/.docker-machine-completion.sh and add the line
-#    below to your .bashrc after bash completion features are loaded
-#    . ~/.docker-machine-completion.sh
-#
-
-# --- helper functions -------------------------------------------------------
-
-_docker_machine_q() {
-    docker-machine 2>/dev/null "$@"
-}
-
-# suppresses trailing whitespace
-_docker_machine_nospace() {
-    # compopt is not available in ancient bash versions (OSX)
-    # so only call it if it's available
-    type compopt &>/dev/null && compopt -o nospace
-}
-
-_docker_machine_machines() {
-    _docker_machine_q ls --format '{{.Name}}' "$@"
-}
-
-_docker_machine_drivers() {
-    local drivers=(
-        amazonec2
-        azure
-        digitalocean
-        exoscale
-        generic
-        google
-        hyperv
-        openstack
-        rackspace
-        softlayer
-        virtualbox
-        vmwarefusion
-        vmwarevcloudair
-        vmwarevsphere
-    )
-    echo "${drivers[@]}"
-}
-
-_docker_machine_value_of_option() {
-    local pattern="$1"
-    for (( i=2; i < ${cword}; ++i)); do
-        if [[ ${words[$i]} =~ ^($pattern)$ ]] ; then
-            echo ${words[$i + 1]}
-            break
-        fi
-    done
-}
-
-# Returns `key` if we are currently completing the value of a map option
-# (`key=value`) which matches the glob passed in as an argument.
-# This function is needed for key-specific argument completions.
-_docker_machine_map_key_of_current_option() {
-    local glob="$1"
-
-    local key glob_pos
-    if [ "$cur" = "=" ] ; then        # key= case
-            key="$prev"
-            glob_pos=$((cword - 2))
-    elif [[ $cur == *=* ]] ; then     # key=value case (OSX)
-            key=${cur%=*}
-            glob_pos=$((cword - 1))
-    elif [ "$prev" = "=" ] ; then
-            key=${words[$cword - 2]}  # key=value case
-            glob_pos=$((cword - 3))
-    else
-            return
-    fi
-
-    [ "${words[$glob_pos]}" = "=" ] && ((glob_pos--))  # --option=key=value syntax
-
-    [[ ${words[$glob_pos]} == $glob ]] && echo "$key"
-}
-
-# Finds the position of the first word that is neither option nor an option's argument.
-# If there are options that require arguments, you need to pass a glob describing
-# those options, e.g. "--option1|-o|--option2".
-# Use this function to restrict completions to exact positions after the options.
-_docker_machine_pos_first_nonflag() {
-    local argument_flags=$1
-
-    local counter=$((${subcommand_pos:-${command_pos}} + 1))
-    while [ "$counter" -le "$cword" ]; do
-       if [ -n "$argument_flags" ] && eval "case '${words[$counter]}' in $argument_flags) true ;; *) false ;; esac"; then
-          (( counter++ ))
-          # eat "=" in case of --option=arg syntax
-          [ "${words[$counter]}" = "=" ] && (( counter++ ))
-       else
-          case "${words[$counter]}" in
-             -*)
-                 ;;
-             *)
-                 break
-                 ;;
-          esac
-       fi
-
-       # Bash splits words at "=", retaining "=" as a word, examples:
-       # "--debug=false" => 3 words, "--log-opt syslog-facility=daemon" => 4 words
-       while [ "${words[$counter + 1]}" = "=" ] ; do
-               counter=$(( counter + 2))
-       done
-
-       (( counter++ ))
-    done
-
-    echo $counter
-}
-# --- completion functions ---------------------------------------------------
-
-_docker_machine_active() {
-    case "${prev}" in
-        --timeout|-t)
-            return
-            ;;
-    esac
-
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help --timeout -t" -- "${cur}"))
-    fi
-}
-
-_docker_machine_config() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help --swarm" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_create() {
-    case "${prev}" in
-        --driver|-d)
-            COMPREPLY=($(compgen -W "$(_docker_machine_drivers)" -- "${cur}"))
-            return
-            ;;
-    esac
-
-    # driver specific options are only included in help output if --driver is given,
-    # so we have to pass that option when calling docker-machine to harvest options.
-    local driver="$(_docker_machine_value_of_option '--driver|-d')"
-    local parsed_options="$(_docker_machine_q create ${driver:+--driver $driver} --help | grep '^   -' | sed 's/^   //; s/[^a-z0-9-].*$//')"
-    if [[ ${cur} == -* ]]; then
-        COMPREPLY=($(compgen -W "${parsed_options} -d --help" -- "${cur}"))
-    fi
-}
-
-_docker_machine_env() {
-    case "${prev}" in
-        --shell)
-            COMPREPLY=($(compgen -W "cmd emacs fish powershell tcsh" -- "${cur}"))
-            return
-            ;;
-    esac
-
-    if [[ "${cur}" == -* ]]; then
-	COMPREPLY=($(compgen -W "--help --no-proxy --shell --swarm --unset -u" -- "${cur}"))
-    else
-	COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-# See docker-machine-wrapper.bash for the use command
-_docker_machine_use() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help --swarm --unset" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_inspect() {
-    case "${prev}" in
-        --format|-f)
-            return
-            ;;
-    esac
-
-    if [[ "${cur}" == -* ]]; then
-	COMPREPLY=($(compgen -W "--format -f --help" -- "${cur}"))
-    else
-	COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_ip() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_kill() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_ls() {
-    local key=$(_docker_machine_map_key_of_current_option '--filter')
-    case "$key" in
-        driver)
-            COMPREPLY=($(compgen -W "$(_docker_machine_drivers)" -- "${cur##*=}"))
-            return
-            ;;
-        state)
-            COMPREPLY=($(compgen -W "Error Paused Running Saved Starting Stopped Stopping" -- "${cur##*=}"))
-            return
-            ;;
-    esac
-
-    case "${prev}" in
-        --filter)
-            COMPREPLY=($(compgen -W "driver label name state swarm" -S= -- "${cur}"))
-            _docker_machine_nospace
-            return
-            ;;
-        --format|-f|--timeout|-t)
-            return
-            ;;
-    esac
-
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--filter --format -f --help --quiet -q --timeout -t" -- "${cur}"))
-    fi
-}
-
-_docker_machine_mount() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help --unmount -u" -- "${cur}"))
-    else
-        local pos=$(_docker_machine_pos_first_nonflag)
-        if [ "$cword" -eq "$pos" ]; then
-            # We can't complete remote filesystems. All we can do here is to complete the machine.
-            COMPREPLY=($(compgen -W "$(_docker_machine_machines --filter state=Running)" -S: -- "${cur}"))
-            _docker_machine_nospace
-        elif [ "$cword" -eq "$((pos + 1))" ]; then
-           _filedir -d
-        fi
-    fi
-}
-
-_docker_machine_provision() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines --filter state=Running)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_regenerate_certs() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--client-certs --force -f --help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines --filter state=Running)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_restart() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_rm() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--force -f --help -y" -- "${cur}"))
-    else
-	COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_ssh() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_scp() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--delta -d --help --quiet -q --recursive -r" -- "${cur}"))
-    else
-        _filedir
-        # It would be really nice to ssh to the machine and ls to complete
-        # remote files.
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines | sed 's/$/:/')" -- "${cur}") "${COMPREPLY[@]}")
-    fi
-}
-
-_docker_machine_start() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines --filter state=Stopped)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_status() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_stop() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines --filter state=Running)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_upgrade() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_url() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_version() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "$(_docker_machine_machines)" -- "${cur}"))
-    fi
-}
-
-_docker_machine_help() {
-    if [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "--help" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "${commands[*]}" -- "${cur}"))
-    fi
-}
-
-_docker_machine_docker_machine() {
-    if [[ " ${wants_file[*]} " =~ " ${prev} " ]]; then
-        _filedir
-    elif [[ " ${wants_dir[*]} " =~ " ${prev} " ]]; then
-        _filedir -d
-    elif [[ "${cur}" == -* ]]; then
-        COMPREPLY=($(compgen -W "${flags[*]} ${wants_dir[*]} ${wants_file[*]}" -- "${cur}"))
-    else
-        COMPREPLY=($(compgen -W "${commands[*]}" -- "${cur}"))
-    fi
-}
-
-_docker_machine() {
-    COMPREPLY=()
-    local commands=(active config create env inspect ip kill ls mount provision regenerate-certs restart rm ssh scp start status stop upgrade url version help)
-
-    local flags=(--debug --native-ssh --github-api-token --bugsnag-api-token --help --version)
-    local wants_dir=(--storage-path)
-    local wants_file=(--tls-ca-cert --tls-ca-key --tls-client-cert --tls-client-key)
-
-    # Add the use subcommand, if we have an alias loaded
-    if [[ ${DOCKER_MACHINE_WRAPPED} = true ]]; then
-        commands=("${commands[@]}" use)
-    fi
-
-    local cur prev words cword
-    _get_comp_words_by_ref -n : cur prev words cword
-    local i
-    local command=docker-machine command_pos=0
-
-    for (( i=1; i < ${cword}; ++i)); do
-        local word=${words[i]}
-        if [[ " ${wants_file[*]} ${wants_dir[*]} " =~ " ${word} " ]]; then
-            # skip the next option
-            (( ++i ))
-        elif [[ " ${commands[*]} " =~ " ${word} " ]]; then
-            command=${word}
-            command_pos=$i
-        fi
-    done
-
-    local completion_func=_docker_machine_"${command//-/_}"
-    if declare -F "${completion_func}" > /dev/null; then
-        ${completion_func}
-    fi
-
-    return 0
-}
-
-complete -F _docker_machine docker-machine docker-machine.exe
diff --git a/.bash_completion.d/fabric-completion.bash b/.bash_completion.d/fabric-completion.bash
deleted file mode 100644
index 7a62fdf..0000000
--- a/.bash_completion.d/fabric-completion.bash
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/usr/bin/env bash
-#
-# Bash completion support for Fabric (http://fabfile.org/)
-#
-#
-# Copyright (C) 2011 by Konstantin Bakulin
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-#
-# Thanks to:
-# - Adam Vandenberg,
-#   https://github.com/adamv/dotfiles/blob/master/completion_scripts/fab_completion.bash
-#
-# - Enrico Batista da Luz,
-#   https://github.com/ricobl/dotfiles/blob/master/bin/fab_bash_completion
-#
-
-
-# Use cache files for fab tasks or not.
-# If set to "false" command "fab --shortlist" will be executed every time.
-export FAB_COMPLETION_CACHE_TASKS=true
-
-# File name where tasks cache will be stored (in current dir).
-export FAB_COMPLETION_CACHED_TASKS_FILENAME=".fab_tasks~"
-
-
-# Set command to get time of last file modification as seconds since Epoch
-case $(uname) in
-    Darwin|FreeBSD)
-        __FAB_COMPLETION_MTIME_COMMAND="/usr/bin/stat -f '%m'"
-        ;;
-    *)
-        __FAB_COMPLETION_MTIME_COMMAND="stat -c '%Y'"
-        ;;
-esac
-
-
-#
-# Get time of last fab cache file modification as seconds since Epoch
-#
-function __fab_chache_mtime() {
-    $__FAB_COMPLETION_MTIME_COMMAND \
-        $FAB_COMPLETION_CACHED_TASKS_FILENAME | xargs -n 1 expr
-}
-
-
-#
-# Get time of last fabfile file/module modification as seconds since Epoch
-#
-function __fab_fabfile_mtime() {
-    local f="fabfile"
-    if [[ -e "${f}.py" ]]; then
-        $__FAB_COMPLETION_MTIME_COMMAND "${f}.py" | xargs -n 1 expr
-    else
-        # Suppose that it's a fabfile dir
-        find $f/*.py -exec $__FAB_COMPLETION_MTIME_COMMAND {} + \
-            | xargs -n 1 expr | sort -nr | head -1
-    fi
-}
-
-
-#
-# Completion for "fab" command
-#
-function __fab_completion() {
-    # Return if "fab" command doesn't exists
-    [[ -e $(which fab 2> /dev/null) ]] || return 0
-
-    # Variables to hold the current word and possible matches
-    local cur="${COMP_WORDS[COMP_CWORD]}"
-    local opts=()
-
-    # Generate possible matches and store them in variable "opts"
-    case "$cur" in
-        -*)
-            if [[ -z "$__FAB_COMPLETION_LONG_OPT" ]]; then
-                export __FAB_COMPLETION_LONG_OPT=$(
-                    fab --help | egrep -o "\-\-[A-Za-z_\-]+\=?" | sort -u)
-            fi
-            opts="$__FAB_COMPLETION_LONG_OPT"
-            ;;
-
-        *)
-            # If "fabfile.py" or "fabfile" dir with "__init__.py" file exists
-            local f="fabfile"
-            if [[ -e "${f}.py" || (-d "$f" && -e "$f/__init__.py") ]]; then
-                # Build a list of the available tasks
-                if $FAB_COMPLETION_CACHE_TASKS; then
-                    # If use cache
-                    if [[ ! -s $FAB_COMPLETION_CACHED_TASKS_FILENAME ||
-                          $(__fab_fabfile_mtime) -gt $(__fab_chache_mtime) ]]
-                    then
-                        fab --shortlist > $FAB_COMPLETION_CACHED_TASKS_FILENAME \
-                            2> /dev/null
-                    fi
-                    opts=$(cat $FAB_COMPLETION_CACHED_TASKS_FILENAME)
-                else
-                    # Without cache
-                    opts=$(fab --shortlist 2> /dev/null)
-                fi
-            fi
-            ;;
-    esac
-
-    # Set possible completions
-    COMPREPLY=($(compgen -W "$opts" -- $cur))
-}
-complete -o default -o nospace -F __fab_completion fab
diff --git a/.bash_completion.d/gcloud b/.bash_completion.d/gcloud
deleted file mode 100644
index bdf232a..0000000
--- a/.bash_completion.d/gcloud
+++ /dev/null
@@ -1,82 +0,0 @@
-_python_argcomplete() {
-    local IFS=''
-    local prefix=
-    typeset -i n
-    (( lastw=${#COMP_WORDS[@]} -1))
-    if [[ ${COMP_WORDS[lastw]} == --*=* ]]; then
-        # for bash version 3.2
-        flag=${COMP_WORDS[lastw]%%=*}
-        set -- "$1" "$2" '='
-    elif [[ $3 == '=' ]]; then
-      flag=${COMP_WORDS[-3]}
-    fi
-    if [[ $3 == ssh  && $2 == *@* ]] ;then
-        # handle ssh user@instance specially
-        prefix=${2%@*}@
-        COMP_LINE=${COMP_LINE%$2}"${2#*@}"
-    elif [[ $3 == '=' ]] ; then
-        # handle --flag=value
-        prefix=$flag=$2
-        line=${COMP_LINE%$prefix};
-        COMP_LINE=$line${prefix/=/ };
-        prefix=
-    fi
-    if [[ $2 == *,* ]]; then
-          # handle , separated list
-          prefix=${2%,*},
-          set -- "$1" "${2#$prefix}" "$3"
-          COMP_LINE==${COMP_LINE%$prefix*}$2
-    fi
-    # Treat --flag=<TAB> as --flag <TAB> to work around bash 4.x bug
-    if [[ ${COMP_LINE} == *=  && ${COMP_WORDS[-2]} == --* ]]; then
-        COMP_LINE=${COMP_LINE%=}' '
-    fi
-    COMPREPLY=( $(IFS="$IFS"                   COMP_LINE="$COMP_LINE"                   COMP_POINT="$COMP_POINT"                   _ARGCOMPLETE_COMP_WORDBREAKS="$COMP_WORDBREAKS"                   _ARGCOMPLETE=1                   "$1" 8>&1 9>&2 1>/dev/null 2>/dev/null) )
-    if [[ $? != 0 ]]; then
-        unset COMPREPLY
-        return
-    fi
-    if [[ $prefix != '' ]]; then
-        for ((n=0; n < ${#COMPREPLY[@]}; n++)); do
-            COMPREPLY[$n]=$prefix${COMPREPLY[$n]}
-        done
-    fi
-    for ((n=0; n < ${#COMPREPLY[@]}; n++)); do
-        match=${COMPREPLY[$n]%' '}
-        if [[ $match != '' ]]; then
-            COMPREPLY[$n]=${match//? /' '}' '
-        fi
-    done
-    # if flags argument has a single completion and ends in  '= ', delete ' '
-    if [[ ${#COMPREPLY[@]} == 1 && ${COMPREPLY[0]} == -* &&
-          ${COMPREPLY[0]} == *'= ' ]]; then
-        COMPREPLY[0]=${COMPREPLY[0]%' '}
-    fi
-}
-complete -o nospace -F _python_argcomplete "gcloud"
-
-_completer() {
-    command=$1
-    name=$2
-    eval '[[ "$'"${name}"'_COMMANDS" ]] || '"${name}"'_COMMANDS="$('"${command}"')"'
-    set -- $COMP_LINE
-    shift
-    while [[ $1 == -* ]]; do
-          shift
-    done
-    [[ $2 ]] && return
-    grep -q "${name}\s*$" <<< $COMP_LINE &&
-        eval 'COMPREPLY=($'"${name}"'_COMMANDS)' &&
-        return
-    [[ "$COMP_LINE" == *" " ]] && return
-    [[ $1 ]] &&
-        eval 'COMPREPLY=($(echo "$'"${name}"'_COMMANDS" | grep ^'"$1"'))'
-}
-
-unset bq_COMMANDS
-_bq_completer() {
-    _completer "CLOUDSDK_COMPONENT_MANAGER_DISABLE_UPDATE_CHECK=1 bq help | grep '^[^ ][^ ]*  ' | sed 's/ .*//'" bq
-}
-
-complete -F _bq_completer bq
-complete -o nospace -F _python_argcomplete gsutil
diff --git a/.bash_completion.d/molecule b/.bash_completion.d/molecule
deleted file mode 100644
index 4f32049..0000000
--- a/.bash_completion.d/molecule
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/bin/bash
-
-# Credits:
-# https://blog.heckel.xyz/2015/03/24/bash-completion-with-sub-commands-and-dynamic-options/
-# https://raw.githubusercontent.com/syncany/syncany/develop/gradle/bash/syncany.bash-completion
-
-shopt -s progcomp
-
-_platforms(){
-  molecule status --porcelain --platforms | cut -d' ' -f1 2>/dev/null
-}
-
-_providers(){
-  molecule status --porcelain --providers | cut -d' ' -f1 2>/dev/null
-}
-
-_hosts(){
-  molecule status --porcelain --hosts | cut -d' ' -f1 2>/dev/null
-}
-
-_molecule(){
-  local cur prev firstword lastword complete_words complete_options
-  cur=${COMP_WORDS[COMP_CWORD]}
-	prev=${COMP_WORDS[COMP_CWORD-1]}
-	firstword=$(_get_firstword)
-
-  GLOBAL_COMMANDS="syntax check create converge dependency destroy idempotence init list login status test verify"
-  GLOBAL_OPTIONS="-h -v"
-  SYNTAX_OPTIONS=""
-  CHECK_OPTIONS=""
-  CREATE_OPTIONS="--debug --platform --provider --tags"
-  CONVERGE_OPTIONS="--debug --platform --provider --tags"
-  DEPENDENCY_OPTIONS=""
-  DESTROY_OPTIONS="--debug --platform --provider --tags"
-  IDEMPOTENCE_OPTIONS="--debug --platform --provider --tags"
-  INIT_OPTIONS="--docker"
-  LIST_OPTIONS="--debug -m"
-  LOGIN_OPTIONS=""
-  STATUS_OPTIONS="--debug --hosts --platforms --porcelain --providers"
-  TEST_OPTIONS="--debug --platform --provider --tags --sudo"
-  VERIFY_OPTIONS="--debug --platform --provider --tags --sudo"
-
-  # Un-comment this for debug purposes:
-  # echo -e "\nprev = $prev, cur = $cur, firstword = $firstword.\n"
-
-  case "${firstword}" in
-    check)
-      complete_options="${CHECK_OPTIONS}"
-      ;;
-    create)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${CREATE_OPTIONS}"
-          ;;
-      esac
-      ;;
-    converge)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${CONVERGE_OPTIONS}"
-          ;;
-      esac
-      ;;
-    dependency)
-      complete_options="${DEPENDENCY_OPTIONS}"
-      ;;
-    destroy)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${DESTROY_OPTIONS}"
-          ;;
-      esac
-      ;;
-    idempotence)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${IDEMPOTENCE_OPTIONS}"
-          ;;
-      esac
-      ;;
-    init)
-      complete_options="${INIT_OPTIONS}"
-      ;;
-    list)
-      complete_options="${LIST_OPTIONS}"
-      ;;
-    login)
-      complete_options="${LOGIN_OPTIONS}"
-      complete_words=$(_hosts)
-      ;;
-    status)
-      complete_options="${STATUS_OPTIONS}"
-      ;;
-    syntax)
-      complete_options="${SYNTAX_OPTIONS}"
-      ;;
-    test)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${TEST_OPTIONS}"
-          ;;
-      esac
-      ;;
-    verify)
-      case "${prev}" in
-        --platform)
-          complete_words=$(_platforms)
-          ;;
-        --provider)
-          complete_words=$(_providers)
-          ;;
-        *)
-          complete_options="${VERIFY_OPTIONS}"
-          ;;
-      esac
-      ;;
-    *)
-  		complete_words="${GLOBAL_COMMANDS}"
-  		complete_options="${GLOBAL_OPTIONS}"
-  		;;
-  esac
-
-  # Either display words or options, depending on the user input
-	if [[ ${cur} == -* ]]; then
-		COMPREPLY=( $( compgen -W "${complete_options}" -- ${cur} ))
-	else
-		COMPREPLY=( $( compgen -W "${complete_words}" -- ${cur} ))
-	fi
-
-	return 0
-}
-
-# Determines the first non-option word of the command line. This is usually the command.
-_get_firstword() {
-	local firstword i
-
-	firstword=
-	for ((i = 1; i < ${#COMP_WORDS[@]}; ++i)); do
-		if [[ ${COMP_WORDS[i]} != -* ]]; then
-			firstword=${COMP_WORDS[i]}
-			break
-		fi
-	done
-
-	echo $firstword
-}
-
-complete -F _molecule molecule
diff --git a/.bash_completion.d/packer b/.bash_completion.d/packer
deleted file mode 100644
index bf0d9f4..0000000
--- a/.bash_completion.d/packer
+++ /dev/null
@@ -1 +0,0 @@
-complete -C packer packer
diff --git a/.bash_completion.d/terraform b/.bash_completion.d/terraform
deleted file mode 100644
index 64ea17f..0000000
--- a/.bash_completion.d/terraform
+++ /dev/null
@@ -1 +0,0 @@
-complete -C terraform terraform
diff --git a/.bash_completion.d/vault b/.bash_completion.d/vault
deleted file mode 100644
index 76b631f..0000000
--- a/.bash_completion.d/vault
+++ /dev/null
@@ -1 +0,0 @@
-complete -C vault vault
diff --git a/.githooks/post-merge b/.githooks/post-merge
index 395c0d3..c606042 100755
--- a/.githooks/post-merge
+++ b/.githooks/post-merge
@@ -1,18 +1,20 @@
 #!/bin/sh
 set -eux
+
+notify () { printf '\n\e[1;94m=== %s ====\e[0m\n\n' "$1" >&2; }
+
 cd "$(git rev-parse --show-toplevel)"
-echo Downloading binary and vendored files, creating generated files >&2
-git clean -fX .bash_completion.d/
-make generated vendored
-echo Installing Git hooks >&2
+notify 'Generating some configuration and private files.'
+make
+notify 'Installing Git hooks'
 Documents/bin/install-git-hooks
-echo Loading dconf config >&2
+notify 'Loading dconf config'
 Documents/bin/dconf-load
-echo Configuring Git repo >&2
+notify 'Configuring Git repo'
 git config --local status.showUntrackedFiles no
 if command -v vagrant > /dev/null
 then
-	echo Installing Vagrant plugins >&2
+	notify 'Installing Vagrant plugins'
     for plugin in landrush vagrant-gatling-rsync vagrant-disksize
     do
         vagrant plugin list | grep -qw "$plugin" || vagrant plugin install "$plugin" || true
@@ -20,7 +22,7 @@ then
 fi
 if command -v helm > /dev/null
 then
-    echo Installing Helm plugins >&2
+    notify 'Installing Helm plugins'
     eval "$(grep 'export HELM_HOME' .bashrc)"
     helm plugin list | { ! grep --silent '^diff'; } || helm plugin remove diff
     helm plugin install https://github.com/databus23/helm-diff --version master
diff --git a/.local/share/bfg/.gitkeep b/.local/share/bfg/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/Documents/bin/bfg b/Documents/bin/bfg
deleted file mode 100755
index cd65bed..0000000
--- a/Documents/bin/bfg
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-set -eu
-
-if [ -f /usr/local/share/bfg/bfg.jar ]
-then
-    exec java -jar /usr/local/share/bfg/bfg.jar "$@"
-elif [ -f "${XDG_DATA_HOME:-$HOME/.local/share}/bfg/bfg.jar" ]
-then
-    exec java "${XDG_DATA_HOME:-$HOME/.local/share}/bfg/bfg.jar" "$@"
-else
-    echo "Can't find the BFG jar, exiting." >&2
-    exit 1
-fi
diff --git a/Documents/bin/rabbitmqadmin b/Documents/bin/rabbitmqadmin
deleted file mode 100755
index c98a4f7..0000000
--- a/Documents/bin/rabbitmqadmin
+++ /dev/null
@@ -1,1184 +0,0 @@
-#!/usr/bin/env python3
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-#
-# Copyright (c) 2007-2021 VMware, Inc. or its affiliates.  All rights reserved.
-
-from __future__ import print_function
-
-from optparse import OptionParser, TitledHelpFormatter
-
-import base64
-import copy
-import json
-import os
-import socket
-import ssl
-import traceback
-
-try:
-    from signal import signal, SIGPIPE, SIG_DFL
-    signal(SIGPIPE, SIG_DFL)
-except ImportError:
-    pass
-
-import sys
-
-
-def eprint(*args, **kwargs):
-    print(*args, file=sys.stderr, **kwargs)
-
-
-if sys.version_info[0] < 2 or (sys.version_info[0] == 2 and sys.version_info[1] < 6):
-    eprint("Sorry, rabbitmqadmin requires at least Python 2.6 (2.7.9 when HTTPS is enabled).")
-    sys.exit(1)
-
-if sys.version_info[0] == 2:
-    from ConfigParser import ConfigParser, NoSectionError
-    import httplib
-    import urlparse
-    from urllib import quote_plus
-    from urllib import quote
-
-    def b64(s):
-        return base64.b64encode(s)
-else:
-    from configparser import ConfigParser, NoSectionError
-    import http.client as httplib
-    import urllib.parse as urlparse
-    from urllib.parse import quote_plus
-    from urllib.parse import quote
-
-    def b64(s):
-        return base64.b64encode(s.encode('utf-8')).decode('utf-8')
-
-if sys.version_info[0] == 2:
-    class ConnectionError(OSError):
-        pass
-
-    class ConnectionRefusedError(ConnectionError):
-        pass
-
-VERSION = '%%VSN%%'
-
-LISTABLE = {'connections': {'vhost': False, 'cols': ['name', 'user', 'channels']},
-            'channels':    {'vhost': False, 'cols': ['name', 'user']},
-            'consumers':   {'vhost': True},
-            'exchanges':   {'vhost': True,  'cols': ['name', 'type']},
-            'queues':      {'vhost': True,  'cols': ['name', 'messages']},
-            'bindings':    {'vhost': True,  'cols': ['source', 'destination',
-                                                     'routing_key']},
-            'users':       {'vhost': False},
-            'vhosts':      {'vhost': False, 'cols': ['name', 'messages']},
-            'permissions': {'vhost': False},
-            'nodes':       {'vhost': False, 'cols': ['name', 'type', 'mem_used']},
-            'parameters':  {'vhost': False, 'json': ['value']},
-            'policies':    {'vhost': False, 'json': ['definition']},
-            'operator_policies': {'vhost': False, 'json': ['definition']},
-            'vhost_limits': {'vhost': False, 'json': ['value']}}
-
-SHOWABLE = {'overview': {'vhost': False, 'cols': ['rabbitmq_version',
-                                                  'cluster_name',
-                                                  'queue_totals.messages',
-                                                  'object_totals.queues']}}
-
-PROMOTE_COLUMNS = ['vhost', 'name', 'type',
-                   'source', 'destination', 'destination_type', 'routing_key']
-
-URIS = {
-    'exchange':    '/exchanges/{vhost}/{name}',
-    'queue':       '/queues/{vhost}/{name}',
-    'binding':     '/bindings/{vhost}/e/{source}/{destination_char}/{destination}',
-    'binding_del': '/bindings/{vhost}/e/{source}/{destination_char}/{destination}/{properties_key}',
-    'vhost':       '/vhosts/{name}',
-    'user':        '/users/{name}',
-    'permission':  '/permissions/{vhost}/{user}',
-    'parameter':   '/parameters/{component}/{vhost}/{name}',
-    'policy':      '/policies/{vhost}/{name}',
-    'operator_policy': '/operator-policies/{vhost}/{name}',
-    'vhost_limit': '/vhost-limits/{vhost}/{name}'
-    }
-
-
-def queue_upload_fixup(upload):
-    # rabbitmq/rabbitmq-management#761
-    #
-    # In general, the fixup_upload argument can be used to fixup/change the
-    # upload dict after all argument parsing is complete.
-    #
-    # This simplifies setting the queue type for a new queue by allowing the
-    # user to use a queue_type=quorum argument rather than the somewhat confusing
-    # arguments='{"x-queue-type":"quorum"}' parameter
-    #
-    if 'queue_type' in upload:
-        queue_type = upload.get('queue_type')
-        arguments = upload.get('arguments', {})
-        arguments['x-queue-type'] = queue_type
-        upload['arguments'] = arguments
-
-
-DECLARABLE = {
-    'exchange':   {'mandatory': ['name', 'type'],
-                   'json':      ['arguments'],
-                   'optional':  {'auto_delete': 'false', 'durable': 'true',
-                                 'internal': 'false', 'arguments': {}}},
-    'queue':      {'mandatory': ['name'],
-                   'json':      ['arguments'],
-                   'optional':  {'auto_delete': 'false', 'durable': 'true',
-                                 'arguments': {}, 'node': None, 'queue_type': None},
-                   'fixup_upload': queue_upload_fixup},
-    'binding':    {'mandatory': ['source', 'destination'],
-                   'json':      ['arguments'],
-                   'optional':  {'destination_type': 'queue',
-                                 'routing_key': '', 'arguments': {}}},
-    'vhost':      {'mandatory': ['name'],
-                   'optional':  {'tracing': None}},
-    'user':       {'mandatory': ['name', ['password', 'password_hash'], 'tags'],
-                   'optional':  {'hashing_algorithm': None}},
-    'permission': {'mandatory': ['vhost', 'user', 'configure', 'write', 'read'],
-                   'optional':  {}},
-    'parameter':  {'mandatory': ['component', 'name', 'value'],
-                   'json':      ['value'],
-                   'optional':  {}},
-    # priority has to be converted to an integer
-    'policy':     {'mandatory': ['name', 'pattern', 'definition'],
-                   'json':      ['definition', 'priority'],
-                   'optional':  {'priority': 0, 'apply-to': None}},
-    'operator_policy': {'mandatory': ['name', 'pattern', 'definition'],
-                        'json':      ['definition', 'priority'],
-                        'optional':  {'priority': 0, 'apply-to': None}},
-    'vhost_limit': {'mandatory': ['vhost', 'name', 'value'],
-                    'json': ['value'],
-                    'optional': {}},
-    }
-
-DELETABLE = {
-    'exchange':   {'mandatory': ['name']},
-    'queue':      {'mandatory': ['name']},
-    'binding':    {'mandatory': ['source', 'destination_type', 'destination'],
-                   'optional': {'properties_key': '~'}},
-    'vhost':      {'mandatory': ['name']},
-    'user':       {'mandatory': ['name']},
-    'permission': {'mandatory': ['vhost', 'user']},
-    'parameter':  {'mandatory': ['component', 'name']},
-    'policy':     {'mandatory': ['name']},
-    'operator_policy': {'mandatory': ['name']},
-    'vhost_limit': {'mandatory': ['vhost', 'name']}
-    }
-
-CLOSABLE = {
-    'connection': {'mandatory': ['name'],
-                   'optional':  {},
-                   'uri':       '/connections/{name}'}
-    }
-
-PURGABLE = {
-    'queue': {'mandatory': ['name'],
-              'optional':  {},
-              'uri':       '/queues/{vhost}/{name}/contents'}
-    }
-
-EXTRA_VERBS = {
-    'publish': {'mandatory': ['routing_key'],
-                'optional':  {'payload': None,
-                              'properties': {},
-                              'exchange': 'amq.default',
-                              'payload_encoding': 'string'},
-                'json':      ['properties'],
-                'uri':       '/exchanges/{vhost}/{exchange}/publish'},
-    'get':     {'mandatory': ['queue'],
-                'optional':  {'count': '1', 'ackmode': 'ack_requeue_true',
-                              'payload_file': None, 'encoding': 'auto'},
-                'uri':       '/queues/{vhost}/{queue}/get'}
-}
-
-for k in DECLARABLE:
-    DECLARABLE[k]['uri'] = URIS[k]
-
-for k in DELETABLE:
-    DELETABLE[k]['uri'] = URIS[k]
-    DELETABLE[k]['optional'] = DELETABLE[k].get('optional', {})
-DELETABLE['binding']['uri'] = URIS['binding_del']
-
-
-def short_usage():
-    return "rabbitmqadmin [options] subcommand"
-
-
-def title(name):
-    return "\n%s\n%s\n\n" % (name, '=' * len(name))
-
-
-def subcommands_usage():
-    usage = """Usage
-=====
-  """ + short_usage() + """
-
-  where subcommand is one of:
-""" + title("Display")
-
-    for l in LISTABLE:
-        usage += "  list {0} [<column>...]\n".format(l)
-    for s in SHOWABLE:
-        usage += "  show {0} [<column>...]\n".format(s)
-    usage += title("Object Manipulation")
-    usage += fmt_usage_stanza(DECLARABLE,  'declare')
-    usage += fmt_usage_stanza(DELETABLE,   'delete')
-    usage += fmt_usage_stanza(CLOSABLE,    'close')
-    usage += fmt_usage_stanza(PURGABLE,    'purge')
-    usage += title("Broker Definitions")
-    usage += """  export <file>
-  import <file>
-"""
-    usage += title("Publishing and Consuming")
-    usage += fmt_usage_stanza(EXTRA_VERBS, '')
-    usage += """
-  * If payload is not specified on publish, standard input is used
-
-  * If payload_file is not specified on get, the payload will be shown on
-    standard output along with the message metadata
-
-  * If payload_file is specified on get, count must not be set
-"""
-    return usage
-
-
-def config_usage():
-    usage = "Usage\n=====\n" + short_usage()
-    usage += "\n" + title("Configuration File")
-    usage += """  It is possible to specify a configuration file from the command line.
-  Hosts can be configured easily in a configuration file and called
-  from the command line.
-"""
-    usage += title("Example")
-    usage += """  # rabbitmqadmin.conf.example START
-
-  [host_normal]
-  hostname = localhost
-  port = 15672
-  username = guest
-  password = guest
-  declare_vhost = / # Used as default for declare / delete only
-  vhost = /         # Used as default for declare / delete / list
-
-  [host_ssl]
-  hostname = otherhost
-  port = 15672
-  username = guest
-  password = guest
-  ssl = True
-  ssl_key_file = /path/to/key.pem
-  ssl_cert_file = /path/to/cert.pem
-
-  # rabbitmqadmin.conf.example END
-"""
-    usage += title("Use")
-    usage += """  rabbitmqadmin -c rabbitmqadmin.conf.example -N host_normal ..."""
-    return usage
-
-
-def more_help():
-    return """
-More Help
-=========
-
-For more help use the help subcommand:
-
-  rabbitmqadmin help subcommands  # For a list of available subcommands
-  rabbitmqadmin help config       # For help with the configuration file
-"""
-
-
-def fmt_required_flag(val):
-    # when one of the options is required, e.g.
-    # password vs. password_hash
-    if type(val) is list:
-        # flag1=... OR flag2=... OR flag3=...
-        return "=... OR ".join(val)
-    else:
-        return val
-
-
-def fmt_optional_flag(val):
-    return val
-
-
-def fmt_usage_stanza(root, verb):
-    def fmt_args(args):
-        res = " ".join(["{0}=...".format(fmt_required_flag(a)) for a in args['mandatory']])
-        opts = " ".join("{0}=...".format(fmt_optional_flag(o)) for o in args['optional'].keys())
-        if opts != "":
-            res += " [{0}]".format(opts)
-        return res
-
-    text = ""
-    if verb != "":
-        verb = " " + verb
-    for k in root.keys():
-        text += " {0} {1} {2}\n".format(verb, k, fmt_args(root[k]))
-    return text
-
-
-default_options = {"hostname": "localhost",
-                   "port": "15672",
-                   # default config file section name
-                   "node": "default",
-                   "path_prefix": "",
-                   "declare_vhost": "/",
-                   "username": "guest",
-                   "password": "guest",
-                   "ssl": False,
-                   "request_timeout": 120,
-                   "verbose": True,
-                   "format": "table",
-                   "depth": 1,
-                   "bash_completion": False}
-
-
-class MyFormatter(TitledHelpFormatter):
-    def format_epilog(self, epilog):
-        return epilog
-
-
-parser = OptionParser(usage=short_usage(),
-                      formatter=MyFormatter(),
-                      epilog=more_help())
-
-
-def make_parser():
-    def add(*args, **kwargs):
-        key = kwargs['dest']
-        if key in default_options:
-            default = " [default: %s]" % default_options[key]
-            kwargs['help'] = kwargs['help'] + default
-        parser.add_option(*args, **kwargs)
-
-    add("-c", "--config", dest="config",
-        help="configuration file [default: ~/.rabbitmqadmin.conf]",
-        metavar="CONFIG")
-    add("-N", "--node", dest="node",
-        help="node described in the configuration file [default: 'default' only if configuration file is specified]",
-        metavar="NODE")
-    add("-H", "--host", dest="hostname",
-        help="connect to host HOST",
-        metavar="HOST")
-    add("-P", "--port", dest="port",
-        help="connect to port PORT",
-        metavar="PORT")
-    add("--path-prefix", dest="path_prefix",
-        help="use specific URI path prefix for the RabbitMQ HTTP API. /api and operation path will be appended to it. (default: blank string)")
-    add("-V", "--vhost", dest="vhost",
-        help="connect to vhost VHOST [default: all vhosts for list, '/' for declare]",
-        metavar="VHOST")
-    add("-u", "--username", dest="username",
-        help="connect using username USERNAME",
-        metavar="USERNAME")
-    add("-p", "--password", dest="password",
-        help="connect using password PASSWORD",
-        metavar="PASSWORD")
-    add("-U", "--base-uri", dest="base_uri",
-        help="connect using a base HTTP API URI. /api and operation path will be appended to it. Path will be ignored. --vhost has to be provided separately.",
-        metavar="URI")
-    add("-q", "--quiet", action="store_false", dest="verbose",
-        help="suppress status messages")
-    add("-s", "--ssl", action="store_true", dest="ssl",
-        help="connect with ssl")
-    add("--ssl-key-file", dest="ssl_key_file",
-        help="PEM format key file for SSL")
-    add("--ssl-cert-file", dest="ssl_cert_file",
-        help="PEM format certificate file for SSL")
-    add("--ssl-ca-cert-file", dest="ssl_ca_cert_file",
-        help="PEM format CA certificate file for SSL")
-    add("--ssl-disable-hostname-verification", dest="ssl_disable_hostname_verification",
-        help="Disables peer hostname verification", default=False, action="store_true")
-    add("-k", "--ssl-insecure", dest="ssl_insecure",
-        help="Disables all SSL validations like curl's '-k' argument", default=False, action="store_true")
-    add("-t", "--request-timeout", dest="request_timeout",
-        help="HTTP request timeout in seconds", type="int")
-    add("-f", "--format", dest="format",
-        help="format for listing commands - one of [" + ", ".join(FORMATS.keys()) + "]")
-    add("-S", "--sort", dest="sort", help="sort key for listing queries")
-    add("-R", "--sort-reverse", action="store_true", dest="sort_reverse",
-        help="reverse the sort order")
-    add("-d", "--depth", dest="depth",
-        help="maximum depth to recurse for listing tables")
-    add("--bash-completion", action="store_true",
-        dest="bash_completion",
-        help="Print bash completion script")
-    add("--version", action="store_true",
-        dest="version",
-        help="Display version and exit")
-
-
-def default_config():
-    home = os.getenv('USERPROFILE') or os.getenv('HOME')
-    if home is not None:
-        config_file = home + os.sep + ".rabbitmqadmin.conf"
-        if os.path.isfile(config_file):
-            return config_file
-    return None
-
-
-def make_configuration():
-    make_parser()
-    (cli_options, args) = parser.parse_args()
-
-    if cli_options.version:
-        print_version()
-
-    setattr(cli_options, "declare_vhost", None)
-    final_options = copy.copy(cli_options)
-
-    # Resolve config file path
-    if cli_options.config is None:
-        config_file = default_config()
-        if config_file is not None:
-            setattr(final_options, "config", config_file)
-    else:
-        if not os.path.isfile(cli_options.config):
-            assert_usage(False, "Could not read config file '%s'" % cli_options.config)
-
-    final_options = merge_default_options(cli_options, final_options)
-    final_options = merge_config_file_options(cli_options, final_options)
-    final_options = expand_base_uri_options(cli_options, final_options)
-
-    return (final_options, args)
-
-def merge_default_options(cli_options, final_options):
-    for (key, default_val) in default_options.items():
-        if getattr(cli_options, key) is None:
-            setattr(final_options, key, default_val)
-    return final_options
-
-def merge_config_file_options(cli_options, final_options):
-    # Parse config file and load it, making sure that CLI flags
-    # take precedence
-    if final_options.config is not None:
-        config_parser = ConfigParser()
-        try:
-            config_parser.read(final_options.config)
-            section_settings = dict(config_parser.items(final_options.node))
-        except NoSectionError as error:
-            # Report if an explicitly provided section (node) does not exist in the file
-            if final_options.node == "default":
-                pass
-            else:
-                msg = "Could not read section '%s' in config file '%s':\n   %s" % (final_options.node, final_options.config, error)
-                assert_usage(False, msg)
-        else:
-            for key, section_val in section_settings.items():
-                # special case --ssl
-                if key == 'ssl':
-                    setattr(final_options, key, section_val == "True")
-                else:
-                    # if CLI options do not contain this key, set it from the config file
-                    if getattr(cli_options, key) is None:
-                        setattr(final_options, key, section_val)
-    return final_options
-
-def expand_base_uri_options(cli_options, final_options):
-    # if --base-uri is passed, set connection parameters from it
-    if final_options.base_uri is not None:
-        u = urlparse.urlparse(final_options.base_uri)
-        for key in ["hostname", "port", "username", "password"]:
-            if getattr(u, key) is not None:
-                setattr(final_options, key, getattr(u, key))
-
-        if u.path is not None and (u.path != "") and (u.path != "/"):
-            eprint("WARNING: path in --base-uri is ignored. Please specify --vhost and/or --path-prefix separately.\n")
-    return final_options
-
-def assert_usage(expr, error):
-    if not expr:
-        eprint("\nERROR: {0}\n".format(error))
-        eprint("{0} --help for help\n".format(os.path.basename(sys.argv[0])))
-        sys.exit(1)
-
-
-def print_version():
-    print("rabbitmqadmin {0}".format(VERSION))
-    sys.exit(0)
-
-
-def column_sort_key(col):
-    if col in PROMOTE_COLUMNS:
-        return (1, PROMOTE_COLUMNS.index(col))
-    else:
-        return (2, col)
-
-
-def main():
-    (options, args) = make_configuration()
-    if options.bash_completion:
-        print_bash_completion()
-        sys.exit(0)
-    assert_usage(len(args) > 0, 'Action not specified')
-    mgmt = Management(options, args[1:])
-    mode = "invoke_" + args[0]
-    assert_usage(hasattr(mgmt, mode),
-                 'Action {0} not understood'.format(args[0]))
-    method = getattr(mgmt, "invoke_%s" % args[0])
-    method()
-
-
-def die(s):
-    eprint("*** {0}\n".format(s))
-    sys.exit(1)
-
-
-def maybe_utf8(s):
-    if isinstance(s, int):
-        # s can be also an int for ex messages count
-        return str(s)
-    if isinstance(s, float):
-        # s can be also a float for message rate
-        return str(s)
-    if sys.version_info[0] == 3:
-        # It will have an encoding, which Python will respect
-        return s
-    else:
-        # It won't have an encoding, and Python will pick ASCII by default
-        return s.encode('utf-8')
-
-
-class Management:
-    def __init__(self, options, args):
-        self.options = options
-        self.args = args
-
-    def get(self, path):
-        return self.http("GET", "%s/api%s" % (self.options.path_prefix, path), "")
-
-    def put(self, path, body):
-        return self.http("PUT", "%s/api%s" % (self.options.path_prefix, path), body)
-
-    def post(self, path, body):
-        return self.http("POST", "%s/api%s" % (self.options.path_prefix, path), body)
-
-    def delete(self, path):
-        return self.http("DELETE", "%s/api%s" % (self.options.path_prefix, path), "")
-
-    def __initialize_connection(self, hostname, port):
-        if self.options.ssl:
-            return self.__initialize_https_connection(hostname, port)
-        else:
-            return httplib.HTTPConnection(hostname, port, timeout=self.options.request_timeout)
-
-    def __initialize_https_connection(self, hostname, port):
-        # Python 2.7.9+
-        if hasattr(ssl, 'create_default_context'):
-            return httplib.HTTPSConnection(hostname, port, context=self.__initialize_tls_context())
-        # Python < 2.7.8, note: those versions still have SSLv3 enabled
-        #                       and other limitations. See rabbitmq/rabbitmq-management#225
-        else:
-            eprint("WARNING: rabbitmqadmin requires Python 2.7.9+ when HTTPS is used.")
-            return httplib.HTTPSConnection(hostname, port,
-                                           cert_file=self.options.ssl_cert_file,
-                                           key_file=self.options.ssl_key_file)
-
-    def __initialize_tls_context(self):
-        # Python 2.7.9+ only
-        ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
-        ssl_ctx.options &= ~ssl.OP_NO_SSLv3
-
-        ssl_insecure = self.options.ssl_insecure
-        ssl_disable_hostname_verification = ssl_insecure or self.options.ssl_disable_hostname_verification
-        # Note: you must set check_hostname prior to verify_mode
-        if ssl_disable_hostname_verification:
-            ssl_ctx.check_hostname = False
-        if ssl_insecure:
-            ssl_ctx.verify_mode = ssl.CERT_NONE
-
-        if self.options.ssl_key_file:
-            ssl_ctx.load_cert_chain(self.options.ssl_cert_file,
-                                    self.options.ssl_key_file)
-        if self.options.ssl_ca_cert_file:
-            ssl_ctx.load_verify_locations(self.options.ssl_ca_cert_file)
-        return ssl_ctx
-
-    def http(self, method, path, body):
-        conn = self.__initialize_connection(self.options.hostname, self.options.port)
-        auth = (self.options.username + ":" + self.options.password)
-
-        headers = {"Authorization": "Basic " + b64(auth)}
-        if body != "":
-            headers["Content-Type"] = "application/json"
-        try:
-            conn.request(method, path, body, headers)
-        except ConnectionRefusedError as e:
-            die("Could not connect: {0}".format(e))
-        except socket.error as e:
-            traceback.print_exc()
-            die("Could not connect: {0}".format(e))
-        try:
-            resp = conn.getresponse()
-        except socket.timeout:
-            die("Timed out getting HTTP response (request timeout: {0} seconds)".format(
-                self.options.request_timeout))
-        except (KeyboardInterrupt, SystemExit):
-            raise
-        except (Exception):
-            e_fmt = traceback.format_exc()
-            die("Error getting HTTP response:\n\n{0}".format(e_fmt))
-        if resp.status == 400:
-            die(json.loads(resp.read())['reason'])
-        if resp.status == 401:
-            die("Access refused: {0}".format(path))
-        if resp.status == 404:
-            die("Not found: {0}".format(path))
-        if resp.status == 301:
-            url = urlparse.urlparse(resp.getheader('location'))
-            [host, port] = url.netloc.split(':')
-            self.options.hostname = host
-            self.options.port = int(port)
-            return self.http(method, url.path + '?' + url.query, body)
-        if resp.status > 400:
-            raise Exception("Received response %d %s for path %s\n%s"
-                            % (resp.status, resp.reason, path, resp.read()))
-        return resp.read().decode('utf-8')
-
-    def verbose(self, string):
-        if self.options.verbose:
-            print(string)
-
-    def get_arg(self):
-        assert_usage(len(self.args) == 1, 'Exactly one argument required')
-        return self.args[0]
-
-    def use_cols(self):
-        # Deliberately do not cast to int here; we only care about the
-        # default, not explicit setting.
-        return self.options.depth == 1 and ('json' not in self.options.format)
-
-    def invoke_help(self):
-        if len(self.args) == 0:
-            parser.print_help()
-        else:
-            help_cmd = self.get_arg()
-            if help_cmd == 'subcommands':
-                usage = subcommands_usage()
-            elif help_cmd == 'config':
-                usage = config_usage()
-            else:
-                assert_usage(False, """help topic must be one of:
-  subcommands
-  config""")
-            print(usage)
-        sys.exit(0)
-
-    def invoke_publish(self):
-        (uri, upload) = self.parse_args(self.args, EXTRA_VERBS['publish'])
-        if 'payload' not in upload:
-            data = sys.stdin.read()
-            upload['payload'] = b64(data)
-            upload['payload_encoding'] = 'base64'
-        resp = json.loads(self.post(uri, json.dumps(upload)))
-        if resp['routed']:
-            self.verbose("Message published")
-        else:
-            self.verbose("Message published but NOT routed")
-
-    def invoke_get(self):
-        (uri, upload) = self.parse_args(self.args, EXTRA_VERBS['get'])
-        payload_file = 'payload_file' in upload and upload['payload_file'] or None
-        assert_usage(not payload_file or upload['count'] == '1',
-                     'Cannot get multiple messages using payload_file')
-        result = self.post(uri, json.dumps(upload))
-        if payload_file:
-            write_payload_file(payload_file, result)
-            columns = ['routing_key', 'exchange', 'message_count',
-                       'payload_bytes', 'redelivered']
-            format_list(result, columns, {}, self.options)
-        else:
-            format_list(result, [], {}, self.options)
-
-    def invoke_export(self):
-        path = self.get_arg()
-        uri = "/definitions"
-        if self.options.vhost:
-            uri += "/%s" % quote_plus(self.options.vhost)
-        definitions = self.get(uri)
-        with open(path, 'wb') as f:
-            f.write(definitions.encode())
-        self.verbose("Exported definitions for %s to \"%s\""
-                     % (self.options.hostname, path))
-
-    def invoke_import(self):
-        path = self.get_arg()
-        with open(path, 'rb') as f:
-            definitions = f.read()
-        uri = "/definitions"
-        if self.options.vhost:
-            uri += "/%s" % quote_plus(self.options.vhost)
-        self.post(uri, definitions)
-        self.verbose("Uploaded definitions from \"%s\" to %s. The import process may take some time. Consult server logs to track progress."
-                     % (self.options.hostname, path))
-
-    def invoke_list(self):
-        (uri, obj_info, cols) = self.list_show_uri(LISTABLE, 'list')
-        format_list(self.get(uri), cols, obj_info, self.options)
-
-    def invoke_show(self):
-        (uri, obj_info, cols) = self.list_show_uri(SHOWABLE, 'show')
-        format_list('[{0}]'.format(self.get(uri)), cols, obj_info, self.options)
-
-    def _list_path_for_obj_type(self, obj_type):
-        # This returns a URL path for given object type, e.g.
-        # replaces underscores in command names with
-        # dashes that HTTP API endpoints use
-        return obj_type.replace("_", "-")
-
-    def list_show_uri(self, obj_types, verb):
-        obj_type = self.args[0]
-        assert_usage(obj_type in obj_types,
-                     "Don't know how to {0} {1}".format(verb, obj_type))
-        obj_info = obj_types[obj_type]
-        uri = "/%s" % self._list_path_for_obj_type(obj_type)
-        query = []
-        if obj_info['vhost'] and self.options.vhost:
-            uri += "/%s" % quote_plus(self.options.vhost)
-        cols = self.args[1:]
-        if cols == [] and 'cols' in obj_info and self.use_cols():
-            cols = obj_info['cols']
-        if cols != []:
-            query.append("columns=" + ",".join(cols))
-        sort = self.options.sort
-        if sort:
-            query.append("sort=" + sort)
-        if self.options.sort_reverse:
-            query.append("sort_reverse=true")
-        query = "&".join(query)
-        if query != "":
-            uri += "?" + query
-        return (uri, obj_info, cols)
-
-    def invoke_declare(self):
-        (obj_type, uri, upload) = self.declare_delete_parse(DECLARABLE)
-        if obj_type == 'binding':
-            self.post(uri, json.dumps(upload))
-        else:
-            self.put(uri, json.dumps(upload))
-        self.verbose("{0} declared".format(obj_type))
-
-    def invoke_delete(self):
-        (obj_type, uri, upload) = self.declare_delete_parse(DELETABLE)
-        self.delete(uri)
-        self.verbose("{0} deleted".format(obj_type))
-
-    def invoke_close(self):
-        (obj_type, uri, upload) = self.declare_delete_parse(CLOSABLE)
-        self.delete(uri)
-        self.verbose("{0} closed".format(obj_type))
-
-    def invoke_purge(self):
-        (obj_type, uri, upload) = self.declare_delete_parse(PURGABLE)
-        self.delete(uri)
-        self.verbose("{0} purged".format(obj_type))
-
-    def declare_delete_parse(self, root):
-        assert_usage(len(self.args) > 0, 'Type not specified')
-        obj_type = self.args[0]
-        assert_usage(obj_type in root,
-                     'Type {0} not recognised'.format(obj_type))
-        obj = root[obj_type]
-        (uri, upload) = self.parse_args(self.args[1:], obj)
-        return (obj_type, uri, upload)
-
-    def assert_mandatory_keys(self, mandatory, upload):
-        for m in mandatory:
-            if type(m) is list:
-                a_set = set(m)
-                b_set = set(upload.keys())
-                assert_usage((a_set & b_set),
-                             'one of mandatory arguments "{0}" is required'.format(m))
-            else:
-                assert_usage(m in upload.keys(),
-                             'mandatory argument "{0}" is required'.format(m))
-
-    def parse_args(self, args, obj):
-        mandatory = obj['mandatory']
-        optional = obj['optional']
-        uri_template = obj['uri']
-        upload = {}
-        for k in optional.keys():
-            if optional[k] is not None:
-                upload[k] = optional[k]
-        for arg in args:
-            assert_usage("=" in arg,
-                         'Argument "{0}" not in the name=value format'.format(arg))
-            (name, value) = arg.split("=", 1)
-            # flatten the list of mandatory keys
-            mandatory_keys = []
-            for key in mandatory:
-                if type(key) is list:
-                    for subkey in key:
-                        mandatory_keys.append(subkey)
-                else:
-                    mandatory_keys.append(key)
-
-            assert_usage(name in mandatory_keys or name in optional.keys(),
-                         'Argument "{0}" is not recognised'.format(name))
-
-            if 'json' in obj and name in obj['json']:
-                upload[name] = self.parse_json(value)
-            else:
-                upload[name] = value
-        self.assert_mandatory_keys(mandatory, upload)
-        if 'vhost' not in mandatory:
-            upload['vhost'] = self.options.vhost or self.options.declare_vhost
-        uri_args = {}
-        for k in upload:
-            v = upload[k]
-            if v and isinstance(v, (str, bytes)):
-                uri_args[k] = quote(v, '')
-                if k == 'destination_type':
-                    uri_args['destination_char'] = v[0]
-        uri = uri_template.format(**uri_args)
-        if 'fixup_upload' in obj:
-            fixup = obj['fixup_upload']
-            fixup(upload)
-        return (uri, upload)
-
-    def parse_json(self, text):
-        try:
-            return json.loads(text)
-        except ValueError:
-            eprint("ERROR: Could not parse JSON:\n  {0}".format(text))
-            sys.exit(1)
-
-
-def format_list(json_list, columns, args, options):
-    format = options.format
-    formatter = None
-    if format == "raw_json":
-        print(json_list)
-        return
-    elif format == "pretty_json":
-        json_list_parsed = json.loads(json_list)
-        print(json.dumps(json_list_parsed,
-            skipkeys=False, ensure_ascii=False, check_circular=True,
-            allow_nan=True, sort_keys=True, indent=2))
-        return
-    else:
-        formatter = FORMATS[format]
-    assert_usage(formatter is not None,
-                 "Format {0} not recognised".format(format))
-    formatter_instance = formatter(columns, args, options)
-    formatter_instance.display(json_list)
-
-
-class Lister:
-    def verbose(self, string):
-        if self.options.verbose:
-            print(string)
-
-    def display(self, json_list):
-        depth = sys.maxsize
-        if len(self.columns) == 0:
-            depth = int(self.options.depth)
-        (columns, table) = self.list_to_table(json.loads(json_list), depth)
-        if len(table) > 0:
-            self.display_list(columns, table)
-        else:
-            self.verbose("No items")
-
-    def list_to_table(self, items, max_depth):
-        columns = {}
-        column_ix = {}
-        row = None
-        table = []
-
-        def add(prefix, depth, item, fun):
-            for key in item:
-                column = prefix == '' and key or (prefix + '.' + key)
-                subitem = item[key]
-                if type(subitem) == dict:
-                    if 'json' in self.obj_info and key in self.obj_info['json']:
-                        fun(column, json.dumps(subitem))
-                    else:
-                        if depth < max_depth:
-                            add(column, depth + 1, subitem, fun)
-                elif type(subitem) == list:
-                    # The first branch has mirrors in queues in
-                    # mind (which come out looking decent); the second
-                    # one has applications in nodes (which look less
-                    # so, but what would look good?).
-                    if [x for x in subitem if type(x) != str] == []:
-                        serialised = " ".join(subitem)
-                    else:
-                        serialised = json.dumps(subitem)
-                    fun(column, serialised)
-                else:
-                    fun(column, subitem)
-
-        def add_to_columns(col, val):
-            columns[col] = True
-
-        def add_to_row(col, val):
-            if col in column_ix:
-                if val is not None:
-                    row[column_ix[col]] = maybe_utf8(val)
-                else:
-                    row[column_ix[col]] = None
-
-        if len(self.columns) == 0:
-            for item in items:
-                add('', 1, item, add_to_columns)
-            columns = list(columns.keys())
-            columns.sort(key=column_sort_key)
-        else:
-            columns = self.columns
-
-        for i in range(0, len(columns)):
-            column_ix[columns[i]] = i
-        for item in items:
-            row = len(columns) * ['']
-            add('', 1, item, add_to_row)
-            table.append(row)
-
-        return (columns, table)
-
-
-class TSVList(Lister):
-    def __init__(self, columns, obj_info, options):
-        self.columns = columns
-        self.obj_info = obj_info
-        self.options = options
-
-    def display_list(self, columns, table):
-        head = "\t".join(columns)
-        self.verbose(head)
-
-        for row in table:
-            line = "\t".join(row)
-            print(line)
-
-
-class LongList(Lister):
-    def __init__(self, columns, obj_info, options):
-        self.columns = columns
-        self.obj_info = obj_info
-        self.options = options
-
-    def display_list(self, columns, table):
-        sep = "\n" + "-" * 80 + "\n"
-        max_width = 0
-        for col in columns:
-            max_width = max(max_width, len(col))
-        fmt = "{0:>" + str(max_width) + "}: {1}"
-        print(sep)
-        for i in range(0, len(table)):
-            for j in range(0, len(columns)):
-                print(fmt.format(columns[j], table[i][j]))
-            print(sep)
-
-
-class TableList(Lister):
-    def __init__(self, columns, obj_info, options):
-        self.columns = columns
-        self.obj_info = obj_info
-        self.options = options
-
-    def display_list(self, columns, table):
-        total = [columns]
-        total.extend(table)
-        self.ascii_table(total)
-
-    def ascii_table(self, rows):
-        col_widths = [0] * len(rows[0])
-        for i in range(0, len(rows[0])):
-            for j in range(0, len(rows)):
-                col_widths[i] = max(col_widths[i], len(rows[j][i]))
-        self.ascii_bar(col_widths)
-        self.ascii_row(col_widths, rows[0], "^")
-        self.ascii_bar(col_widths)
-        for row in rows[1:]:
-            self.ascii_row(col_widths, row, "<")
-        self.ascii_bar(col_widths)
-
-    def ascii_row(self, col_widths, row, align):
-        txt = "|"
-        for i in range(0, len(col_widths)):
-            fmt = " {0:" + align + str(col_widths[i]) + "} "
-            txt += fmt.format(row[i]) + "|"
-        print(txt)
-
-    def ascii_bar(self, col_widths):
-        txt = "+"
-        for w in col_widths:
-            txt += ("-" * (w + 2)) + "+"
-        print(txt)
-
-
-class KeyValueList(Lister):
-    def __init__(self, columns, obj_info, options):
-        self.columns = columns
-        self.obj_info = obj_info
-        self.options = options
-
-    def display_list(self, columns, table):
-        for i in range(0, len(table)):
-            row = []
-            for j in range(0, len(columns)):
-                row.append("{0}=\"{1}\"".format(columns[j], table[i][j]))
-            print(" ".join(row))
-
-
-# TODO handle spaces etc in completable names
-class BashList(Lister):
-    def __init__(self, columns, obj_info, options):
-        self.columns = columns
-        self.obj_info = obj_info
-        self.options = options
-
-    def display_list(self, columns, table):
-        ix = None
-        for i in range(0, len(columns)):
-            if columns[i] == 'name':
-                ix = i
-        if ix is not None:
-            res = []
-            for row in table:
-                res.append(row[ix])
-            print(" ".join(res))
-
-
-FORMATS = {
-    # Special cased
-    'raw_json': None,
-    # Ditto
-    'pretty_json': None,
-    'tsv': TSVList,
-    'long': LongList,
-    'table': TableList,
-    'kvp': KeyValueList,
-    'bash': BashList
-}
-
-
-def write_payload_file(payload_file, json_list):
-    result = json.loads(json_list)[0]
-    payload = result['payload']
-    payload_encoding = result['payload_encoding']
-    with open(payload_file, 'wb') as f:
-        if payload_encoding == 'base64':
-            data = base64.b64decode(payload)
-        else:
-            data = payload
-        f.write(data.encode("utf-8"))
-
-
-def print_bash_completion():
-    script = """# This is a bash completion script for rabbitmqadmin.
-# Redirect it to a file, then source it or copy it to /etc/bash_completion.d
-# to get tab completion. rabbitmqadmin must be on your PATH for this to work.
-_rabbitmqadmin()
-{
-    local cur prev opts base
-    COMPREPLY=()
-    cur="${COMP_WORDS[COMP_CWORD]}"
-    prev="${COMP_WORDS[COMP_CWORD-1]}"
-
-    opts="list show declare delete close purge import export get publish help"
-    fargs="--help --host --port --vhost --username --password --format --depth --sort --sort-reverse"
-
-    case "${prev}" in
-        list)
-            COMPREPLY=( $(compgen -W '""" + " ".join(LISTABLE) + """' -- ${cur}) )
-            return 0
-            ;;
-        show)
-            COMPREPLY=( $(compgen -W '""" + " ".join(SHOWABLE) + """' -- ${cur}) )
-            return 0
-            ;;
-        declare)
-            COMPREPLY=( $(compgen -W '""" + " ".join(DECLARABLE.keys()) + """' -- ${cur}) )
-            return 0
-            ;;
-        delete)
-            COMPREPLY=( $(compgen -W '""" + " ".join(DELETABLE.keys()) + """' -- ${cur}) )
-            return 0
-            ;;
-        close)
-            COMPREPLY=( $(compgen -W '""" + " ".join(CLOSABLE.keys()) + """' -- ${cur}) )
-            return 0
-            ;;
-        purge)
-            COMPREPLY=( $(compgen -W '""" + " ".join(PURGABLE.keys()) + """' -- ${cur}) )
-            return 0
-            ;;
-        export)
-            COMPREPLY=( $(compgen -f ${cur}) )
-            return 0
-            ;;
-        import)
-            COMPREPLY=( $(compgen -f ${cur}) )
-            return 0
-            ;;
-        help)
-            opts="subcommands config"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-        -H)
-            COMPREPLY=( $(compgen -A hostname ${cur}) )
-            return 0
-            ;;
-        --host)
-            COMPREPLY=( $(compgen -A hostname ${cur}) )
-            return 0
-            ;;
-        -V)
-            opts="$(rabbitmqadmin -q -f bash list vhosts)"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-        --vhost)
-            opts="$(rabbitmqadmin -q -f bash list vhosts)"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-        -u)
-            opts="$(rabbitmqadmin -q -f bash list users)"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-        --username)
-            opts="$(rabbitmqadmin -q -f bash list users)"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-        -f)
-            COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
-            return 0
-            ;;
-        --format)
-            COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
-            return 0
-            ;;
-
-"""
-    for l in LISTABLE:
-        key = l[0:len(l) - 1]
-        script += "        " + key + """)
-            opts="$(rabbitmqadmin -q -f bash list """ + l + """)"
-            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
-            return 0
-            ;;
-"""
-    script += """        *)
-        ;;
-    esac
-
-   COMPREPLY=($(compgen -W "${opts} ${fargs}" -- ${cur}))
-   return 0
-}
-complete -F _rabbitmqadmin rabbitmqadmin
-"""
-    print(script)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/Makefile b/Makefile
index 1746075..538b023 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,92 @@
-.PHONY: all
-all:
+DESTDIR ?= .local
+ansible-local = ansible localhost -c local -i localhost, -e "ansible_python_interpreter=$$(which python3)"
+ssh_configs != find ".ssh/config.d/" -type f \! -name '.*' | sort
 
-include binaries.mk
-include generated.mk
-include vendored.mk
+all: .config/pythonrc.py
+.config/pythonrc.py:
+	mkdir -p $$(dirname $@)
+	$(download) https://raw.githubusercontent.com/lonetwin/pythonrc/0.8.4/pythonrc.py
+
+all: .bashrc.private
+.bashrc.private: Documents/Database.kdbx
+	echo "export GITLAB_TOKEN='$$(ph show --field Password 'shore.co.il/GitLab token')'" > '$@'
+	echo 'export GITLAB_PRIVATE_TOKEN="$$GITLAB_TOKEN"' >> '$@'
+	echo "export GITLAB_REGISTRATION_TOKEN='$$(ph show --field Password 'shore.co.il/GitLab runner registration token')'" >> '$@'
+	echo "export GITHUB_TOKEN='$$(ph show --field 'CLI token' 'Web Sites/GitHub')'" >> '$@'
+
+all: .ssh/github_ed25519
+.ssh/github_ed25519: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/gitlab_fdo
+.ssh/gitlab_fdo: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/gitlab_toptal
+.ssh/gitlab_toptal: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/shore_rsa
+.ssh/shore_rsa: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/gitlab_ed25519
+.ssh/gitlab_ed25519: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/shore_ecdsa
+.ssh/shore_ecdsa: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/shore_ed25519
+.ssh/shore_ed25519: Documents/Database.kdbx
+	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
+	chmod 600 '$@'
+
+all: .ssh/config
+.ssh/config: $(ssh_configs)
+	mkdir -p $$(dirname $@)
+	cat $(ssh_configs) > $@
+
+all: .ssh/localhost
+.ssh/localhost:
+	-rm $@ $@.pub
+	ssh-keygen -t ecdsa -N '' -C localhost -f $@
+
+all: .ssh/localhost.pub
+.ssh/localhost.pub: .ssh/localhost
+	ssh-keygen -y -f $< > $@
+
+all: .ssh/authorized_keys
+.ssh/authorized_keys: .ssh/localhost.pub
+	-$(ansible-local) -m authorized_key -a "user=$$(whoami) key='$$(cat .ssh/localhost.pub)' key_options='from=\"127.0.0.1/8\"'"
+
+all: .config/python-gitlab.cfg
+.config/python-gitlab.cfg:
+	echo '[global]' > '$@'
+	echo 'default = shore.co.il' >> '$@'
+	echo 'ssl_verify = true' >> '$@'
+	echo '' >> '$@'
+	echo '[shore.co.il]' >> '$@'
+	echo 'url = https://git.shore.co.il/' >> '$@'
+	echo "private_token = $$(ph show --field Password 'shore.co.il/GitLab token')" >> '$@'
+	echo 'api_version = 4' >> '$@'
+
+all: .config/gem/gemrc
+.config/gem/gemrc:
+	echo '# vim: ft=yaml' > '$@'
+	echo '---' >> '$@'
+	echo ':backtrace: false' >> '$@'
+	echo ':bulk_threshold: 1000' >> '$@'
+	echo ':sources:' >> '$@'
+	echo '- https://rubygems.org/' >> '$@'
+	echo "- https://$$(ph show --field 'UserName' 'Web Sites/GitHub'):$$(ph show --field 'Smile gem token' 'Web Sites/GitHub')@rubygems.pkg.github.com/smile-io/" >> '$@'
+	echo ':update_sources: true' >> '$@'
+	echo ':verbose: true' >> '$@'
+	echo ':concurrent_downloads: 8' >> '$@'
diff --git a/README.md b/README.md
index 29b750b..7d8116d 100644
--- a/README.md
+++ b/README.md
@@ -2,12 +2,12 @@
 
 [![pipeline status](https://git.shore.co.il/nimrod/rcfiles/badges/master/pipeline.svg)](https://git.shore.co.il/nimrod/rcfiles/-/commits/master)
 
-A repository with my rc files. The purpose is for me to have revision
-control of my home directory, maybe send somebody a link to an example
-file and to be browsable for others (if anyone wants to copy some
-snippet). Because these are my actual files that I use right now, the
-repository is cloned directly to my home directory and is not meant for
-mass consumption as it is.
+A repository with my rc files and various scripts I have. The purpose is for me
+to have revision control of my home directory, maybe send somebody a link to an
+example file and to be browsable for others (if anyone wants to copy some
+snippet). Because these are my actual files that I use right now, the repository
+is cloned directly to my home directory and is not meant for mass consumption as
+it is.
 
 ## Installation
 
@@ -27,15 +27,12 @@ Documents/bin/install-git-hooks
 
 ## Dependencies
 
-Dependencies that can be installed locally inside the home directory, are
-installed with the Git hook using the supplied `Makefile`. Dependencies that
-can't be installed locally, can be install with the `workstation.yml` Ansible
-playbook from the
-[ansible-desktop-playbook](https://git.shore.co.il/ansible/ansible-desktop-playbooks)
-repository, please consult the README from that repository. Care has been given
-to minimizing the dependencies and making the scripts as cross-platform as
-reasonably possible, so most script would run without any installing any tools
-not found on a Unix-like OS by default.
+Care has been taken to make the scripts as portable as possible. Meaning that
+they should work out of the box on Debian, Alpine and OpenBSD. This is a
+best-effort on my part so mistake are made, feel free to send patches. The
+counterpart for this repository is the
+[workbench](https://git.shore.co.il/shore/workbench) project where I maintain a
+container image with all of the tools I use, but that is for Linux only.
 
 ## License
 
diff --git a/binaries.mk b/binaries.mk
deleted file mode 100644
index 224537a..0000000
--- a/binaries.mk
+++ /dev/null
@@ -1,146 +0,0 @@
-DESTDIR ?= .local
-tempdir != mktemp -d
-os != uname -s | awk '{print tolower($$0)}'
-arch != uname -m
-goos != go env GOOS
-goarch != go env GOARCH
-curl = curl --location --silent --fail
-download = $(curl) --output $@
-
-.PHONY: binaries
-all: binaries
-
-binaries: $(DESTDIR)/bin/hugo
-$(DESTDIR)/bin/hugo:
-	mkdir -p $$(dirname $@)
-	$(curl) https://github.com/gohugoio/hugo/releases/download/v0.83.1/hugo_0.83.1_Linux-64bit.tar.gz | tar -xzC "$$(dirname '$@')" "$$(basename '$@')"
-
-binaries: $(DESTDIR)/share/bfg/bfg.jar
-$(DESTDIR)/share/bfg/bfg.jar:
-	mkdir -p $$(dirname $@)
-	$(download) 'https://search.maven.org/remote_content?g=com.madgag&a=bfg&v=LATEST'
-
-binaries: $(DESTDIR)/bin/rke
-$(DESTDIR)/bin/rke:
-	mkdir -p $$(dirname $@)
-	-$(download) https://github.com/rancher/rke/releases/download/v1.2.8/rke_$(os)-$(goarch)
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/docker-machine
-$(DESTDIR)/bin/docker-machine:
-	mkdir -p $$(dirname $@)
-	-$(download) "https://github.com/docker/machine/releases/download/v0.16.2/docker-machine-$(os)-$(arch)"
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/packer
-$(DESTDIR)/bin/packer:
-	mkdir -p $$(dirname $@)
-	$(curl) https://releases.hashicorp.com/packer/1.7.2/packer_1.7.2_$(os)_$(goarch).zip --output $(tempdir)/packer.zip
-	unzip -d $(tempdir) $(tempdir)/packer.zip
-	install -m 755 $(tempdir)/packer $@
-	rm $(tempdir)/packer*
-
-binaries: $(DESTDIR)/bin/terraform
-$(DESTDIR)/bin/terraform:
-	mkdir -p $$(dirname $@)
-	$(curl) https://releases.hashicorp.com/terraform/0.15.3/terraform_0.15.3_$(os)_$(goarch).zip --output $(tempdir)/terraform.zip
-	unzip -d $(tempdir) $(tempdir)/terraform.zip
-	install -m 755 $(tempdir)/terraform $@
-	rm $(tempdir)/terraform*
-
-binaries: $(DESTDIR)/bin/terragrunt
-$(DESTDIR)/bin/terragrunt:
-	mkdir -p $$(dirname $@)
-	-$(download) https://github.com/gruntwork-io/terragrunt/releases/download/v0.22.5/terragrunt_$(goos)_$(goarch)
-	-chmod +x '$@'
-
-binaries: $(DESTDIR)/bin/vault
-$(DESTDIR)/bin/vault:
-	mkdir -p $$(dirname $@)
-	$(curl) https://releases.hashicorp.com/vault/1.7.1/vault_1.7.1_$(os)_$(goarch).zip --output $(tempdir)/vault.zip
-	unzip -d $(tempdir) $(tempdir)/vault.zip
-	install -m 755 $(tempdir)/vault $@
-	rm $(tempdir)/vault*
-
-binaries: $(DESTDIR)/bin/kubectl
-$(DESTDIR)/bin/kubectl:
-	mkdir -p $$(dirname $@)
-	-$(download) "https://storage.googleapis.com/kubernetes-release/release/v1.21.0/bin/$(os)/$(goarch)/kubectl"
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/kops
-$(DESTDIR)/bin/kops:
-	mkdir -p $$(dirname $@)
-	-$(download) "https://github.com/kubernetes/kops/releases/download/v1.20.0/kops-$(os)-$(goarch)"
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/kompose
-$(DESTDIR)/bin/kompose:
-	mkdir -p $$(dirname $@)
-	-$(download) https://github.com/kubernetes/kompose/releases/download/v1.22.0/kompose-$(os)-$(goarch)
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/minikube
-$(DESTDIR)/bin/minikube:
-	mkdir -p $$(dirname $@)
-	-$(download) https://storage.googleapis.com/minikube/releases/latest/minikube-$(os)-$(goarch)
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/kustomize
-$(DESTDIR)/bin/kustomize:
-	mkdir -p $$(dirname $@)
-	-$(curl) https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.1.2/kustomize_v4.1.2_$(os)_$(goarch).tar.gz | tar -zxC $(DESTDIR)/bin/
-
-binaries: $(DESTDIR)/bin/docker-machine-driver-kvm2
-$(DESTDIR)/bin/docker-machine-driver-kvm2:
-	mkdir -p $$(dirname $@)
-	-$(download) https://storage.googleapis.com/minikube/releases/latest/docker-machine-driver-kvm2
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/helm
-$(DESTDIR)/bin/helm:
-	mkdir -p $$(dirname $@)
-	mkdir -p $(tempdir)/helm
-	-$(curl) https://get.helm.sh/helm-v3.5.4-$(os)-$(goarch).tar.gz | tar -zx -C $(tempdir)/helm/
-	-install -m 755 $(tempdir)/helm/$(os)-$(goarch)/helm $@
-	rm -r $(tempdir)/helm
-
-binaries: $(DESTDIR)/bin/pack
-$(DESTDIR)/bin/pack:
-	mkdir -p $$(dirname $@)
-	-$(curl) https://github.com/buildpack/pack/releases/download/v0.18.1/pack-v0.18.1-$(os).tgz | tar -xzC $(DESTDIR)/bin/
-
-binaries: $(DESTDIR)/bin/skaffold
-$(DESTDIR)/bin/skaffold:
-	mkdir -p $$(dirname $@)
-	-$(download) https://storage.googleapis.com/skaffold/releases/v1.24.0/skaffold-$(os)-$(goarch)
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/minishift
-$(DESTDIR)/bin/minishift:
-	mkdir -p $$(dirname $@)
-	-$(curl) https://github.com/minishift/minishift/releases/download/v1.34.3/minishift-1.34.3-$(goos)-$(goarch).tgz | tar -xzC $(tempdir)
-	-install -m 755 $(tempdir)/minishift-*/minishift $@
-	-rm -r $(tempdir)/minishift-*
-
-binaries: $(DESTDIR)/bin/oc
-$(DESTDIR)/bin/oc:
-	mkdir -p $$(dirname $@)
-	-$(curl) https://github.com/openshift/okd/releases/download/4.7.0-0.okd-2021-04-24-103438/openshift-client-linux-4.7.0-0.okd-2021-04-24-103438.tar.gz | tar -xzC $(DESTDIR)/bin oc
-
-binaries: $(DESTDIR)/bin/docker-machine-driver-kvm
-$(DESTDIR)/bin/docker-machine-driver-kvm:
-	mkdir -p $$(dirname $@)
-	-$(download) https://github.com/dhiltgen/docker-machine-kvm/releases/download/v0.10.0/docker-machine-driver-kvm-ubuntu16.04
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/gomplate
-$(DESTDIR)/bin/gomplate:
-	mkdir -p $$(dirname $@)
-	-$(download) https://github.com/hairyhenderson/gomplate/releases/download/v3.9.0/gomplate_$(goos)-$(goarch)
-	-chmod +x $@
-
-binaries: $(DESTDIR)/bin/envconsul
-$(DESTDIR)/bin/envconsul:
-	mkdir -p $$(dirname $@)
-	-$(curl) https://releases.hashicorp.com/envconsul/0.11.0/envconsul_0.11.0_$(goos)_$(goarch).tgz | tar -xzC $$(dirname $@)
diff --git a/generated.mk b/generated.mk
deleted file mode 100644
index 642c1ec..0000000
--- a/generated.mk
+++ /dev/null
@@ -1,153 +0,0 @@
-DESTDIR ?= .local
-ansible-local = ansible localhost -c local -i localhost, -e "ansible_python_interpreter=$$(which python3)"
-ssh_configs != find ".ssh/config.d/" -type f \! -name '.*' | sort
-
-.PHONY: generated
-all: generated
-
-generated: .bash_completion.d/helm
-.bash_completion.d/helm: $(DESTDIR)/bin/helm
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/kompose
-.bash_completion.d/kompose: $(DESTDIR)/bin/kompose
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/kops
-.bash_completion.d/kops: $(DESTDIR)/bin/kops
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/kubectl
-.bash_completion.d/kubectl: $(DESTDIR)/bin/kubectl
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/minikube
-.bash_completion.d/minikube: $(DESTDIR)/bin/minikube
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/skaffold
-.bash_completion.d/skaffold: $(DESTDIR)/bin/skaffold
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/pipenv
-.bash_completion.d/pipenv:
-	mkdir -p $$(dirname $@)
-	-bash -c 'pipenv --completion > $@'
-
-generated: .bash_completion.d/pandoc
-.bash_completion.d/pandoc:
-	mkdir -p $$(dirname $@)
-	-pandoc --bash-completion > $@
-
-generated: .bash_completion.d/rabbitmqadmin
-.bash_completion.d/rabbitmqadmin: Documents/bin/rabbitmqadmin
-	mkdir -p $$(dirname $@)
-	-Documents/bin/rabbitmqadmin --bash-completion > $@
-
-generated: .bash_completion.d/minishift
-.bash_completion.d/minishift: $(DESTDIR)/bin/minishift
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/oc
-.bash_completion.d/oc: $(DESTDIR)/bin/oc
-	mkdir -p $$(dirname $@)
-	-$$(basename $@) completion bash > $@
-
-generated: .bash_completion.d/poetry
-.bash_completion.d/poetry:
-	-poetry completions bash > $@
-
-generated: .bashrc.private
-.bashrc.private: Documents/Database.kdbx
-	echo "export GITLAB_TOKEN='$$(ph show --field Password 'shore.co.il/GitLab token')'" > '$@'
-	echo 'export GITLAB_PRIVATE_TOKEN="$$GITLAB_TOKEN"' >> '$@'
-	echo "export GITLAB_REGISTRATION_TOKEN='$$(ph show --field Password 'shore.co.il/GitLab runner registration token')'" >> '$@'
-	echo "export GITHUB_TOKEN='$$(ph show --field 'CLI token' 'Web Sites/GitHub')'" >> '$@'
-
-generated: .ssh/github_ed25519
-.ssh/github_ed25519: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/gitlab_fdo
-.ssh/gitlab_fdo: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/gitlab_toptal
-.ssh/gitlab_toptal: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/shore_rsa
-.ssh/shore_rsa: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/gitlab_ed25519
-.ssh/gitlab_ed25519: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/shore_ecdsa
-.ssh/shore_ecdsa: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/shore_ed25519
-.ssh/shore_ed25519: Documents/Database.kdbx
-	ph show --field Notes "SSH/$$(basename '$@')" > '$@'
-	chmod 600 '$@'
-
-generated: .ssh/config
-.ssh/config: $(ssh_configs)
-	mkdir -p $$(dirname $@)
-	cat $(ssh_configs) > $@
-
-generated: .ssh/localhost
-.ssh/localhost:
-	-rm $@ $@.pub
-	ssh-keygen -t ecdsa -N '' -C localhost -f $@
-
-generated: .ssh/localhost.pub
-.ssh/localhost.pub: .ssh/localhost
-	ssh-keygen -y -f $< > $@
-
-generated: .ssh/authorized_keys
-.ssh/authorized_keys: .ssh/localhost.pub
-	-$(ansible-local) -m authorized_key -a "user=$$(whoami) key='$$(cat .ssh/localhost.pub)' key_options='from=\"127.0.0.1/8\"'"
-
-generated: .bash_completion.d/python-gitlab
-.bash_completion.d/python-gitlab:
-	-register-python-argcomplete gitlab > $@
-
-generated: .config/python-gitlab.cfg
-.config/python-gitlab.cfg:
-	echo '[global]' > '$@'
-	echo 'default = shore.co.il' >> '$@'
-	echo 'ssl_verify = true' >> '$@'
-	echo '' >> '$@'
-	echo '[shore.co.il]' >> '$@'
-	echo 'url = https://git.shore.co.il/' >> '$@'
-	echo "private_token = $$(ph show --field Password 'shore.co.il/GitLab token')" >> '$@'
-	echo 'api_version = 4' >> '$@'
-
-generated: .config/gem/gemrc
-.config/gem/gemrc:
-	echo '# vim: ft=yaml' > '$@'
-	echo '---' >> '$@'
-	echo ':backtrace: false' >> '$@'
-	echo ':bulk_threshold: 1000' >> '$@'
-	echo ':sources:' >> '$@'
-	echo '- https://rubygems.org/' >> '$@'
-	echo "- https://$$(ph show --field 'UserName' 'Web Sites/GitHub'):$$(ph show --field 'Smile gem token' 'Web Sites/GitHub')@rubygems.pkg.github.com/smile-io/" >> '$@'
-	echo ':update_sources: true' >> '$@'
-	echo ':verbose: true' >> '$@'
-	echo ':concurrent_downloads: 8' >> '$@'
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index f51cc0f..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,49 +0,0 @@
-ansible
-ansible-runner
-aws-sam-cli
-aws-shell
-awscli
-awslogs
-bcrypt
-black
-boto
-boto3
-cookiecutter
-cryptography
-dnspython
-docker
-docker-compose
-flit
-Glances
-gunicorn
-hashin
-httpbin
-httpie
-identify
-importlab
-invoke
-khal
-khard
-magic-wormhole
-mycli
-netaddr
-parse
-passhole
-passlib
-pgcli
-pipenv
-piprot
-poetry
-pre-commit
-psycopg2
-pur
-pygments
-pymongo
-PyMySQL
-pyopenssl
-remarshal
-requests
-sh
-template
-todoman
-tox
diff --git a/vendored.mk b/vendored.mk
deleted file mode 100644
index 7e5a93e..0000000
--- a/vendored.mk
+++ /dev/null
@@ -1,37 +0,0 @@
-curl = curl --location --silent --fail
-download = $(curl) --output $@
-
-.PHONY: vendored
-all: vendored
-
-vendored: .bash_completion.d/docker-compose
-.bash_completion.d/docker-compose:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/docker/compose/1.29.2/contrib/completion/bash/docker-compose
-
-vendored: .bash_completion.d/docker-machine.bash
-.bash_completion.d/docker-machine.bash:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/docker/machine/v0.16.2/contrib/completion/bash/docker-machine.bash
-
-vendored: .bash_completion.d/fabric-completion.bash
-.bash_completion.d/fabric-completion.bash:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/kbakulin/fabric-completion/master/fabric-completion.bash
-
-vendored: .config/pythonrc.py
-.config/pythonrc.py:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/lonetwin/pythonrc/0.8.4/pythonrc.py
-
-vendored: .bash_completion.d/molecule
-.bash_completion.d/molecule:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/ansible-community/molecule/1.25.1/asset/bash_completion/molecule.bash-completion.sh
-
-vendored: Documents/bin/rabbitmqadmin
-Documents/bin/rabbitmqadmin:
-	mkdir -p $$(dirname $@)
-	$(download) https://raw.githubusercontent.com/rabbitmq/rabbitmq-server/v3.8.16/deps/rabbitmq_management/bin/rabbitmqadmin
-	chmod +x $@
-
-- 
GitLab