Compare commits

..

No commits in common. "master" and "gpg" have entirely different histories.
master ... gpg

58 changed files with 14885 additions and 4416 deletions

270
.gitignore vendored
View File

@ -1,268 +1,4 @@
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,linux,python,vim
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos,linux,python,vim
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
### Vim ###
# Swap
[._]*.s[a-v][a-z]
!*.svg # comment out if you don't need vector files
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
Sessionx.vim
# Temporary
.netrwhist
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,linux,python,vim
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
zsh.d/99-local.zsh
zsh.d/Darwin/99-local.zsh
*~
*.swp

5
.gitmodules vendored
View File

@ -1,6 +1,3 @@
[submodule "zsh.d/prompts/pure"]
path = zsh.d/prompts/pure
url = https://github.com/sindresorhus/pure.git
[submodule "zsh.d/plugins/zsh-z"]
path = zsh.d/plugins/zsh-z
url = https://github.com/agkozak/zsh-z
url = git@github.com:sindresorhus/pure.git

View File

@ -1,29 +1,6 @@
# zsh
My zsh configuration files
Works on Linux, *BSD and OSX, tested on Ubuntu, CentOS, Debian, FreeBSD and several versions of macOS.
Good for Linux and OSX, tested on Ubuntu, CentOS, Debian and FreeBSD
### Supported plugins
There is native support for:
- [homebrew](https://brew.sh/);
- [zoxide](https://github.com/ajeetdsouza/zoxide), if not installed falls back to zsh-z native plugin;
- kubectl;
- [krew](ihttps://krew.sigs.k8s.io/);
- [pyenv](https://github.com/pyenv/pyenv);
- [thefuck](https://github.com/nvbn/thefuck)
These plugins must be installed in another way, I only support using them
There is also native support for [iTerm2](https://iterm2.com/) shell integration, if you are running on macOS
### Installation
- Clone recurively this repo in `/etc/zsh`: `sudo git clone --recurse <url> /etc/zsh`;
- Copy `/etc/zsh/zshrc` in `/etc`;
- Done
**NOTE**: on macOS with each update of the OS, the file `/etc/zshrc` is restored to the default, you will have to copy again the one in `/etc/zsh`: `sudo cp /etc/zsh/zshrc /etc/zshrc`
If you prefer to avoid touching `/etc/` you can clone the repo in `~/.zsh` and copy the `zshrc` into `~/.zshrc`: `cp ~/.zsh/zshrc ~/.zshrc`, but then you have to add your own customisations to `.zshrc` at the end of the file.
Last github PGP another

12
antibody/plugins.sh Normal file
View File

@ -0,0 +1,12 @@
source /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/git/git.plugin.zsh
fpath+=( /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/git )
source /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/pip/pip.plugin.zsh
fpath+=( /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/pip )
source /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/docker-compose/docker-compose.plugin.zsh
fpath+=( /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/docker-compose )
source /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/aws/aws.plugin.zsh
fpath+=( /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh/plugins/aws )
source /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-agnoster-SLASH-agnoster-zsh-theme/agnoster.zsh-theme
fpath+=( /home/andre/.cache/antibody/https-COLON--SLASH--SLASH-github.com-SLASH-agnoster-SLASH-agnoster-zsh-theme )

33
antibody/plugins.txt Normal file
View File

@ -0,0 +1,33 @@
# To update plugins, add them below, then run
# antibody bundle < ~/.zshdir/plugins.txt > ~/.zshdir/plugins.sh
# Bundles from the default repo (robbyrussell's oh-my-zsh).
# heroku
# lein
# command-not-found
# sudo
# fancy-ctrl-z
# zsh_reload
# vagrant
# zsh-users/zsh-completions src
# brew-cask
# brew
robbyrussell/oh-my-zsh path:plugins/git
# robbyrussell/oh-my-zsh path:plugins/git-prompt
robbyrussell/oh-my-zsh path:plugins/pip
# robbyrussell/oh-my-zsh path:plugins/svn
# robbyrussell/oh-my-zsh path:plugins/osx
robbyrussell/oh-my-zsh path:plugins/docker
robbyrussell/oh-my-zsh path:plugins/docker-compose
robbyrussell/oh-my-zsh path:plugins/aws
robbyrussell/oh-my-zsh path:plugins/httpie
# Syntax highlighting bundle.
# zsh-users/zsh-syntax-highlighting
# Load the theme.
agnoster/agnoster-zsh-theme
# vim: set ts=4 sw=4 tw=0 ft=sh :

10
antibody/zshrc.antibody Normal file
View File

@ -0,0 +1,10 @@
# Created by newuser for 5.7.1
#
export DEFAULT_USER=$(whoami)
export WORKON_HOME=$HOME/Src/virtualenvs
export PROJECT_HOME=$HOME/Src/projects
ZSH="$(antibody home)/https-COLON--SLASH--SLASH-github.com-SLASH-robbyrussell-SLASH-oh-my-zsh"
source ~/.zshdir/plugins.sh
cd

View File

@ -1,140 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/JanDeDobbeleer/oh-my-posh/main/themes/schema.json
console_title: true
console_title_style: template
# console_title_template: "{{if .Env.SSH_CONNECTION}} {{ .HostName }} :: {{end}}{{.Folder}}"
console_title_template: "{{if .Env.SSH_CONNECTION}}@{{ .HostName }}:{{end}}{{.Folder}}"
final_space: false
blocks:
- type: prompt
alignment: left
newline: false
segments:
- type: session
style: plain
foreground: yellow
properties:
prefix: ""
postfix: ""
template: '{{if .SSHSession}}|{{ .HostName }}| {{end}}'
- type: text
style: plain
foreground: lightBlue
properties:
prefix: ""
postfix: ""
text: "$([ $(jobs|wc -l|xargs) -gt 0 ] && echo '\u2622 ')"
- type: root
style: plain
foreground: yellow
properties:
prefix: ""
postfix: ""
root_icon: "\u26A1"
# root_icon: "# "
- type: path
style: plain
foreground: lightBlue
properties:
prefix: ""
# style: agnoster_short
# max_depth: 2
style: full
- type: git
style: plain
foreground: yellow
properties:
branch_ahead_icon: <#88C0D0>⇡</>
branch_behind_icon: <#88C0D0>⇣</>
branch_icon: ""
display_status: true
fetch_status: true
prefix: ""
template: '{{ .HEAD }}{{.BranchStatus}}{{ if .Working.Changed }}<#FF0000>* {{ .Working.String }}</>{{ end }}'
# template: '{{.BranchStatus}}{{ if .Working.Changed }}<#FF0000>* {{ .Working.String }}</>{{ end }}'
- type: executiontime
style: plain
foreground: yellow
properties:
always_enabled: false
postfix: ""
prefix: " "
style: austin
threshold: 30000
- type: prompt
alignment: right
segments:
- type: aws
style: plain
foreground: yellow
properties:
postfix: ""
# prefix: " "
prefix: " "
# prefix: " "
template: '{{if .Profile}}{{.Profile}}{{end}}'
- type: kubectl
style: plain
foreground: lightBlue
properties:
display_error: true
parse_kubeconfig: true
postfix: ""
prefix: ""
# template: '{{if eq "on" .Env.KUBE_PS1_ENABLED}} ﴱ {{.Context}}{{if .Namespace}}/{{.Namespace}}{{end}}{{end}}'
template: '{{if eq "on" .Env.KUBE_PS1_ENABLED}} ﴱ {{.Context}}/{{.Namespace}}{{end}}'
# template: '{{ .Context }}/{{ .Namespace }}'
- type: command
style: plain
foreground: green
properties:
shell: /bin/sh
command: "ifconfig utun2 > /dev/null 2>&1 && echo '\uF023' || echo '\0'"
- type: prompt
alignment: left
newline: true
segments:
- type: python
style: plain
foreground: darkGray
properties:
display_version: false
postfix: ""
prefix: ""
- type: exit
style: plain
foreground: lightYellow
foreground_templates:
- '{{ if gt .Code 0 }}#FF0000{{ end }}'
properties:
always_enabled: true
prefix: ""
# template:
template: "\u276F"
tooltips:
- type: kubectl
style: plain
foreground: lightBlue
postfix: ""
prefix: ""
tips:
- kubectl
- k
- stern
properties:
template: 'ﴱ {{.Context}}{{ if .Namespace }}/{{.Namespace}}{{end}}'

View File

@ -1,118 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/JanDeDobbeleer/oh-my-posh/main/themes/schema.json
console_title: true
console_title_style: template
console_title_template: '{{if .Env.SSH_CONNECTION}} {{ .Host }} {{end}}{{.Path}}'
final_space: false
blocks:
- type: prompt
alignment: left
newline: false
segments:
- type: session
style: plain
foreground: yellow
properties:
postfix: ""
prefix: ""
template: '{{if .SSHSession}}@{{ .ComputerName }} {{end}}'
- type: root
style: plain
foreground: yellow
properties:
postfix: ""
prefix: ""
root_icon: "\u26A1"
# root_icon: "# "
- type: path
style: plain
# style: agnoster_left
max_depth: 2
foreground: lightBlue
properties:
prefix: ""
style: full
- type: git
style: plain
foreground: yellow
properties:
branch_ahead_icon: <#88C0D0>⇡</>
branch_behind_icon: <#88C0D0>⇣</>
branch_icon: ""
display_status: true
fetch_status: true
prefix: ""
template: '{{ .HEAD }}{{.BranchStatus}}{{ if .Working.Changed }}<#FF0000>* {{ .Working.String }}</>{{ end }}'
# template: '{{.BranchStatus}}{{ if .Working.Changed }}<#FF0000>* {{ .Working.String }}</>{{ end }}'
- type: executiontime
style: plain
foreground: yellow
properties:
always_enabled: false
postfix: ""
prefix: " "
style: austin
threshold: 30000
- type: prompt
alignment: right
segments:
- type: aws
style: plain
foreground: "#ffA000"
properties:
postfix: ""
# prefix: " "
prefix: " "
template: '{{if .Profile}}{{.Profile}}{{end}}'
- type: kubectl
style: plain
foreground: lightBlue
properties:
parse_kubeconfig: true
postfix: ""
prefix: " "
template: '{{if eq "on" .Env.KUBE_PS1_ENABLED}}|{{.Context}}/{{if .Namespace}}{{.Namespace}}{{else}}default{{end}}|{{end}}'
# - type: command
# style: plain
# foreground: white
# properties:
# shell: /bin/sh
# # command: "ifconfig utun2 > /dev/null 2>&1 && echo '\uF023' || echo ''"
# # command: "ifconfig utun2 > /dev/null 2>&1 && echo '<#00FF00>\u2713</>' || echo '\u2717'"
# command: "ifconfig utun2 > /dev/null 2>&1 && echo '<#00FF00>\u2713</>' || echo ' '"
- type: prompt
alignment: left
newline: true
segments:
- type: python
style: plain
foreground: darkGray
properties:
display_version: false
postfix: ""
prefix: ""
- type: exit
style: plain
foreground: lightYellow
foreground_templates:
- '{{ if gt .Code 0 }}#FF0000{{ end }}'
properties:
always_enabled: true
prefix: ""
# template:
template: "\u276F"

View File

@ -1,876 +0,0 @@
"$schema" = 'https://starship.rs/config-schema.json'
# $all is shorthand for $username$hostname$shlvl$singularity$kubernetes$directory$vcsh$git_branch$git_commit$git_state$git_metrics$git_status$hg_branch$docker_context$package$cmake$cobol$dart$deno$dotnet$elixir$elm$erlang$golang$helm$java$julia$kotlin$lua$nim$nodejs$ocaml$perl$php$pulumi$purescript$python$rlang$red$ruby$rust$scala$swift$terraform$vlang$vagrant$zig$nix_shell$conda$memory_usage$aws$gcloud$openstack$azure$env_var$crystal$custom$sudo$cmd_duration$line_break$jobs$battery$time$status$shell$character
# format = '$all'
format = """
${custom.vpn}\
${custom.root}\
$hostname\
$jobs\
$directory\
$git_branch\
$git_commit\
$git_state\
$git_status\
$git_metrics\
$cmd_duration\
$fill\
$aws\
$kubernetes\
$line_break\
$python\
$character"""
# right_format = """
# ${custom.local_title}\
# ${custom.remote_title}"""
scan_timeout = 30
command_timeout = 500
add_newline = false
palette = 'extras'
[palettes.extras]
aws_fg = '#202020'
aws_bg = '#ff9900'
k8s_fg = '#d0d0d0'
k8s_bg = '#3970e4'
[aws]
# format = '[$symbol($profile )(\($region\) )(\[$duration\])]($style)'
# format = '[$symbol ($profile)(/($region)) ]($style)'
format = '[ $symbol($profile)(/($region)) ]($style)'
# symbol = '☁️ '
# symbol = ' '
symbol = ' '
style = 'bold aws_fg bg:aws_bg'
disabled = false
expiration_symbol = 'X'
[aws.region_aliases]
eu-west-1 = "ire1"
eu-central-1 = "fra1"
us-east-2 = "ohio1"
[azure]
format = 'on [$symbol($subscription)]($style) '
symbol = 'ﴃ '
style = 'blue bold'
disabled = true
[battery]
full_symbol = ' '
charging_symbol = ' '
discharging_symbol = ' '
unknown_symbol = ' '
empty_symbol = ' '
disabled = false
format = '[$symbol$percentage]($style) '
[[battery.display]]
threshold = 10
style = 'red bold'
[character]
format = '$symbol '
success_symbol = '[](bright-yellow)'
error_symbol = '[](bold red)'
vicmd_symbol = '[](bright-yellow)'
disabled = false
[cmake]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '△ '
style = 'bold blue'
disabled = false
detect_extensions = []
detect_files = [
'CMakeLists.txt',
'CMakeCache.txt',
]
detect_folders = []
[cmd_duration]
min_time = 30_000
format = '[$duration]($style) '
style = 'yellow'
show_milliseconds = false
disabled = false
show_notifications = false
[cobol]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⚙️ '
style = 'bold blue'
disabled = false
detect_extensions = [
'cbl',
'cob',
'CBL',
'COB',
]
detect_files = []
detect_folders = []
[conda]
truncation_length = 1
format = 'via [$symbol$environment]($style) '
symbol = '🅒 '
style = 'green bold'
ignore_base = true
disabled = false
[crystal]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🔮 '
style = 'bold red'
disabled = false
detect_extensions = ['cr']
detect_files = ['shard.yml']
detect_folders = []
[dart]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🎯 '
style = 'bold blue'
disabled = false
detect_extensions = ['dart']
detect_files = [
'pubspec.yaml',
'pubspec.yml',
'pubspec.lock',
]
detect_folders = ['.dart_tool']
[deno]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🦕 '
style = 'green bold'
disabled = false
detect_extensions = []
detect_files = [
'mod.ts',
'deps.ts',
'mod.js',
'deps.js',
]
detect_folders = []
[directory]
truncation_length = 5
truncate_to_repo = true
repo_root_style = "underline italic bold blue"
fish_style_pwd_dir_length = 0
use_logical_path = true
format = '[$path]($style)[$read_only]($read_only_style) '
# repo_root_format = '[$before_root_path]($style)[$repo_root]($repo_root_style)[$path]($style)[$read_only]($read_only_style) '
style = 'blue bold'
disabled = false
read_only = " \u2718"
read_only_style = 'bright-red bold'
truncation_symbol = '.../'
home_symbol = '~'
[directory.substitutions]
[docker_context]
symbol = '🐳 '
style = 'blue bold'
format = 'via [$symbol$context]($style) '
only_with_files = true
disabled = true
detect_extensions = []
detect_files = [
'docker-compose.yml',
'docker-compose.yaml',
'Dockerfile',
]
detect_folders = []
[dotnet]
format = 'via [$symbol($version )(🎯 $tfm )]($style)'
version_format = 'v${raw}'
symbol = '.NET '
style = 'blue bold'
heuristic = true
disabled = false
detect_extensions = [
'csproj',
'fsproj',
'xproj',
]
detect_files = [
'global.json',
'project.json',
'Directory.Build.props',
'Directory.Build.targets',
'Packages.props',
]
detect_folders = []
[elixir]
format = 'via [$symbol($version \(OTP $otp_version\) )]($style)'
version_format = 'v${raw}'
symbol = '💧 '
style = 'bold purple'
disabled = false
detect_extensions = []
detect_files = ['mix.exs']
detect_folders = []
[elm]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🌳 '
style = 'cyan bold'
disabled = false
detect_extensions = ['elm']
detect_files = [
'elm.json',
'elm-package.json',
'.elm-version',
]
detect_folders = ['elm-stuff']
[env_var]
[erlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = ' '
style = 'bold red'
disabled = false
detect_extensions = []
detect_files = [
'rebar.config',
'erlang.mk',
]
detect_folders = []
[fill]
style = 'bold black'
symbol = ' '
disabled = false
[gcloud]
format = 'on [$symbol$account(@$domain)(\($region\))]($style) '
symbol = '☁️ '
style = 'bold blue'
disabled = false
[gcloud.region_aliases]
[git_branch]
format = '[$symbol$branch]($style)(:[$remote]($style)) '
symbol = ' '
style = 'yellow'
truncation_length = 9223372036854775807
truncation_symbol = '…'
only_attached = false
always_show_remote = false
disabled = false
[git_commit]
commit_hash_length = 7
format = '[\($hash$tag\)]($style) '
style = 'green bold'
only_detached = true
disabled = false
tag_symbol = ' 🏷 '
tag_disabled = true
[git_metrics]
added_style = 'bold green'
deleted_style = 'bold red'
only_nonzero_diffs = true
format = '([+$added]($added_style) )([-$deleted]($deleted_style) )'
disabled = true
[git_state]
rebase = 'REBASING'
merge = 'MERGING'
revert = 'REVERTING'
cherry_pick = 'CHERRY-PICKING'
bisect = 'BISECTING'
am = 'AM'
am_or_rebase = 'AM/REBASE'
style = 'bold yellow'
format = '\([$state( $progress_current/$progress_total)]($style)\) '
disabled = false
[git_status]
format = '([\[$all_status$ahead_behind\]]($style) )'
style = 'red bold'
stashed = '\$'
ahead = '⇡'
behind = '⇣'
up_to_date = ''
diverged = '⇕'
conflicted = '='
deleted = '✘'
renamed = '»'
modified = '!'
staged = '+'
untracked = '?'
disabled = false
[golang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐹 '
style = 'bold cyan'
disabled = false
detect_extensions = ['go']
detect_files = [
'go.mod',
'go.sum',
'glide.yaml',
'Gopkg.yml',
'Gopkg.lock',
'.go-version',
]
detect_folders = ['Godeps']
[helm]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⎈ '
style = 'bold white'
disabled = false
detect_extensions = []
detect_files = [
'helmfile.yaml',
'Chart.yaml',
]
detect_folders = []
[hg_branch]
symbol = ' '
style = 'bold purple'
format = 'on [$symbol$branch]($style) '
truncation_length = 9223372036854775807
truncation_symbol = '…'
disabled = true
[hostname]
ssh_only = true
trim_at = '.'
format = '|[$hostname]($style)| '
style = 'white'
disabled = false
[java]
disabled = false
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
style = 'red dimmed'
symbol = '☕ '
detect_extensions = [
'java',
'class',
'jar',
'gradle',
'clj',
'cljc',
]
detect_files = [
'pom.xml',
'build.gradle.kts',
'build.sbt',
'.java-version',
'deps.edn',
'project.clj',
'build.boot',
]
detect_folders = []
[jobs]
threshold = 1
symbol_threshold = 1
number_threshold = 2
format = '[$symbol$number]($style) '
# symbol = '✦'
symbol = "\u2622"
style = 'bold bright-yellow'
disabled = false
[julia]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = 'ஃ '
style = 'bold purple'
disabled = false
detect_extensions = ['jl']
detect_files = [
'Project.toml',
'Manifest.toml',
]
detect_folders = []
[kotlin]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🅺 '
style = 'bold blue'
kotlin_binary = 'kotlin'
disabled = false
detect_extensions = [
'kt',
'kts',
]
detect_files = []
detect_folders = []
[kubernetes]
symbol = '☸ '
format = '[ $symbol$context::$namespace ]($style)'
style = 'bold k8s_fg bg:k8s_bg'
disabled = false
[[kubernetes.contexts]]
context_pattern = "prod01.jafx-ire1.lan"
style = 'bold bright-yellow bg:k8s_bg'
context_alias = "prod01"
[[kubernetes.contexts]]
context_pattern = "preprod01.jafx-ire1.lan"
context_alias = "preprod01"
[[kubernetes.contexts]]
context_pattern = "test01.jafx-ire1.lan"
style = 'bold green bg:k8s_bg'
context_alias = "test01"
[[kubernetes.contexts]]
context_pattern = "infra01.arr.lan"
context_alias = "infra01"
[[kubernetes.contexts]]
context_pattern = "k3s.ifson.lan"
context_alias = "k3s"
# [kubernetes.context_aliases]
# 'preprod01.jafx-ire1.lan' = 'preprod01'
# 'prod01.jafx-ire1.lan' = 'prod01'
# 'test01.jafx-ire1.lan' = 'test01'
# 'infra01.arr.lan' = 'infra01'
[line_break]
disabled = false
[lua]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🌙 '
style = 'bold blue'
lua_binary = 'lua'
disabled = false
detect_extensions = ['lua']
detect_files = ['.lua-version']
detect_folders = ['lua']
[memory_usage]
threshold = 75
format = 'via $symbol[$ram( | $swap)]($style) '
style = 'white bold dimmed'
symbol = '🐏 '
disabled = false
[nim]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '👑 '
style = 'yellow bold'
disabled = false
detect_extensions = [
'nim',
'nims',
'nimble',
]
detect_files = ['nim.cfg']
detect_folders = []
[nix_shell]
format = 'via [$symbol$state( \($name\))]($style) '
symbol = '❄️ '
style = 'bold blue'
impure_msg = 'impure'
pure_msg = 'pure'
disabled = false
[nodejs]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = ' '
style = 'bold green'
disabled = false
not_capable_style = 'bold red'
detect_extensions = [
'js',
'mjs',
'cjs',
'ts',
]
detect_files = [
'package.json',
'.node-version',
'.nvmrc',
]
detect_folders = ['node_modules']
[ocaml]
format = 'via [$symbol($version )(\($switch_indicator$switch_name\) )]($style)'
version_format = 'v${raw}'
global_switch_indicator = ''
local_switch_indicator = '*'
symbol = '🐫 '
style = 'bold yellow'
disabled = false
detect_extensions = [
'opam',
'ml',
'mli',
're',
'rei',
]
detect_files = [
'dune',
'dune-project',
'jbuild',
'jbuild-ignore',
'.merlin',
]
detect_folders = [
'_opam',
'esy.lock',
]
[openstack]
format = 'on [$symbol$cloud(\($project\))]($style) '
symbol = '☁️ '
style = 'bold yellow'
disabled = false
[package]
format = 'is [$symbol$version]($style) '
symbol = '📦 '
style = '208 bold'
display_private = false
disabled = false
version_format = 'v${raw}'
[perl]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐪 '
style = '149 bold'
disabled = false
detect_extensions = [
'pl',
'pm',
'pod',
]
detect_files = [
'Makefile.PL',
'Build.PL',
'cpanfile',
'cpanfile.snapshot',
'META.json',
'META.yml',
'.perl-version',
]
detect_folders = []
[php]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐘 '
style = '147 bold'
disabled = false
detect_extensions = ['php']
detect_files = [
'composer.json',
'.php-version',
]
detect_folders = []
[pulumi]
format = 'via [$symbol$stack]($style) '
version_format = 'v${raw}'
symbol = ' '
style = 'bold 5'
disabled = false
[purescript]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '<=> '
style = 'bold white'
disabled = false
detect_extensions = ['purs']
detect_files = ['spago.dhall']
detect_folders = []
[python]
pyenv_version_name = false
pyenv_prefix = 'pyenv '
python_binary = [
'python3',
'python'
]
# format = 'via [${symbol}${pyenv_prefix}(${version} )(\($virtualenv\) )]($style)'
format = '[($virtualenv:)${version}]($style)'
# version_format = '${raw}'
version_format = '${major}.${minor}'
style = 'bright-black'
# symbol = '🐍 '
# symbol = ''
# symbol = ''
disabled = false
detect_extensions = ['py']
detect_files = [
'requirements.txt',
'.python-version',
'pyproject.toml',
'Pipfile',
'tox.ini',
'setup.py',
'__init__.py',
]
detect_folders = []
[red]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🔺 '
style = 'red bold'
disabled = false
detect_extensions = [
'red',
'reds',
]
detect_files = []
detect_folders = []
[rlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
style = 'blue bold'
symbol = '📐 '
disabled = false
detect_extensions = [
'R',
'Rd',
'Rmd',
'Rproj',
'Rsx',
]
detect_files = ['.Rprofile']
detect_folders = ['.Rproj.user']
[ruby]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '💎 '
style = 'bold red'
disabled = false
detect_extensions = ['rb']
detect_files = [
'Gemfile',
'.ruby-version',
]
detect_folders = []
detect_variables = [
'RUBY_VERSION',
'RBENV_VERSION',
]
[rust]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🦀 '
style = 'bold red'
disabled = false
detect_extensions = ['rs']
detect_files = ['Cargo.toml']
detect_folders = []
[scala]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
disabled = false
style = 'red bold'
symbol = '🆂 '
detect_extensions = [
'sbt',
'scala',
]
detect_files = [
'.scalaenv',
'.sbtenv',
'build.sbt',
]
detect_folders = ['.metals']
[shell]
format = '[$indicator]($style) '
bash_indicator = 'bsh'
fish_indicator = 'fsh'
zsh_indicator = 'zsh'
powershell_indicator = 'psh'
ion_indicator = 'ion'
elvish_indicator = 'esh'
tcsh_indicator = 'tsh'
nu_indicator = 'nu'
xonsh_indicator = 'xsh'
unknown_indicator = ''
style = 'white bold'
disabled = true
[shlvl]
threshold = 2
format = '[$symbol$shlvl]($style) '
symbol = '↕️ '
repeat = false
style = 'bold yellow'
disabled = true
[singularity]
symbol = ''
format = '[$symbol\[$env\]]($style) '
style = 'blue bold dimmed'
disabled = false
[status]
format = '[$symbol$status]($style) '
symbol = '✖'
success_symbol = '✔️'
not_executable_symbol = '🚫'
not_found_symbol = '🔍'
sigint_symbol = '🧱'
signal_symbol = '⚡'
style = 'bold red'
map_symbol = true
recognize_signal_code = true
pipestatus = false
pipestatus_separator = '|'
pipestatus_format = '\[$pipestatus\] => [$symbol$common_meaning$signal_name$maybe_int]($style)'
disabled = true
[sudo]
format = '[as $symbol]($style)'
symbol = '🧙 '
style = 'bold blue'
allow_windows = false
disabled = true
[swift]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐦 '
style = 'bold 202'
disabled = false
detect_extensions = ['swift']
detect_files = ['Package.swift']
detect_folders = []
[terraform]
format = '[$symbol$version]($style)'
version_format = '${raw}'
symbol = '💠 '
style = 'bold 105'
disabled = false
detect_extensions = [
'tf',
'tfplan',
'tfstate',
]
detect_files = []
detect_folders = ['.terraform']
[time]
format = 'at [$time]($style) '
style = 'bold yellow'
use_12hr = false
disabled = true
utc_time_offset = 'local'
time_range = '-'
[username]
format = '[$user]($style)'
style_root = 'red bold'
style_user = 'yellow bold'
show_always = false
disabled = false
[vagrant]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⍱ '
style = 'cyan bold'
disabled = false
detect_extensions = []
detect_files = ['Vagrantfile']
detect_folders = []
[vcsh]
symbol = ''
style = 'bold yellow'
format = 'vcsh [$symbol$repo]($style) '
disabled = false
[vlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = 'V '
style = 'blue bold'
disabled = false
detect_extensions = ['v']
detect_files = [
'v.mod',
'vpkg.json',
'.vpkg-lock.json',
]
detect_folders = []
[zig]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '↯ '
style = 'bold yellow'
disabled = false
detect_extensions = ['zig']
detect_files = []
detect_folders = []
[custom]
[custom.vpn]
description = "vpn status"
command = "echo \uF023"
# when = "ifconfig utun4 > /dev/null 2>&1"
when = "pgrep openvpn"
# format = "[$symbol($output)]($style)" # Remove extra blank after output
# style = "green"
style = "bright-green"
[custom.root]
description = "show if user is root"
command = "echo \u26A1"
when = "[ $USER = 'root' ]"
style = "bright-yellow"
[custom.window_title]
description = "set window title"
command = '[ -z $SSH_CLIENT] && print -Pn "\e]1;%2~\a" || print -Pn "\e]1;(%m) %2~\a"'
# format = "[$symbol($output)]($style) " # Remove extra blank after output
format="($output)"
when = "true"

View File

@ -1,838 +0,0 @@
# Warning: This config does not include keys that have an unset value
# $all is shorthand for $username$hostname$shlvl$singularity$kubernetes$directory$vcsh$git_branch$git_commit$git_state$git_metrics$git_status$hg_branch$docker_context$package$cmake$cobol$dart$deno$dotnet$elixir$elm$erlang$golang$helm$java$julia$kotlin$lua$nim$nodejs$ocaml$perl$php$pulumi$purescript$python$rlang$red$ruby$rust$scala$swift$terraform$vlang$vagrant$zig$nix_shell$conda$memory_usage$aws$gcloud$openstack$azure$env_var$crystal$custom$sudo$cmd_duration$line_break$jobs$battery$time$status$shell$character
# format = '$all'
format = """
${custom.root}\
$hostname\
$jobs\
$directory\
$git_branch\
$git_state\
$git_status\
$cmd_duration\
$fill\
$aws\
$kubernetes\
${custom.vpn}\
$line_break\
$python\
$character"""
# right_format = """
# ${custom.local_title}\
# ${custom.remote_title}"""
scan_timeout = 30
command_timeout = 500
add_newline = false
[aws]
# format = '[$symbol($profile )(\($region\) )(\[$duration\])]($style)'
format = '[ $symbol($profile)(/($region)) ]($style)'
# symbol = '☁️ '
symbol = ' '
# symbol = ' '
style = 'inverted bright-yellow'
disabled = false
expiration_symbol = 'X'
[aws.region_aliases]
eu-west-1 = "ire1"
eu-central-1 = "fra1"
us-east-2 = "ohio1"
[azure]
format = 'on [$symbol($subscription)]($style) '
symbol = 'ﴃ '
style = 'blue bold'
disabled = true
[battery]
full_symbol = ' '
charging_symbol = ' '
discharging_symbol = ' '
unknown_symbol = ' '
empty_symbol = ' '
disabled = false
format = '[$symbol$percentage]($style) '
[[battery.display]]
threshold = 10
style = 'red bold'
[character]
format = '$symbol '
success_symbol = '[](bright-yellow)'
error_symbol = '[](bold red)'
vicmd_symbol = '[](bright-yellow)'
disabled = false
[cmake]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '△ '
style = 'bold blue'
disabled = false
detect_extensions = []
detect_files = [
'CMakeLists.txt',
'CMakeCache.txt',
]
detect_folders = []
[cmd_duration]
min_time = 30000
format = '[$duration]($style) '
style = 'yellow'
show_milliseconds = false
disabled = false
show_notifications = false
min_time_to_notify = 45000
[cobol]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⚙️ '
style = 'bold blue'
disabled = false
detect_extensions = [
'cbl',
'cob',
'CBL',
'COB',
]
detect_files = []
detect_folders = []
[conda]
truncation_length = 1
format = 'via [$symbol$environment]($style) '
symbol = '🅒 '
style = 'green bold'
ignore_base = true
disabled = false
[crystal]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🔮 '
style = 'bold red'
disabled = false
detect_extensions = ['cr']
detect_files = ['shard.yml']
detect_folders = []
[dart]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🎯 '
style = 'bold blue'
disabled = false
detect_extensions = ['dart']
detect_files = [
'pubspec.yaml',
'pubspec.yml',
'pubspec.lock',
]
detect_folders = ['.dart_tool']
[deno]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🦕 '
style = 'green bold'
disabled = false
detect_extensions = []
detect_files = [
'mod.ts',
'deps.ts',
'mod.js',
'deps.js',
]
detect_folders = []
[directory]
truncation_length = 5
truncate_to_repo = true
repo_root_style = "underline italic bold blue"
fish_style_pwd_dir_length = 0
use_logical_path = true
format = '[$path]($style)[$read_only]($read_only_style) '
# repo_root_format = '[$before_root_path]($style)[$repo_root]($repo_root_style)[$path]($style)[$read_only]($read_only_style) '
style = 'blue bold'
disabled = false
# read_only = '🔒'
read_only = " \u2718"
read_only_style = 'bright-red bold'
truncation_symbol = '.../'
home_symbol = '~'
[directory.substitutions]
[docker_context]
symbol = '🐳 '
style = 'blue bold'
format = 'via [$symbol$context]($style) '
only_with_files = true
disabled = true
detect_extensions = []
detect_files = [
'docker-compose.yml',
'docker-compose.yaml',
'Dockerfile',
]
detect_folders = []
[dotnet]
format = 'via [$symbol($version )(🎯 $tfm )]($style)'
version_format = 'v${raw}'
symbol = '.NET '
style = 'blue bold'
heuristic = true
disabled = false
detect_extensions = [
'csproj',
'fsproj',
'xproj',
]
detect_files = [
'global.json',
'project.json',
'Directory.Build.props',
'Directory.Build.targets',
'Packages.props',
]
detect_folders = []
[elixir]
format = 'via [$symbol($version \(OTP $otp_version\) )]($style)'
version_format = 'v${raw}'
symbol = '💧 '
style = 'bold purple'
disabled = false
detect_extensions = []
detect_files = ['mix.exs']
detect_folders = []
[elm]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🌳 '
style = 'cyan bold'
disabled = false
detect_extensions = ['elm']
detect_files = [
'elm.json',
'elm-package.json',
'.elm-version',
]
detect_folders = ['elm-stuff']
[env_var]
[erlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = ' '
style = 'bold red'
disabled = false
detect_extensions = []
detect_files = [
'rebar.config',
'erlang.mk',
]
detect_folders = []
[fill]
style = 'bold black'
symbol = ' '
disabled = false
[gcloud]
format = 'on [$symbol$account(@$domain)(\($region\))]($style) '
symbol = '☁️ '
style = 'bold blue'
disabled = false
[gcloud.region_aliases]
[git_branch]
format = '[$symbol$branch]($style)(:[$remote]($style)) '
symbol = ' '
style = 'yellow'
truncation_length = 9223372036854775807
truncation_symbol = '…'
only_attached = false
always_show_remote = false
disabled = false
[git_commit]
commit_hash_length = 7
format = '[\($hash$tag\)]($style) '
style = 'green bold'
only_detached = true
disabled = false
tag_symbol = ' 🏷 '
tag_disabled = true
[git_metrics]
added_style = 'bold green'
deleted_style = 'bold red'
only_nonzero_diffs = true
format = '([+$added]($added_style) )([-$deleted]($deleted_style) )'
disabled = true
[git_state]
rebase = 'REBASING'
merge = 'MERGING'
revert = 'REVERTING'
cherry_pick = 'CHERRY-PICKING'
bisect = 'BISECTING'
am = 'AM'
am_or_rebase = 'AM/REBASE'
style = 'bold yellow'
format = '\([$state( $progress_current/$progress_total)]($style)\) '
disabled = false
[git_status]
format = '([\[$all_status$ahead_behind\]]($style) )'
style = 'red bold'
stashed = '\$'
ahead = '⇡'
behind = '⇣'
up_to_date = ''
diverged = '⇕'
conflicted = '='
deleted = '✘'
renamed = '»'
modified = '!'
staged = '+'
untracked = '?'
disabled = false
[golang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐹 '
style = 'bold cyan'
disabled = false
detect_extensions = ['go']
detect_files = [
'go.mod',
'go.sum',
'glide.yaml',
'Gopkg.yml',
'Gopkg.lock',
'.go-version',
]
detect_folders = ['Godeps']
[helm]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⎈ '
style = 'bold white'
disabled = false
detect_extensions = []
detect_files = [
'helmfile.yaml',
'Chart.yaml',
]
detect_folders = []
[hg_branch]
symbol = ' '
style = 'bold purple'
format = 'on [$symbol$branch]($style) '
truncation_length = 9223372036854775807
truncation_symbol = '…'
disabled = true
[hostname]
ssh_only = true
trim_at = '.'
format = '|[$hostname]($style)| '
style = 'white'
disabled = false
[java]
disabled = false
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
style = 'red dimmed'
symbol = '☕ '
detect_extensions = [
'java',
'class',
'jar',
'gradle',
'clj',
'cljc',
]
detect_files = [
'pom.xml',
'build.gradle.kts',
'build.sbt',
'.java-version',
'deps.edn',
'project.clj',
'build.boot',
]
detect_folders = []
[jobs]
threshold = 1
symbol_threshold = 1
number_threshold = 2
format = '[$symbol$number]($style) '
# symbol = '✦'
symbol = "\u2622"
style = 'bold bright-blue'
disabled = false
[julia]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = 'ஃ '
style = 'bold purple'
disabled = false
detect_extensions = ['jl']
detect_files = [
'Project.toml',
'Manifest.toml',
]
detect_folders = []
[kotlin]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🅺 '
style = 'bold blue'
kotlin_binary = 'kotlin'
disabled = false
detect_extensions = [
'kt',
'kts',
]
detect_files = []
detect_folders = []
[kubernetes]
symbol = '☸ '
format = '[ $symbol$context/$namespace]($style) '
style = 'inverted bold bright-blue'
disabled = false
[kubernetes.context_aliases]
[line_break]
disabled = false
[lua]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🌙 '
style = 'bold blue'
lua_binary = 'lua'
disabled = false
detect_extensions = ['lua']
detect_files = ['.lua-version']
detect_folders = ['lua']
[memory_usage]
threshold = 75
format = 'via $symbol[$ram( | $swap)]($style) '
style = 'white bold dimmed'
symbol = '🐏 '
disabled = false
[nim]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '👑 '
style = 'yellow bold'
disabled = false
detect_extensions = [
'nim',
'nims',
'nimble',
]
detect_files = ['nim.cfg']
detect_folders = []
[nix_shell]
format = 'via [$symbol$state( \($name\))]($style) '
symbol = '❄️ '
style = 'bold blue'
impure_msg = 'impure'
pure_msg = 'pure'
disabled = false
[nodejs]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = ' '
style = 'bold green'
disabled = false
not_capable_style = 'bold red'
detect_extensions = [
'js',
'mjs',
'cjs',
'ts',
]
detect_files = [
'package.json',
'.node-version',
'.nvmrc',
]
detect_folders = ['node_modules']
[ocaml]
format = 'via [$symbol($version )(\($switch_indicator$switch_name\) )]($style)'
version_format = 'v${raw}'
global_switch_indicator = ''
local_switch_indicator = '*'
symbol = '🐫 '
style = 'bold yellow'
disabled = false
detect_extensions = [
'opam',
'ml',
'mli',
're',
'rei',
]
detect_files = [
'dune',
'dune-project',
'jbuild',
'jbuild-ignore',
'.merlin',
]
detect_folders = [
'_opam',
'esy.lock',
]
[openstack]
format = 'on [$symbol$cloud(\($project\))]($style) '
symbol = '☁️ '
style = 'bold yellow'
disabled = false
[package]
format = 'is [$symbol$version]($style) '
symbol = '📦 '
style = '208 bold'
display_private = false
disabled = false
version_format = 'v${raw}'
[perl]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐪 '
style = '149 bold'
disabled = false
detect_extensions = [
'pl',
'pm',
'pod',
]
detect_files = [
'Makefile.PL',
'Build.PL',
'cpanfile',
'cpanfile.snapshot',
'META.json',
'META.yml',
'.perl-version',
]
detect_folders = []
[php]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐘 '
style = '147 bold'
disabled = false
detect_extensions = ['php']
detect_files = [
'composer.json',
'.php-version',
]
detect_folders = []
[pulumi]
format = 'via [$symbol$stack]($style) '
version_format = 'v${raw}'
symbol = ' '
style = 'bold 5'
disabled = false
[purescript]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '<=> '
style = 'bold white'
disabled = false
detect_extensions = ['purs']
detect_files = ['spago.dhall']
detect_folders = []
[python]
pyenv_version_name = false
pyenv_prefix = 'pyenv '
python_binary = [
'python',
'python3',
'python2',
]
# format = 'via [${symbol}${pyenv_prefix}(${version} )(\($virtualenv\) )]($style)'
format = '[($virtualenv:)${version}]($style)'
version_format = '${raw}'
style = 'bright-black'
# symbol = '🐍 '
# symbol = ''
symbol = ''
disabled = false
detect_extensions = ['py']
detect_files = [
'requirements.txt',
'.python-version',
'pyproject.toml',
'Pipfile',
'tox.ini',
'setup.py',
'__init__.py',
]
detect_folders = []
[red]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🔺 '
style = 'red bold'
disabled = false
detect_extensions = [
'red',
'reds',
]
detect_files = []
detect_folders = []
[rlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
style = 'blue bold'
symbol = '📐 '
disabled = false
detect_extensions = [
'R',
'Rd',
'Rmd',
'Rproj',
'Rsx',
]
detect_files = ['.Rprofile']
detect_folders = ['.Rproj.user']
[ruby]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '💎 '
style = 'bold red'
disabled = false
detect_extensions = ['rb']
detect_files = [
'Gemfile',
'.ruby-version',
]
detect_folders = []
detect_variables = [
'RUBY_VERSION',
'RBENV_VERSION',
]
[rust]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🦀 '
style = 'bold red'
disabled = false
detect_extensions = ['rs']
detect_files = ['Cargo.toml']
detect_folders = []
[scala]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
disabled = false
style = 'red bold'
symbol = '🆂 '
detect_extensions = [
'sbt',
'scala',
]
detect_files = [
'.scalaenv',
'.sbtenv',
'build.sbt',
]
detect_folders = ['.metals']
[shell]
format = '[$indicator]($style) '
bash_indicator = 'bsh'
fish_indicator = 'fsh'
zsh_indicator = 'zsh'
powershell_indicator = 'psh'
ion_indicator = 'ion'
elvish_indicator = 'esh'
tcsh_indicator = 'tsh'
nu_indicator = 'nu'
xonsh_indicator = 'xsh'
unknown_indicator = ''
style = 'white bold'
disabled = true
[shlvl]
threshold = 2
format = '[$symbol$shlvl]($style) '
symbol = '↕️ '
repeat = false
style = 'bold yellow'
disabled = true
[singularity]
symbol = ''
format = '[$symbol\[$env\]]($style) '
style = 'blue bold dimmed'
disabled = false
[status]
format = '[$symbol$status]($style) '
symbol = '✖'
success_symbol = '✔️'
not_executable_symbol = '🚫'
not_found_symbol = '🔍'
sigint_symbol = '🧱'
signal_symbol = '⚡'
style = 'bold red'
map_symbol = false
recognize_signal_code = true
pipestatus = false
pipestatus_separator = '|'
pipestatus_format = '\[$pipestatus\] => [$symbol$common_meaning$signal_name$maybe_int]($style)'
disabled = true
[sudo]
format = '[as $symbol]($style)'
symbol = '🧙 '
style = 'bold blue'
allow_windows = false
disabled = true
[swift]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '🐦 '
style = 'bold 202'
disabled = false
detect_extensions = ['swift']
detect_files = ['Package.swift']
detect_folders = []
[terraform]
format = '[$symbol$version]($style)'
version_format = '${raw}'
symbol = '💠 '
style = 'bold 105'
disabled = false
detect_extensions = [
'tf',
'tfplan',
'tfstate',
]
detect_files = []
detect_folders = ['.terraform']
[time]
format = 'at [$time]($style) '
style = 'bold yellow'
use_12hr = false
disabled = true
utc_time_offset = 'local'
time_range = '-'
[username]
format = '[$user]($style)'
style_root = 'red bold'
style_user = 'yellow bold'
show_always = false
disabled = false
[vagrant]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '⍱ '
style = 'cyan bold'
disabled = false
detect_extensions = []
detect_files = ['Vagrantfile']
detect_folders = []
[vcsh]
symbol = ''
style = 'bold yellow'
format = 'vcsh [$symbol$repo]($style) '
disabled = false
[vlang]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = 'V '
style = 'blue bold'
disabled = false
detect_extensions = ['v']
detect_files = [
'v.mod',
'vpkg.json',
'.vpkg-lock.json',
]
detect_folders = []
[zig]
format = 'via [$symbol($version )]($style)'
version_format = 'v${raw}'
symbol = '↯ '
style = 'bold yellow'
disabled = false
detect_extensions = ['zig']
detect_files = []
detect_folders = []
[custom]
[custom.vpn]
description = "vpn status"
command = "echo \uF023"
when = "ifconfig utun3 > /dev/null 2>&1"
# format = "[$symbol($output)]($style)" # Remove extra blank after output
style = "green"
[custom.root]
description = "show if user is root"
command = "echo \u26A1"
when = "[ $USER = 'root' ]"
style = "bright-yellow"
[custom.window_title]
description = "set window title"
command = '[ -z $SSH_CLIENT] && print -Pn "\e]1;%2~\a" || print -Pn "\e]1;(%m) %2~\a"'
# format = "[$symbol($output)]($style) " # Remove extra blank after output
format="($output)"
when = "true"

View File

@ -1,8 +1,11 @@
# setopt ALL_EXPORT
setopt ALWAYS_TO_END
setopt AUTONAMEDIRS
setopt AUTO_PARAM_SLASH
setopt AUTO_REMOVE_SLASH
setopt AUTO_RESUME
#setopt CDABLE_VARS
unsetopt CDABLE_VARS
setopt CORRECT
setopt FUNCTION_ARGZERO
@ -16,23 +19,21 @@ setopt HIST_NO_STORE
setopt HIST_REDUCE_BLANKS
setopt INC_APPEND_HISTORY
setopt NO_HIST_BEEP
# setopt SHARE_HISTORY
setopt LIST_TYPES
setopt LONG_LIST_JOBS
#setopt no_CLOBBER
setopt no_BEEP
setopt no_HUP
setopt NOTIFY
setopt PATH_DIRS
setopt SHORT_LOOPS
setopt PROMPT_SUBST
setopt prompt_subst
setopt AUTO_CD
setopt AUTO_MENU
setopt no_MENU_COMPLETE
setopt AUTO_LIST
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -2,7 +2,10 @@
# Variables
## Vars used always
# MIBS=all
PATH="/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:$HOME/bin:."
# LANG="it_IT.UTF-8"
# TZ='Europe/Rome'
MIBS=all
LOGD=/var/log
## If we are not interactive quit
@ -10,7 +13,7 @@ LOGD=/var/log
## Vars only for interactive sessions
SAVEHIST=10000000
HISTFILE=~/.zsh_history
HISTFILE=~/.history
HISTFILESIZE=10000000
HISTSIZE=10000000
@ -24,37 +27,10 @@ EDITOR=vim
VISUAL=vim
export QUOTING_STYLE=escape
#######################################################################################
# Setup LESS
## make less colourful
LESS_TERMCAP_mb=$'\E[01;34m' # begin blinking
LESS_TERMCAP_md=$'\E[01;36m' # begin bold
LESS_TERMCAP_me=$'\E[0m' # end mode
LESS_TERMCAP_so=$'\E[01;47;34m' # begin standout-mode - info box
LESS_TERMCAP_se=$'\E[0m' # end standout-mode
LESS_TERMCAP_us=$'\E[04;32m' # begin underline
LESS_TERMCAP_ue=$'\E[0m' # end underline
LESS="-c -x4 -R -MM -PMFile\:?f%f:STDIN. ?BSize\:?B%B:Unk.?B\:?pb%pb\%:Unk.?B\:%i/%m"
LESSCHARSET=utf-8
READNULLCMD=/usr/bin/less
PAGER=/usr/bin/less
export LESS
export PAGER
local LESSOPEN_SCRIPT=$(whence lesspipe lesspipe.sh)
if [ $LESSOPEN_SCRIPT ]; then
LESSOPEN="|${LESSOPEN_SCRIPT} %s"
LESS_ADVANCED_PREPROCESSOR=1
export LESSOPEN
export LESS_ADVANCED_PREPROCESSOR
fi
#
# Hack for svn
export SVN_EDITOR=${VISUAL}
# Set umask to collaborative mode
umask 002
# vim: set ts=4 sw=4 tw=0 ft=zsh :
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,65 +1,31 @@
#######################################################################################
#
# By default we choose emacs mode
bindkey "-e"
# Backward-kill-word stop on dir delimiter
autoload -U select-word-style
select-word-style bash
# History search
autoload -Uz history-search-end
zle -N history-beginning-search-backward-end history-search-end
zle -N history-beginning-search-forward-end history-search-end
# Command line editing in $EDITOR
autoload -z edit-command-line
zle -N edit-command-line
bindkey "^X^E" edit-command-line
# Standard Linux
bindkey "^[[1;5C" forward-word
bindkey "^[[1;5D" backward-word
bindkey "^S" kill-line
bindkey "^U" backward-kill-line
bindkey "\e[3~" delete-char
# Useful under iTerm
bindkey "\e[H" beginning-of-line
bindkey "-e"
bindkey "\eOH" beginning-of-line
bindkey "\eOF" end-of-line
bindkey "\e[1~" beginning-of-line
bindkey "\e[F" end-of-line
bindkey "\e[4~" end-of-line
bindkey "\e[3~" delete-char
bindkey "\eOA" history-search-backward
bindkey "\eOB" history-search-forward
bindkey "\e[A" history-search-backward
bindkey "\e[B" history-search-forward
# Required by Terminus in Sublime Text
bindkey "\e[1;3C" forward-word
bindkey "\e[1;3D" backward-word
# VI mode history search
bindkey -M vicmd '^[[A' history-beginning-search-backward-end \
'^[OA' history-beginning-search-backward-end \
'^[[B' history-beginning-search-forward-end \
'^[OB' history-beginning-search-forward-end
bindkey -M viins '^[[A' history-beginning-search-backward-end \
'^[OA' history-beginning-search-backward-end \
'^[[B' history-beginning-search-forward-end \
'^[OB' history-beginning-search-forward-end
bindkey -M viins "\e[H" beginning-of-line \
"\e[1~" beginning-of-line \
"^A" beginning-of-line \
"\e[F" end-of-line \
"\e[4~" end-of-line \
"^E" end-of-line
bindkey -M vicmd "\e[H" beginning-of-line \
"\e[1~" beginning-of-line \
"^A" beginning-of-line \
"\e[F" end-of-line \
"\e[4~" end-of-line \
"^E" end-of-line
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,16 +1,15 @@
#######################################################################################
# Start autocomplete
autoload -Uz compinit bashcompinit
for dump in ~/.zcompdump(N.mh+24); do
compinit
touch ~/.zcompdump
done
compinit -C
bashcompinit
# autoload bashcompinit
autoload -U compinit && compinit -u
autoload bashcompinit && bashcompinit
# autoload -Uz compinit
#if [ $(date +'%j') != $(stat -f '%Sm' -t '%j' ~/.zcompdump) ]; then
# compinit -C
# else
# compinit -C
# fi
autoload -U compsys;
compctl -g "*(-/)" + -g ".*(-/)" cd
# allow approximate
zstyle ':completion:*' completer _complete _match _approximate
@ -24,4 +23,6 @@ zstyle ':completion:*:kill:*' force-list always
# # cd not select parent dir
zstyle ':completion:*:cd:*' ignore-parents parent pwd
# compctl -g "*(-/)" + -g ".*(-/)" cd
#
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,8 +1,8 @@
#######################################################################################
# Aliases
# alias ls='ls -F --color'
# alias dir='ls -l'
alias ls='ls -F --color'
alias dir='ls -l'
alias la='ls -A'
alias ll='ls -l'
alias lla='ls -Al'
@ -13,11 +13,11 @@ alias cls='clear'
alias dc=cd
alias les=less
alias fu='sudo $(fc -ln -1)'
# alias sudo='sudo '
alias sudo='sudo '
alias mkdir='mkdir -p'
alias zap='rm -rf'
# alias ftail='tail -f'
alias ftail='tail -f'
alias grep='grep --colour'
alias ns='host -t ns'
@ -25,26 +25,20 @@ alias mx='host -t mx'
alias soa='host -t soa'
alias ptr='host -t ptr'
# alias vi="vim "
alias vi="vim "
# alias du1="du -h --max-depth=1"
# alias psg="ps ax|grep "
alias myip="dig +short ANY @resolver1.opendns.com myip.opendns.com"
alias du1="du -h --max-depth=1"
alias psg="ps ax|grep "
alias myip="dig +short myip.opendns.com @resolver1.opendns.com"
# alias delkey="ssh-keygen -R "
alias k=kubectl
alias kk='kubectl konfig merge ~/.kube/configs/* > ~/.kube/config'
alias delkey="ssh-keygen -R "
# Global Aliases
#
alias -g G="|grep "
alias -g K="-o yaml | kubectl neat | less"
alias -g KK="-o yaml | less"
alias -g L="|less"
alias -g W="|wc -l"
alias -g NO="> /dev/null"
alias -g NE="2> /dev/null"
alias -g NA="> /dev/null 2>&1"
# vim: set ts=4 sw=4 tw=0 ft=zsh :
# vim: set ts=4 sw=4 tw=0 ft=zsh :

23
zsh.d/30-less.zsh Normal file
View File

@ -0,0 +1,23 @@
#######################################################################################
# Setup LESS
## make less colourful
LESS_TERMCAP_mb=$'\E[01;34m' # begin blinking
LESS_TERMCAP_md=$'\E[01;36m' # begin bold
LESS_TERMCAP_me=$'\E[0m' # end mode
LESS_TERMCAP_so=$'\E[01;47;34m' # begin standout-mode - info box
LESS_TERMCAP_se=$'\E[0m' # end standout-mode
LESS_TERMCAP_us=$'\E[04;32m' # begin underline
LESS_TERMCAP_ue=$'\E[0m' # end underline
LESS="-c -x4 -R -MM -PMFile\:?f%f:STDIN. ?BSize\:?B%B:Unk.?B\:?pb%pb\%:Unk.?B\:%i/%m"
LESSCHARSET=utf-8
READNULLCMD=/usr/bin/less
PAGER=/usr/bin/less
export LESS
export PAGER
LESSOPEN="|lesspipe.sh %s"; export LESSOPEN
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,18 +1,17 @@
fpath+=$0:h/prompts
fpath=($0:h/prompts $fpath)
fpath+=$0:h/prompts/mypure
# fpath+=$0:h/prompts/pure
fpath+=$0:h/prompts/pure
autoload -U promptinit;promptinit
zstyle :prompt:pure:user color white
zstyle :prompt:pure:host color white
zstyle :prompt:pure:prompt:success color yellow
PURE_CMD_MAX_EXEC_TIME=30
# Default prompt is pure
#
# prompt mypure
# prompt pure
prompt pure
# vim: set ts=4 sw=4 tw=0 ft=zsh :
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,41 +1,29 @@
#######################################################################################
# Functions
# cdb - Goes to folder by complete path
autoload -U cdb
# hist - Grep from history
autoload -U hist
# hdu - Human readable report of files and directories sizes
autoload -U hdu
# dust - list total size in . directory
autoload -U dust
# bak - remove backup files
autoload -U bak
#
#
#
#
# Load usefule modules
# fancyTerm - returns true if we are on xterm/rxvt/screen, elsewhere false
autoload -U fancyTerm
zmodload -F zsh/stat b:zstat
zmodload zsh/datetime
#
# Autoload all functions in zshfunctions
#
autoload -U $LIBRARY/zshfunctions/*(.:t)
# # repo_char - returns a character based on the type of repo we are in
# autoload -U repo_char
# Add zsh-hook function
autoload -Uz add-zsh-hook
autoload -U x509
# Add async support
autoload -Uz async && async
autoload -U awslogin
# Async Git update
autoload -Uz vcs_info
_vbe_vcs_info_done() {
local stdout=$3
vcs_info_msg_0_=$stdout
# echo $(date +"%Y-%m-%d %H:%M:%S") $PWD info $vcs_info_msg_0_ $vcs_info_msg_1_ >> ~/vcs.log
zle reset-prompt
}
_vbe_vcs_precmd() {
# echo $(date +"%Y-%m-%d %H:%M:%S") $PWD pre >> ~/vcs.log
async_flush_jobs vcs_info
async_job vcs_info _vbe_vcs_info $PWD
}
async_init
async_start_worker vcs_info
async_register_callback vcs_info _vbe_vcs_info_done
add-zsh-hook precmd _vbe_vcs_precmd
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,28 +0,0 @@
# pyenv
if type pyenv > /dev/null; then
export PYENV_ROOT="$HOME/.pyenv"
# eval "$(pyenv init --path)"
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
fi
# thefuck
type thefuck > /dev/null && eval $(thefuck --alias)
# z
# [ -f $LIBRARY/plugins/zsh-z/zsh-z.plugin.zsh ] && source $LIBRARY/plugins/zsh-z/zsh-z.plugin.zsh
if type zoxide > /dev/null; then ## zoxide is installed
eval "$(zoxide init zsh)"
export Z_COMMAND=zoxide
elif [ -f $LIBRARY/plugins/zsh-z/zsh-z.plugin.zsh ]; then ## zsh-z is installed
source $LIBRARY/plugins/zsh-z/zsh-z.plugin.zsh
export Z_COMMAND=zsh-z
else
echo "No zoxide or zsh-z found, z command disabled"
fi
# kubectl/krew
export PATH="${KREW_ROOT:-$HOME/.krew}/bin:$PATH"
# Setup iTerm shell integration
test -e $HOME/.iterm2_shell_integration.zsh && source $HOME/.iterm2_shell_integration.zsh && export ITERM_ENABLE_SHELL_INTEGRATION_WITH_TMUX=YES || echo "iTerm integration not installed"

View File

@ -0,0 +1,29 @@
function venv {
# export WORKON_HOME=$HOME/Src/virtualenvs
# export PROJECT_HOME=$HOME/Src/projects
if [ -z $WORKON_HOME ] || [ -z $PROJECT_HOME ]; then
echo "Please, set WORKON_HOME and PROJECT_HOME first."
echo "Better place to set them is 99-local.zsh"
return
fi
[ -z $VIRTUALENV_SCRIPT ] && VIRTUALENV_SCRIPT=/usr/local/bin/virtualenvwrapper.sh
[ -d $WORKON_HOME ] || mkdir -p $WORKON_HOME
[ -d $PROJECT_HOME ] || mkdir -p $PROJECT_HOME
source $VIRTUALENV_SCRIPT
if [ -z $1 ]; then
echo "virtualenv enabled."
echo "WORKON_HOME: $WORKON_HOME"
echo "PROJECT_HOME: $PROJECT_HOME"
echo
echo "Virtualenvs:"
lsvirtualenv -b
echo
else
workon $1
fi
}

View File

@ -1,22 +1,14 @@
# Darwin/OSX
# Environment and aliases for OSX
# Setup brew
# We need it here to fix the PATH
# so we can setup pyenv and other stuff
#
if type /opt/homebrew/bin/brew > /dev/null; then
eval "$(/opt/homebrew/bin/brew shellenv)"
FPATH="$(brew --prefix)/share/zsh/site-functions:${FPATH}"
fi
export LSCOLORS="ExgxcxdxCxegedabagacad"
alias ls='ls -F -G'
alias ldd='otool -L'
# alias skill=killall
alias ls='ls -F -G'
alias skill=killall
alias lsrebuild='/System/Library/Frameworks/CoreServices.framework/Versions/A/Frameworks/LaunchServices.framework/Versions/A/Support/lsregister -kill -r -domain local -domain system -domain user'
# alias vim='/Applications/MacVim.app/Contents/MacOS/Vim '
# alias vi='/Applications/MacVim.app/Contents/MacOS/Vim '
# alias mvim='mvim --remote-tab-silent '
# alias mvim='open -a MacVim '
# vim: set ts=4 sw=4 tw=0 ft=zsh :
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -0,0 +1,19 @@
# Core Utils
which gcp > /dev/null
if [ $? -eq 0 ]; then
if [ -f /etc/zsh/gdircolors ]; then
eval `gdircolors /etc/zsh/gdircolors`
else
eval `gdircolors`
fi
#alias ls='gls --color -F' # OSX ls is better (supports extended attributes)
alias ll='ls -l'
alias la='ls -a'
alias cp='gcp'
alias mv='gmv'
alias rm='grm'
alias du='gdu'
alias df='gdf -a'
fi
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -8,7 +8,7 @@ if which dircolors > /dev/null; then
fi
fi
alias ls='LC_ALL=C ls -F --color=auto --group-directories-first'
alias ls='LANG=C ls -F --color=auto --group-directories-first'
alias open='xdg-open '
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -4,4 +4,12 @@ Useful customizations that you can add are
prompt <promptname> - redefine prompt
To use virtualenvs you MUST define
export WORKON_HOME <path to virtualenvs>
export PROJECT_HOME <path to projects>
then you can explicitly call 'venv' or leave it as a function to invoke at the
right moment
/* vim: set ts=4 sw=4 tw=78 ft=text : */

417
zsh.d/completions/_adb Normal file
View File

@ -0,0 +1,417 @@
#compdef adb
# ------------------------------------------------------------------------------
# Copyright (c) 2011 Github zsh-users - http://github.com/zsh-users
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the zsh-users nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for adb (Android Debug Bridge) 1.0.26
# (http://developer.android.com/guide/developing/tools/adb.html).
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Julien Nicoulaud <julien.nicoulaud@gmail.com>
#
# ------------------------------------------------------------------------------
_adb() {
typeset -A opt_args
local context state line curcontext="$curcontext" adb_args
local ret=1
_arguments -C \
'(-e -s)-d[directs command to the only connected USB device, returns an error if more than one USB device is present]' \
'(-d -s)-e[directs command to the only running emulator, returns an error if more than one emulator is running]' \
'(-d -e)-s[directs command to the USB device or emulator with the given serial number]: :_adb_serial_numbers' \
'-p[simple product name or a relative/absolute path to a product out directory]: :_adb_products' \
'1: :_adb_cmds' \
'*::arg:->args' \
&& ret=0
adb_args="${(fkv)opt_args[(I)-d|-e|-s|-p]}"
case "$state" in
(args)
curcontext="${curcontext%:*:*}:adb-cmd-$words[1]:"
case $words[1] in
(help|version|devices|jdwp|bugreport|wait-for-device|start-server|kill-server|get-state|get-serialno|status-window|remount|reboot-bootloader|root|usb)
_message 'no more arguments' && ret=0
;;
(connect|disconnect)
_arguments \
'1: :_adb_host_colon_ports' \
&& ret=0
;;
(push)
_arguments \
'1:local directory:_files -/' \
'2: :_adb_remote_files -/' \
&& ret=0
;;
(pull)
_arguments \
'1: :_adb_remote_files -/' \
'2:local directory:_files -/' \
&& ret=0
;;
(sync)
_arguments \
'-l[list but do not copy]' \
'1: :_adb_sync_directories' \
&& ret=0
;;
(shell|emu)
_arguments -C \
'1: :_adb_remote_commands' \
'*::remote-command-arg:->remote-command-args' \
&& ret=0
case "$state" in
(remote-command-args)
curcontext="${curcontext%:*:*}:adb-remote-cmd-$words[1]:"
if (( $+functions[_adb_remote_command_$words[1]_args] )); then
_adb_remote_command_$words[1] && ret=0
# TODO Write handlers for following commands:
# * am (Activity Manager)
# * pm (Package Manager)
# TODO Reuse existing compdefs for standard commands (ls, id, ifconfig, kill, etc) ?
# How do we tell them to use _remote_ files/pids/users/etc ?
else
_adb_remote_command_default && ret=0
fi
;;
esac
;;
(logcat)
local -a rotation_opts
[[ -n ${(M)words:#"-f"} ]] && rotation_opts+=('-r[rotates the log file every kbytes of output. The default value is 16]:value (in kb)')
[[ -n ${(M)words:#"-r"} ]] && rotation_opts+=('-n[sets the maximum number of rotated logs. The default value is 4]:count')
_arguments \
'-b[loads an alternate log buffer for viewing, such as event or radio. The main buffer is used by default]: :_adb_logcat_buffers' \
'-c[clears (flushes) the entire log and exits]' \
'-d[dumps the log to the screen and exits]' \
'-f[writes log message output to file. The default is stdout]: :_files' \
'-g[prints the size of the specified log buffer and exits]' \
'-s[sets the default filter spec to silent]' \
'-v[sets the output format for log messages]: :_adb_logcat_output_formats' \
"${rotation_opts[@]}" \
'*: :_adb_logcat_filter_specs' \
&& ret=0
;;
(forward)
_arguments \
'1: :_adb_local_forward_specs' \
'2: :_adb_remote_forward_specs' \
&& ret=0
;;
(install)
_arguments \
'-l[forward-lock the app]' \
'-r[reinstall the app, keeping its data]' \
'-s[install on SD card instead of internal storage]' \
'1: :_files' \
&& ret=0
;;
(uninstall)
_arguments \
'-k[keep the data and cache directories]' \
'1: :_adb_packages' \
&& ret=0
;;
(reboot)
_arguments \
'1:program:((bootloader:reboot\ into\ the\ bootloader\ program recovery:reboot\ into\ the\ recovery\ program))' \
&& ret=0
;;
(tcpip)
_arguments \
'1::port' \
&& ret=0
;;
(ppp)
# TODO Complete tty (See http://developer.android.com/guide/developing/tools/adb.html#commandsummary)
# TODO Complete PPP parameters (See http://ppp.samba.org/pppd.html)
_arguments \
'1::tty' \
'*::parameters' \
&& ret=0
;;
esac
;;
esac
return ret
}
(( $+functions[_adb_cmds] )) ||
_adb_cmds() {
_alternative \
'general-commands:general command:_adb_general_cmds' \
'device-commands:device command:_adb_device_cmds' \
'scripting-commands:scripting command:_adb_scripting_cmds'
}
(( $+functions[_adb_general_cmds] )) ||
_adb_general_cmds() {
local commands; commands=(
'help:show help message'
'version:show version number'
'devices:list all connected devices'
'connect:connect to a device via TCP/IP'
'disconnect:disconnect from a TCP/IP device'
)
_describe -t general-commands 'general command' commands "$@"
}
(( $+functions[_adb_device_cmds] )) ||
_adb_device_cmds() {
local commands; commands=(
'push:copy file/dir to device'
'pull:copy file/dir from device'
'sync:copy host->device only if changed'
'shell:run remote shell interactively or command'
'emu:run emulator console command'
'logcat:view device log'
'forward:forward socket connections'
'jdwp:list PIDs of processes hosting a JDWP transport'
'install:push this padbage file to the device and install it'
'uninstall:remove this app padbage from the device'
'bugreport:return all information from the device'
)
_describe -t device-commands 'device command' commands "$@"
}
(( $+functions[_adb_scripting_cmds] )) ||
_adb_scripting_cmds() {
local commands; commands=(
'wait-for-device:block until device is online'
'start-server:ensure that there is a server running'
'kill-server:kill the server if it is running'
'get-state:prints\: offline | bootloader | device'
'get-serialno:prints\: <serial-number>'
'status-window:continuously print device status for a specified device'
'remount:remounts the /system partition on the device read-write'
'reboot:reboots the device, optionally into the bootloader or recovery program'
'reboot-bootloader:reboots the device into the bootloader'
'root:restarts the adbd daemon with root permissions'
'usb:restarts the adbd daemon listening on USB'
'tcpip:restarts the adbd daemon listening on TCP on the specified port'
'ppp:run PPP over USB'
)
_describe -t scripting-commands 'scripting command' commands "$@"
}
(( $+functions[_adb_products] )) ||
_adb_products() {
_alternative \
'product-names:product name:_adb_product_names' \
'directories:directory:_files -/'
}
(( $+functions[_adb_product_names] )) ||
_adb_product_names() {
local ret=1
if [[ -n "$ANDROID_PRODUCT_OUT" ]]; then
local product_names; product_names=("$ANDROID_PRODUCT_OUT:default value set in ANDROID_PRODUCT_OUT environment variable")
_describe -t product-names 'product name' product_names && ret=0
else
_message -e product-names 'product name' && ret=0
fi
return ret
}
(( $+functions[_adb_serial_numbers] )) ||
_adb_serial_numbers() {
local serial_numbers; serial_numbers=(${${(M)${(f)"$(_call_program devices $service devices)"}:#*device}%%[[:space:]]*}":connected device")
[[ -n "$ANDROID_SERIAL" ]] && serial_numbers+=("$ANDROID_SERIAL:default value set in ANDROID_SERIAL environment variable")
_describe -t serial-numbers 'serial number' serial_numbers "$@" && ret=0
}
(( $+functions[_adb_packages] )) ||
_adb_packages() {
local packages; packages=(${${(ps:\r\n:)"$(_call_program packages $service $adb_args shell 'ls /data/data 2>/dev/null')"}:#\**\*})
_multi_parts . packages
}
(( $+functions[_adb_host_colon_ports] )) ||
_adb_host_colon_ports() {
local ret=1
if compset -P '*:'; then
_message -e ports 'port' && ret=0
else
_wanted hosts expl 'host' _hosts -qS: && ret=0
fi
return ret
}
(( $+functions[_adb_remote_files] )) ||
_adb_remote_files() {
local dirsonly command="ls -d ${(S)words[CURRENT]/\/*//}*/ 2>/dev/null"
zparseopts -D -E '/=dirsonly'
(( ! $#dirsonly )) && command+="; ls -d ${words[CURRENT]}* 2>/dev/null"
local files; files=(${${(ps:\r\n:)"$(_call_program files $service $adb_args shell "'$command'" 2>/dev/null)"}:#\**\*})
_multi_parts "$@" / files
}
(( $+functions[_adb_remote_commands] )) ||
_adb_remote_commands() {
local commands; commands=(${${(ps:\r\n:)"$(_call_program commands $service $adb_args shell "'IFS=:;for path_dir in \$PATH; do ls \$path_dir 2>/dev/null; done'" 2>/dev/null)"}:#\**\*})
_describe -t remote-commands 'remote command' commands && ret=0
}
(( $+functions[_adb_local_forward_specs] )) ||
_adb_local_forward_specs() {
local ret=1
if compset -P '*:'; then
case ${IPREFIX%:} in
(tcp)
_message -e ports 'port' && ret=0
;;
(localabstract|localreserved)
_wanted sockets expl 'socket' _socket && ret=0
;;
(localfilesystem)
_wanted socket-files expl 'socket file' _files && ret=0
;;
(dev)
_wanted devices expl 'device' _files -g "/dev/**" && ret=0
;;
esac
else
local modes; modes=(
'tcp:TCP socket'
'localabstract:local abstract socket'
'localreserved:local reserved socket'
'localfilesystem:local filesystem socket'
'dev:device'
)
_describe -t forward-modes 'forward mode' modes -qS: && ret=0
fi
return ret
}
(( $+functions[_adb_remote_forward_specs] )) ||
_adb_remote_forward_specs() {
local ret=1
if compset -P '*:'; then
case ${IPREFIX%:} in
(tcp)
_message -e ports 'remote port' && ret=0
;;
(localabstract|localreserved|localfilesystem)
_message -e sockets 'remote socket' && ret=0
;;
(dev)
_message -e devices 'remote device' && ret=0
;;
(jdwp)
local pids; pids=(${${(f)"$(_call_program pids $service $adb_args jdwp 2>/dev/null)"}:#\**\*})
_describe -t remote-pids 'remote pid' pids && ret=0
;;
esac
else
local modes; modes=(
'tcp:TCP socket'
'localabstract:local abstract socket'
'localreserved:local reserved socket'
'localfilesystem:local filesystem socket'
'dev:device'
'jdwp:Java Debug Wire Protocol'
)
_describe -t forward-modes 'forward mode' modes -qS: && ret=0
fi
return ret
}
(( $+functions[_adb_sync_directories] )) ||
_adb_sync_directories() {
_alternative \
'partitions:partition:((system:the\ /system\ partition data:the\ /data\ partition))' \
'directories:directory:_adb_remote_files -/'
}
(( $+functions[_adb_logcat_filter_specs] )) ||
_adb_logcat_filter_specs() {
local ret=1
if compset -P '*:'; then
local priorities; priorities=(
'V:verbose (lowest priority)'
'D:debug'
'I:info'
'W:warning'
'E:error'
'F:fatal'
'S:silent (highest priority, on which nothing is ever printed)'
)
_describe -t log-priorities 'log priority' priorities "$@" && ret=0
else
local tags; tags=(${(u)${${${(f)"$(_call_program tags $service $adb_args logcat -d 2>/dev/null)"}%%[[:space:]]#\(*}##*\/}:#\**\*})
_describe -t log-tags 'log tag' tags -qS: "$@" && ret=0
fi
return ret
}
(( $+functions[_adb_logcat_output_formats] )) ||
_adb_logcat_output_formats() {
local formats; formats=(
'brief:display priority/tag and PID of originating process (the default format)'
'process:display PID only'
'tag:display the priority/tag only'
'thread:display process:thread and priority/tag only'
'raw:display the raw log message, with no other metadata fields'
'time:display the date, invocation time, priority/tag, and PID of the originating process'
'long:display all metadata fields and separate messages with a blank lines'
)
_describe -t log-formats 'log format' formats "$@" && ret=0
}
(( $+functions[_adb_logcat_buffers] )) ||
_adb_logcat_buffers() {
local buffers; buffers=(
'main:view the main log buffer (default)'
'radio:view the buffer that contains radio/telephony related messages'
'events:view the buffer containing events-related messages'
)
_describe -t log-buffers 'log buffer' buffers "$@" && ret=0
}
(( $+functions[_adb_remote_command_default] )) ||
_adb_remote_command_default() {
_wanted remote-files expl 'remote file' _adb_remote_files
}
_adb "$@"
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

View File

@ -1,237 +0,0 @@
#compdef beet
# zsh completion for beets music library manager and MusicBrainz tagger: https://beets.io/
# Default values for BEETS_LIBRARY & BEETS_CONFIG needed for the cache checking function.
# They will be updated under the assumption that the config file is in the same directory as the library.
local BEETS_LIBRARY=~/.config/beets/library.db
local BEETS_CONFIG=~/.config/beets/config.yaml
# Use separate caches for file locations, command completions, and query completions.
# This allows the use of different rules for when to update each one.
zstyle ":completion:${curcontext}:" cache-policy _beet_check_cache
_beet_check_cache () {
local cachefile="$(basename ${1})"
if [[ ! -a "${1}" ]] || [[ "${1}" -ot =beet ]]; then
# always update the cache if it doesnt exist, or if the beet executable changes
return 0
fi
case cachefile; in
(beetslibrary)
if [[ ! -a "${~BEETS_LIBRARY}" ]] || [[ "${1}" -ot "${~BEETS_CONFIG}" ]]; then
return 0
fi
;;
(beetscmds)
_retrieve_cache beetslibrary
if [[ "${1}" -ot "${~BEETS_CONFIG}" ]]; then
return 0
fi
;;
esac
return 1
}
# useful: argument to _regex_arguments for matching any word
local matchany=/$'[^\0]##\0'/
# arguments to _regex_arguments for completing files and directories
local -a files dirs
files=("$matchany" ':file:file:_files')
dirs=("$matchany" ':dir:directory:_dirs')
# Retrieve or update caches
if ! _retrieve_cache beetslibrary || _cache_invalid beetslibrary; then
local BEETS_LIBRARY="${$(beet config|grep library|cut -f 2 -d ' '):-${BEETS_LIBRARY}}"
local BEETS_CONFIG="${$(beet config -p):-${BEETS_CONFIG}}"
_store_cache beetslibrary BEETS_LIBRARY BEETS_CONFIG
fi
if ! _retrieve_cache beetscmds || _cache_invalid beetscmds; then
local -a subcommands fields beets_regex_words_subcmds beets_regex_words_help query modify
local subcmd cmddesc matchquery matchmodify field fieldargs queryelem modifyelem
# Useful function for joining grouped lines of output into single lines (taken from _completion_helpers)
_join_lines() {
awk -v SEP="$1" -v ARG2="$2" -v START="$3" -v END2="$4" 'BEGIN {if(START==""){f=1}{f=0};
if(ARG2 ~ "^[0-9]+"){LINE1 = "^[[:space:]]{,"ARG2"}[^[:space:]]"}else{LINE1 = ARG2}}
($0 ~ END2 && f>0 && END2!="") {exit}
($0 ~ START && f<1) {f=1; if(length(START)!=0){next}}
($0 ~ LINE1 && f>0) {if(f<2){f=2; printf("%s",$0)}else{printf("\n%s",$0)}; next}
(f>1) {gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); printf("%s%s",SEP, $0); next}
END {print ""}'
}
# Variables used for completing subcommands and queries
subcommands=(${${(f)"$(beet help | _join_lines ' ' 3 'Commands:')"}[@]})
fields=($(beet fields | grep -G '^ ' | sort -u | colrm 1 2))
for field in "${fields[@]}"
do
fieldargs="$fieldargs '$field:::{_beet_field_values $field}'"
done
queryelem="_values -S : 'query field (add an extra : to match by regexp)' '::' $fieldargs"
modifyelem="_values -S = 'modify field (replace = with ! to remove field)' $(echo "'${^fields[@]}:: '")"
# regexps for matching query and modify terms on the command line
matchquery=/"(${(j/|/)fields[@]})"$':[^\0]##\0'/
matchmodify=/"(${(j/|/)fields[@]})"$'(=[^\0]##|!)\0'/
# create completion function for queries
_regex_arguments _beet_query "$matchany" \# \( "$matchquery" ":query:query string:$queryelem" \) \#
local "beets_query"="$(which _beet_query)"
# arguments for _regex_arguments for completing lists of queries and modifications
beets_query_args=( \( "$matchquery" ":query:query string:{_beet_query}" \) \# )
beets_modify_args=( \( "$matchmodify" ":modify:modify string:$modifyelem" \) \# )
# now build arguments for _beet and _beet_help completion functions
beets_regex_words_subcmds=('(')
for i in ${subcommands[@]}; do
subcmd="${i[(w)1]}"
# remove first word and parenthesised alias, replace : with -, [ with (, ] with ), and remove single quotes
cmddesc="${${${${${i[(w)2,-1]##\(*\) #}//:/-}//\[/(}//\]/)}//\'/}"
# update arguments needed for creating _beet
beets_regex_words_subcmds+=(/"${subcmd}"$'\0'/ ":subcmds:subcommands:((${subcmd}:${cmddesc// /\ }))")
beets_regex_words_subcmds+=(\( "${matchany}" ":option:option:{_beet_subcmd ${subcmd}}" \) \# \|)
# update arguments needed for creating _beet_help
beets_regex_words_help+=("${subcmd}:${cmddesc}")
done
beets_regex_words_subcmds[-1]=')'
_store_cache beetscmds beets_regex_words_subcmds beets_regex_words_help beets_query_args beets_modify_args beets_query
else
# Evaluate the variable containing the query completer function
eval "${beets_query}"
fi
# Function for getting unique values for field from database (you may need to change the path to the database).
_beet_field_values() {
local -a output fieldvals
local sqlcmd="select distinct $1 from items;"
_retrieve_cache beetslibrary
case ${1}
in
lyrics)
fieldvals=
;;
*)
if [[ "$(sqlite3 ${~BEETS_LIBRARY} ${sqlcmd} 2>&1)" =~ "no such column" ]]; then
sqlcmd="select distinct value from item_attributes where key=='$1' and value!='';"
fi
output="$(sqlite3 ${~BEETS_LIBRARY} ${sqlcmd} 2>/dev/null | sed -rn '/^-+$/,${{/^[- ]+$/n};p}')"
fieldvals=("${(f)output[@]}")
;;
esac
compadd -P \" -S \" -M 'm:{[:lower:][:upper:]}={[:upper:][:lower:]}' -Q -a fieldvals
}
# This function takes a beet subcommand as its first argument, and then uses _regex_words to set ${reply[@]}
# to an array containing arguments for the _regex_arguments function.
_beet_subcmd_options() {
local shortopt optarg optdesc
local matchany=/$'[^\0]##\0'/
local -a regex_words
regex_words=()
for i in ${${(f)"$(beet help $1 | awk '/^ +-/{if(x)print x;x=$0;next}/^ *$/{if(x) exit}{if(x) x=x$0}END{print x}')"}[@]}
do
opt="${i[(w)1]/,/}"
optarg="${${${i## #[-a-zA-Z]# }##[- ]##*}%%[, ]*}"
optdesc="${${${${${i[(w)2,-1]/[A-Z, ]#--[-a-z]##[=A-Z]# #/}//:/-}//\[/(}//\]/)}//\'/}"
case $optarg; in
("")
if [[ "$1" == "import" && "$opt" == "-L" ]]; then
regex_words+=("$opt:$optdesc:\${beets_query_args[@]}")
else
regex_words+=("$opt:$optdesc")
fi
;;
(LOG)
local -a files
files=("$matchany" ':file:file:_files')
regex_words+=("$opt:$optdesc:\$files")
;;
(CONFIG)
local -a configfile
configfile=("$matchany" ':file:config file:{_files -g *.yaml}')
regex_words+=("$opt:$optdesc:\$configfile")
;;
(LIB|LIBRARY)
local -a libfile
libfile=("$matchany" ':file:database file:{_files -g *.db}')
regex_words+=("$opt:$optdesc:\$libfile")
;;
(DIR|DIRECTORY)
local -a dirs
dirs=("$matchany" ':dir:directory:_dirs')
regex_words+=("$opt:$optdesc:\$dirs")
;;
(SOURCE)
if [[ "${1}" -eq lastgenre ]]; then
local -a lastgenresource
lastgenresource=(/$'(artist|album|track)\0'/ ':source:genre source:(artist album track)')
regex_words+=("$opt:$optdesc:\$lastgenresource")
else
regex_words+=("$opt:$optdesc:\$matchany")
fi
;;
(*)
regex_words+=("$opt:$optdesc:\$matchany")
;;
esac
done
_regex_words options "$1 options" "${regex_words[@]}"
}
## Function for completing subcommands. It calls another completion function which is first created if it doesn't already exist.
_beet_subcmd() {
local -a options
local subcmd="${1}"
if [[ ! $(type _beet_${subcmd} | grep function) =~ function ]]; then
if ! _retrieve_cache "beets${subcmd}" || _cache_invalid "beets${subcmd}"; then
local matchany=/$'[^\0]##\0'/
local -a files
files=("$matchany" ':file:file:_files')
# get arguments for completing subcommand options
_beet_subcmd_options "$subcmd"
options=("${reply[@]}" \#)
_retrieve_cache beetscmds
case ${subcmd}; in
(import)
_regex_arguments _beet_import "${matchany}" /"${subcmd}"$'\0'/ "${options[@]}" "${files[@]}" \#
;;
(modify)
_regex_arguments _beet_modify "${matchany}" /"${subcmd}"$'\0'/ "${options[@]}" \
"${beets_query_args[@]}" "${beets_modify_args[@]}"
;;
(fields|migrate|version|config)
_regex_arguments _beet_${subcmd} "${matchany}" /"${subcmd}"$'\0'/ "${options[@]}"
;;
(help)
_regex_words subcmds "subcommands" "${beets_regex_words_help[@]}"
_regex_arguments _beet_help "${matchany}" /$'help\0'/ "${options[@]}" "${reply[@]}"
;;
(*) # Other commands have options followed by a query
_regex_arguments _beet_${subcmd} "${matchany}" /"${subcmd}"$'\0'/ "${options[@]}" "${beets_query_args[@]}"
;;
esac
# Store completion function in a cache file
local "beets_${subcmd}"="$(which _beet_${subcmd})"
_store_cache "beets${subcmd}" "beets_${subcmd}"
else
# Evaluate the function which is stored in $beets_${subcmd}
local var="beets_${subcmd}"
eval "${(P)var}"
fi
fi
_beet_${subcmd}
}
# Global options
local -a globalopts
_regex_words options "global options" '-c:path to configuration file:$files' '-v:print debugging information' \
'-l:library database file to use:$files' '-h:show this help message and exit' '-d:destination music directory:$dirs'
globalopts=("${reply[@]}")
# Create main completion function
_regex_arguments _beet "$matchany" \( "${globalopts[@]}" \# \) "${beets_regex_words_subcmds[@]}"
# Set tag-order so that options are completed separately from arguments
zstyle ":completion:${curcontext}:" tag-order '! options'
# Execute the completion function
_beet "$@"
# Local Variables:
# mode:shell-script
# End:

View File

@ -0,0 +1,65 @@
#compdef debuild
# ------------------------------------------------------------------------------
# Copyright (c) 2011 Github zsh-users - http://github.com/zsh-users
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the zsh-users nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for debuild 2.10.
#
# Status: incomplete.
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Julien Nicoulaud <julien.nicoulaud@gmail.com>
#
# ------------------------------------------------------------------------------
# FIXME --noconf is only allowed in first position
_arguments \
'(- 1 *)'{-h,--help}'[show help]' \
'(- 1 *)--version[show version and copyright information]' \
{--no-conf,--noconf}'[don'\''t read devscripts config files]' \
{-r-,--rootcmd=}'[command used to become root if debuild not setuid root (default: fakeroot)]: :_command_names' \
'*'{-e-,--preserve-envvar=}'[preserve environment variable]: :_vars' \
'(-e --preserve-envvar)--preserve-env[preserve all environment vars (except PATH)]' \
'*'{-e-,--set-envvar=}'[preserve environment variable]: :_vars -qS=' \
'--prepend-path=[prepend to the sanitised PATH]: :_files -/' \
'(-D)-d[skip checking of build dependencies]' \
'(-d)-D[force checking of build dependencies]' \
'--check-dirname-level[how much to check directory names]:level:((0\:never 1\:only\ if\ program\ changes\ directory\ \(default\) 2\:always))' \
'--check-dirname-regex[Perl regex defining matching directory names, the string PACKAGE will be replaced by the package name (default: '\''PACKAGE(-.+)?'\'')]:regex'
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

13090
zsh.d/completions/_kubectl Normal file

File diff suppressed because it is too large Load Diff

125
zsh.d/completions/_pip Normal file
View File

@ -0,0 +1,125 @@
#compdef pip
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for pip (http://pypi.python.org/pypi/pip).
#
# Source: https://github.com/technolize/zsh-completion-funcs
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * technolize (https://github.com/technolize)
#
# ------------------------------------------------------------------------------
local ret=1 state
local -a common_ops
common_ops=(
"--version[display version number]"
{-h,--help}"[show help]"
{-E,--environment=}"[virtualenv environment to run pip]:environment:_directories"
{-s,--enable-site-packages}"[include site-packages in virtualenv]"
{-v,--verbose}"[give more output]"
{-q,--quiet}"[give less output]"
"--log=[log file where a complete record will be kept]"
"--proxy=[specify a proxy in the form user:passwd@proxy.server:port]:proxy"
"--timeout=[set the socket timeout (default 15 seconds)]:second"
)
_directories () {
_wanted directories expl directory _path_files -/ "$@" -
}
typeset -A opt_args
_arguments \
':subcommand:->subcommand' \
$common_ops \
'*::options:->options' && ret=0
case $state in
subcommand)
local -a subcommands
subcommands=(
"bundle:create pybundle"
"freeze:put all currently installed packages"
"help:show available commands"
"install:install packages"
"search:search pypi"
"uninstall:uninstall packages"
"unzip:unzip undividual packages"
"zip:zip dividual packages"
)
_describe -t subcommands 'pip subcommand' subcommands && ret=0
;;
options)
local -a args
args=(
$common_ops
)
local -a requirement
requirement=(
{-r,--requirement=}"[install all the packages listed in the given requirements file]:filename:_files"
)
local -a findlink
findlink=(
{-f,--find-links=}"[URL to look for packages at]:url"
)
case $words[1] in
bundle | install)
args+=(
{-e,--editable=}"[install a package directly from a checkout]:VCS+REPOS_URL[@REV]#egg=PACKAGE"
$requirement
$findlink
{-i,--index-url=,--pypi-url=}"[base URL of Python Package Index]:URL"
"--extra-index-url=[extra URLs of package indexes to use]:URL"
{-b,--build=,--build-dir=}"[unpack packages into DIR]:directory:_directories"
{--src=,--source=}"[check out --editable packages into DIR]:directory:_directories"
{-U,--upgrade}"[upgrade all packages to the newest available version]"
{-I,--ignore-installed}"[ignore the installed packages]"
"--noinstall[download and unpack all packages, but don't actually install them]"
"--install-option=[extra arguments to be supplied to the setup.py install command]"
)
;;
freeze)
args+=(
$requirement
$findlink
)
;;
unzip | zip)
args+=(
"--unzip[unzip a package]"
"--no-pyc[do not include .pyc files in zip files]"
{-l,--list}"[list the packages available, and their zip status]"
"--sort-files[with --list, sort packages according to how many files they contain]"
"--path=[restrict operation to the given paths]:paths"
{-n,--simulate}"[do not actually perform the zip/unzip operation]"
)
;;
esac
_arguments $args && ret=0
;;
esac
return ret
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

117
zsh.d/completions/_salt Normal file
View File

@ -0,0 +1,117 @@
#compdef salt
# ------------------------------------------------------------------------------
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the zsh-users nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for salt (http://saltstack.com/).
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Massimiliano Torromeo <massimiliano.torromeo@gmail.com>
#
# ------------------------------------------------------------------------------
_minions() {
ls /etc/salt/pki/master/minions | while read minion; do
_wanted 'target' expl 'target' compadd $minion
done
}
_modules() {
for module in $(salt-call sys.list_modules 2&>/dev/null --output=key | tail -n+2); do
_wanted 'module' expl 'module' compadd $module
done
}
_functions() {
for fn in $(salt-call sys.list_functions 2&>/dev/null --output=key | tail -n+2); do
_wanted 'function' expl 'function' compadd $fn
done
}
_salt() {
local _loglevels
_loglevels=(all garbage trace debug info warning error quiet)
_arguments -s \
"--version[show program's version number and exit]" \
"--versions-report[show program's dependencies version number and exit]" \
"(-h --help)"{-h,--help}"[show help message and exit]" \
"(-c --config-dir)"{-c+,--config-dir=}"[Pass in an alternative configuration directory.]:configuration directory:_files -/" \
"(-t --timeout)"{-t+,--timeout=}"[Change the timeout, if applicable, for the running command]:timeout" \
"(-s --static)"{-s,--static}"[Return the data from minions as a group after they all return]" \
"--async[Run the salt command but don't wait for a reply]" \
"(--state-output --state_output)"{--state-output=,--state_output=}"[Override the configured state_output value for minion output]:state output" \
"--subset=[Execute the routine on a random subset of the targeted minions]:subset" \
"(-v --verbose)"{-v,--verbose}"[Turn on command verbosity, display jid and active job queries]" \
"--show-timeout[Display minions that timeout]" \
"(-b --batch --batch-size)"{-b+,--batch=,--batch-size=}"[Execute the salt job in batch mode]:batch" \
"(-a --auth --eauth --extended-auth)"{-a+,--auth=,--eauth=,--extended--auth=}"[Specify an extended authentication system to use]:eauth" \
"(-T --make-token)"{-T,--make-token}"[Generate and save an authentication token for re-use]" \
"--return=[Set an alternative return method]:returner" \
"(-d --doc --documentation)"{-d,--doc,--documentation}"[Return the documentation for the specified module or for all modules]::function:_functions" \
"--args-separator=[Set the special argument used as a delimiter between command arguments of compound commands]:args separator" \
\
"(-l --log-level)"{-l+,--log-level=}"[Console logging log level]:level:($_loglevels[@])" \
"--log-file=[Log file path]:log file:_files" \
"--log-file-level=[Logfile logging log level]:level:($_loglevels[@])" \
\
"(-E --pcre)"{-E,--pcre}"[Target servers using pcre regular expressions]" \
"(-L --list)"{-L,--list}"[Target servers using a comma or space delimited list of servers]" \
"(-G --grain)"{-G,--grain}"[Target servers using a grain value]" \
"--grain-pcre[Target servers using a grain value matched by a pcre regular expression]" \
"(-N --nodegroup)"{-N,--nodegroup}"[Target servers using a predefined nodegroup]" \
"(-R --range)"{-R,--range}"[Target servers using range expression]" \
"(-C --compound)"{-C,--compound}"[Target servers using compound selectors]" \
"(-X --exsel)"{-X,--exsel}"[Target servers using the return code of a function]" \
"(-I --pillar)"{-I,--pillar}"[Target servers using a pillar value]" \
"(-S --ipcidr)"{-S,--ipcidr}"[Match servers based on subnet (CIDR or IPv4 address)]" \
\
"(--out --output)"{--out=,--output=}"[Print the output using the specified outputter]:outputter:(no_return grains yaml overstatestage json pprint nested raw highstate quiet key txt virt_query)" \
"(--out-indent --output-indent)"{--out-indent=,--output-indent=}"[Print the output indented by the provided value in spaces]:nr spaces" \
"(--out-file --output-file)"{--out-file=,--output-file=}"[Write the output to the specified file]:output:_files" \
"(--no-color --no-colour)"{--no-color,--no-colour}"[Disable all colored output]" \
"(--force-color --force-colour)"{--force-color,--force-colour}"[Force colored output]" \
\
'1:target:_minions' \
'2:function:_functions' \
'*::arguments'
}
case "$service" in
salt)
_salt "@"
;;
esac
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

708
zsh.d/completions/_setup.py Normal file
View File

@ -0,0 +1,708 @@
#compdef setup.py
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for setup.py (http://docs.python.org/distutils/).
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Hideo Hattori (https://github.com/hhatto)
#
# ------------------------------------------------------------------------------
_setup.py() {
typeset -A opt_args
local context state line
_arguments -s -S \
"--verbose[run verbosely (default)]" \
"-v[run verbosely (default)]" \
"--quiet[run quietly (turns verbosity off)]" \
"-q[run quietly (turns verbosity off)]" \
"--dry-run[don't actually do anything]" \
"-n[don't actually do anything]" \
"--help[show detailed help message]" \
"-h[show detailed help message]" \
"--no-user-cfg[ignore pydistutils.cfg in your home directory]" \
"--command-packages=[list of packages that provide distutils commands]" \
"--help-commands[list all available commands]" \
"--name[print package name]" \
"--version[print package version]" \
"-V[print package version]" \
"--fullname[print <package name>-<version>]" \
"--author[print the author's name]" \
"--author-email[print the author's email address]" \
"--maintainer[print the maintainer's name]" \
"--maintainer-email[print the maintainer's email address]" \
"--contact[print the maintainer's name if known, else the author's]" \
"--contact-email[print the maintainer's email address if known, else the author's]" \
"--url[print the URL for this package]" \
"--license[print the license of the package]" \
"--licence[alias for --license]" \
"--description[print the package description]" \
"--long-description[print the long package description]" \
"--platforms[print the list of platforms]" \
"--classifiers[print the list of classifiers]" \
"--keywords[print the list of keywords]" \
"--provides[print the list of packages/modules provided]" \
"--requires[print the list of packages/modules required]" \
"--obsoletes[print the list of packages/modules made obsolete]" \
"*::setup.py commands:_setuppy_command"
}
(( $+functions[_setuppy_command] )) ||
_setuppy_command() {
local cmd ret=1
(( $+setuppy_cmds )) || _setuppy_cmds=(
"build:build everything needed to install" \
"build_py:\"build\" pure Python modules (copy to build directory)" \
"build_ext:build C/C++ extensions (compile/link to build directory)" \
"build_clib:build C/C++ libraries used by Python extensions" \
"build_scripts:\"build\" scripts (copy and fixup #! line)" \
"clean:clean up temporary files from 'build' command" \
"install:install everything from build directory" \
"install_lib:install all Python modules (extensions and pure Python)" \
"install_headers:install C/C++ header files" \
"install_scripts:install scripts (Python or otherwise)" \
"install_data:install data files" \
"sdist:create a source distribution (tarball, zip file, etc.)" \
"register:register the distribution with the Python package index" \
"bdist:create a built (binary) distribution" \
"bdist_dumb:create a \"dumb\" built distribution" \
"bdist_rpm:create an RPM distribution" \
"bdist_wininst:create an executable installer for MS Windows" \
"upload:upload binary package to PyPI" \
"check:perform some checks on the package" \
"alias:define a shortcut to invoke one or more commands" \
"bdist_egg:create an \"egg\" distribution" \
"develop:install package in 'development mode'" \
"easy_install:Find/get/install Python packages" \
"egg_info:create a distribution's .egg-info directory" \
"rotate:delete older distributions, keeping N newest files" \
"saveopts:save supplied options to setup.cfg or other config file" \
"setopt:set an option in setup.cfg or another config file" \
"test:run unit tests after in-place build" \
"install_egg_info:Install an .egg-info directory for the package" \
"upload_docs:Upload documentation to PyPI" \
)
if (( CURRENT == 1 )); then
_describe -t commands 'setup.py subcommand' _setuppy_cmds || compadd "$@" - ${(s.:.)${(j.:.)_setuppy_syns}}
else
local curcontext="$curcontext"
cmd="${${_setuppy_cmds[(r)$words[1]:*]%%:*}:-${(k)_setuppy_syns[(r)(*:|)$words[1](:*|)]}}"
if (( $#cmd )); then
curcontext="${curcontext%:*:*}:setuppy-${cmd}:"
_call_function ret _setuppy_$cmd || _message 'no more arguments'
else
_message "unknown setup.py command: $words[1]"
fi
return ret
fi
}
(( $+functions[_setuppy_build] )) ||
_setuppy_build() {
_arguments -s \
"--build-base=[base directory for build library]" \
"-b[base directory for build library]" \
"--build-purelib=[build directory for platform-neutral distributions]" \
"--build-platlib=[build directory for platform-specific distributions]" \
"--build-lib=[build directory for all distribution (defaults to either build-purelib or build-platlib]" \
"--build-scripts=[build directory for scripts]" \
"--build-temp=[temporary build directory]" \
"-t[temporary build directory]" \
"--plat-name=[platform name to build for, if supported (default: linux-i686)]" \
"-p[platform name to build for, if supported (default: linux-i686)]" \
"--compiler=[specify the compiler type]" \
"-c[specify the compiler type]" \
"--debug[compile extensions and libraries with debugging information]" \
"-g[compile extensions and libraries with debugging information]" \
"--force[forcibly build everything (ignore file timestamps)]" \
"-f[forcibly build everything (ignore file timestamps)]" \
"--executable=[specify final destination interpreter path (build.py)]" \
"-e[specify final destination interpreter path (build.py)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_build_py] )) ||
_setuppy_build_py() {
_arguments -s \
"--build-lib=[directory to \"build\" (copy) to]" \
"-d[directory to \"build\" (copy) to]" \
"--compile[compile .py to .pyc]" \
"-c[compile .py to .pyc]" \
"--no-compile[don't compile .py files \[default\]]" \
"--optimize=[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"-O[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"--force[forcibly build everything (ignore file timestamps)]" \
"-f[forcibly build everything (ignore file timestamps)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_build_ext] )) ||
_setuppy_build_ext() {
_arguments -s \
"--build-lib=[directory for compiled extension modules]" \
"-b[directory for compiled extension modules]" \
"--build-temp=[directory for temporary files (build by-products)]" \
"-t[directory for temporary files (build by-products)]" \
"--plat-name=[platform name to cross-compile for, if supported (default: linux-i686)]" \
"-p[platform name to cross-compile for, if supported (default: linux-i686)]" \
"--inplace[ignore build-lib and put compiled extensions into the source directory alongside your pure Python modules]" \
"-i[ignore build-lib and put compiled extensions into the source directory alongside your pure Python modules]" \
"--include-dirs=[list of directories to search for header files (separated by ':')]" \
"-I[list of directories to search for header files (separated by ':')]" \
"--define=[C preprocessor macros to define]" \
"-D[C preprocessor macros to define]" \
"--undef=[C preprocessor macros to undefine]" \
"-U[C preprocessor macros to undefine]" \
"--libraries=[external C libraries to link with]" \
"-l[external C libraries to link with]" \
"--library-dirs=[directories to search for external C libraries (separated by ':')]" \
"-L[directories to search for external C libraries (separated by ':')]" \
"--rpath=[directories to search for shared C libraries at runtime]" \
"-R[directories to search for shared C libraries at runtime]" \
"--link-objects=[extra explicit link objects to include in the link]" \
"-O[extra explicit link objects to include in the link]" \
"--debug[compile/link with debugging information]" \
"-g[compile/link with debugging information]" \
"--force[forcibly build everything (ignore file timestamps)]" \
"-f[forcibly build everything (ignore file timestamps)]" \
"--compiler=[specify the compiler type]" \
"-c[specify the compiler type]" \
"--swig-cpp[make SWIG create C++ files (default is C)]" \
"--swig-opts=[list of SWIG command line options]" \
"--swig=[path to the SWIG executable]" \
"--user[add user include, library and rpath]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_build_clib] )) ||
_setuppy_build_clib() {
_arguments -s \
"--build-clib=[directory to build C/C++ libraries to]" \
"-b[directory to build C/C++ libraries to]" \
"--build-temp=[directory to put temporary build by-products]" \
"-t[directory to put temporary build by-products]" \
"--debug[compile with debugging information]" \
"-g[compile with debugging information]" \
"--force[forcibly build everything (ignore file timestamps)]" \
"-f[forcibly build everything (ignore file timestamps)]" \
"--compiler=[specify the compiler type]" \
"-c[specify the compiler type]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_build_scripts] )) ||
_setuppy_build_scripts() {
_arguments -s \
"--build-dir=[directory to \"build\" (copy) to]" \
"-d[directory to \"build\" (copy) to]" \
"--force[forcibly build everything (ignore file timestamps]" \
"-f[forcibly build everything (ignore file timestamps]" \
"--executable=[specify final destination interpreter path]" \
"-e[specify final destination interpreter path]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_clean] )) ||
_setuppy_clean() {
_arguments -s \
"--build-base=[base build directory (default: 'build.build-base')]" \
"-b[base build directory (default: 'build.build-base')]" \
"--build-lib=[build directory for all modules (default: 'build.build-lib')]" \
"--build-temp=[temporary build directory (default: 'build.build-temp')]" \
"-t[temporary build directory (default: 'build.build-temp')]" \
"--build-scripts=[build directory for scripts (default: 'build.build-scripts')]" \
"--bdist-base=[temporary directory for built distributions]" \
"--all[remove all build output, not just temporary by-products]" \
"-a[remove all build output, not just temporary by-products]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install] )) ||
_setuppy_install() {
_arguments -s \
"--prefix=[installation prefix]" \
"--exec-prefix=[(Unix only) prefix for platform-specific files]" \
"--home=[(Unix only) home directory to install under]" \
"--user[install in user site-package '/home/hattori/.local/lib/python2.7/site-packages']" \
"--install-base=[base installation directory (instead of --prefix or --home)]" \
"--install-platbase=[base installation directory for platform-specific files (instead of --exec-prefix or --home)]" \
"--root=[install everything relative to this alternate root directory]" \
"--install-purelib=[installation directory for pure Python module distributions]" \
"--install-platlib=[installation directory for non-pure module distributions]" \
"--install-lib=[installation directory for all module distributions (overrides --install-purelib and --install-platlib)]" \
"--install-headers=[installation directory for C/C++ headers]" \
"--install-scripts=[installation directory for Python scripts]" \
"--install-data=[installation directory for data files]" \
"--compile[compile .py to .pyc \[default\]]" \
"-c[compile .py to .pyc \[default\]]" \
"--no-compile[don't compile .py files]" \
"--optimize=[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"-O[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"--force[force installation (overwrite any existing files)]" \
"-f[force installation (overwrite any existing files)]" \
"--skip-build[skip rebuilding everything (for testing/debugging)]" \
"--record=[filename in which to record list of installed files]" \
"--install-layout=[installation layout to choose (known values: deb, unix)]" \
"--old-and-unmanageable[Try not to use this!]" \
"--single-version-externally-managed[used by system package builders to create 'flat' eggs]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install_lib] )) ||
_setuppy_install_lib() {
_arguments -s \
"--install-dir=[directory to install to]" \
"-d[directory to install to]" \
"--build-dir=[build directory (where to install from)]" \
"-b[build directory (where to install from)]" \
"--force[force installation (overwrite existing files)]" \
"-f[force installation (overwrite existing files)]" \
"--compile[compile .py to .pyc \[default\]]" \
"-c[compile .py to .pyc \[default\]]" \
"--no-compile[don't compile .py files]" \
"--optimize=[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"-O[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"--skip-build[skip the build steps]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install_headers] )) ||
_setuppy_install_headers() {
_arguments -s \
"--install-dir=[directory to install header files to]" \
"-d[directory to install header files to]" \
"--force[force installation (overwrite existing files)]" \
"-f[force installation (overwrite existing files)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install_scripts] )) ||
_setuppy_install_scripts() {
_arguments -s \
"--install-dir=[directory to install scripts to]" \
"-d[directory to install scripts to]" \
"--build-dir=[build directory (where to install from)]" \
"-b[build directory (where to install from)]" \
"--force[force installation (overwrite existing files)]" \
"-f[force installation (overwrite existing files)]" \
"--skip-build[skip the build steps]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install_data] )) ||
_setuppy_install_data() {
_arguments -s \
"--install-dir=[base directory for installing data files (default: installation base dir)]" \
"-d[base directory for installing data files (default: installation base dir)]" \
"--root=[install everything relative to this alternate root directory]" \
"--force[force installation (overwrite existing files)]" \
"-f[force installation (overwrite existing files)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_sdist] )) ||
_setuppy_sdist() {
_arguments -s \
"--formats=[formats for source distribution (comma-separated list)]" \
"--keep-temp[keep the distribution tree around after creating archive file(s)]" \
"-k[keep the distribution tree around after creating archive file(s)]" \
"--dist-dir=[directory to put the source distribution archive(s) in \[default: dist\]]" \
"-d[directory to put the source distribution archive(s) in \[default: dist\]]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_register] )) ||
_setuppy_register() {
_arguments -s \
"--repository=[url of repository \[default: http://pypi.python.org/pypi\]]" \
"-r[url of repository \[default: http://pypi.python.org/pypi\]]" \
"--show-response[display full response text from server]" \
"--list-classifiers[list the valid Trove classifiers]" \
"--strict[Will stop the registering if the meta-data are not fully compliant]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_bdist] )) ||
_setuppy_bdist() {
_arguments -s \
"--bdist-base=[temporary directory for creating built distributions]" \
"-b[temporary directory for creating built distributions]" \
"--plat-name=[platform name to embed in generated filenames (default: linux-i686)]" \
"-p[platform name to embed in generated filenames (default: linux-i686)]" \
"--formats=[formats for distribution (comma-separated list)]" \
"--dist-dir=[directory to put final built distributions in \[default: dist\]]" \
"-d[directory to put final built distributions in \[default: dist\]]" \
"--skip-build[skip rebuilding everything (for testing/debugging)]" \
"--owner=[Owner name used when creating a tar file \[default: current user\]]" \
"-u[Owner name used when creating a tar file \[default: current user\]]" \
"--group=[Group name used when creating a tar file \[default: current group\]]" \
"-g[Group name used when creating a tar file \[default: current group\]]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_bdist_dumb] )) ||
_setuppy_bdist_dumb() {
_arguments -s \
"--bdist-dir=[temporary directory for creating the distribution]" \
"-d[temporary directory for creating the distribution]" \
"--plat-name=[platform name to embed in generated filenames (default: linux-i686)]" \
"-p[platform name to embed in generated filenames (default: linux-i686)]" \
"--format=[archive format to create (tar, ztar, gztar, zip)]" \
"-f[archive format to create (tar, ztar, gztar, zip)]" \
"--keep-temp[keep the pseudo-installation tree around after creating the distribution archive]" \
"-k[keep the pseudo-installation tree around after creating the distribution archive]" \
"--dist-dir=[directory to put final built distributions in]" \
"-d[directory to put final built distributions in]" \
"--skip-build[skip rebuilding everything (for testing/debugging)]" \
"--relative[build the archive using relative paths(default: false)]" \
"--owner=[Owner name used when creating a tar file \[default: current user\]]" \
"-u[Owner name used when creating a tar file \[default: current user\]]" \
"--group=[Group name used when creating a tar file \[default: current group\]]" \
"-g[Group name used when creating a tar file \[default: current group\]]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_bdist_rpm] )) ||
_setuppy_bdist_rpm() {
_arguments -s \
"--bdist-base=[base directory for creating built distributions]" \
"--rpm-base=[base directory for creating RPMs (defaults to \"rpm\" under --bdist-base; must be specified for RPM 2)]" \
"--dist-dir=[directory to put final RPM files in (and .spec files if --spec-only)]" \
"-d[directory to put final RPM files in (and .spec files if --spec-only)]" \
"--python=[path to Python interpreter to hard-code in the .spec file (default: \"python\")]" \
"--fix-python[hard-code the exact path to the current Python interpreter in the .spec file]" \
"--spec-only[only regenerate spec file]" \
"--source-only[only generate source RPM]" \
"--binary-only[only generate binary RPM]" \
"--use-bzip2[use bzip2 instead of gzip to create source distribution]" \
"--distribution-name=[name of the (Linux) distribution to which this RPM applies (*not* the name of the module distribution!)]" \
"--group=[package classification \[default: \"Development/Libraries\"\]]" \
"--release=[RPM release number]" \
"--serial=[RPM serial number]" \
"--vendor=[RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") \[default: maintainer or author from setup script\]]" \
"--packager=[RPM packager (eg. \"Jane Doe <jane@example.net>\")\[default: vendor\]]" \
"--doc-files=[list of documentation files (space or comma-separated)]" \
"--changelog=[RPM changelog]" \
"--icon=[name of icon file]" \
"--provides=[capabilities provided by this package]" \
"--requires=[capabilities required by this package]" \
"--conflicts=[capabilities which conflict with this package]" \
"--build-requires=[capabilities required to build this package]" \
"--obsoletes=[capabilities made obsolete by this package]" \
"--no-autoreq[do not automatically calculate dependencies]" \
"--keep-temp[don't clean up RPM build directory]" \
"-k[don't clean up RPM build directory]" \
"--no-keep-temp[clean up RPM build directory \[default\]]" \
"--use-rpm-opt-flags[compile with RPM_OPT_FLAGS when building from source RPM]" \
"--no-rpm-opt-flags[do not pass any RPM CFLAGS to compiler]" \
"--rpm3-mode[RPM 3 compatibility mode (default)]" \
"--rpm2-mode[RPM 2 compatibility mode]" \
"--prep-script=[Specify a script for the PREP phase of RPM building]" \
"--build-script=[Specify a script for the BUILD phase of RPM building]" \
"--pre-install=[Specify a script for the pre-INSTALL phase of RPM building]" \
"--install-script=[Specify a script for the INSTALL phase of RPM building]" \
"--post-install=[Specify a script for the post-INSTALL phase of RPM building]" \
"--pre-uninstall=[Specify a script for the pre-UNINSTALL phase of RPM building]" \
"--post-uninstall=[Specify a script for the post-UNINSTALL phase of RPM building]" \
"--clean-script=[Specify a script for the CLEAN phase of RPM building]" \
"--verify-script=[Specify a script for the VERIFY phase of the RPM build]" \
"--force-arch=[Force an architecture onto the RPM build process]" \
"--quiet[Run the INSTALL phase of RPM building in quiet mode]" \
"-q[Run the INSTALL phase of RPM building in quiet mode]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_bdist_wininst] )) ||
_setuppy_bdist_wininst() {
_arguments -s \
"--bdist-dir=[temporary directory for creating the distribution]" \
"--plat-name=[platform name to embed in generated filenames (default: linux-i686)]" \
"-p[platform name to embed in generated filenames (default: linux-i686)]" \
"--keep-temp[keep the pseudo-installation tree around after creating the distribution archive]" \
"-k[keep the pseudo-installation tree around after creating the distribution archive]" \
"--target-version=[require a specific python version on the target system]" \
"--no-target-compile[do not compile .py to .pyc on the target system]" \
"-c[do not compile .py to .pyc on the target system]" \
"--no-target-optimize[do not compile .py to .pyo (optimized)on the target system]" \
"-o[do not compile .py to .pyo (optimized)on the target system]" \
"--dist-dir=[directory to put final built distributions in]" \
"-d[directory to put final built distributions in]" \
"--bitmap=[bitmap to use for the installer instead of python-powered logo]" \
"-b[bitmap to use for the installer instead of python-powered logo]" \
"--title=[title to display on the installer background instead of default]" \
"-t[title to display on the installer background instead of default]" \
"--skip-build[skip rebuilding everything (for testing/debugging)]" \
"--install-script=[basename of installation script to be run afterinstallation or before deinstallation]" \
"--pre-install-script=[Fully qualified filename of a script to be run before any files are installed. This script need not be in the distribution]" \
"--user-access-control=[specify Vista's UAC handling - 'none'/default=no handling, 'auto'=use UAC if target Python installed for all users, 'force'=always use UAC]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_upload] )) ||
_setuppy_upload() {
_arguments -s \
"--repository=[url of repository \[default: http://pypi.python.org/pypi\]]" \
"-r[url of repository \[default: http://pypi.python.org/pypi\]]" \
"--show-response[display full response text from server]" \
"--sign[sign files to upload using gpg]" \
"-s[sign files to upload using gpg]" \
"--identity=[GPG identity used to sign files]" \
"-i[GPG identity used to sign files]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_check] )) ||
_setuppy_check() {
_arguments -s \
"--metadata[Verify meta-data]" \
"-m[Verify meta-data]" \
"--restructuredtext[Checks if long string meta-data syntax are reStructuredText-compliant]" \
"-r[Checks if long string meta-data syntax are reStructuredText-compliant]" \
"--strict[Will exit with an error if a check fails]" \
"-s[Will exit with an error if a check fails]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_alias] )) ||
_setuppy_alias() {
_arguments -s \
"--remove[remove (unset) the alias]" \
"-r[remove (unset) the alias]" \
"--global-config[save options to the site-wide distutils.cfg file]" \
"-g[save options to the site-wide distutils.cfg file]" \
"--user-config[save options to the current user's pydistutils.cfg file]" \
"-u[save options to the current user's pydistutils.cfg file]" \
"--filename=[configuration file to use (default=setup.cfg)]" \
"-f[configuration file to use (default=setup.cfg)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_bdist_egg] )) ||
_setuppy_bdist_egg() {
_arguments -s \
"--bdist-dir=[temporary directory for creating the distribution]" \
"-b[temporary directory for creating the distribution]" \
"--plat-name=[platform name to embed in generated filenames (default: linux-i686)]" \
"-p[platform name to embed in generated filenames (default: linux-i686)]" \
"--exclude-source-files[remove all .py files from the generated egg]" \
"--keep-temp[keep the pseudo-installation tree around after creating the distribution archive]" \
"-k[keep the pseudo-installation tree around after creating the distribution archive]" \
"--dist-dir=[directory to put final built distributions in]" \
"-d[directory to put final built distributions in]" \
"--skip-build[skip rebuilding everything (for testing/debugging)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_develop] )) ||
_setuppy_develop() {
_arguments -s \
"--prefix=[installation prefix]" \
"--zip-ok[install package as a zipfile]" \
"-z[install package as a zipfile]" \
"--multi-version[make apps have to require() a version]" \
"-m[make apps have to require() a version]" \
"--upgrade[force upgrade (searches PyPI for latest versions)]" \
"-U[force upgrade (searches PyPI for latest versions)]" \
"--install-dir=[install package to DIR]" \
"-d[install package to DIR]" \
"--script-dir=[install scripts to DIR]" \
"-s[install scripts to DIR]" \
"--exclude-scripts[Don't install scripts]" \
"-x[Don't install scripts]" \
"--always-copy[Copy all needed packages to install dir]" \
"-a[Copy all needed packages to install dir]" \
"--index-url=[base URL of Python Package Index]" \
"-i[base URL of Python Package Index]" \
"--find-links=[additional URL(s) to search for packages]" \
"-f[additional URL(s) to search for packages]" \
"--delete-conflicting[no longer needed; don't use this]" \
"-D[no longer needed; don't use this]" \
"--ignore-conflicts-at-my-risk[no longer needed; don't use this]" \
"--build-directory=[download/extract/build in DIR; keep the results]" \
"-b[download/extract/build in DIR; keep the results]" \
"--optimize=[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"-O[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"--record=[filename in which to record list of installed files]" \
"--always-unzip[don't install as a zipfile, no matter what]" \
"-Z[don't install as a zipfile, no matter what]" \
"--site-dirs=[list of directories where .pth files work]" \
"-S[list of directories where .pth files work]" \
"--editable[Install specified packages in editable form]" \
"-e[Install specified packages in editable form]" \
"--no-deps[don't install dependencies]" \
"-N[don't install dependencies]" \
"--allow-hosts=[pattern(s) that hostnames must match]" \
"-H[pattern(s) that hostnames must match]" \
"--local-snapshots-ok[allow building eggs from local checkouts]" \
"-l[allow building eggs from local checkouts]" \
"--version[print version information and exit]" \
"--install-layout=[installation layout to choose (known values: deb)]" \
"--force-installation-into-system-dir[force installation into /usr]" \
"-0[force installation into /usr]" \
"--no-find-links[Don't load find-links defined in packages being installed]" \
"--user[install in user site-package '/home/hattori/.local/lib/python2.7/site-packages']" \
"--uninstall[Uninstall this source package]" \
"-u[Uninstall this source package]" \
"--egg-path=[Set the path to be used in the .egg-link file]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_easy_install] )) ||
_setuppy_easy_install() {
_arguments -s \
"--prefix=[installation prefix]" \
"--zip-ok[install package as a zipfile]" \
"-z[install package as a zipfile]" \
"--multi-version[make apps have to require() a version]" \
"-m[make apps have to require() a version]" \
"--upgrade[force upgrade (searches PyPI for latest versions)]" \
"-U[force upgrade (searches PyPI for latest versions)]" \
"--install-dir=[install package to DIR]" \
"-d[install package to DIR]" \
"--script-dir=[install scripts to DIR]" \
"-s[install scripts to DIR]" \
"--exclude-scripts[Don't install scripts]" \
"-x[Don't install scripts]" \
"--always-copy[Copy all needed packages to install dir]" \
"-a[Copy all needed packages to install dir]" \
"--index-url=[base URL of Python Package Index]" \
"-i[base URL of Python Package Index]" \
"--find-links=[additional URL(s) to search for packages]" \
"-f[additional URL(s) to search for packages]" \
"--delete-conflicting[no longer needed; don't use this]" \
"-D[no longer needed; don't use this]" \
"--ignore-conflicts-at-my-risk[no longer needed; don't use this]" \
"--build-directory=[download/extract/build in DIR; keep the results]" \
"-b[download/extract/build in DIR; keep the results]" \
"--optimize=[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"-O[also compile with optimization: -O1 for \"python -O\", -O2 for \"python -OO\", and -O0 to disable \[default: -O0\]]" \
"--record=[filename in which to record list of installed files]" \
"--always-unzip[don't install as a zipfile, no matter what]" \
"-Z[don't install as a zipfile, no matter what]" \
"--site-dirs=[list of directories where .pth files work]" \
"-S[list of directories where .pth files work]" \
"--editable[Install specified packages in editable form]" \
"-e[Install specified packages in editable form]" \
"--no-deps[don't install dependencies]" \
"-N[don't install dependencies]" \
"--allow-hosts=[pattern(s) that hostnames must match]" \
"-H[pattern(s) that hostnames must match]" \
"--local-snapshots-ok[allow building eggs from local checkouts]" \
"-l[allow building eggs from local checkouts]" \
"--version[print version information and exit]" \
"--install-layout=[installation layout to choose (known values: deb)]" \
"--force-installation-into-system-dir[force installation into /usr]" \
"-0[force installation into /usr]" \
"--no-find-links[Don't load find-links defined in packages being installed]" \
"--user[install in user site-package '/home/hattori/.local/lib/python2.7/site-packages']" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_egg_info] )) ||
_setuppy_egg_info() {
_arguments -s \
"--egg-base=[directory containing .egg-info directories (default: top of the source tree)]" \
"-e[directory containing .egg-info directories (default: top of the source tree)]" \
"--tag-svn-revision[Add subversion revision ID to version number]" \
"-r[Add subversion revision ID to version number]" \
"--tag-date[Add date stamp (e.g. 20050528) to version number]" \
"-d[Add date stamp (e.g. 20050528) to version number]" \
"--tag-build=[Specify explicit tag to add to version number]" \
"-b[Specify explicit tag to add to version number]" \
"--no-svn-revision[Don't add subversion revision ID \[default\]]" \
"-R[Don't add subversion revision ID \[default\]]" \
"--no-date[Don't include date stamp \[default\]]" \
"-D[Don't include date stamp \[default\]]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_rotate] )) ||
_setuppy_rotate() {
_arguments -s \
"--match=[patterns to match (required)]" \
"-m[patterns to match (required)]" \
"--dist-dir=[directory where the distributions are]" \
"-d[directory where the distributions are]" \
"--keep=[number of matching distributions to keep]" \
"-k[number of matching distributions to keep]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_saveopts] )) ||
_setuppy_saveopts() {
_arguments -s \
"--global-config[save options to the site-wide distutils.cfg file]" \
"-g[save options to the site-wide distutils.cfg file]" \
"--user-config[save options to the current user's pydistutils.cfg file]" \
"-u[save options to the current user's pydistutils.cfg file]" \
"--filename=[configuration file to use (default=setup.cfg)]" \
"-f[configuration file to use (default=setup.cfg)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_setopt] )) ||
_setuppy_setopt() {
_arguments -s \
"--command=[command to set an option for]" \
"-c[command to set an option for]" \
"--option=[option to set]" \
"-o[option to set]" \
"--set-value=[value of the option]" \
"-s[value of the option]" \
"--remove[remove (unset) the value]" \
"-r[remove (unset) the value]" \
"--global-config[save options to the site-wide distutils.cfg file]" \
"-g[save options to the site-wide distutils.cfg file]" \
"--user-config[save options to the current user's pydistutils.cfg file]" \
"-u[save options to the current user's pydistutils.cfg file]" \
"--filename=[configuration file to use (default=setup.cfg)]" \
"-f[configuration file to use (default=setup.cfg)]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_test] )) ||
_setuppy_test() {
_arguments -s \
"--test-module=[Run 'test_suite' in specified module]" \
"-m[Run 'test_suite' in specified module]" \
"--test-suite=[Test suite to run (e.g. 'some_module.test_suite')]" \
"-s[Test suite to run (e.g. 'some_module.test_suite')]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_install_egg_info] )) ||
_setuppy_install_egg_info() {
_arguments -s \
"--install-dir=[directory to install to]" \
"-d[directory to install to]" \
"*::setup.py commands:_setuppy"
}
(( $+functions[_setuppy_upload_docs] )) ||
_setuppy_upload_docs() {
_arguments -s \
"--repository=[url of repository \[default: http://pypi.python.org/pypi\]]" \
"-r[url of repository \[default: http://pypi.python.org/pypi\]]" \
"--show-response[display full response text from server]" \
"--upload-dir=[directory to upload]" \
"*::setup.py commands:_setuppy"
}
_setup.py "$@"
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

View File

@ -1,178 +0,0 @@
#compdef _stern stern
# zsh completion for stern -*- shell-script -*-
__stern_debug()
{
local file="$BASH_COMP_DEBUG_FILE"
if [[ -n ${file} ]]; then
echo "$*" >> "${file}"
fi
}
_stern()
{
local shellCompDirectiveError=1
local shellCompDirectiveNoSpace=2
local shellCompDirectiveNoFileComp=4
local shellCompDirectiveFilterFileExt=8
local shellCompDirectiveFilterDirs=16
local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace
local -a completions
__stern_debug "\n========= starting completion logic =========="
__stern_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}"
# The user could have moved the cursor backwards on the command-line.
# We need to trigger completion from the $CURRENT location, so we need
# to truncate the command-line ($words) up to the $CURRENT location.
# (We cannot use $CURSOR as its value does not work when a command is an alias.)
words=("${=words[1,CURRENT]}")
__stern_debug "Truncated words[*]: ${words[*]},"
lastParam=${words[-1]}
lastChar=${lastParam[-1]}
__stern_debug "lastParam: ${lastParam}, lastChar: ${lastChar}"
# For zsh, when completing a flag with an = (e.g., stern -n=<TAB>)
# completions must be prefixed with the flag
setopt local_options BASH_REMATCH
if [[ "${lastParam}" =~ '-.*=' ]]; then
# We are dealing with a flag with an =
flagPrefix="-P ${BASH_REMATCH}"
fi
# Prepare the command to obtain completions
requestComp="${words[1]} __complete ${words[2,-1]}"
if [ "${lastChar}" = "" ]; then
# If the last parameter is complete (there is a space following it)
# We add an extra empty parameter so we can indicate this to the go completion code.
__stern_debug "Adding extra empty parameter"
requestComp="${requestComp} \"\""
fi
__stern_debug "About to call: eval ${requestComp}"
# Use eval to handle any environment variables and such
out=$(eval ${requestComp} 2>/dev/null)
__stern_debug "completion output: ${out}"
# Extract the directive integer following a : from the last line
local lastLine
while IFS='\n' read -r line; do
lastLine=${line}
done < <(printf "%s\n" "${out[@]}")
__stern_debug "last line: ${lastLine}"
if [ "${lastLine[1]}" = : ]; then
directive=${lastLine[2,-1]}
# Remove the directive including the : and the newline
local suffix
(( suffix=${#lastLine}+2))
out=${out[1,-$suffix]}
else
# There is no directive specified. Leave $out as is.
__stern_debug "No directive found. Setting do default"
directive=0
fi
__stern_debug "directive: ${directive}"
__stern_debug "completions: ${out}"
__stern_debug "flagPrefix: ${flagPrefix}"
if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
__stern_debug "Completion received error. Ignoring completions."
return
fi
while IFS='\n' read -r comp; do
if [ -n "$comp" ]; then
# If requested, completions are returned with a description.
# The description is preceded by a TAB character.
# For zsh's _describe, we need to use a : instead of a TAB.
# We first need to escape any : as part of the completion itself.
comp=${comp//:/\\:}
local tab=$(printf '\t')
comp=${comp//$tab/:}
__stern_debug "Adding completion: ${comp}"
completions+=${comp}
lastComp=$comp
fi
done < <(printf "%s\n" "${out[@]}")
if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
__stern_debug "Activating nospace."
noSpace="-S ''"
fi
if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
# File extension filtering
local filteringCmd
filteringCmd='_files'
for filter in ${completions[@]}; do
if [ ${filter[1]} != '*' ]; then
# zsh requires a glob pattern to do file filtering
filter="\*.$filter"
fi
filteringCmd+=" -g $filter"
done
filteringCmd+=" ${flagPrefix}"
__stern_debug "File filtering command: $filteringCmd"
_arguments '*:filename:'"$filteringCmd"
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
# File completion for directories only
local subDir
subdir="${completions[1]}"
if [ -n "$subdir" ]; then
__stern_debug "Listing directories in $subdir"
pushd "${subdir}" >/dev/null 2>&1
else
__stern_debug "Listing directories in ."
fi
local result
_arguments '*:dirname:_files -/'" ${flagPrefix}"
result=$?
if [ -n "$subdir" ]; then
popd >/dev/null 2>&1
fi
return $result
else
__stern_debug "Calling _describe"
if eval _describe "completions" completions $flagPrefix $noSpace; then
__stern_debug "_describe found some completions"
# Return the success of having called _describe
return 0
else
__stern_debug "_describe did not find completions."
__stern_debug "Checking if we should do file completion."
if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
__stern_debug "deactivating file completion"
# We must return an error code here to let zsh know that there were no
# completions found by _describe; this is what will trigger other
# matching algorithms to attempt to find completions.
# For example zsh can match letters in the middle of words.
return 1
else
# Perform file completion
__stern_debug "Activating file completion"
# We must return the result of this command, so it must be the
# last command, or else we must store its result to return it.
_arguments '*:filename:_files'" ${flagPrefix}"
fi
fi
fi
}
# don't run the completion function when being source-ed or eval-ed
if [ "$funcstack[1]" = "_stern" ]; then
_stern
fi
compdef _stern stern

View File

@ -1,213 +0,0 @@
#compdef talosctl
compdef _talosctl talosctl
# zsh completion for talosctl -*- shell-script -*-
__talosctl_debug()
{
local file="$BASH_COMP_DEBUG_FILE"
if [[ -n ${file} ]]; then
echo "$*" >> "${file}"
fi
}
_talosctl()
{
local shellCompDirectiveError=1
local shellCompDirectiveNoSpace=2
local shellCompDirectiveNoFileComp=4
local shellCompDirectiveFilterFileExt=8
local shellCompDirectiveFilterDirs=16
local shellCompDirectiveKeepOrder=32
local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace keepOrder
local -a completions
__talosctl_debug "\n========= starting completion logic =========="
__talosctl_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}"
# The user could have moved the cursor backwards on the command-line.
# We need to trigger completion from the $CURRENT location, so we need
# to truncate the command-line ($words) up to the $CURRENT location.
# (We cannot use $CURSOR as its value does not work when a command is an alias.)
words=("${=words[1,CURRENT]}")
__talosctl_debug "Truncated words[*]: ${words[*]},"
lastParam=${words[-1]}
lastChar=${lastParam[-1]}
__talosctl_debug "lastParam: ${lastParam}, lastChar: ${lastChar}"
# For zsh, when completing a flag with an = (e.g., talosctl -n=<TAB>)
# completions must be prefixed with the flag
setopt local_options BASH_REMATCH
if [[ "${lastParam}" =~ '-.*=' ]]; then
# We are dealing with a flag with an =
flagPrefix="-P ${BASH_REMATCH}"
fi
# Prepare the command to obtain completions
requestComp="${words[1]} __complete ${words[2,-1]}"
if [ "${lastChar}" = "" ]; then
# If the last parameter is complete (there is a space following it)
# We add an extra empty parameter so we can indicate this to the go completion code.
__talosctl_debug "Adding extra empty parameter"
requestComp="${requestComp} \"\""
fi
__talosctl_debug "About to call: eval ${requestComp}"
# Use eval to handle any environment variables and such
out=$(eval ${requestComp} 2>/dev/null)
__talosctl_debug "completion output: ${out}"
# Extract the directive integer following a : from the last line
local lastLine
while IFS='\n' read -r line; do
lastLine=${line}
done < <(printf "%s\n" "${out[@]}")
__talosctl_debug "last line: ${lastLine}"
if [ "${lastLine[1]}" = : ]; then
directive=${lastLine[2,-1]}
# Remove the directive including the : and the newline
local suffix
(( suffix=${#lastLine}+2))
out=${out[1,-$suffix]}
else
# There is no directive specified. Leave $out as is.
__talosctl_debug "No directive found. Setting do default"
directive=0
fi
__talosctl_debug "directive: ${directive}"
__talosctl_debug "completions: ${out}"
__talosctl_debug "flagPrefix: ${flagPrefix}"
if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
__talosctl_debug "Completion received error. Ignoring completions."
return
fi
local activeHelpMarker="_activeHelp_ "
local endIndex=${#activeHelpMarker}
local startIndex=$((${#activeHelpMarker}+1))
local hasActiveHelp=0
while IFS='\n' read -r comp; do
# Check if this is an activeHelp statement (i.e., prefixed with $activeHelpMarker)
if [ "${comp[1,$endIndex]}" = "$activeHelpMarker" ];then
__talosctl_debug "ActiveHelp found: $comp"
comp="${comp[$startIndex,-1]}"
if [ -n "$comp" ]; then
compadd -x "${comp}"
__talosctl_debug "ActiveHelp will need delimiter"
hasActiveHelp=1
fi
continue
fi
if [ -n "$comp" ]; then
# If requested, completions are returned with a description.
# The description is preceded by a TAB character.
# For zsh's _describe, we need to use a : instead of a TAB.
# We first need to escape any : as part of the completion itself.
comp=${comp//:/\\:}
local tab="$(printf '\t')"
comp=${comp//$tab/:}
__talosctl_debug "Adding completion: ${comp}"
completions+=${comp}
lastComp=$comp
fi
done < <(printf "%s\n" "${out[@]}")
# Add a delimiter after the activeHelp statements, but only if:
# - there are completions following the activeHelp statements, or
# - file completion will be performed (so there will be choices after the activeHelp)
if [ $hasActiveHelp -eq 1 ]; then
if [ ${#completions} -ne 0 ] || [ $((directive & shellCompDirectiveNoFileComp)) -eq 0 ]; then
__talosctl_debug "Adding activeHelp delimiter"
compadd -x "--"
hasActiveHelp=0
fi
fi
if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
__talosctl_debug "Activating nospace."
noSpace="-S ''"
fi
if [ $((directive & shellCompDirectiveKeepOrder)) -ne 0 ]; then
__talosctl_debug "Activating keep order."
keepOrder="-V"
fi
if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
# File extension filtering
local filteringCmd
filteringCmd='_files'
for filter in ${completions[@]}; do
if [ ${filter[1]} != '*' ]; then
# zsh requires a glob pattern to do file filtering
filter="\*.$filter"
fi
filteringCmd+=" -g $filter"
done
filteringCmd+=" ${flagPrefix}"
__talosctl_debug "File filtering command: $filteringCmd"
_arguments '*:filename:'"$filteringCmd"
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
# File completion for directories only
local subdir
subdir="${completions[1]}"
if [ -n "$subdir" ]; then
__talosctl_debug "Listing directories in $subdir"
pushd "${subdir}" >/dev/null 2>&1
else
__talosctl_debug "Listing directories in ."
fi
local result
_arguments '*:dirname:_files -/'" ${flagPrefix}"
result=$?
if [ -n "$subdir" ]; then
popd >/dev/null 2>&1
fi
return $result
else
__talosctl_debug "Calling _describe"
if eval _describe $keepOrder "completions" completions $flagPrefix $noSpace; then
__talosctl_debug "_describe found some completions"
# Return the success of having called _describe
return 0
else
__talosctl_debug "_describe did not find completions."
__talosctl_debug "Checking if we should do file completion."
if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
__talosctl_debug "deactivating file completion"
# We must return an error code here to let zsh know that there were no
# completions found by _describe; this is what will trigger other
# matching algorithms to attempt to find completions.
# For example zsh can match letters in the middle of words.
return 1
else
# Perform file completion
__talosctl_debug "Activating file completion"
# We must return the result of this command, so it must be the
# last command, or else we must store its result to return it.
_arguments '*:filename:_files'" ${flagPrefix}"
fi
fi
fi
}
# don't run the completion function when being source-ed or eval-ed
if [ "$funcstack[1]" = "_talosctl" ]; then
_talosctl
fi
compdef _talosctl talosctl

View File

@ -1,388 +0,0 @@
#compdef terraform
local -a _terraform_cmds opt_args
_terraform_cmds=(
'apply:Builds or changes infrastructure'
'console:Interactive console for Terraform interpolations'
'destroy:Destroy Terraform-managed infrastructure'
'debug:This command has subcommands for debug output management'
'env:Workspace management'
'fmt:Rewrites config files to canonical format'
'force-unlock:Manually unlock the state for the defined configuration.'
'get:Download and install modules for the configuration'
'graph:Create a visual graph of Terraform resources'
'import:Import existing infrastructure into Terraform'
'init:Initialize a Terraform working directory'
'login:Obtain and save credentials for a remote host'
'logout:Remove locally-stored credentials for a remote host'
'output:Read an output from a state file'
'plan:Generate and show an execution plan'
'providers:Prints a tree of the providers used in the configuration'
'push:Upload this Terraform module to Atlas to run'
'refresh:Update local state file against real resources'
'show:Inspect Terraform state or plan'
'state:Advanced state management'
'taint:Manually mark a resource for recreation'
'untaint:Manually unmark a resource as tainted'
'validate:Validates the Terraform files'
'version:Prints the Terraform version'
'workspace:Workspace management'
)
__apply() {
_arguments \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-auto-approve[Skip interactive approval of plan before applying.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-input=[(true) Ask for input for variables if not directly set.]' \
'-no-color[If specified, output wil be colorless.]' \
'-parallelism=[(10) Limit the number of parallel resource operations.]' \
'-refresh=[(true) Update state prior to checking for differences. This has no effect if a plan file is given to apply.]' \
'-state=[(terraform.tfstate) Path to read and save state (unless state-out is specified).]' \
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]:target:__statelist' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
}
__console() {
_arguments \
'-state=[(terraform.tfstate) Path to read state.]' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
}
__env() {
local -a __env_cmds
__env_cmds=(
'delete:Delete a workspace'
'list:List Workspaces'
'new:Create a new workspace'
'select:Select a workspace'
'show:Show the name of the current workspace'
)
_describe -t env "env commands" __env_cmds
}
__destroy() {
_arguments \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-auto-approve[Skip interactive approval before destroying.]' \
'-force[Deprecated: same as auto-approve.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-no-color[If specified, output will contain no color.]' \
'-parallelism=[(10) Limit the number of concurrent operations.]' \
'-refresh=[(true) Update state prior to checking for differences. This has no effect if a plan file is given to apply.]' \
'-state=[(terraform.tfstate) Path to read and save state (unless state-out is specified).]' \
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]:target:__statelist' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
}
__fmt() {
_arguments \
'-list=[(true) List files whose formatting differs (always false if using STDIN)]' \
'-write=[(true) Write result to source file instead of STDOUT (always false if using STDIN or -check)]' \
'-diff=[(false) Display diffs of formatting changes]' \
'-check=[(false) Check if the input is formatted. Exit status will be 0 if all input is properly formatted and non-zero otherwise.]' \
'-recursive=[(false) Also process files in subdirectories. By default, only the given directory (or current directory) is processed.]'
}
__force_unlock() {
_arguments
}
__get() {
_arguments \
'-update=[(false) If true, modules already downloaded will be checked for updates and updated if necessary.]' \
'-no-color[If specified, output will contain no color.]'
}
__graph() {
_arguments \
'-draw-cycles[Highlight any cycles in the graph with colored edges. This helps when diagnosing cycle errors.]' \
'-no-color[If specified, output will contain no color.]' \
'-type=[(plan) Type of graph to output. Can be: plan, plan-destroy, apply, validate, input, refresh.]'
}
__import() {
_arguments \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-config=[(path) Path to a directory of Terraform configuration files to use to configure the provider. Defaults to pwd. If no config files are present, they must be provided via the input prompts or env vars.]' \
'-allow-missing-config[Allow import when no resource configuration block exists.]' \
'-input=[(true) Ask for input for variables if not directly set.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-no-color[If specified, output will contain no color.]' \
'-provider=[(provider) Specific provider to use for import. This is used for specifying aliases, such as "aws.eu". Defaults to the normal provider prefix of the resource being imported.]' \
'-state=[(PATH) Path to the source state file. Defaults to the configured backend, or "terraform.tfstate"]' \
'-state-out=[(PATH) Path to the destination state file to write to. If this is not specified, the source state file will be used. This can be a new or existing path.]' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times. This is only useful with the "-config" flag.]' \
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
}
__init() {
_arguments \
'-backend=[(true) Configure the backend for this configuration.]' \
'-backend-config=[This can be either a path to an HCL file with key/value assignments (same format as terraform.tfvars) or a 'key=value' format. This is merged with what is in the configuration file. This can be specified multiple times. The backend type must be in the configuration itself.]' \
'-force-copy[Suppress prompts about copying state data. This is equivalent to providing a "yes" to all confirmation prompts.]' \
'-from-module=[Copy the contents of the given module into the target directory before initialization.]' \
'-get=[(true) Download any modules for this configuration.]' \
'-get-plugins=[(true) Download any missing plugins for this configuration.]' \
'-input=[(true) Ask for input if necessary. If false, will error if input was required.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-no-color[If specified, output will contain no color.]' \
'-plugin-dir[Directory containing plugin binaries. This overrides all default search paths for plugins, and prevents the automatic installation of plugins. This flag can be used multiple times.]' \
'-reconfigure[Reconfigure the backend, ignoring any saved configuration.]' \
'-upgrade=[(false) If installing modules (-get) or plugins (-get-plugins), ignore previously-downloaded objects and install the latest version allowed within configured constraints.]' \
'-verify-plugins=[(true) Verify the authenticity and integrity of automatically downloaded plugins.]'
}
__login() {
_arguments
}
__logout() {
_arguments
}
__output() {
_arguments \
'-state=[(path) Path to the state file to read. Defaults to "terraform.tfstate".]' \
'-no-color[ If specified, output will contain no color.]' \
'-module=[(name) If specified, returns the outputs for a specific module]' \
'-json[If specified, machine readable output will be printed in JSON format]'
}
__plan() {
_arguments \
'-destroy[() If set, a plan will be generated to destroy all resources managed by the given configuration and state.]' \
'-detailed-exitcode[() Return detailed exit codes when the command exits. This will change the meaning of exit codes to: 0 - Succeeded, diff is empty (no changes); 1 - Errored, 2 - Succeeded; there is a diff]' \
'-input=[(true) Ask for input for variables if not directly set.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-module-depth=[(n) Specifies the depth of modules to show in the output. This does not affect the plan itself, only the output shown. By default, this is -1, which will expand all.]' \
'-no-color[() If specified, output will contain no color.]' \
'-out=[(path) Write a plan file to the given path. This can be used as input to the "apply" command.]' \
'-parallelism=[(10) Limit the number of concurrent operations.]' \
'-refresh=[(true) Update state prior to checking for differences.]' \
'-state=[(statefile) Path to a Terraform state file to use to look up Terraform-managed resources. By default it will use the state "terraform.tfstate" if it exists.]' \
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]:target:__statelist' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]' \
}
__providers() {
_arguments
}
__push() {
_arguments \
'-atlas-address=[(url) An alternate address to an Atlas instance. Defaults to https://atlas.hashicorp.com.]' \
'-upload-modules=[(true) If true (default), then the modules being used are all locked at their current checkout and uploaded completely to Atlas. This prevents Atlas from running terraform get for you.]' \
'-name=[(name) Name of the infrastructure configuration in Atlas. The format of this is: "username/name" so that you can upload configurations not just to your account but to other accounts and organizations. This setting can also be set in the configuration in the Atlas section.]' \
'-no-color[Disables output with coloring]' \
'-overwrite=[(foo) Marks a specific variable to be updated on Atlas. Normally, if a variable is already set in Atlas, Terraform will not send the local value (even if it is different). This forces it to send the local value to Atlas. This flag can be repeated multiple times.]' \
'-token=[(token) Atlas API token to use to authorize the upload. If blank or unspecified, the ATLAS_TOKEN environmental variable will be used.]' \
'-var=[("foo=bar") Set the value of a variable for the Terraform configuration.]' \
'-var-file=[(foo) Set the value of variables using a variable file.]' \
'-vcs=[(true) If true (default), then Terraform will detect if a VCS is in use, such as Git, and will only upload files that are committed to version control. If no version control system is detected, Terraform will upload all files in path (parameter to the command).]'
}
__refresh() {
_arguments \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-input=[(true) Ask for input for variables if not directly set.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-no-color[If specified, output will not contain any color.]' \
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
'-target=[(resource) A Resource Address to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]:target:__statelist' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
}
__show() {
_arguments \
'-module-depth=[(n) The maximum depth to expand modules. By default this is zero, which will not expand modules at all.]' \
'-no-color[If specified, output will not contain any color.]'
}
__state() {
local -a __state_cmds
__state_cmds=(
'list:List resources in the state'
'mv:Move an item in the state'
'pull:Pull current state and output to stdout'
'push:Update remote state from a local state file'
'rm:Remove instances from the state'
'show:Show a resource in the state'
)
_describe -t state "state commands" __state_cmds
}
__state_list() {
_arguments \
'-state=[(path) Path to a Terraform state file to use to look up Terraform-managed resources. By default it will use the state "terraform.tfstate" if it exists.]' \
'-id=[(id) Filters the results to include only instances whose resource types have an attribute named id whose value equals the given id string.]' \
"*:address:__statelist"
}
__state_mv() {
_arguments \
"-dry-run[If set, prints out what would've been moved but doesn't actually move anything.]" \
"-backup=[(path) Path where Terraform should write the backup for the original state. This can't be disabled. If not set, Terraform will write it to the same path as the statefile with a \".backup\" extension.]:file:_files" \
"-backup-out=[(path) Path where Terraform should write the backup for the destination state. This can't be disabled. If not set, Terraform will write it to the same path as the destination state file with a backup extension. This only needs to be specified if -state-out is set to a different path than -state.]:file:_files" \
"-lock=[(true|false) Lock the state files when locking is supported.]:lock:(true false)" \
"-lock-timeout=[(seconds) Duration to retry a state lock.]" \
'-state=[(path) Path to the source state file. Defaults to the configured backend, or "terraform.tfstate"]:file:_files' \
"-state-out=[(path) Path to the destination state file to write to. If this isn't specified, the source state file will be used. This can be a new or existing path.]:file:_files" \
"::" \
":source:__statelist" \
":destination: "
}
__state_push() {
_arguments \
"-force[Write the state even if lineages don't match or the remote serial is higher.]" \
'-lock=[(true|false) Lock the state file when locking is supported.]:lock:(true false)' \
"-lock-timeout=[(seconds) Duration to retry a state lock.]" \
"::" \
":destination:_files"
}
__state_rm() {
_arguments \
"-dry-run[If set, prints out what would've been removed but doesn't actually remove anything.]" \
"-backup=[(path) Path where Terraform should write the backup for the original state.]:file:_files" \
"-lock=[(true|false) Lock the state files when locking is supported.]:lock:(true false)" \
"-lock-timeout=[(seconds) Duration to retry a state lock.]" \
'-state=[(path) Path to the state file to update. Defaults to the current workspace state.]:file:_files' \
"*:address:__statelist"
}
__state_show() {
_arguments \
'-state=[(path) Path to a Terraform state file to use to look up Terraform-managed resources. By default it will use the state "terraform.tfstate" if it exists.]' \
"*:address:__statelist"
}
__statelist() {
compadd $(terraform state list $opt_args[-state])
}
__taint() {
_arguments \
'-allow-missing[If specified, the command will succeed (exit code 0) even if the resource is missing.]' \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-module=[(path) The module path where the resource lives. By default this will be root. Child modules can be specified by names. Ex. "consul" or "consul.vpc" (nested modules).]' \
'-no-color[If specified, output will not contain any color.]' \
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
'-state-out=[(path) Path to write updated state file. By default, the "-state" path will be used.]' \
"*:address:__statelist"
}
__untaint() {
_arguments \
'-allow-missing[If specified, the command will succeed (exit code 0) even if the resource is missing.]' \
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
'-lock=[(true) Lock the state file when locking is supported.]' \
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
'-module=[(path) The module path where the resource lives. By default this will be root. Child modules can be specified by names. Ex. "consul" or "consul.vpc" (nested modules).]' \
'-no-color[If specified, output will not contain any color.]' \
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
'-state-out=[(path) Path to write updated state file. By default, the "-state" path will be used.]'
}
__validate() {
_arguments \
'-check-variables=[(true) If set to true (default), the command will check whether all required variables have been specified.]' \
'-no-color[If specified, output will not contain any color.]' \
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
}
__workspace() {
local -a __workspace_cmds
__workspace_cmds=(
'delete:Delete a workspace'
'list:List Workspaces'
'new:Create a new workspace'
'select:Select a workspace'
'show:Show the name of the current workspace'
)
_describe -t workspace "workspace commands" __workspace_cmds
}
_arguments '*:: :->command'
if (( CURRENT == 1 )); then
_describe -t commands "terraform command" _terraform_cmds
return
fi
local -a _command_args
case "$words[1]" in
apply)
__apply ;;
console)
__console;;
destroy)
__destroy ;;
debug)
__debug ;;
env)
__env ;;
fmt)
__fmt ;;
force-unlock)
__force_unlock ;;
get)
__get ;;
graph)
__graph ;;
import)
__import;;
init)
__init ;;
login)
__login ;;
logout)
__logout ;;
output)
__output ;;
plan)
__plan ;;
providers)
__providers ;;
push)
__push ;;
refresh)
__refresh ;;
show)
__show ;;
state)
test $CURRENT -lt 3 && __state
[[ $words[2] = "list" ]] && __state_list
[[ $words[2] = "mv" ]] && __state_mv
[[ $words[2] = "push" ]] && __state_push
[[ $words[2] = "rm" ]] && __state_rm
[[ $words[2] = "show" ]] && __state_show
;;
taint)
__taint ;;
untaint)
__untaint ;;
validate)
__validate ;;
workspace)
test $CURRENT -lt 3 && __workspace ;;
esac

@ -1 +0,0 @@
Subproject commit afaf2965b41fdc6ca66066e09382726aa0b6aa04

View File

@ -135,7 +135,7 @@ prompt_mypure_set_aws() {
if [[ "$AWS_PROFILE" =~ "$_aws_prod_profile" ]]; then
prompt_mypure_aws_prod=1
fi
prompt_mypure_aws="$AWS_PROFILE"
prompt_mypure_aws="/$AWS_PROFILE/"
fi
}
@ -540,8 +540,7 @@ prompt_mypure_async_callback() {
if (( code == 0 )); then
unset prompt_mypure_git_dirty
else
# typeset -g prompt_mypure_git_dirty="*"
typeset -g prompt_mypure_git_dirty=" ∆"
typeset -g prompt_mypure_git_dirty="*"
fi
[[ $prev_dirty != $prompt_mypure_git_dirty ]] && do_render=1

View File

@ -1,5 +1,17 @@
function prompt_minimal_setup {
# Setup PROMPT
if `fancyTerm`; then
# We are on xterminal
W="%n@%m:%."
I="%m:%."
L="%B%. %#%b "
PS1="%{]2;$W%}%{]1;$I%}$L"
unset W I L
else
# We are not on xterminal
PS1="%B%. %#%b "
fi
}
prompt_minimal_setup

View File

@ -0,0 +1,18 @@
function prompt_niceOld_setup {
if `fancyTerm`; then
W="::: %n at %m in %. :::"
I="%m::%."
L="%B%K{blue}%F{white}@%m::%2~ %f%k
%F{yellow}%#%f%b "
PS1="%{]2;$W%}%{]1;$I%}$L"
unset W I L
else
# We are not on xterminal
PS1="%B%K{blue}%F{white}[%l] %n@%m:%~ %f%k%F{yellow}%#%f%b "
fi
}
prompt_niceOld_setup
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -0,0 +1,20 @@
function prompt_nice_setup {
if `fancyTerm`; then
title="::: %n at %m in %. :::"
icon="%m::%."
prompt_='$FX[bold]$FX[italic]$BG[062]@%m::%2~$FX[reset]
$FX[bold]$(repo_char)$FG[220]%#$FX[reset] '
PS1="%{]2;$title%}%{]1;$icon%}$prompt_"
unset title icon prompt_
else
# We are not on xterminal
PS1='%B%K{blue}%F{white}[%l] $(repo_char)%n@%m:%~ %f%k%F{yellow}%#%f%b '
fi
}
prompt_nice_setup
# vim: set ts=4 sw=4 tw=0 ft=zsh :

View File

@ -1,5 +1,17 @@
function prompt_plain_setup {
# Setup PROMPT
if `fancyTerm`; then
# We are on xterminal
W="%n@%m:%."
I="%m:%."
L="%B[%l] %~ %#%b "
PS1="%{]2;$W%}%{]1;$I%}$L"
unset W I L
else
# We are not on xterminal
PS1="%B[%l] %n@%m:%~ %#%b "
fi
}
prompt_plain_setup

View File

@ -1,5 +1,17 @@
function prompt_server_setup {
# Setup PROMPT
if `fancyTerm`; then
# We are on xterminal
W="%n@%m:%."
I="%m:%."
L="%T %B@%m %4(~:...:)%3~ %#%b "
PS1="%{]2;$W%}%{]1;$I%}$L"
unset W I L
else
# We are not on xterminal
PS1="%T %B%n@%m:%~ %#%b "
fi
}
prompt_server_setup

View File

@ -0,0 +1,19 @@
function prompt_minimal_setup {
# Setup PROMPT
if `fancyTerm`; then
# We are on xterminal
W="%n@%m:%."
I="%m:%."
L="%B%3(~:...:)%2~ %#%b "
PS1="%{]2;$W%}%{]1;$I%}$L"
unset W I L
else
# We are not on xterminal
PS1="%B%. %#%b "
fi
}
prompt_minimal_setup
# vim: set ts=4 sw=4 tw=0 ft=zsh :

@ -1 +1 @@
Subproject commit 92b8e9057988566b37ff695e70e2e9bbeb7196c8
Subproject commit b122bbfca59867f3f0d47b553e656b436ae03a9d

View File

@ -0,0 +1 @@
https://github.com/sindresorhus/pure.git

View File

@ -1,672 +0,0 @@
#!/usr/bin/env zsh
#
# zsh-async
#
# version: v1.8.5
# author: Mathias Fredriksson
# url: https://github.com/mafredri/zsh-async
#
typeset -g ASYNC_VERSION=1.8.5
# Produce debug output from zsh-async when set to 1.
typeset -g ASYNC_DEBUG=${ASYNC_DEBUG:-0}
# Execute commands that can manipulate the environment inside the async worker. Return output via callback.
_async_eval() {
local ASYNC_JOB_NAME
# Rename job to _async_eval and redirect all eval output to cat running
# in _async_job. Here, stdout and stderr are not separated for
# simplicity, this could be improved in the future.
{
eval "$@"
} &> >(ASYNC_JOB_NAME=[async/eval] _async_job 'command -p cat')
}
# Wrapper for jobs executed by the async worker, gives output in parseable format with execution time
_async_job() {
# Disable xtrace as it would mangle the output.
setopt localoptions noxtrace
# Store start time for job.
float -F duration=$EPOCHREALTIME
# Run the command and capture both stdout (`eval`) and stderr (`cat`) in
# separate subshells. When the command is complete, we grab write lock
# (mutex token) and output everything except stderr inside the command
# block, after the command block has completed, the stdin for `cat` is
# closed, causing stderr to be appended with a $'\0' at the end to mark the
# end of output from this job.
local jobname=${ASYNC_JOB_NAME:-$1} out
out="$(
local stdout stderr ret tok
{
stdout=$(eval "$@")
ret=$?
duration=$(( EPOCHREALTIME - duration )) # Calculate duration.
print -r -n - $'\0'${(q)jobname} $ret ${(q)stdout} $duration
} 2> >(stderr=$(command -p cat) && print -r -n - " "${(q)stderr}$'\0')
)"
if [[ $out != $'\0'*$'\0' ]]; then
# Corrupted output (aborted job?), skipping.
return
fi
# Grab mutex lock, stalls until token is available.
read -r -k 1 -p tok || return 1
# Return output (<job_name> <return_code> <stdout> <duration> <stderr>).
print -r -n - "$out"
# Unlock mutex by inserting a token.
print -n -p $tok
}
# The background worker manages all tasks and runs them without interfering with other processes
_async_worker() {
# Reset all options to defaults inside async worker.
emulate -R zsh
# Make sure monitor is unset to avoid printing the
# pids of child processes.
unsetopt monitor
# Redirect stderr to `/dev/null` in case unforseen errors produced by the
# worker. For example: `fork failed: resource temporarily unavailable`.
# Some older versions of zsh might also print malloc errors (know to happen
# on at least zsh 5.0.2 and 5.0.8) likely due to kill signals.
exec 2>/dev/null
# When a zpty is deleted (using -d) all the zpty instances created before
# the one being deleted receive a SIGHUP, unless we catch it, the async
# worker would simply exit (stop working) even though visible in the list
# of zpty's (zpty -L). This has been fixed around the time of Zsh 5.4
# (not released).
if ! is-at-least 5.4.1; then
TRAPHUP() {
return 0 # Return 0, indicating signal was handled.
}
fi
local -A storage
local unique=0
local notify_parent=0
local parent_pid=0
local coproc_pid=0
local processing=0
local -a zsh_hooks zsh_hook_functions
zsh_hooks=(chpwd periodic precmd preexec zshexit zshaddhistory)
zsh_hook_functions=(${^zsh_hooks}_functions)
unfunction $zsh_hooks &>/dev/null # Deactivate all zsh hooks inside the worker.
unset $zsh_hook_functions # And hooks with registered functions.
unset zsh_hooks zsh_hook_functions # Cleanup.
close_idle_coproc() {
local -a pids
pids=(${${(v)jobstates##*:*:}%\=*})
# If coproc (cat) is the only child running, we close it to avoid
# leaving it running indefinitely and cluttering the process tree.
if (( ! processing )) && [[ $#pids = 1 ]] && [[ $coproc_pid = $pids[1] ]]; then
coproc :
coproc_pid=0
fi
}
child_exit() {
close_idle_coproc
# On older version of zsh (pre 5.2) we notify the parent through a
# SIGWINCH signal because `zpty` did not return a file descriptor (fd)
# prior to that.
if (( notify_parent )); then
# We use SIGWINCH for compatibility with older versions of zsh
# (pre 5.1.1) where other signals (INFO, ALRM, USR1, etc.) could
# cause a deadlock in the shell under certain circumstances.
kill -WINCH $parent_pid
fi
}
# Register a SIGCHLD trap to handle the completion of child processes.
trap child_exit CHLD
# Process option parameters passed to worker.
while getopts "np:uz" opt; do
case $opt in
n) notify_parent=1;;
p) parent_pid=$OPTARG;;
u) unique=1;;
z) notify_parent=0;; # Uses ZLE watcher instead.
esac
done
# Terminate all running jobs, note that this function does not
# reinstall the child trap.
terminate_jobs() {
trap - CHLD # Ignore child exits during kill.
coproc : # Quit coproc.
coproc_pid=0 # Reset pid.
if is-at-least 5.4.1; then
trap '' HUP # Catch the HUP sent to this process.
kill -HUP -$$ # Send to entire process group.
trap - HUP # Disable HUP trap.
else
# We already handle HUP for Zsh < 5.4.1.
kill -HUP -$$ # Send to entire process group.
fi
}
killjobs() {
local tok
local -a pids
pids=(${${(v)jobstates##*:*:}%\=*})
# No need to send SIGHUP if no jobs are running.
(( $#pids == 0 )) && continue
(( $#pids == 1 )) && [[ $coproc_pid = $pids[1] ]] && continue
# Grab lock to prevent half-written output in case a child
# process is in the middle of writing to stdin during kill.
(( coproc_pid )) && read -r -k 1 -p tok
terminate_jobs
trap child_exit CHLD # Reinstall child trap.
}
local request do_eval=0
local -a cmd
while :; do
# Wait for jobs sent by async_job.
read -r -d $'\0' request || {
# Unknown error occurred while reading from stdin, the zpty
# worker is likely in a broken state, so we shut down.
terminate_jobs
# Stdin is broken and in case this was an unintended
# crash, we try to report it as a last hurrah.
print -r -n $'\0'"'[async]'" $(( 127 + 3 )) "''" 0 "'$0:$LINENO: zpty fd died, exiting'"$'\0'
# We use `return` to abort here because using `exit` may
# result in an infinite loop that never exits and, as a
# result, high CPU utilization.
return $(( 127 + 1 ))
}
# We need to clean the input here because sometimes when a zpty
# has died and been respawned, messages will be prefixed with a
# carraige return (\r, or \C-M).
request=${request#$'\C-M'}
# Check for non-job commands sent to worker
case $request in
_killjobs) killjobs; continue;;
_async_eval*) do_eval=1;;
esac
# Parse the request using shell parsing (z) to allow commands
# to be parsed from single strings and multi-args alike.
cmd=("${(z)request}")
# Name of the job (first argument).
local job=$cmd[1]
# Check if a worker should perform unique jobs, unless
# this is an eval since they run synchronously.
if (( !do_eval )) && (( unique )); then
# Check if a previous job is still running, if yes,
# skip this job and let the previous one finish.
for pid in ${${(v)jobstates##*:*:}%\=*}; do
if [[ ${storage[$job]} == $pid ]]; then
continue 2
fi
done
fi
# Guard against closing coproc from trap before command has started.
processing=1
# Because we close the coproc after the last job has completed, we must
# recreate it when there are no other jobs running.
if (( ! coproc_pid )); then
# Use coproc as a mutex for synchronized output between children.
coproc command -p cat
coproc_pid="$!"
# Insert token into coproc
print -n -p "t"
fi
if (( do_eval )); then
shift cmd # Strip _async_eval from cmd.
_async_eval $cmd
else
# Run job in background, completed jobs are printed to stdout.
_async_job $cmd &
# Store pid because zsh job manager is extremely unflexible (show jobname as non-unique '$job')...
storage[$job]="$!"
fi
processing=0 # Disable guard.
if (( do_eval )); then
do_eval=0
# When there are no active jobs we can't rely on the CHLD trap to
# manage the coproc lifetime.
close_idle_coproc
fi
done
}
#
# Get results from finished jobs and pass it to the to callback function. This is the only way to reliably return the
# job name, return code, output and execution time and with minimal effort.
#
# If the async process buffer becomes corrupt, the callback will be invoked with the first argument being `[async]` (job
# name), non-zero return code and fifth argument describing the error (stderr).
#
# usage:
# async_process_results <worker_name> <callback_function>
#
# callback_function is called with the following parameters:
# $1 = job name, e.g. the function passed to async_job
# $2 = return code
# $3 = resulting stdout from execution
# $4 = execution time, floating point e.g. 2.05 seconds
# $5 = resulting stderr from execution
# $6 = has next result in buffer (0 = buffer empty, 1 = yes)
#
async_process_results() {
setopt localoptions unset noshwordsplit noksharrays noposixidentifiers noposixstrings
local worker=$1
local callback=$2
local caller=$3
local -a items
local null=$'\0' data
integer -l len pos num_processed has_next
typeset -gA ASYNC_PROCESS_BUFFER
# Read output from zpty and parse it if available.
while zpty -r -t $worker data 2>/dev/null; do
ASYNC_PROCESS_BUFFER[$worker]+=$data
len=${#ASYNC_PROCESS_BUFFER[$worker]}
pos=${ASYNC_PROCESS_BUFFER[$worker][(i)$null]} # Get index of NULL-character (delimiter).
# Keep going until we find a NULL-character.
if (( ! len )) || (( pos > len )); then
continue
fi
while (( pos <= len )); do
# Take the content from the beginning, until the NULL-character and
# perform shell parsing (z) and unquoting (Q) as an array (@).
items=("${(@Q)${(z)ASYNC_PROCESS_BUFFER[$worker][1,$pos-1]}}")
# Remove the extracted items from the buffer.
ASYNC_PROCESS_BUFFER[$worker]=${ASYNC_PROCESS_BUFFER[$worker][$pos+1,$len]}
len=${#ASYNC_PROCESS_BUFFER[$worker]}
if (( len > 1 )); then
pos=${ASYNC_PROCESS_BUFFER[$worker][(i)$null]} # Get index of NULL-character (delimiter).
fi
has_next=$(( len != 0 ))
if (( $#items == 5 )); then
items+=($has_next)
$callback "${(@)items}" # Send all parsed items to the callback.
(( num_processed++ ))
elif [[ -z $items ]]; then
# Empty items occur between results due to double-null ($'\0\0')
# caused by commands being both pre and suffixed with null.
else
# In case of corrupt data, invoke callback with *async* as job
# name, non-zero exit status and an error message on stderr.
$callback "[async]" 1 "" 0 "$0:$LINENO: error: bad format, got ${#items} items (${(q)items})" $has_next
fi
done
done
(( num_processed )) && return 0
# Avoid printing exit value when `setopt printexitvalue` is active.`
[[ $caller = trap || $caller = watcher ]] && return 0
# No results were processed
return 1
}
# Watch worker for output
_async_zle_watcher() {
setopt localoptions noshwordsplit
typeset -gA ASYNC_PTYS ASYNC_CALLBACKS
local worker=$ASYNC_PTYS[$1]
local callback=$ASYNC_CALLBACKS[$worker]
if [[ -n $2 ]]; then
# from man zshzle(1):
# `hup' for a disconnect, `nval' for a closed or otherwise
# invalid descriptor, or `err' for any other condition.
# Systems that support only the `select' system call always use
# `err'.
# this has the side effect to unregister the broken file descriptor
async_stop_worker $worker
if [[ -n $callback ]]; then
$callback '[async]' 2 "" 0 "$0:$LINENO: error: fd for $worker failed: zle -F $1 returned error $2" 0
fi
return
fi;
if [[ -n $callback ]]; then
async_process_results $worker $callback watcher
fi
}
_async_send_job() {
setopt localoptions noshwordsplit noksharrays noposixidentifiers noposixstrings
local caller=$1
local worker=$2
shift 2
zpty -t $worker &>/dev/null || {
typeset -gA ASYNC_CALLBACKS
local callback=$ASYNC_CALLBACKS[$worker]
if [[ -n $callback ]]; then
$callback '[async]' 3 "" 0 "$0:$LINENO: error: no such worker: $worker" 0
else
print -u2 "$caller: no such async worker: $worker"
fi
return 1
}
zpty -w $worker "$@"$'\0'
}
#
# Start a new asynchronous job on specified worker, assumes the worker is running.
#
# Note if you are using a function for the job, it must have been defined before the worker was
# started or you will get a `command not found` error.
#
# usage:
# async_job <worker_name> <my_function> [<function_params>]
#
async_job() {
setopt localoptions noshwordsplit noksharrays noposixidentifiers noposixstrings
local worker=$1; shift
local -a cmd
cmd=("$@")
if (( $#cmd > 1 )); then
cmd=(${(q)cmd}) # Quote special characters in multi argument commands.
fi
_async_send_job $0 $worker "$cmd"
}
#
# Evaluate a command (like async_job) inside the async worker, then worker environment can be manipulated. For example,
# issuing a cd command will change the PWD of the worker which will then be inherited by all future async jobs.
#
# Output will be returned via callback, job name will be [async/eval].
#
# usage:
# async_worker_eval <worker_name> <my_function> [<function_params>]
#
async_worker_eval() {
setopt localoptions noshwordsplit noksharrays noposixidentifiers noposixstrings
local worker=$1; shift
local -a cmd
cmd=("$@")
if (( $#cmd > 1 )); then
cmd=(${(q)cmd}) # Quote special characters in multi argument commands.
fi
# Quote the cmd in case RC_EXPAND_PARAM is set.
_async_send_job $0 $worker "_async_eval $cmd"
}
# This function traps notification signals and calls all registered callbacks
_async_notify_trap() {
setopt localoptions noshwordsplit
local k
for k in ${(k)ASYNC_CALLBACKS}; do
async_process_results $k ${ASYNC_CALLBACKS[$k]} trap
done
}
#
# Register a callback for completed jobs. As soon as a job is finnished, async_process_results will be called with the
# specified callback function. This requires that a worker is initialized with the -n (notify) option.
#
# usage:
# async_register_callback <worker_name> <callback_function>
#
async_register_callback() {
setopt localoptions noshwordsplit nolocaltraps
typeset -gA ASYNC_PTYS ASYNC_CALLBACKS
local worker=$1; shift
ASYNC_CALLBACKS[$worker]="$*"
# Enable trap when the ZLE watcher is unavailable, allows
# workers to notify (via -n) when a job is done.
if [[ ! -o interactive ]] || [[ ! -o zle ]]; then
trap '_async_notify_trap' WINCH
elif [[ -o interactive ]] && [[ -o zle ]]; then
local fd w
for fd w in ${(@kv)ASYNC_PTYS}; do
if [[ $w == $worker ]]; then
zle -F $fd _async_zle_watcher # Register the ZLE handler.
break
fi
done
fi
}
#
# Unregister the callback for a specific worker.
#
# usage:
# async_unregister_callback <worker_name>
#
async_unregister_callback() {
typeset -gA ASYNC_CALLBACKS
unset "ASYNC_CALLBACKS[$1]"
}
#
# Flush all current jobs running on a worker. This will terminate any and all running processes under the worker, use
# with caution.
#
# usage:
# async_flush_jobs <worker_name>
#
async_flush_jobs() {
setopt localoptions noshwordsplit
local worker=$1; shift
# Check if the worker exists
zpty -t $worker &>/dev/null || return 1
# Send kill command to worker
async_job $worker "_killjobs"
# Clear the zpty buffer.
local junk
if zpty -r -t $worker junk '*'; then
(( ASYNC_DEBUG )) && print -n "async_flush_jobs $worker: ${(V)junk}"
while zpty -r -t $worker junk '*'; do
(( ASYNC_DEBUG )) && print -n "${(V)junk}"
done
(( ASYNC_DEBUG )) && print
fi
# Finally, clear the process buffer in case of partially parsed responses.
typeset -gA ASYNC_PROCESS_BUFFER
unset "ASYNC_PROCESS_BUFFER[$worker]"
}
#
# Start a new async worker with optional parameters, a worker can be told to only run unique tasks and to notify a
# process when tasks are complete.
#
# usage:
# async_start_worker <worker_name> [-u] [-n] [-p <pid>]
#
# opts:
# -u unique (only unique job names can run)
# -n notify through SIGWINCH signal
# -p pid to notify (defaults to current pid)
#
async_start_worker() {
setopt localoptions noshwordsplit noclobber
local worker=$1; shift
local -a args
args=("$@")
zpty -t $worker &>/dev/null && return
typeset -gA ASYNC_PTYS
typeset -h REPLY
typeset has_xtrace=0
if [[ -o interactive ]] && [[ -o zle ]]; then
# Inform the worker to ignore the notify flag and that we're
# using a ZLE watcher instead.
args+=(-z)
if (( ! ASYNC_ZPTY_RETURNS_FD )); then
# When zpty doesn't return a file descriptor (on older versions of zsh)
# we try to guess it anyway.
integer -l zptyfd
exec {zptyfd}>&1 # Open a new file descriptor (above 10).
exec {zptyfd}>&- # Close it so it's free to be used by zpty.
fi
fi
# Workaround for stderr in the main shell sometimes (incorrectly) being
# reassigned to /dev/null by the reassignment done inside the async
# worker.
# See https://github.com/mafredri/zsh-async/issues/35.
integer errfd=-1
# Redirect of errfd is broken on zsh 5.0.2.
if is-at-least 5.0.8; then
exec {errfd}>&2
fi
# Make sure async worker is started without xtrace
# (the trace output interferes with the worker).
[[ -o xtrace ]] && {
has_xtrace=1
unsetopt xtrace
}
if (( errfd != -1 )); then
zpty -b $worker _async_worker -p $$ $args 2>&$errfd
else
zpty -b $worker _async_worker -p $$ $args
fi
local ret=$?
# Re-enable it if it was enabled, for debugging.
(( has_xtrace )) && setopt xtrace
(( errfd != -1 )) && exec {errfd}>& -
if (( ret )); then
async_stop_worker $worker
return 1
fi
if ! is-at-least 5.0.8; then
# For ZSH versions older than 5.0.8 we delay a bit to give
# time for the worker to start before issuing commands,
# otherwise it will not be ready to receive them.
sleep 0.001
fi
if [[ -o interactive ]] && [[ -o zle ]]; then
if (( ! ASYNC_ZPTY_RETURNS_FD )); then
REPLY=$zptyfd # Use the guessed value for the file desciptor.
fi
ASYNC_PTYS[$REPLY]=$worker # Map the file desciptor to the worker.
fi
}
#
# Stop one or multiple workers that are running, all unfetched and incomplete work will be lost.
#
# usage:
# async_stop_worker <worker_name_1> [<worker_name_2>]
#
async_stop_worker() {
setopt localoptions noshwordsplit
local ret=0 worker k v
for worker in $@; do
# Find and unregister the zle handler for the worker
for k v in ${(@kv)ASYNC_PTYS}; do
if [[ $v == $worker ]]; then
zle -F $k
unset "ASYNC_PTYS[$k]"
fi
done
async_unregister_callback $worker
zpty -d $worker 2>/dev/null || ret=$?
# Clear any partial buffers.
typeset -gA ASYNC_PROCESS_BUFFER
unset "ASYNC_PROCESS_BUFFER[$worker]"
done
return $ret
}
#
# Initialize the required modules for zsh-async. To be called before using the zsh-async library.
#
# usage:
# async_init
#
async_init() {
(( ASYNC_INIT_DONE )) && return
typeset -g ASYNC_INIT_DONE=1
zmodload zsh/zpty
zmodload zsh/datetime
# Load is-at-least for reliable version check.
autoload -Uz is-at-least
# Check if zsh/zpty returns a file descriptor or not,
# shell must also be interactive with zle enabled.
typeset -g ASYNC_ZPTY_RETURNS_FD=0
[[ -o interactive ]] && [[ -o zle ]] && {
typeset -h REPLY
zpty _async_test :
(( REPLY )) && ASYNC_ZPTY_RETURNS_FD=1
zpty -d _async_test
}
}
async() {
async_init
}
async "$@"

View File

@ -0,0 +1,11 @@
if [[ -z "$1" ]]
then
unset AWS_DEFAULT_PROFILE AWS_PROFILE AWS_EB_PROFILE
echo AWS profile cleared.
return
fi
#saml2aws login -a $1 -p $1
#eval $(saml2aws script -a $1 -p $1 --skip-prompt)
export AWS_DEFAULT_PROFILE=$1
export AWS_PROFILE=$1
export AWS_EB_PROFILE=$1

Binary file not shown.

View File

@ -1,68 +0,0 @@
profile=$1
[ $# -gt 0 ] && shift
region=${1:-""}
declare -A symRegions
symRegions[ire1]='eu-west-1'
symRegions[fra1]='eu-central-1'
symRegions[ohio1]='us-east-2'
lookup_region=${symRegions[$region]}
[ -z $lookup_region ] || region=${lookup_region}
if [ ! -f ~/.aws/config ]; then
echo "no aws config file found, bailing out..."
return 1
fi
if [ $profile ]; then # We got a profile pattern, look for a match
# look for an exact match
grep -qE "\[profile $profile\]" ~/.aws/config
if [ $? -eq 0 ]; then # We have an exact match
match=$profile
else # Look for regex match
match=$(grep -E "\[profile .*$profile.*" ~/.aws/config | sed -E 's/\[profile (.+)\]/\1/') # Array of matching profiles
match_no=$(echo $match|wc -l) # Number of profiles matching
if [ $match_no -gt 1 ]; then # more than one match
print -P "$FX[bold]multiple profile match:"
print -P "$FG[003]"
echo $match
print -P "$FX[reset]"
return
fi
fi
if [ $match ]; then # Single match, setting profile
# export AWS_CLI_AUTO_PROMPT=off
# Set default profile and profile
export AWS_DEFAULT_PROFILE=$match
export AWS_PROFILE=${AWS_DEFAULT_PROFILE}
# Get default region and set region to argument or default region
export AWS_DEFAULT_REGION=$(aws configure get region)
export AWS_REGION=${region:-$AWS_DEFAULT_REGION}
print -P "$FX[bold]activating profile $FG[075]$match$FX[reset]$FX[bold] on region $FG[075]$AWS_REGION$FX[reset]"
# export AWS_CLI_AUTO_PROMPT=on
return
else
print -P "$FX[bold]$FG[009]no match for $profile$FX[reset]"
return
fi
elif [ $AWS_PROFILE ]; then # no profile passed, clean up current one, logout
unset AWS_DEFAULT_PROFILE AWS_PROFILE AWS_DEFAULT_REGION AWS_REGION
print -P "$FX[bold]profile cleared$FX[reset]."
return
fi
print -P "$FX[bold]available profiles"
print -P "$FG[075]"
grep profile ~/.aws/config | sed -E 's/\[profile (.+)\]/\1/'
print -P "$FX[reset]"
# vim: set ts=2 sw=2 tw=0 ft=sh :

BIN
zsh.d/zshfunctions/bak.zwc Normal file

Binary file not shown.

BIN
zsh.d/zshfunctions/cdb.zwc Normal file

Binary file not shown.

View File

@ -1 +0,0 @@
ssh-keygen -R $(echo $1|cut -d@ -f2)

1
zsh.d/zshfunctions/dust Normal file
View File

@ -0,0 +1 @@
hdu -s *

BIN
zsh.d/zshfunctions/dust.zwc Normal file

Binary file not shown.

View File

@ -1,72 +0,0 @@
local _gitdir
local last_fetch
local diff
# exec 3>&1 4>&2
# trap 'exec 2>&4 1>&3' 0 1 2 3
# exec 1>>~/tmp/gitfetch.log 2>&1
cd $1
gitdir=$(git rev-parse --git-dir 2> /dev/null) || return 0
last_fetch=$(zstat +mtime $gitdir/FETCH_HEAD 2> /dev/null || echo 0)
let "diff = $(strftime %s) - $last_fetch"
if [ $diff -gt ${GIT_FETCH_INTERVAL:-30} ]; then
setopt localoptions noshwordsplit
# Sets `GIT_TERMINAL_PROMPT=0` to disable authentication prompt for Git fetch (Git 2.3+).
export GIT_TERMINAL_PROMPT=0
# Set SSH `BachMode` to disable all interactive SSH password prompting.
export GIT_SSH_COMMAND="${GIT_SSH_COMMAND:-"ssh"} -o BatchMode=yes"
local ref
ref=$(command git symbolic-ref -q HEAD)
local -a remote
remote=($(command git for-each-ref --format='%(upstream:remotename) %(refname)' $ref))
if [[ -z $remote[1] ]]; then
# No remote specified for this branch, skip fetch.
return 97
fi
# Default return code, which indicates Git fetch failure.
local fail_code=99
# Guard against all forms of password prompts. By setting the shell into
# MONITOR mode we can notice when a child process prompts for user input
# because it will be suspended. Since we are inside an async worker, we
# have no way of transmitting the password and the only option is to
# kill it. If we don't do it this way, the process will corrupt with the
# async worker.
setopt localtraps monitor
# Make sure local HUP trap is unset to allow for signal propagation when
# the async worker is flushed.
trap - HUP
trap '
# Unset trap to prevent infinite loop
trap - CHLD
if [[ $jobstates = suspended* ]]; then
# Set fail code to password prompt and kill the fetch.
fail_code=98
kill %%
fi
' CHLD
# Only fetch information for the current branch and avoid
# fetching tags or submodules to speed up the process.
command git -c gc.auto=0 fetch \
--quiet \
--no-tags \
--recurse-submodules=yes \
$remote &>/dev/null &
wait $! || return $fail_code
unsetopt monitor
fi
return 0
# vim: set ts=2 sw=2 tw=0 ft=sh :

View File

@ -1,58 +0,0 @@
local cpu_type
echo
if [ $KERNEL = "Darwin" ]; then
cpu_type=$(sysctl -n machdep.cpu.brand_string)
elif [ $KERNEL = "Linux" ]; then
cpu_type=$(cat /proc/cpuinfo|grep "model name"|cut -d: -f2|uniq)
fi
print -P "$FX[bold]Kernel:$FX[reset] $KERNEL"
print -P "$FX[bold]CPU type: $FX[reset]$cpu_type"
print -P "$FX[bold]Library:$FX[reset] $LIBRARY"
echo
if [ $HOMEBREW_PREFIX ]; then
print -P "$FX[bold]Homebrew enabled: $FG[002]yes$FX[reset]"
print -P "$FX[bold]Homebrew prefix: $FG[002]$HOMEBREW_PREFIX$FX[reset]"
else
print -P "$FX[bold]Homebrew enabled: $FG[001]no$FX[reset]"
fi
print -nP "\n$FX[bold]iTerm2 integration: "
if [ -e $HOME/.iterm2_shell_integration.zsh ]; then
print -P "$FG[002]present$FX[reset]"
else
print -P "$FG[001]absent$FX[reset]"
fi
echo
for plugin in thefuck pyenv kubectl aws; do
print -nP "$FX[bold]$plugin: "
if type $plugin > /dev/null; then
print -P "$FG[002]installed$FX[reset]"
else
print -P "$FG[001]absent$FX[reset]"
fi
done
print -nP "$FX[bold]zsh z: "
if [ -e $LIBRARY/plugins/zsh-z/zsh-z.plugin.zsh ]; then
print -P "$FG[002]present$FX[reset]"
else
print -P "$FG[001]absent$FX[reset]"
fi
echo
print -nP "$FX[bold]oh-my-posh: "
if type oh-my-posh > /dev/null; then
print -P "$FG[002]installed$FX[reset] $FX[bold]- theme: $FG[002]$POSH_THEME$FX[reset]"
else
print -P "$FG[001]absent$FX[reset]"
fi
# vim: set ts=2 sw=2 tw=0 ft=sh :

View File

@ -1,43 +1,5 @@
usage() {
echo "x509 <certfile>"
echo "x509 -r <hostname:port>"
}
if [ $# -eq 0 ]; then
usage
return 0
fi
help=0
remote=0
while getopts ":hr:" arg; do
case $arg in
r)
remote=1
host=$OPTARG
hostname=$(echo $host|cut -d: -f1)
;;
h|*) help=1
;;
esac
done
if [ $help -eq 1 ]; then
usage
return 0
fi
if [ $remote -eq 0 ]; then
if [ -f $* ]; then
openssl x509 -in $* -noout -text
else
echo "$* not found"
usage
return 0
fi
if [ -r "$1" ]; then
openssl x509 -in $1 -noout -text | less
else
openssl s_client -connect $host -servername $hostname < /dev/null | openssl x509 -noout -text
echo "x509 <certfile>"
fi
# vim: set ts=2 sw=2 tw=0 ft=sh :

BIN
zsh.d/zshfunctions/x509.zwc Normal file

Binary file not shown.

53
zshrc
View File

@ -6,36 +6,43 @@
#
# Global Order: zshenv, zprofile, zshrc, zlogin
#
# Determine zsh conf file position. On Debian/Ubuntu is /etc/zsh, on other should be /etc
#
# Load zprof if we need to profile startup time
# at the bottom of ~/.zshrc add `zprof >! zsh_profile` to save
# profiling data at startup
# zmodload zsh/zprof
# Default PATH
PATH="/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:$HOME/bin"
# Determine zsh conf file position.
local BASE="/etc"
# On Debian/Ubuntu is /etc/zsh, on other should be /etc
[ -d /etc/zsh ] && BASE="/etc/zsh"
[ -d /etc/zsh ] && BASE="/etc/zsh" || BASE="/etc"
# If there is a .zsh directory into user's home, use that one instead (local install)
[ -d ~/.zsh ] && BASE=~/.zsh
function loadRC {
if [ -d $1/zshfunctions ]; then
fpath=($1/zshfunctions $fpath)
fi
if [ -d $1/completions ]; then
fpath=($1/completions $fpath)
fi
# # Add local customization file
# if [ -w $1 ]; then
# [ -f $1/99-local.zsh ] || echo "# Local customizations" > $1/99-local.zsh
# fi
# Process all .zsh files
for zshFile in $1/[0-9]*.zsh; do
. $zshFile
done
}
local KERNEL=`uname -s`
local LIBRARY=${BASE}/zsh.d
local KERNLIB=${LIBRARY}/$(uname -s)
local KERNLIB=${LIBRARY}/${KERNEL}
for lib in $KERNLIB $LIBRARY; do
[ -d $lib/zshfunctions ] && fpath=($lib/zshfunctions $fpath)
[ -d $lib/completions ] && fpath=($lib/completions $fpath)
loadRC ${LIBRARY}
loadRC ${KERNLIB}
for zshFile in $lib/[0-9]*.zsh; do
# echo $zshFile
source $zshFile
done
done
# Add local customization file
if [ -w $LIBRARY ]; then
[ -f $LIBRARY/99-local.zsh ] || echo "# Local customizations" > $LIBRARY/99-local.zsh
fi