v3.0.0 "The Great Overhaul"

=====================================================================

Notice the major version change which comes with breaking changes to
2.x! Reconstructs "library" functions for both python and zsh scwrypts,
with changes to virtualenv naming conventions (you'll need to refresh
all virtualenv with the appropriate scwrypt).

--- Changes ------------------------------

- changed a naming convention across zsh scripts, particularly
  removing underscores where there is no need to avoid naming clash
  (e.g. 'zsh/lib/utils/io.zsh' renames '__STATUS' to 'STATUS')

- moved clients reliant on py.lib.http to the py.lib.http module

- python scripts now rely on py.lib.scwrypts.execute

- updated package.json in zx scripts to include `type = module`

- 'scwrypts --list' commandline argument now includes additional
  relevant data for each scwrypt

- environment variables no longer add themselves to be staged in the
  '.env.template'

--- New Features -------------------------

- new 'use' syntax for disjoint import within zsh scripts; took me
  a very long time to convince myself this would be necessary

- introduced scwrypt "groups" to allow portable module creation;
  (i.e. ability add your own scripts from another repo!)

- py.lib.scwrypts.io provides a combined IO stream for quick, hybrid
  use of input/output files and stdin/stdout

- py.lib.fzf provides a wrapper to provide similar functionality to
  zsh/utils/io.zsh including fzf_(head|tail)

- improved efficiency of various scwrypts; notably reducing runtime
  of scwrypts/environment sync

- improved scwrypts CLI by adding new options for exact scwrypt
  matching, better filtering, and prettier/more-detailed interfaces

--- New Scripts --------------------------

- py/twilio )
    basic SMS integration with twilio
     - send-sms

- py/directus )
    interactive directus GET query
     - get-items

- py/discord )
    post message to discord channel or webhook
     - post-message
This commit is contained in:
Wryn (yage) Wagner 2023-02-21 18:44:27 -07:00
parent 7617c938b1
commit 76a746a53e
196 changed files with 3487 additions and 2097 deletions

View File

@ -5,6 +5,11 @@ export AWS_REGION=
export AWS__EFS__LOCAL_MOUNT_POINT= export AWS__EFS__LOCAL_MOUNT_POINT=
export AWS__S3__MEDIA_BUCKET= export AWS__S3__MEDIA_BUCKET=
export AWS__S3__MEDIA_TARGETS= export AWS__S3__MEDIA_TARGETS=
export DIRECTUS__API_TOKEN=
export DIRECTUS__BASE_URL=
export DISCORD__BOT_TOKEN=
export DISCORD__DEFAULT_AVATAR_URL=
export DISCORD__DEFAULT_CHANNEL_ID=
export I3__BORDER_PIXEL_SIZE= export I3__BORDER_PIXEL_SIZE=
export I3__DMENU_FONT_SIZE= export I3__DMENU_FONT_SIZE=
export I3__GLOBAL_FONT_SIZE= export I3__GLOBAL_FONT_SIZE=
@ -13,3 +18,8 @@ export LINEAR__API_TOKEN=
export REDIS_AUTH= export REDIS_AUTH=
export REDIS_HOST= export REDIS_HOST=
export REDIS_PORT= export REDIS_PORT=
export TWILIO__ACCOUNT_SID=
export TWILIO__API_KEY=
export TWILIO__API_SECRET=
export TWILIO__DEFAULT_PHONE_FROM=
export TWILIO__DEFAULT_PHONE_TO=

View File

@ -7,6 +7,13 @@ AWS__EFS__LOCAL_MOUNT_POINT | fully-qualified path to mount the EFS drive
AWS__S3__MEDIA_BUCKET | s3 bucket name and filesystem targets for media backups AWS__S3__MEDIA_BUCKET | s3 bucket name and filesystem targets for media backups
AWS__S3__MEDIA_TARGETS | AWS__S3__MEDIA_TARGETS |
DIRECTUS__API_TOKEN | details for a directus instance
DIRECTUS__BASE_URL |
DISCORD__BOT_TOKEN | details for discord bot
DISCORD__DEFAULT_AVATAR_URL |
DISCORD__DEFAULT_CHANNEL_ID |
I3__BORDER_PIXEL_SIZE | custom i3 configuration settings I3__BORDER_PIXEL_SIZE | custom i3 configuration settings
I3__DMENU_FONT_SIZE | I3__DMENU_FONT_SIZE |
I3__GLOBAL_FONT_SIZE | I3__GLOBAL_FONT_SIZE |
@ -17,3 +24,9 @@ LINEAR__API_TOKEN | linear.app project management configuration
REDIS_AUTH | redis connection credentials REDIS_AUTH | redis connection credentials
REDIS_HOST | REDIS_HOST |
REDIS_PORT | REDIS_PORT |
TWILIO__ACCOUNT_SID | twilio account / credentials
TWILIO__API_KEY |
TWILIO__API_SECRET |
TWILIO__DEFAULT_PHONE_FROM |
TWILIO__DEFAULT_PHONE_TO |

View File

@ -1,42 +0,0 @@
#####################################################################
[ ! $SCWRYPTS_ROOT ] && SCWRYPTS_ROOT="$(dirname ${0:a:h})"
source "${0:a:h}/config.zsh"
#####################################################################
__SCWRYPT=1 # arbitrary; indicates scwrypts exists
__PREFERRED_PYTHON_VERSIONS=(3.10 3.9)
__NODE_VERSION=18.0.0
__ENV_TEMPLATE=$SCWRYPTS_ROOT/.env.template
#####################################################################
__GET_PATH_TO_RELATIVE_ARGUMENT() {
[[ $1 =~ ^[.] ]] \
&& echo $(readlink -f "$EXECUTION_DIR/$1") \
|| echo "$1" \
;
true
}
#####################################################################
__RUN_SCWRYPT() {
((SUBSCWRYPT+=1))
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
echo " BEGIN SUBSCWRYPT : $@" >&2
SUBSCWRYPT=$SUBSCWRYPT SCWRYPTS_ENV=$ENV_NAME \
"$SCWRYPTS_ROOT/scwrypts" $@
EXIT_CODE=$?
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
echo " END SUBSCWRYPT : $1" >&2
((SUBSCWRYPT-=1))
return $EXIT_CODE
}

View File

@ -1,44 +0,0 @@
#####################################################################
SCWRYPTS_CONFIG_PATH="$HOME/.config/scwrypts"
SCWRYPTS_DATA_PATH="$HOME/.local/share/scwrypts"
SCWRYPTS_SHORTCUT='' # CTRL + SPACE
SCWRYPTS_ENV_SHORTCUT='' # CTRL + /
#####################################################################
SCWRYPTS_ENV_PATH="$SCWRYPTS_CONFIG_PATH/env"
SCWRYPTS_LOG_PATH="$SCWRYPTS_DATA_PATH/logs"
SCWRYPTS_OUTPUT_PATH="$SCWRYPTS_DATA_PATH/output"
SCWRYPTS_VIRTUALENV_PATH="$SCWRYPTS_DATA_PATH/virtualenv"
#####################################################################
[ -f $SCWRYPTS_CONFIG_PATH/config ] && source $SCWRYPTS_CONFIG_PATH/config
#####################################################################
[ ! -d $SCWRYPTS_CONFIG_PATH ] && mkdir -p $SCWRYPTS_CONFIG_PATH
[ ! -d $SCWRYPTS_DATA_PATH ] && mkdir -p $SCWRYPTS_DATA_PATH
[ ! -d $SCWRYPTS_ENV_PATH ] && mkdir -p $SCWRYPTS_ENV_PATH
[ ! -d $SCWRYPTS_LOG_PATH ] && mkdir -p $SCWRYPTS_LOG_PATH
[ ! -d $SCWRYPTS_OUTPUT_PATH ] && mkdir -p $SCWRYPTS_OUTPUT_PATH
[ ! -d $SCWRYPTS_VIRTUALENV_PATH ] && mkdir -p $SCWRYPTS_VIRTUALENV_PATH
export \
SCWRYPTS_CONFIG_PATH \
SCWRYPTS_DATA_PATH \
SCWRYPS_SHORTCUT \
SCWRYPTS_ENV_SHORTCUT \
SCWRYPTS_ENV_PATH \
SCWRYPTS_LOG_PATH \
SCWRYPTS_OUTPUT_PATH \
SCWRYPTS_VIRTUALENV_PATH \
;
#####################################################################
true

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts csv into json') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'csv', input_type = 'csv',
output_file = args.output_file, output_stream = stream.output,
output_type = 'json', output_type = 'json',
) )
#####################################################################
execute(main,
description = 'convert csv into json',
parse_args = [],
)

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts csv into yaml') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'csv', input_type = 'csv',
output_file = args.output_file, output_stream = stream.output,
output_type = 'yaml', output_type = 'yaml',
) )
#####################################################################
execute(main,
description = 'convert csv into yaml',
parse_args = [],
)

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts csv into json') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'json', input_type = 'json',
output_file = args.output_file, output_stream = stream.output,
output_type = 'csv', output_type = 'csv',
) )
#####################################################################
execute(main,
description = 'convert json into csv',
parse_args = [],
)

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts json into yaml') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'json', input_type = 'json',
output_file = args.output_file, output_stream = stream.output,
output_type = 'yaml', output_type = 'yaml',
) )
#####################################################################
execute(main,
description = 'convert json into yaml',
parse_args = [],
)

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts yaml into csv') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'yaml', input_type = 'yaml',
output_file = args.output_file, output_stream = stream.output,
output_type = 'csv', output_type = 'csv',
) )
#####################################################################
execute(main,
description = 'convert yaml into csv',
parse_args = [],
)

View File

@ -1,21 +1,24 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser
from py.lib.data.io import add_io_arguments
from py.lib.data.converter import convert from py.lib.data.converter import convert
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'converts yaml into json') def main(_args, stream):
add_io_arguments(parser) return convert(
input_stream = stream.input,
args = parser.parse_args()
convert(
input_file = args.input_file,
input_type = 'yaml', input_type = 'yaml',
output_file = args.output_file, output_stream = stream.output,
output_type = 'json', output_type = 'json',
) )
#####################################################################
execute(main,
description = 'convert yaml into json',
parse_args = [],
)

145
py/directus/get-items.py Executable file
View File

@ -0,0 +1,145 @@
#!/usr/bin/env python
from json import dumps
from py.lib.fzf import fzf, fzf_tail
from py.lib.http import directus
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__':
raise ImportedExecutableError()
#####################################################################
def main(args, stream):
if {None} == { args.collection, args.filters, args.fields }:
args.interactive = True
if args.interactive:
args.generate_filters_prompt = True
args.generate_fields_prompt = True
collection = _get_or_select_collection(args)
filters = _get_or_select_filters(args, collection)
fields = _get_or_select_fields(args, collection)
query = '&'.join([
param for param in [
fields,
filters,
]
if param
])
endpoint = f'items/{collection}?{query}'
response = directus.request('GET', endpoint)
stream.writeline(dumps({
**response.json(),
'scwrypts_metadata': {
'endpoint': endpoint,
'repeat_with': f'scwrypts -n py/directus/get-items -- -c {collection} -f \'{query}\'',
},
}))
def _get_or_select_collection(args):
collection = args.collection
if collection is None:
collection = fzf(
prompt = 'select a collection',
choices = directus.get_collections(),
)
if not collection:
raise ValueError('collection required for query')
return collection
def _get_or_select_filters(args, collection):
filters = args.filters or ''
if filters == '' and args.generate_filters_prompt:
filters = '&'.join([
f'filter[{filter}][' + (
operator := fzf(
prompt = f'select operator for {filter}',
choices = directus.FILTER_OPERATORS,
)
) + ']=' + fzf_tail(prompt = f'filter[{filter}][{operator}]')
for filter in fzf(
prompt = 'select filter(s) [C^c to skip]',
fzf_options = '--multi',
force_list = True,
choices = directus.get_fields(collection),
)
])
return filters
def _get_or_select_fields(args, collection):
fields = args.fields or ''
if fields == '' and args.generate_fields_prompt:
fields = ','.join(fzf(
prompt = 'select return field(s) [C^c to get all]',
fzf_options = '--multi',
choices = directus.get_fields(collection),
force_list = True,
))
if fields:
fields = f'fields[]={fields}'
return fields
#####################################################################
execute(main,
description = 'interactive CLI to get data from directus',
parse_args = [
( ['-c', '--collection'], {
"dest" : 'collection',
"default" : None,
"help" : 'the name of the collection',
"required" : False,
}),
( ['-f', '--filters'], {
"dest" : 'filters',
"default" : None,
"help" : 'as a URL-suffix, filters for the query',
"required" : False,
}),
( ['-d', '--fields'], {
"dest" : 'fields',
"default" : None,
"help" : 'comma-separated list of fields to include',
"required" : False,
}),
( ['-p', '--interactive-prompt'], {
"action" : 'store_true',
"dest" : 'interactive',
"default" : False,
"help" : 'interactively generate filter prompts; implied if no flags are provided',
"required" : False,
}),
( ['--prompt-filters'], {
"action" : 'store_true',
"dest" : 'generate_filters_prompt',
"default" : False,
"help" : '(superceded by -p) only generate filters interactively',
"required" : False,
}),
( ['--prompt-fields'], {
"action" : 'store_true',
"dest" : 'generate_fields_prompt',
"default" : False,
"help" : '(superceded by -p) only generate filters interactively',
"required" : False,
}),
]
)

0
py/discord/__init__.py Normal file
View File

61
py/discord/post-message.py Executable file
View File

@ -0,0 +1,61 @@
#!/usr/bin/env python
from json import dumps
from sys import stderr
from py.lib.http import discord
from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__':
raise ImportedExecutableError()
#####################################################################
def main(args, stream):
if args.body is None:
print(f'reading input from {stream.input.name}', file=stderr)
args.body = ''.join(stream.readlines()).strip()
if len(args.body) == 0:
args.body = 'PING'
response = discord.send_message(
content = args.body,
channel_id = args.channel_id,
webhook = args.webhook,
avatar_url = args.avatar_url,
)
stream.writeline(dumps({
**(response.json() if response.text != '' else {'message': 'OK'}),
'scwrypts_metadata': {},
}))
#####################################################################
execute(main,
description = 'post a message to the indicated discord channel',
parse_args = [
( ['-b', '--body'], {
'dest' : 'body',
'help' : 'message body',
'required' : False,
}),
( ['-c', '--channel-id'], {
'dest' : 'channel_id',
'help' : 'target channel id',
'required' : False,
}),
( ['-w', '--webhook'], {
'dest' : 'webhook',
'help' : 'target webhook (takes precedence over -c)',
'required' : False,
}),
( ['--avatar-url'], {
'dest' : 'avatar_url',
'help' : 'replace default avatar_url',
'required' : False,
}),
]
)

View File

@ -1,19 +1,27 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser from py.lib.scwrypts import execute
from py.lib.scwrypts.exceptions import ImportedExecutableError
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'a simple "Hello, World!" program') def main(args, stream):
parser.add_argument( stream.writeline(args.message)
'-m', '--message',
dest = 'message',
default = 'HELLO WORLD', #####################################################################
help = 'message to print to stdout', execute(main,
required = False, description = 'a simple "Hello, World!" program',
parse_args = [
( ['-m', '--message'], {
'dest' : 'message',
'default' : 'HELLO WORLD',
'help' : 'message to print',
'required' : False,
}),
],
) )
args = parser.parse_args()
print(args.message)

View File

@ -0,0 +1,6 @@
import py.lib.data
import py.lib.fzf
import py.lib.http
import py.lib.redis
import py.lib.scwrypts
import py.lib.twilio

View File

@ -0,0 +1 @@
import py.lib.data.converter

View File

@ -2,18 +2,13 @@ import csv
import json import json
import yaml import yaml
from py.lib.data.io import get_stream
def convert(input_stream, input_type, output_stream, output_type):
def convert(input_file, input_type, output_file, output_type):
if input_type == output_type: if input_type == output_type:
raise ValueError('input type and output type are the same') raise ValueError('input type and output type are the same')
with get_stream(input_file) as input_stream:
data = convert_input(input_stream, input_type) data = convert_input(input_stream, input_type)
write_output(output_stream, output_type, data)
with get_stream(output_file, 'w+') as output_stream:
_write_output(output_stream, output_type, data)
def convert_input(stream, input_type): def convert_input(stream, input_type):
@ -28,7 +23,8 @@ def convert_input(stream, input_type):
'yaml': _read_yaml, 'yaml': _read_yaml,
}[input_type](stream) }[input_type](stream)
def _write_output(stream, output_type, data):
def write_output(stream, output_type, data):
supported_output_types = {'csv', 'json', 'yaml'} supported_output_types = {'csv', 'json', 'yaml'}
if output_type not in supported_output_types: if output_type not in supported_output_types:
@ -40,6 +36,7 @@ def _write_output(stream, output_type, data):
'yaml': _write_yaml, 'yaml': _write_yaml,
}[output_type](stream, data) }[output_type](stream, data)
##################################################################### #####################################################################
def _read_csv(stream): def _read_csv(stream):

1
py/lib/fzf/__init__.py Normal file
View File

@ -0,0 +1 @@
from py.lib.fzf.client import fzf, fzf_tail, fzf_head

61
py/lib/fzf/client.py Normal file
View File

@ -0,0 +1,61 @@
from pyfzf.pyfzf import FzfPrompt
FZF_PROMPT = None
def fzf( # pylint: disable=too-many-arguments
choices=None,
prompt=None,
fzf_options='',
delimiter='\n',
return_type=str,
force_list=False,
):
global FZF_PROMPT # pylint: disable=global-statement
if choices is None:
choices = []
if not isinstance(return_type, type):
raise ValueError(f'return_type must be a valid python type; "{return_type}" is not a type')
if FZF_PROMPT is None:
FZF_PROMPT = FzfPrompt()
options = ' '.join({
'-i',
'--layout=reverse',
'--ansi',
'--height=30%',
f'--prompt "{prompt} : "' if prompt is not None else '',
fzf_options,
})
selections = [
return_type(selection)
for selection in FZF_PROMPT.prompt(choices, options, delimiter)
]
if not force_list:
if len(selections) == 0:
return None
if len(selections) == 1:
return selections[0]
return selections
def fzf_tail(*args, **kwargs):
return _fzf_print(*args, **kwargs)[-1]
def fzf_head(*args, **kwargs):
return _fzf_print(*args, **kwargs)[0]
def _fzf_print(*args, fzf_options='', **kwargs):
return fzf(
*args,
**kwargs,
fzf_options = f'--print-query {fzf_options}',
force_list = True,
)

View File

@ -1 +1,5 @@
from py.lib.http.client import get_request_client from py.lib.http.client import get_request_client
import py.lib.http.directus
import py.lib.http.discord
import py.lib.http.linear

View File

@ -0,0 +1,2 @@
from py.lib.http.directus.client import *
from py.lib.http.directus.constant import *

View File

@ -0,0 +1,56 @@
from py.lib.http import get_request_client
from py.lib.scwrypts import getenv
REQUEST = None
COLLECTIONS = None
FIELDS = {}
def request(method, endpoint, **kwargs):
global REQUEST # pylint: disable=global-statement
if REQUEST is None:
REQUEST = get_request_client(
base_url = getenv("DIRECTUS__BASE_URL"),
headers = {
'Authorization': f'bearer {getenv("DIRECTUS__API_TOKEN")}',
}
)
return REQUEST(method, endpoint, **kwargs)
def graphql(query, system=False):
return request(
'POST',
'graphql' if system is True else 'graphql/system',
json={'query': query},
)
def get_collections():
global COLLECTIONS # pylint: disable=global-statement
if COLLECTIONS is None:
COLLECTIONS = [
item['collection']
for item in request(
'GET',
'collections?limit=-1&fields[]=collection',
).json()['data']
]
return COLLECTIONS
def get_fields(collection):
if FIELDS.get(collection) is None:
FIELDS[collection] = [
item['field']
for item in request(
'GET',
f'fields/{collection}?limit=-1&fields[]=field',
).json()['data']
]
return FIELDS[collection]

View File

@ -0,0 +1,25 @@
FILTER_OPERATORS = {
'_eq',
'_neq',
'_lt',
'_lte',
'_gt',
'_gte',
'_in',
'_nin',
'_null',
'_nnull',
'_contains',
'_ncontains',
'_starts_with',
'_ends_with',
'_nends_with',
'_between',
'_nbetween',
'_empty',
'_nempty',
'_intersects',
'_nintersects',
'_intersects_bbox',
'_nintersects_bbox',
}

View File

@ -0,0 +1,2 @@
from py.lib.http.discord.client import *
from py.lib.http.discord.send_message import *

View File

@ -0,0 +1,20 @@
from py.lib.http import get_request_client
from py.lib.scwrypts import getenv
REQUEST = None
def request(method, endpoint, **kwargs):
global REQUEST # pylint: disable=global-statement
if REQUEST is None:
headers = {}
if (token := getenv("DISCORD__BOT_TOKEN", required = False)) is not None:
headers['Authorization'] = f'Bot {token}'
REQUEST = get_request_client(
base_url = 'https://discord.com/api',
headers = headers,
)
return REQUEST(method, endpoint, **kwargs)

View File

@ -0,0 +1,34 @@
from py.lib.scwrypts import getenv
from py.lib.http.discord import request
def send_message(content, channel_id=None, webhook=None, avatar_url=None, **kwargs):
if channel_id is None:
channel_id = getenv('DISCORD__DEFAULT_CHANNEL_ID', required=False)
if avatar_url is None:
avatar_url = getenv('DISCORD__DEFAULT_AVATAR_URL', required=False)
endpoint = None
if webhook is not None:
endpoint = f'webhooks/{webhook}'
elif channel_id is not None:
endpoint = f'channels/{channel_id}/messages'
else:
raise ValueError('must provide target channel_id or webhook')
return request(
method = 'POST',
endpoint = endpoint,
json = {
key: value
for key, value in {
'content': content,
'username': 'wrobot',
'avatar_url': avatar_url,
**kwargs,
}.items()
if value is not None
},
)

View File

@ -0,0 +1 @@
from py.lib.http.linear.client import *

View File

@ -0,0 +1,20 @@
from py.lib.http import get_request_client
from py.lib.scwrypts import getenv
REQUEST = None
def request(method, endpoint, **kwargs):
global REQUEST # pylint: disable=global-statement
if REQUEST is None:
REQUEST = get_request_client(
base_url = 'https://api.linear.app',
headers = {
'Authorization': f'bearer {getenv("LINEAR__API_TOKEN")}',
}
)
return REQUEST(method, endpoint, **kwargs)
def graphql(query):
return request('POST', 'graphql', json={'query': query})

View File

@ -1 +0,0 @@
from py.lib.linear.client import request, graphql

View File

@ -1,13 +0,0 @@
from py.lib.http import get_request_client
from py.lib.scwrypts import getenv
request = get_request_client(
base_url = 'https://api.linear.app',
headers = {
'Authorization': f'bearer {getenv("LINEAR__API_TOKEN")}',
}
)
def graphql(query):
return request('POST', 'graphql', json={'query': query})

View File

@ -1 +1 @@
from py.lib.redis.client import get_client

View File

@ -2,14 +2,18 @@ from redis import StrictRedis
from py.lib.scwrypts import getenv from py.lib.scwrypts import getenv
CLIENT = None
class RedisClient(StrictRedis): def get_client():
def __init__(self): global CLIENT # pylint: disable=global-statement
super().__init__(
if CLIENT is None:
print('getting redis client')
CLIENT = StrictRedis(
host = getenv('REDIS_HOST'), host = getenv('REDIS_HOST'),
port = getenv('REDIS_PORT'), port = getenv('REDIS_PORT'),
password = getenv('REDIS_AUTH', required=False), password = getenv('REDIS_AUTH', required=False),
decode_responses = True, decode_responses = True,
) )
Client = RedisClient() return CLIENT

View File

@ -1,3 +1,6 @@
from py.lib.scwrypts.execute import execute
from py.lib.scwrypts.getenv import getenv from py.lib.scwrypts.getenv import getenv
from py.lib.scwrypts.interactive import interactive from py.lib.scwrypts.interactive import interactive
from py.lib.scwrypts.run import run from py.lib.scwrypts.run import run
import py.lib.scwrypts.io

View File

@ -1,3 +1,16 @@
class MissingVariableError(Exception): from argparse import ArgumentError
class MissingVariableError(EnvironmentError):
def init(self, name): def init(self, name):
super().__init__(f'Missing required environment variable "{name}"') super().__init__(f'Missing required environment variable "{name}"')
class ImportedExecutableError(ImportError):
def __init__(self):
super().__init__('executable only; must run through scwrypts')
class MissingFlagAndEnvironmentVariableError(EnvironmentError, ArgumentError):
def __init__(self, flags, env_var):
super().__init__(f'must provide at least one of : {{ flags: {flags} OR {env_var} }}')

View File

@ -0,0 +1,23 @@
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from py.lib.scwrypts.io import get_combined_stream, add_io_arguments
def execute(main, description=None, parse_args=None, toggle_input=True, toggle_output=True):
if parse_args is None:
parse_args = []
parser = ArgumentParser(
description = description,
formatter_class = ArgumentDefaultsHelpFormatter,
)
add_io_arguments(parser, toggle_input, toggle_output)
for a in parse_args:
parser.add_argument(*a[0], **a[1])
args = parser.parse_args()
with get_combined_stream(args.input_file, args.output_file) as stream:
return main(args, stream)

View File

@ -1,16 +1,15 @@
from os import getenv as os_getenv from os import getenv as os_getenv
from py.lib.scwrypts.exceptions import MissingVariableError from py.lib.scwrypts.exceptions import MissingVariableError
from py.lib.scwrypts.run import run
def getenv(name, required=True): def getenv(name, required=True):
value = os_getenv(name, None) value = os_getenv(name, None)
if value == None:
run('zsh/scwrypts/environment/stage-variables', name)
if required and not value: if required and not value:
raise MissingVariableError(name) raise MissingVariableError(name)
if value == '':
value = None
return value return value

View File

@ -1,11 +1,22 @@
from bpython import embed from bpython import embed
def interactive(function): def interactive(variable_descriptions):
def main(*args, **kwargs): def outer(function):
print('preparing interactive environment...')
def inner(*args, **kwargs):
print('\npreparing interactive environment...\n')
local_vars = function(*args, **kwargs) local_vars = function(*args, **kwargs)
print('environment ready; user, GO! :)')
print('\n\n'.join([
f'>>> {x}' for x in variable_descriptions
]))
print('\nenvironment ready; user, GO! :)\n')
embed(local_vars) embed(local_vars)
return main return inner
return outer

View File

@ -30,6 +30,9 @@ def get_stream(filename=None, mode='r', encoding='utf-8', verbose=False, **kwarg
yield stdin if is_read else stdout yield stdin if is_read else stdout
if not is_read:
stdout.flush()
def add_io_arguments(parser, toggle_input=True, toggle_output=True): def add_io_arguments(parser, toggle_input=True, toggle_output=True):
if toggle_input: if toggle_input:
@ -49,3 +52,35 @@ def add_io_arguments(parser, toggle_input=True, toggle_output=True):
help = 'path to output file; omit for stdout', help = 'path to output file; omit for stdout',
required = False, required = False,
) )
@contextmanager
def get_combined_stream(input_file=None, output_file=None):
with get_stream(input_file, 'r') as input_stream, get_stream(output_file, 'w+') as output_stream:
yield CombinedStream(input_stream, output_stream)
class CombinedStream:
def __init__(self, input_stream, output_stream):
self.input = input_stream
self.output = output_stream
def read(self, *args, **kwargs):
return self.input.read(*args, **kwargs)
def readline(self, *args, **kwargs):
return self.input.readline(*args, **kwargs)
def readlines(self, *args, **kwargs):
return self.input.readlines(*args, **kwargs)
def write(self, *args, **kwargs):
return self.output.write(*args, **kwargs)
def writeline(self, line):
x = self.output.write(f'{line}\n')
self.output.flush()
return x
def writelines(self, *args, **kwargs):
return self.output.writelines(*args, **kwargs)

View File

@ -7,8 +7,9 @@ def run(scwrypt_name, *args):
DEPTH = int(getenv('SUBSCWRYPT', '0')) DEPTH = int(getenv('SUBSCWRYPT', '0'))
DEPTH += 1 DEPTH += 1
SCWRYPTS_EXE = Path(__file__).parents[2] / 'scwrypts' SCWRYPTS_EXE = Path(__file__).parents[3] / 'scwrypts'
ARGS = ' '.join([str(x) for x in args]) ARGS = ' '.join([str(x) for x in args])
print(f'SUBSCWRYPT={DEPTH} {SCWRYPTS_EXE} {scwrypt_name} -- {ARGS}')
print(f'\n {"--"*DEPTH} ({DEPTH}) BEGIN SUBSCWRYPT : {Path(scwrypt_name).name}') print(f'\n {"--"*DEPTH} ({DEPTH}) BEGIN SUBSCWRYPT : {Path(scwrypt_name).name}')
subprocess_run( subprocess_run(

View File

@ -0,0 +1,2 @@
from py.lib.twilio.client import get_client
from py.lib.twilio.send_sms import send_sms

18
py/lib/twilio/client.py Normal file
View File

@ -0,0 +1,18 @@
from twilio.rest import Client
from py.lib.scwrypts import getenv
CLIENT = None
def get_client():
global CLIENT # pylint: disable=global-statement
if CLIENT is None:
print('loading client')
CLIENT = Client(
username = getenv('TWILIO__API_KEY'),
password = getenv('TWILIO__API_SECRET'),
account_sid = getenv('TWILIO__ACCOUNT_SID'),
)
return CLIENT

57
py/lib/twilio/send_sms.py Normal file
View File

@ -0,0 +1,57 @@
from json import dumps
from time import sleep
from py.lib.twilio.client import get_client
def send_sms(to, from_, body, max_char_count=300, stream=None):
'''
abstraction for twilio.client.messages.create which will break
messages into multi-part SMS rather than throwing an error or
requiring the use of MMS data
@param to messages.create parameter
@param from_ messages.create parameter
@param body messages.create parameter
@param max_char_count 1 N 1500 (default 300)
@param stream used to report success/failure (optional)
@return a list of twilio MessageInstance objects
'''
client = get_client()
messages = []
max_char_count = max(1, min(max_char_count, 1500))
total_sms_parts = 1 + len(body) // max_char_count
contains_multiple_parts = total_sms_parts > 1
for i in range(0, len(body), max_char_count):
msg_body = body[i:i+max_char_count]
current_part = 1 + i // max_char_count
if contains_multiple_parts:
msg_body = f'{current_part}/{total_sms_parts}\n{msg_body}'
message = client.messages.create(
to = to,
from_ = from_,
body = msg_body,
)
messages.append(message)
if stream is not None:
stream.writeline(
dumps({
'sid': message.sid,
'to': to,
'from': from_,
'body': msg_body,
})
)
if contains_multiple_parts:
sleep(2 if max_char_count <= 500 else 5)
return messages

View File

@ -1,47 +1,45 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser from py.lib.http.linear import graphql
from py.lib.scwrypts import execute
from py.lib.data.io import get_stream, add_io_arguments from py.lib.scwrypts.exceptions import ImportedExecutableError
from py.lib.linear import graphql
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'comment on an issue in linear.app') def get_query(args):
body = f'"""from wrobot:\n```\n{args.message}\n```\n"""'
parser.add_argument( return f'''
'-i', '--issue',
dest = 'issue_id',
help = 'issue short-code (e.g. CLOUD-319)',
required = True,
)
parser.add_argument(
'-m', '--message',
dest = 'message',
help = 'comment to post to the target issue',
required = True,
)
add_io_arguments(parser, toggle_input=False)
args = parser.parse_args()
query = f'''
mutation CommentCreate {{ mutation CommentCreate {{
commentCreate( commentCreate(
input: {{ input: {{
issueId: "{args.issue_id}" issueId: "{args.issue_id}"
body: """from wrobot: body: {body}
```
{args.message.strip()}
```"""
}} }}
) {{ success }} ) {{ success }}
}} }}'''
'''
response = graphql(query) def main(args, stream):
with get_stream(args.output_file, 'w+') as output: response = graphql(get_query(args))
output.write(response.text) stream.writeline(response)
#####################################################################
execute(main,
description = 'comment on an inssue in linear.app',
parse_args = [
( ['-d', '--issue-id'], {
'dest' : 'issue_id',
'help' : 'issue short-code (e.g. CLOUD-319)',
'required' : True,
}),
( ['-m', '--message'], {
'dest' : 'message',
'help' : 'comment to post to the target issue',
'required' : True,
}),
]
)

View File

@ -1,25 +1,26 @@
#!/usr/bin/env python #!/usr/bin/env python
from argparse import ArgumentParser from py.lib.redis import get_client
from py.lib.scwrypts import execute, interactive, getenv
from py.lib.redis.client import Client from py.lib.scwrypts.exceptions import ImportedExecutableError
from py.lib.scwrypts import interactive, getenv
if __name__ != '__main__': if __name__ != '__main__':
raise Exception('executable only; must run through scwrypts') raise ImportedExecutableError()
#####################################################################
parser = ArgumentParser(description = 'establishes a redis client in an interactive python shell') @interactive([
args = parser.parse_args() f'r = StrictRedis(\'{getenv("REDIS_HOST")}:{getenv("REDIS_PORT")}\')',
])
@interactive def main(_args, _stream):
def main():
# pylint: disable=possibly-unused-variable # pylint: disable=possibly-unused-variable
r = Client r = get_client()
print(f'''
>>> r = StrictRedis({getenv("REDIS_HOST")}:{getenv("REDIS_PORT")})
''')
return locals() return locals()
main()
#####################################################################
execute(main,
description = 'establishes a redis client in an interactive python shell',
parse_args = [],
)

View File

@ -1,3 +1,5 @@
redis
bpython bpython
pyfzf
pyyaml pyyaml
redis
twilio

0
py/twilio/__init__.py Normal file
View File

65
py/twilio/send-sms.py Executable file
View File

@ -0,0 +1,65 @@
#!/usr/bin/env python
from sys import stderr
from py.lib.scwrypts import execute, getenv
from py.lib.twilio import send_sms
from py.lib.scwrypts.exceptions import ImportedExecutableError, MissingFlagAndEnvironmentVariableError
if __name__ != '__main__':
raise ImportedExecutableError()
#####################################################################
def main(args, stream):
if args.body is None:
print(f'reading input from {stream.input.name}', file=stderr)
args.body = ''.join(stream.readlines()).strip()
if len(args.body) == 0:
args.body = 'PING'
if args.from_ is None:
raise MissingFlagAndEnvironmentVariableError(['-f', '--from'], 'TWILIO__DEFAULT_PHONE_FROM')
if args.to is None:
raise MissingFlagAndEnvironmentVariableError(['-t', '--to'], 'TWILIO__DEFAULT_PHONE_TO')
send_sms(
to = args.to,
from_ = args.from_,
body = args.body,
max_char_count = args.max_char_count,
stream = stream,
)
#####################################################################
execute(main,
description = 'send a simple SMS through twilio',
parse_args = [
( ['-t', '--to'], {
'dest' : 'to',
'help' : 'phone number of the receipient',
'required' : False,
'default' : getenv('TWILIO__DEFAULT_PHONE_TO', required=False),
}),
( ['-f', '--from'], {
'dest' : 'from_',
'help' : 'phone number of the receipient',
'required' : False,
'default' : getenv('TWILIO__DEFAULT_PHONE_FROM', required=False),
}),
( ['-b', '--body'], {
'dest' : 'body',
'help' : 'message body',
'required' : False,
}),
( ['--max-char-count'], {
'dest' : 'max_char_count',
'help' : 'separate message into parts by character count (1 < N <= 1500)',
'required' : False,
'default' : 300,
}),
]
)

304
run
View File

@ -1,8 +1,7 @@
#!/bin/zsh #!/bin/zsh
export EXECUTION_DIR=$(pwd) export EXECUTION_DIR=$(pwd)
source "${0:a:h}/zsh/lib/import.driver.zsh" || exit 42
SCWRYPTS_ROOT="${0:a:h}"
source "$SCWRYPTS_ROOT/zsh/common.zsh" || exit 42
##################################################################### #####################################################################
__RUN() { __RUN() {
@ -10,10 +9,18 @@ __RUN() {
usage: scwrypts [OPTIONS ...] SCRIPT -- [SCRIPT OPTIONS ...] usage: scwrypts [OPTIONS ...] SCRIPT -- [SCRIPT OPTIONS ...]
OPTIONS OPTIONS
-e, --env <env-name> set environment; overwrites SCWRYPTS_ENV -g, --group <group-name> only use scripts from the indicated group
-n, --no-log skip logging (useful when calling scwrypts as an api) -t, --type <type-name> only use scripts of the indicated type
-l, --list print out command list and exit -m, --name <scwrypt-name> only run the script if there is an exact match
(requires type and group)
-e, --env <env-name> set environment; overwrites SCWRYPTS_ENV
-n, --no-log skip logging and run in quiet mode
--update update scwrypts library to latest version
-v, --version print out scwrypts version and exit
-l, --list print out command list and exit
-h, --help display this message and exit -h, --help display this message and exit
' '
cd "$SCWRYPTS_ROOT" cd "$SCWRYPTS_ROOT"
@ -21,13 +28,35 @@ __RUN() {
local ENV_NAME="$SCWRYPTS_ENV" local ENV_NAME="$SCWRYPTS_ENV"
local SEARCH_PATTERNS=() local SEARCH_PATTERNS=()
local VARSPLIT SEARCH_GROUP SEARCH_TYPE SEARCH_NAME
local ERROR=0 local ERROR=0
while [[ $# -gt 0 ]] while [[ $# -gt 0 ]]
do do
case $1 in case $1 in
-t | --type )
[ ! $2 ] && ERROR "missing value for argument $1" && break
SEARCH_TYPE=$2
shift 2
;;
-g | --group )
[ ! $2 ] && ERROR "missing value for argument $1" && break
SEARCH_GROUP=$2
shift 2
;;
-m | --name )
[ ! $2 ] && ERROR "missing value for argument $1" && break
SEARCH_NAME=$2
shift 2
;;
-[a-z][a-z]* )
VARSPLIT=$(echo "$1 " | sed 's/^\(-.\)\(.*\) /\1 -\2/')
set -- $(echo " $VARSPLIT ") ${@:2}
;;
-h | --help ) -h | --help )
__USAGE USAGE
return 0 return 0
;; ;;
-n | --no-log ) -n | --no-log )
@ -35,55 +64,168 @@ __RUN() {
shift 1 shift 1
;; ;;
-e | --env ) -e | --env )
[ $ENV_NAME ] && __WARNING 'overwriting session environment' [ ! $2 ] && ERROR "missing value for argument $1" && break
[ ! $SUBSCWRYPTS ] \
&& [ $ENV_NAME ] \
&& WARNING 'overwriting session environment' \
;
ENV_NAME="$2" ENV_NAME="$2"
__STATUS "using CLI environment '$ENV_NAME'" STATUS "using CLI environment '$ENV_NAME'"
shift 2 shift 2
;; ;;
-l | --list ) -l | --list )
__OUTPUT_COMMAND_LIST SCWRYPTS__GET_AVAILABLE_SCWRYPTS
return 0
;;
-v | --version )
echo scwrypts $(cd "$SCWRYPTS__ROOT__scwrypts"; git describe --tags)
return 0
;;
--update )
cd "$SCWRYPTS__ROOT__scwrypts"
git fetch --quiet origin main
local SYNC_STATUS=$?
git diff --exit-code origin/main -- . >&2
local DIFF_STATUS=$?
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
SUCCESS 'already up-to-date with origin/main'
} || {
git rebase --autostash origin/main \
&& SUCCESS 'up-to-date with origin/main' \
|| {
git rebase --abort
ERROR 'unable to update scwrypts; please try manual upgrade'
REMINDER "installation in '$(pwd)'"
}
}
return 0 return 0
;; ;;
-- ) -- )
shift 1 shift 1
break # pass arguments after '--' to the scwrypt break # pass arguments after '--' to the scwrypt
;; ;;
-* ) --* )
__ERROR "unrecognized argument '$1'" ERROR "unrecognized argument '$1'"
shift 1 shift 1
;; ;;
* ) * )
SEARCH_PATTERNS+=$1 SEARCH_PATTERNS+=($1)
shift 1 shift 1
;; ;;
esac esac
done done
__ERROR_CHECK [ $SEARCH_NAME ] && {
[ ! $SEARCH_TYPE ] && ERROR '--name requires --type argument'
[ ! $SEARCH_GROUP ] && ERROR '--name requires --group argument'
}
CHECK_ERRORS
########################################## ##########################################
local SCRIPT=$(__SELECT_SCRIPT $SEARCH_PATTERNS) local SCWRYPTS_AVAILABLE
[ ! $SCRIPT ] && exit 2 local POTENTIAL_ERROR="no such scwrypt exists:"
export SCWRYPT_NAME=$SCRIPT
SCWRYPTS_AVAILABLE=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS)
[ $SEARCH_NAME ] && {
POTENTIAL_ERROR+="\n NAME : '$SEARCH_NAME'"
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
SCWRYPTS_AVAILABLE=$({
echo $SCWRYPTS_AVAILABLE | head -n1
echo $SCWRYPTS_AVAILABLE | sed -e 's/\x1b\[[0-9;]*m//g' | grep "^$SEARCH_NAME *$SEARCH_TYPE *$SEARCH_GROUP\$"
})
}
[ ! $SEARCH_NAME ] && {
[ $SEARCH_TYPE ] && {
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
SCWRYPTS_AVAILABLE=$(\
{
echo $SCWRYPTS_AVAILABLE | head -n1
echo $SCWRYPTS_AVAILABLE | grep ' [^/]*'$SEARCH_TYPE'[^/]* '
} \
| awk '{$2=""; print $0;}' \
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
| column -ts '^'
)
}
[ $SEARCH_GROUP ] && {
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
SCWRYPTS_AVAILABLE=$(
{
echo $SCWRYPTS_AVAILABLE | head -n1
echo $SCWRYPTS_AVAILABLE | grep "$SEARCH_GROUP"'[^/]*$'
} \
| awk '{$NF=""; print $0;}' \
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
| column -ts '^'
)
}
[[ ${#SEARCH_PATTERNS[@]} -gt 0 ]] && {
POTENTIAL_ERROR+="\n PATTERNS : $SEARCH_PATTERNS"
local P
for P in ${SEARCH_PATTERNS[@]}
do
SCWRYPTS_AVAILABLE=$(
{
echo $SCWRYPTS_AVAILABLE | head -n1
echo $SCWRYPTS_AVAILABLE | grep $P
}
)
done
}
}
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -lt 2 ]] && ERROR "$POTENTIAL_ERROR"
CHECK_ERRORS
##########################################
local NAME="$SEARCH_NAME"
local TYPE="$SEARCH_TYPE"
local GROUP="$SEARCH_GROUP"
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -eq 2 ]] \
&& SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | tail -n1) \
|| SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | FZF "select a script to run" --header-lines 1)
[ $SCWRYPT_SELECTION ] || exit 2
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
export SCWRYPT_NAME=$NAME
export SCWRYPT_TYPE=$TYPE
export SCWRYPT_GROUP=$GROUP
##########################################
local ENV_REQUIRED=$(__CHECK_ENV_REQUIRED && echo 1 || echo 0) local ENV_REQUIRED=$(__CHECK_ENV_REQUIRED && echo 1 || echo 0)
[[ $ENV_REQUIRED -eq 1 ]] && { [[ $ENV_REQUIRED -eq 1 ]] && {
[ ! $ENV_NAME ] && ENV_NAME=$(__SELECT_ENV) [ ! $ENV_NAME ] && ENV_NAME=$(SCWRYPTS__SELECT_ENV)
local ENV_FILE=$(__GET_ENV_FILE $ENV_NAME) local ENV_FILE=$(SCWRYPTS__GET_ENV_FILE "$ENV_NAME")
source "$ENV_FILE" || FAIL 5 "missing or invalid environment '$ENV_NAME'"
[ -f "$ENV_FILE" ] && source "$ENV_FILE" \
|| __FAIL 5 "missing or invalid environment '$ENV_NAME'"
export ENV_NAME export ENV_NAME
} }
##########################################
[ ! $SUBSCWRYPT ] \ [ ! $SUBSCWRYPT ] \
&& [[ $ENV_NAME =~ prod ]] \ && [[ $ENV_NAME =~ prod ]] \
&& { __VALIDATE_UPSTREAM_TIMELINE || __ABORT; } && { __VALIDATE_UPSTREAM_TIMELINE || ABORT; }
local RUN_STRING=$(__GET_RUN_STRING $SCRIPT $ENV_NAME) ##########################################
local RUN_STRING=$(SCWRYPTS__GET_RUNSTRING $SCWRYPT_NAME $SCWRYPT_TYPE $SCWRYPT_GROUP)
[ ! $RUN_STRING ] && exit 3 [ ! $RUN_STRING ] && exit 3
########################################## ##########################################
@ -93,7 +235,7 @@ __RUN() {
local HEADER=$( local HEADER=$(
[ $SUBSCWRYPT ] && return 0 [ $SUBSCWRYPT ] && return 0
echo '=====================================================================' echo '====================================================================='
echo "script : $SCRIPT" echo "script : $SCWRYPT_GROUP $SCWRYPT_TYPE $SCWRYPT_NAME"
echo "run at : $(date)" echo "run at : $(date)"
echo "config : $ENV_NAME" echo "config : $ENV_NAME"
[ ! $LOGFILE ] && echo '\033[1;33m------------------------------------------\033[0m' [ ! $LOGFILE ] && echo '\033[1;33m------------------------------------------\033[0m'
@ -130,111 +272,17 @@ __RUN() {
##################################################################### #####################################################################
__OUTPUT_COMMAND_LIST() {
local LAST_TYPE LAST_SUBSET
for SCRIPT in $(__GET_AVAILABLE_SCRIPTS)
do
TYPE=$(echo $SCRIPT | sed 's/\/.*//')
SUBSET=$(echo $SCRIPT | sed 's/.*\/\(.*\)\/[^\/]*$/\1/')
[[ ! $LAST_TYPE =~ $TYPE ]] && {
echo >&2
echo "\\033[1;32m$TYPE scwrypts\\033[0m" >&2
LAST_SUBSET=''
}
[ $LAST_SUBSET ] && [[ ! $LAST_SUBSET =~ $SUBSET ]] && {
echo >&2
}
printf ' - ' >&2
echo $SCRIPT
LAST_TYPE=$TYPE
LAST_SUBSET=$SUBSET
done
}
#####################################################################
__SELECT_SCRIPT() {
local SCRIPT
local SCRIPTS=$(__GET_AVAILABLE_SCRIPTS)
local SEARCH=($@)
[[ ${#SEARCH[@]} -eq 0 ]] && {
SCRIPT=$(echo $SCRIPTS | __FZF 'select a script')
}
[[ ${#SEARCH[@]} -eq 1 ]] && [ -f ./$SEARCH ] && {
SCRIPT=$SEARCH
}
[ ! $SCRIPT ] && [[ ${#SEARCH[@]} -gt 0 ]] && {
SCRIPT=$SCRIPTS
for PATTERN in $SEARCH
do
SCRIPT=$(echo $SCRIPT | grep $PATTERN)
done
[ ! $SCRIPT ] && __FAIL 2 "no script found by name '$@'"
[[ $(echo $SCRIPT | wc -l) -gt 1 ]] && {
__STATUS "more than one script matched '$@'"
SCRIPT=$(echo $SCRIPT | __FZF 'select a script')
}
}
echo $SCRIPT
}
__GET_RUN_STRING() {
local SCRIPT="$1"
local ENV_NAME="$2"
local TYPE=$(echo $SCRIPT | sed 's/\/.*$//')
local RUN_STRING
local _VIRTUALENV="$SCWRYPTS_VIRTUALENV_PATH/$TYPE/bin/activate"
[ -f $_VIRTUALENV ] && source $_VIRTUALENV
case $TYPE in
py ) __CHECK_DEPENDENCY python || return 1
RUN_STRING="python -m $(echo $SCRIPT | sed 's/\//./g; s/\.py$//; s/\.\.//')"
CURRENT_PYTHON_VERSION=$(python --version | sed 's/^[^0-9]*\(3\.[^.]*\).*$/\1/')
echo $__PREFERRED_PYTHON_VERSIONS | grep -q $CURRENT_PYTHON_VERSION || {
__WARNING "only tested on the following python versions: $(printf ', %s.x' ${__PREFERRED_PYTHON_VERSIONS[@]} | sed 's/^, //')"
__WARNING 'compatibility may vary'
}
;;
zsh ) __CHECK_DEPENDENCY zsh || return 1
RUN_STRING="noglob ./$SCRIPT"
;;
zx ) __CHECK_DEPENDENCY zx || return 1
RUN_STRING="FORCE_COLOR=3 ./$SCRIPT.mjs"
;;
* ) __ERROR "unsupported script type '$SCRIPT_TYPE'"
return 2
;;
esac
RUN_STRING="SCWRYPTS_ENV='$ENV_NAME' $RUN_STRING"
[ -f $_VIRTUALENV ] && RUN_STRING="source '$_VIRTUALENV'; $RUN_STRING"
echo $RUN_STRING
}
__CHECK_ENV_REQUIRED() { __CHECK_ENV_REQUIRED() {
[ $CI ] && return 1 [ $CI ] && return 1
echo $SCRIPT | grep -q 'zsh/scwrypts/logs' && return 1 echo $SCWRYPT_NAME | grep -q 'scwrypts/logs/' && return 1
echo $SCWRYPT_NAME | grep -q 'scwrypts/environment/' && return 1
return 0 return 0
} }
__VALIDATE_UPSTREAM_TIMELINE() { __VALIDATE_UPSTREAM_TIMELINE() {
__STATUS "on '$ENV_NAME'; checking diff against origin/main" STATUS "on '$ENV_NAME'; checking diff against origin/main"
git fetch --quiet origin main git fetch --quiet origin main
local SYNC_STATUS=$? local SYNC_STATUS=$?
@ -243,14 +291,14 @@ __VALIDATE_UPSTREAM_TIMELINE() {
local DIFF_STATUS=$? local DIFF_STATUS=$?
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && { [[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
__SUCCESS 'up-to-date with origin/main' SUCCESS 'up-to-date with origin/main'
} || { } || {
__WARNING WARNING
[[ $SYNC_STATUS -ne 0 ]] && __WARNING 'unable to synchronize with origin/main' [[ $SYNC_STATUS -ne 0 ]] && WARNING 'unable to synchronize with origin/main'
[[ $DIFF_STATUS -ne 0 ]] && __WARNING 'your branch differs from origin/main (diff listed above)' [[ $DIFF_STATUS -ne 0 ]] && WARNING 'your branch differs from origin/main (diff listed above)'
__WARNING WARNING
__yN 'continue?' || return 1 yN 'continue?' || return 1
} }
} }
@ -262,7 +310,7 @@ __GET_LOGFILE() {
|| [[ $SCRIPT =~ interactive ]] \ || [[ $SCRIPT =~ interactive ]] \
&& return 0 && return 0
echo "$SCWRYPTS_LOG_PATH/$(echo $SCRIPT | sed 's/^\.\///; s/\//\%/g').log" echo "$SCWRYPTS_LOG_PATH/$(echo $GROUP/$TYPE/$NAME | sed 's/^\.\///; s/\//\%/g').log"
} }
##################################################################### #####################################################################

View File

@ -1,26 +1,32 @@
DONT_EXIT=1 source ${0:a:h}/zsh/common.zsh NO_EXPORT_CONFIG=1 source "${0:a:h}/zsh/lib/import.driver.zsh" || return 42
##################################################################### #####################################################################
__SCWRYPTS() { SCWRYPTS__ZSH_PLUGIN() {
local SCRIPT=$(__GET_AVAILABLE_SCRIPTS | __FZF 'select a script') local SCWRYPT_SELECTION=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS | FZF 'select a script' --header-lines 1)
local NAME
local TYPE
local GROUP
zle clear-command-line zle clear-command-line
[ ! $SCRIPT ] && { zle accept-line; return 0; } [ ! $SCWRYPT_SELECTION ] && { zle accept-line; return 0; }
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
which scwrypts >/dev/null 2>&1\ which scwrypts >/dev/null 2>&1\
&& RBUFFER="scwrypts" || RBUFFER="$SCWRYPTS_ROOT/scwrypts" && RBUFFER="scwrypts" || RBUFFER="$SCWRYPTS_ROOT/scwrypts"
RBUFFER+=" $SCRIPT" RBUFFER+=" --name $NAME --group $GROUP --type $TYPE"
zle accept-line zle accept-line
} }
zle -N scwrypts __SCWRYPTS zle -N scwrypts SCWRYPTS__ZSH_PLUGIN
bindkey $SCWRYPTS_SHORTCUT scwrypts bindkey $SCWRYPTS_SHORTCUT scwrypts
##################################################################### #####################################################################
__SCWRYPTS_ENV() { SCWRYPTS__ZSH_PLUGIN_ENV() {
local RESET='reset' local RESET='reset'
local SELECTED=$(\ local SELECTED=$(\
{ [ $SCWRYPTS_ENV ] && echo $RESET; __GET_ENV_NAMES; } \ { [ $SCWRYPTS_ENV ] && echo $RESET; SCWRYPTS__GET_ENV_NAMES; } \
| __FZF 'select an environment' \ | FZF 'select an environment' \
) )
zle clear-command-line zle clear-command-line
@ -32,5 +38,5 @@ __SCWRYPTS_ENV() {
zle accept-line zle accept-line
} }
zle -N scwrypts-setenv __SCWRYPTS_ENV zle -N scwrypts-setenv SCWRYPTS__ZSH_PLUGIN_ENV
bindkey $SCWRYPTS_ENV_SHORTCUT scwrypts-setenv bindkey $SCWRYPTS_ENV_SHORTCUT scwrypts-setenv

View File

@ -1,13 +0,0 @@
_DEPENDENCIES+=(
aws
jq
)
_REQUIRED_ENV+=(
AWS_ACCOUNT
AWS_PROFILE
AWS_REGION
)
source ${0:a:h}/../common.zsh
#####################################################################
_AWS() { aws --profile $AWS_PROFILE --region $AWS_REGION --output json $@; }

View File

@ -1,6 +0,0 @@
_DEPENDENCIES+=(
docker
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,14 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__STATUS "performing AWS ECR docker login"
_AWS ecr get-login-password | docker login \
--username AWS \
--password-stdin \
"$AWS_ACCOUNT.dkr.ecr.$AWS_REGION.amazonaws.com" \
&& __SUCCESS "logged in to 'AWS:$AWS_ACCOUNT:$AWS_REGION'" \
|| __FAIL 1 "unable to login to '$AWS_ACCOUNT' in '$AWS_REGION'"

View File

@ -1,6 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=(
AWS__EFS__LOCAL_MOUNT_POINT
)
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,34 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
_EFS_DISCONNECT() {
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
__STATUS 'no efs currently mounted'
exit 0
}
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
[ ! $MOUNTED ] && {
__STATUS 'no efs currently mounted'
exit 0
}
__GETSUDO || exit 1
local SELECTED=$(echo $MOUNTED | __FZF 'select a file system to unmount')
[ ! $SELECTED ] && __ABORT
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
__STATUS "unmounting '$SELECTED'"
sudo umount $EFS >/dev/null 2>&1
sudo rmdir $EFS \
&& __SUCCESS "done" \
|| __FAIL 2 "failed to unmount '$EFS'"
}
#####################################################################
_EFS_DISCONNECT

View File

@ -1,6 +0,0 @@
_DEPENDENCIES+=(
kubectl
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,19 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__STATUS "performing AWS ECR docker login"
CLUSTER_NAME=$(\
_AWS eks list-clusters \
| jq -r '.[] | .[]' \
| __FZF 'select a cluster'
)
[ ! $CLUSTER_NAME ] && __ABORT
__STATUS "updating kubeconfig for '$CLUSTER_NAME'"
_AWS eks update-kubeconfig --name $CLUSTER_NAME \
&& __SUCCESS "kubeconfig updated with '$CLUSTER_NAME'" \
|| __ERROR "failed to update kubeconfig; do you have permissions to access '$CLUSTER_NAME'?"

View File

@ -1,6 +0,0 @@
_DEPENDENCIES+=(
cli53
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,4 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,30 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=(
AWS__S3__MEDIA_TARGETS
AWS__S3__MEDIA_BUCKET
)
source ${0:a:h}/../common.zsh
#####################################################################
AWS__S3__MEDIA_TARGETS=($(echo $AWS__S3__MEDIA_TARGETS | sed 's/,/\n/g'))
__SYNC_MEDIA() {
local ACTION="$1"
local REMOTE_TARGET="s3://$AWS__S3__MEDIA_BUCKET/$2"
local LOCAL_TARGET="$HOME/$2"
local A B
case $ACTION in
push ) A="$LOCAL_TARGET"; B="$REMOTE_TARGET" ;;
pull ) A="$REMOTE_TARGET"; B="$LOCAL_TARGET" ;;
* ) __ERROR "unknown action '$1'"; return 1 ;;
esac
local FLAGS=(${@:3})
__STATUS "${ACTION}ing $2"
_AWS s3 sync $A $B $FLAGS \
&& __SUCCESS "$2 up-to-date" \
|| { __ERROR "unable to sync $2 (see above)"; return 1; }
}

View File

@ -1,27 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__PULL_ALL_MEDIA() {
local FLAGS=($@)
local FAILED_COUNT=0
__STATUS 'starting media download from s3'
local TARGET
for TARGET in $AWS__S3__MEDIA_TARGETS
do
__SYNC_MEDIA pull $TARGET $FLAGS || ((FAILED_COUNT+=1))
done
[[ $FAILED_COUNT -eq 0 ]] \
&& __SUCCESS 'local media files now up-to-date' \
|| __FAIL $FAILED_COUNT 'unable to download one or more targets' \
;
}
#####################################################################
__PULL_ALL_MEDIA $@

View File

@ -1,27 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__PUSH_ALL_MEDIA() {
local FLAGS=($@)
local FAILED_COUNT=0
__STATUS 'starting media upload to s3'
local TARGET
for TARGET in $AWS__S3__MEDIA_TARGETS
do
__SYNC_MEDIA push $TARGET $FLAGS || ((FAILED_COUNT+=1))
done
[[ $FAILED_COUNT -eq 0 ]] \
&& __SUCCESS 's3 media files now up-to-date' \
|| __FAIL $FAILED_COUNT 'unable to upload one or more targets' \
;
}
#####################################################################
__PUSH_ALL_MEDIA $@

10
zsh/cloud/aws/ecr/login Executable file
View File

@ -0,0 +1,10 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use cloud/aws/ecr
CHECK_ENVIRONMENT
#####################################################################
ECR_LOGIN $@

View File

@ -1,37 +1,40 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=(jq)
_REQUIRED_ENV+=() REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
source ${0:a:h}/common.zsh
use cloud/aws/cli
CHECK_ENVIRONMENT
##################################################################### #####################################################################
_EFS_CONNECT() { EFS_CONNECT() {
__GETSUDO || exit 1 GETSUDO || exit 1
[ ! -d $AWS__EFS__LOCAL_MOUNT_POINT ] && { [ ! -d $AWS__EFS__LOCAL_MOUNT_POINT ] && {
sudo mkdir $AWS__EFS__LOCAL_MOUNT_POINT \ sudo mkdir $AWS__EFS__LOCAL_MOUNT_POINT \
&& __STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'" && STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'"
} }
local FS_ID=$(\ local FS_ID=$(\
_AWS efs describe-file-systems \ AWS efs describe-file-systems \
| jq -r '.[] | .[] | .FileSystemId' \ | jq -r '.[] | .[] | .FileSystemId' \
| __FZF 'select a filesystem to mount' \ | FZF 'select a filesystem to mount' \
) )
[ ! $FS_ID ] && __ABORT [ ! $FS_ID ] && ABORT
local MOUNT_POINT="$AWS__EFS__LOCAL_MOUNT_POINT/$FS_ID" local MOUNT_POINT="$AWS__EFS__LOCAL_MOUNT_POINT/$FS_ID"
[ -d "$MOUNT_POINT" ] && sudo rmdir "$MOUNT_POINT" >/dev/null 2>&1 [ -d "$MOUNT_POINT" ] && sudo rmdir "$MOUNT_POINT" >/dev/null 2>&1
[ -d "$MOUNT_POINT" ] && { [ -d "$MOUNT_POINT" ] && {
__STATUS "$FS_ID is already mounted" STATUS "$FS_ID is already mounted"
exit 0 exit 0
} }
local MOUNT_TARGETS=$(_AWS efs describe-mount-targets --file-system-id $FS_ID) local MOUNT_TARGETS=$(AWS efs describe-mount-targets --file-system-id $FS_ID)
local ZONE=$(\ local ZONE=$(\
echo $MOUNT_TARGETS \ echo $MOUNT_TARGETS \
| jq -r '.[] | .[] | .AvailabilityZoneName' \ | jq -r '.[] | .[] | .AvailabilityZoneName' \
| sort -u | __FZF 'select availability zone'\ | sort -u | FZF 'select availability zone'\
) )
[ ! $ZONE ] && __ABORT [ ! $ZONE ] && ABORT
local MOUNT_IP=$(\ local MOUNT_IP=$(\
echo $MOUNT_TARGETS \ echo $MOUNT_TARGETS \
@ -39,15 +42,15 @@ _EFS_CONNECT() {
| head -n1 \ | head -n1 \
) )
__SUCCESS 'ready to mount!' SUCCESS 'ready to mount!'
__REMINDER 'your device must be connected to the appropriate VPN' REMINDER 'for private file-systems, you must be connected to the appropriate VPN'
__STATUS "file system id : $FS_ID" STATUS "file system id : $FS_ID"
__STATUS "availability zone : $ZONE" STATUS "availability zone : $ZONE"
__STATUS "file system ip : $MOUNT_IP" STATUS "file system ip : $MOUNT_IP"
__STATUS "local mount point : $MOUNT_POINT" STATUS "local mount point : $MOUNT_POINT"
__Yn 'proceed?' || __ABORT Yn 'proceed?' || ABORT
sudo mkdir $MOUNT_POINT \ sudo mkdir $MOUNT_POINT \
&& sudo mount \ && sudo mount \
@ -55,12 +58,12 @@ _EFS_CONNECT() {
-o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport \ -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport \
$MOUNT_IP:/ \ $MOUNT_IP:/ \
"$MOUNT_POINT" \ "$MOUNT_POINT" \
&& __SUCCESS "mounted at '$MOUNT_POINT'" \ && SUCCESS "mounted at '$MOUNT_POINT'" \
|| { || {
sudo rmdir $MOUNT_POINT >/dev/null 2>&1 sudo rmdir $MOUNT_POINT >/dev/null 2>&1
__FAIL 2 "unable to mount '$FS_ID'" FAIL 2 "unable to mount '$FS_ID'"
} }
} }
##################################################################### #####################################################################
_EFS_CONNECT EFS_CONNECT $@

37
zsh/cloud/aws/efs/unmount Executable file
View File

@ -0,0 +1,37 @@
#!/bin/zsh
DEPENDENCIES+=(jq)
REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
use cloud/aws/cli
CHECK_ENVIRONMENT
#####################################################################
EFS_DISCONNECT() {
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
STATUS 'no efs currently mounted'
exit 0
}
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
[ ! $MOUNTED ] && {
STATUS 'no efs currently mounted'
exit 0
}
GETSUDO || exit 1
local SELECTED=$(echo $MOUNTED | FZF 'select a file system to unmount')
[ ! $SELECTED ] && ABORT
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
STATUS "unmounting '$SELECTED'"
sudo umount $EFS >/dev/null 2>&1
sudo rmdir $EFS \
&& SUCCESS "done" \
|| FAIL 2 "failed to unmount '$EFS'"
}
#####################################################################
EFS_DISCONNECT $@

10
zsh/cloud/aws/eks/login Executable file
View File

@ -0,0 +1,10 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use cloud/aws/eks
CHECK_ENVIRONMENT
#####################################################################
EKS_CLUSTER_LOGIN $@

View File

@ -1,14 +1,18 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=()
_REQUIRED_ENV+=() REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
use cloud/aws/rds
use db/postgres
CHECK_ENVIRONMENT
##################################################################### #####################################################################
RDS_INTERACTIVE_LOGIN() { CREATE_BACKUP() {
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
GET_DATABASE_CREDENTIALS $@ || return 1 RDS__GET_DATABASE_CREDENTIALS $@ || return 1
__RUN_SCWRYPT 'zsh/db/postgres/pg_dump' -- \ PG_DUMP \
--host $DB_HOST \ --host $DB_HOST \
--port $DB_PORT \ --port $DB_PORT \
--name $DB_NAME \ --name $DB_NAME \
@ -17,6 +21,5 @@ RDS_INTERACTIVE_LOGIN() {
; ;
} }
##################################################################### #####################################################################
RDS_INTERACTIVE_LOGIN $@ CREATE_BACKUP $@

View File

@ -1,14 +1,18 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=()
_REQUIRED_ENV+=() REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
use cloud/aws/rds
use db/postgres
CHECK_ENVIRONMENT
##################################################################### #####################################################################
RDS_INTERACTIVE_LOGIN() { RDS_INTERACTIVE_LOGIN() {
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
GET_DATABASE_CREDENTIALS $@ || return 1 RDS__GET_DATABASE_CREDENTIALS $@ || return 1
__RUN_SCWRYPT 'zsh/db/interactive/postgres' -- \ POSTGRES__LOGIN_INTERACTIVE \
--host $DB_HOST \ --host $DB_HOST \
--port $DB_PORT \ --port $DB_PORT \
--name $DB_NAME \ --name $DB_NAME \

View File

@ -1,14 +1,18 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=()
_REQUIRED_ENV+=() REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
use cloud/aws/rds
use db/postgres
CHECK_ENVIRONMENT
##################################################################### #####################################################################
RDS_INTERACTIVE_LOGIN() { LOAD_BACKUP() {
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
GET_DATABASE_CREDENTIALS $@ || return 1 RDS__GET_DATABASE_CREDENTIALS $@ || return 1
__RUN_SCWRYPT 'zsh/db/postgres/pg_restore' -- \ PG_RESTORE \
--host $DB_HOST \ --host $DB_HOST \
--port $DB_PORT \ --port $DB_PORT \
--name $DB_NAME \ --name $DB_NAME \
@ -19,4 +23,4 @@ RDS_INTERACTIVE_LOGIN() {
##################################################################### #####################################################################
RDS_INTERACTIVE_LOGIN $@ LOAD_BACKUP $@

View File

@ -1,21 +1,22 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=(cli53)
_REQUIRED_ENV+=() REQUIRED_ENV+=(AWS_PROFILE)
source ${0:a:h}/common.zsh
CHECK_ENVIRONMENT
##################################################################### #####################################################################
_ROUTE53_BACKUP() { ROUTE53_BACKUP() {
local BACKUP_PATH="$SCWRYPTS_OUTPUT_PATH/$ENV_NAME/aws-dns-backup/$(date '+%Y-%m-%d')" local BACKUP_PATH="$SCWRYPTS_OUTPUT_PATH/$ENV_NAME/aws-dns-backup/$(date '+%Y-%m-%d')"
mkdir -p $BACKUP_PATH >/dev/null 2>&1 mkdir -p $BACKUP_PATH >/dev/null 2>&1
local DOMAIN local DOMAIN
local JOBS=() local JOBS=()
for DOMAIN in $(_ROUTE53_GET_DOMAINS) for DOMAIN in $(ROUTE53_GET_DOMAINS)
do do
( __STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \ ( STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \
&& cli53 export --profile $AWS_PROFILE $DOMAIN > "$BACKUP_PATH/$DOMAIN.txt" \ && cli53 export --profile $AWS_PROFILE $DOMAIN > "$BACKUP_PATH/$DOMAIN.txt" \
&& __SUCCESS "backed up '$DOMAIN'" \ && SUCCESS "backed up '$DOMAIN'" \
|| __ERROR "failed to back up '$DOMAIN'" \ || ERROR "failed to back up '$DOMAIN'" \
) & ) &
JOBS+=$! JOBS+=$!
done done
@ -24,7 +25,7 @@ _ROUTE53_BACKUP() {
for P in ${JOBS[@]}; do wait $P >/dev/null 2>&1; done for P in ${JOBS[@]}; do wait $P >/dev/null 2>&1; done
} }
_ROUTE53_GET_DOMAINS() { ROUTE53_GET_DOMAINS() {
cli53 list --profile $AWS_PROFILE \ cli53 list --profile $AWS_PROFILE \
| awk '{print $2;}' \ | awk '{print $2;}' \
| sed '1d; s/\.$//'\ | sed '1d; s/\.$//'\
@ -32,4 +33,4 @@ _ROUTE53_GET_DOMAINS() {
} }
##################################################################### #####################################################################
_ROUTE53_BACKUP ROUTE53_BACKUP

10
zsh/cloud/media-sync/pull Executable file
View File

@ -0,0 +1,10 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use cloud/media-sync
CHECK_ENVIRONMENT
#####################################################################
MEDIA_SYNC__PULL $@

10
zsh/cloud/media-sync/push Executable file
View File

@ -0,0 +1,10 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use cloud/media-sync
CHECK_ENVIRONMENT
#####################################################################
MEDIA_SYNC__PUSH $@

View File

@ -1,31 +0,0 @@
#####################################################################
source ${0:a:h}/../global/common.zsh
source ${0:a:h}/utils/utils.module.zsh \
|| { [ $DONT_EXIT ] && return 1 || exit 1; }
#####################################################################
__GET_ENV_FILES() { ls $SCWRYPTS_CONFIG_PATH/env | sort -r }
[ ! "$(__GET_ENV_FILES)" ] && {
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/dev"
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/local"
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/prod"
}
__GET_ENV_NAMES() { __GET_ENV_FILES | sed 's/.*\///'; }
__GET_ENV_FILE() { echo "$SCWRYPTS_CONFIG_PATH/env/$1"; }
__SELECT_OR_CREATE_ENV() { __GET_ENV_NAMES | __FZF_TAIL 'select/create an environment'; }
__SELECT_ENV() { __GET_ENV_NAMES | __FZF 'select an environment'; }
#####################################################################
__GET_AVAILABLE_SCRIPTS() {
cd $SCWRYPTS_ROOT;
find . -mindepth 2 -type f -executable \
| grep -v '\.git' \
| grep -v 'node_modules' \
| sed 's/^\.\///; s/\.[^.]*$//' \
;
}

View File

@ -1,6 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__EDIT "$CONFIG__USER_SETTINGS"

View File

@ -1,10 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__STATUS 'updating all config files and links'
__RUN_SCWRYPT zsh/config/symlinks || exit 1
__RUN_SCWRYPT zsh/config/terminfo || exit 2
__SUCCESS 'finished updating config files and links'

View File

@ -1,24 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################
GET_POSTGRES_LOGIN_ARGS() {
while [[ $# -gt 0 ]]
do
case $1 in
--host | -h ) _HOST="$2"; shift 2 ;;
--name | -d ) _NAME="$2"; shift 2 ;;
--pass | -w ) _PASS="$2"; shift 2 ;;
--port | -p ) _PORT="$2"; shift 2 ;;
--user | -U ) _USER="$2"; shift 2 ;;
* ) shift 1 ;;
esac
done
[ ! $_HOST ] && _HOST=127.0.0.1
[ ! $_NAME ] && _NAME=postgres
[ ! $_PORT ] && _PORT=5432
[ ! $_USER ] && _USER=postgres
}

View File

@ -1,4 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,29 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=(
pgcli
)
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
_LOGIN_POSTGRES() {
local _HOST _NAME _PASS _PORT _USER
GET_POSTGRES_LOGIN_ARGS $@
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST"
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
cd $DATA_DIR
__STATUS "performing login : $_USER@$_HOST:$_PORT/$_NAME"
__STATUS "working directory : $DATA_DIR"
PGPASSWORD="$_PASS" pgcli \
--host $_HOST \
--port $_PORT \
--user $_USER \
--dbname $_NAME \
;
}
#####################################################################
_LOGIN_POSTGRES $@

View File

@ -1,4 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -0,0 +1,9 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use db/postgres
CHECK_ENVIRONMENT
#####################################################################
POSTGRES__LOGIN_INTERACTIVE $@

View File

@ -1,44 +1,9 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=( DEPENDENCIES+=()
pg_dump REQUIRED_ENV+=()
)
_REQUIRED_ENV+=() use db/postgres
source ${0:a:h}/common.zsh
CHECK_ENVIRONMENT
##################################################################### #####################################################################
PG_DUMP $@
BACKUP_POSTGRES() {
local _HOST _NAME _PASS _PORT _USER
GET_POSTGRES_LOGIN_ARGS $@
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_dump"
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
cd $DATA_DIR
local OUTPUT_FILE="$DATA_DIR/$_NAME.dump"
[ -f $OUTPUT_FILE ] && {
local BACKUP_COUNT=$(ls "$DATA_DIR/$_NAME."*".dump" | wc -l)
ls "$DATA_DIR/$_NAME."*".dump"
__INFO "discovered previous dump for '$_HOST/$_NAME'"
__INFO "backing up previous dump to '$_NAME.$BACKUP_COUNT.dump'"
mv "$OUTPUT_FILE" "$DATA_DIR/$_NAME.$BACKUP_COUNT.dump"
}
__STATUS "making backup of : $_USER@$_HOST:$_PORT/$_NAME"
__STATUS "output file : $OUTPUT_FILE"
PGPASSWORD="$_PASS" pg_dump \
--verbose \
--format custom \
--host "$_HOST" \
--port "$_PORT" \
--username "$_USER" \
--dbname "$_NAME" \
--file "$OUTPUT_FILE" \
&& { __SUCCESS "finished backup of '$_HOST/$_NAME'"; __SUCCESS "saved to '$OUTPUT_FILE'"; } \
|| { __ERROR "error creating backup for '$_HOST/$_NAME' (see above)"; return 1; }
}
#####################################################################
BACKUP_POSTGRES $@

View File

@ -1,55 +1,9 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=( DEPENDENCIES+=()
pg_dump REQUIRED_ENV+=()
)
_REQUIRED_ENV+=() use db/postgres
source ${0:a:h}/common.zsh
CHECK_ENVIRONMENT
##################################################################### #####################################################################
PG_RESTORE $@
BACKUP_POSTGRES() {
local _HOST _NAME _PASS _PORT _USER
GET_POSTGRES_LOGIN_ARGS $@
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_restore"
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
cd $DATA_DIR
local INPUT_FILE="$DATA_DIR/$_NAME.dump"
[ ! -f $INPUT_FILE ] && {
local DUMP="$(dirname $DATA_DIR)/pg_dump/$_NAME.dump"
__STATUS $DUMP
ls $DUMP
[ -f "$DUMP" ] && {
__SUCCESS "discovered previous scwrypts dump"
__SUCCESS "$DUMP"
__Yn 'restore from this backup?' && INPUT_FILE="$DUMP"
}
[ ! -f "$INPUT_FILE" ] && {
__STATUS 'place backup in the following location:'
__STATUS "$INPUT_FILE"
}
while [ ! -f $INPUT_FILE ]; do sleep 1; done
}
__STATUS "backup file : $DATA_DIR"
__STATUS "database : $_USER@$_HOST:$_PORT/$_NAME"
PGPASSWORD="$_PASS" pg_restore \
--verbose \
--single-transaction \
--format custom \
--host "$_HOST" \
--port "$_PORT" \
--username "$_USER" \
--dbname "$_NAME" \
"$INPUT_FILE" \
&& { __SUCCESS "finished restoring backup for '$_HOST/$_NAME'"; } \
|| { __ERROR "error restoring backup for '$_HOST/$_NAME' (see above)"; return 1; }
}
#####################################################################
BACKUP_POSTGRES $@

51
zsh/db/postgres/run-sql Executable file
View File

@ -0,0 +1,51 @@
#!/bin/zsh
DEPENDENCIES+=()
REQUIRED_ENV+=()
use db/postgres
CHECK_ENVIRONMENT
#####################################################################
RUN_SQL_POSTGRES() {
local _PASS _ARGS=()
POSTGRES__SET_LOGIN_ARGS $@
local INPUT_FILE="$FILENAME"
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
cd $SQL_DIR
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
ERROR "you haven't made any SQL commands yet"
REMINDER "add '.sql' files here: '$SQL_DIR/'"
return 1
}
[ ! $INPUT_FILE ] && INPUT_FILE=$(FZF 'select a sql file to run')
[ ! $INPUT_FILE ] && ABORT
[ ! -f "$INPUT_FILE" ] && FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
STATUS "loading '$INPUT_FILE' preview..."
LESS "$INPUT_FILE"
STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
STATUS "command : '$INPUT_FILE'"
yN 'run this command?' || ABORT
STATUS "running '$INPUT_FILE'"
PSQL < $INPUT_FILE \
&& SUCCESS "finished running '$INPUT_FILE'" \
|| FAIL 3 "something went wrong running '$INPUT_FILE' (see above)"
}
#####################################################################
WARNING
WARNING 'this function is in a beta state'
WARNING
RUN_SQL_POSTGRES $@

View File

@ -1,4 +0,0 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,72 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=(
psql
)
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
_RUN_SQL_POSTGRES() {
local _HOST _NAME _PASS _PORT _USER INPUT_FILE
while [[ $# -gt 0 ]]
do
case $1 in
--host | -h ) _HOST="$2"; shift 2 ;;
--name | -d ) _NAME="$2"; shift 2 ;;
--pass | -w ) _PASS="$2"; shift 2 ;;
--port | -p ) _PORT="$2"; shift 2 ;;
--user | -U ) _USER="$2"; shift 2 ;;
--file | -i ) INPUT_FILE="$2"; shift 2 ;;
* ) shift 1 ;;
esac
done
[ ! $_HOST ] && _HOST=127.0.0.1
[ ! $_NAME ] && _NAME=postgres
[ ! $_PORT ] && _PORT=5432
[ ! $_USER ] && _USER=postgres
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
cd $SQL_DIR
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
__ERROR "you haven't made any SQL commands yet"
__REMINDER "add '.sql' files here: '$SQL_DIR/'"
exit 1
}
[ ! $INPUT_FILE ] && INPUT_FILE=$(\
__FZF 'select a sql file to run'
)
[ ! $INPUT_FILE ] && __ABORT
[ ! -f $INPUT_FILE ] && {
__FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
}
__STATUS "loading $INPUT_FILE preview..."
_LESS $INPUT_FILE
__STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
__STATUS "command : ./$INPUT_FILE"
__yN 'run this command?' || __ABORT
__STATUS "running './$INPUT_FILE'"
PGPASSWORD="$_PASS" psql \
-h $_HOST \
-p $_PORT \
-U $_USER \
-d $_NAME \
< $INPUT_FILE \
&& __SUCCESS "finished running './$INPUT_FILE'" \
|| __FAIL 3 "something went wrong running './$INPUT_FILE' (see above)"
}
#####################################################################
__WARNING
__WARNING 'this function is in a beta state'
__WARNING
_RUN_SQL_POSTGRES $@

19
zsh/docker/cleanup Executable file
View File

@ -0,0 +1,19 @@
#!/bin/zsh
DEPENDENCIES+=(docker)
REQUIRED_ENV+=()
CHECK_ENVIRONMENT
#####################################################################
DOCKER_CLEAN() {
WARNING 'this will prune all docker resources from the current machine'
WARNING 'pruned resources are PERMANENTLY DELETED'
yN 'continue?' || return 1
SUCCESS "CONTAINER : $(docker container prune -f 2>/dev/null | tail -n 1)"
SUCCESS "IMAGE : $(docker image prune -f 2>/dev/null | tail -n 1)"
SUCCESS "VOLUME : $(docker volume prune -f 2>/dev/null | tail -n 1)"
}
#####################################################################
DOCKER_CLEAN $@

View File

@ -1,6 +0,0 @@
_DEPENDENCIES+=(
git
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################

View File

@ -1,6 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__RUN_SCWRYPT zsh/git/package/install -- --only-build $@

View File

@ -1,6 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__RUN_SCWRYPT zsh/git/package/install -- --only-pull $@

View File

@ -1,6 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
__RUN_SCWRYPT zsh/git/package/install -- --update $@

View File

@ -1,7 +1,8 @@
#!/bin/zsh #!/bin/zsh
_DEPENDENCIES+=() DEPENDENCIES+=()
_REQUIRED_ENV+=() REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
CHECK_ENVIRONMENT
##################################################################### #####################################################################
__SUCCESS 'hello world!' SUCCESS 'hello world!'

View File

@ -1,14 +0,0 @@
_DEPENDENCIES+=(
i3
i3-msg
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################
[ ! $DISPLAY ] && export DISPLAY=:0
_NOTIFY() {
__CHECK_DEPENDENCY notify-send || return 0
notify-send "SCWRYPTS $SCWRYPT_NAME" $@
}

View File

@ -1,31 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=(
pdflatex
rg
)
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
PDFLATEX() {
[ ! $1 ] && __FAIL 1 'must provide filename'
local FILENAME=$(GET_MAIN_LATEX_FILENAME "$1")
local ARGS=(-interaction=nonstopmode)
ARGS+=("$FILENAME")
cd "$(dirname $FILENAME)"
__STATUS 'running compile (1/2)'
pdflatex ${ARGS[@]} \
|| __FAIL 2 'first compile failed (see above)'
__STATUS 'running compile (2/2)'
pdflatex ${ARGS[@]} >/dev/null 2>&1 \
|| __FAIL 3 'second compile failed :c'
__SUCCESS "created '$(echo $FILENAME | sed 's/\.[^.]*$/.pdf/')'"
}
#####################################################################
PDFLATEX $@

View File

@ -1,34 +0,0 @@
_DEPENDENCIES+=(
rg
)
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
#####################################################################
GET_MAIN_LATEX_FILENAME() {
local FILENAME=$(__GET_PATH_TO_RELATIVE_ARGUMENT "$1")
local DIRNAME="$FILENAME"
for _ in {1..3}
do
CHECK_IS_MAIN_LATEX_FILE && return 0
DIRNAME="$(dirname "$FILENAME")"
__STATUS "checking '$DIRNAME'"
[[ $DIRNAME =~ ^$HOME$ ]] && break
FILENAME=$(
rg -l --max-depth 1 'documentclass' "$DIRNAME/" \
| grep '\.tex$' \
| head -n1 \
)
__STATUS "here is '$FILENAME'"
done
__WARNING 'unable to find documentclass; pdflatex will probably fail'
echo "$1"
}
CHECK_IS_MAIN_LATEX_FILE() {
[ ! $FILENAME ] && return 1
grep -q 'documentclass' $FILENAME 2>/dev/null && echo $FILENAME || return 3
}

View File

@ -1,15 +0,0 @@
#!/bin/zsh
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/common.zsh
#####################################################################
GET_PDF() {
local FILENAME=$(GET_MAIN_LATEX_FILENAME "$1" | sed 's/\.[^.]*$/.pdf/')
[ $FILENAME ] && [ -f $FILENAME ] || __FAIL 1 "no compiled pdf found for '$1'; have you run 'build-pdf'?"
__SUCCESS 'found main pdf'
echo $FILENAME
}
#####################################################################
GET_PDF $@

View File

@ -0,0 +1,21 @@
#####################################################################
DEPENDENCIES+=(
aws
)
REQUIRED_ENV+=(
AWS_ACCOUNT
AWS_PROFILE
AWS_REGION
)
#####################################################################
AWS() {
aws \
--profile $AWS_PROFILE \
--region $AWS_REGION \
--output json \
$@
}

View File

@ -0,0 +1,28 @@
#####################################################################
DEPENDENCIES+=(
docker
)
REQUIRED_ENV+=(
AWS_ACCOUNT
AWS_REGION
)
use cloud/aws/cli
#####################################################################
ECR_LOGIN() {
STATUS "performing AWS ECR docker login"
AWS ecr get-login-password \
| docker login \
--username AWS \
--password-stdin \
"$AWS_ACCOUNT.dkr.ecr.$AWS_REGION.amazonaws.com" \
&& SUCCESS "authenticated docker for '$AWS_ACCOUNT' in '$AWS_REGION'" \
|| {
ERROR "unable to authenticate docker for '$AWS_ACCOUNT' in '$AWS_REGION'"
return 1
}
}

View File

@ -0,0 +1,60 @@
#####################################################################
DEPENDENCIES+=(
kubectl
)
REQUIRED_ENV+=(
AWS_ACCOUNT
AWS_REGION
)
use cloud/aws/cli
#####################################################################
EKS_CLUSTER_LOGIN() {
local USAGE="
usage: [...options...]
options
-c, --cluster-name <string> (optional) login a specific cluster
Interactively sets the default kubeconfig to match the selected
cluster in EKS. Also creates the kubeconfig entry if it does not
already exist.
"
local CLUSTER_NAME
while [[ $# -gt 0 ]]
do
case $1 in
-c | --cluster-name ) CLUSTER_NAME="$2"; shift 1 ;;
* ) [ ! $APPLICATION ] && APPLICATION="$1" \
|| ERROR "extra positional argument '$1'"
;;
esac
shift 1
done
[ ! $CLUSTER_NAME ] && CLUSTER_NAME=$(\
AWS eks list-clusters \
| jq -r '.[] | .[]' \
| FZF 'select a cluster'
)
[ ! $CLUSTER_NAME ] && ERROR 'must select a valid cluster or use -c flag'
CHECK_ERRORS
##########################################
STATUS 'creating / updating kubeconfig for EKS cluster'
STATUS "updating kubeconfig for '$CLUSTER_NAME'"
AWS eks update-kubeconfig --name $CLUSTER_NAME \
&& SUCCESS "kubeconfig updated with '$CLUSTER_NAME'" \
|| ERROR "failed to update kubeconfig; do you have permissions to access '$CLUSTER_NAME'?"
}

View File

@ -1,9 +1,48 @@
_DEPENDENCIES+=()
_REQUIRED_ENV+=()
source ${0:a:h}/../common.zsh
##################################################################### #####################################################################
GET_DATABASE_CREDENTIALS() { DEPENDENCIES+=(
docker
)
REQUIRED_ENV+=(
AWS_ACCOUNT
AWS_REGION
)
use cloud/aws/cli
#####################################################################
RDS__SELECT_DATABASE() {
local DATABASES=$(_RDS__GET_AVAILABLE_DATABASES)
[ ! $DATABASES ] && FAIL 1 'no databases available'
local ID=$(\
echo $DATABASES | jq -r '.instance + " @ " + .cluster' \
| FZF 'select a database (instance@cluster)' \
)
[ ! $ID ] && ABORT
local INSTANCE=$(echo $ID | sed 's/ @ .*$//')
local CLUSTER=$(echo $ID | sed 's/^.* @ //')
echo $DATABASES | jq "select (.instance == \"$INSTANCE\" and .cluster == \"$CLUSTER\")"
}
_RDS__GET_AVAILABLE_DATABASES() {
AWS rds describe-db-instances \
| jq -r '.[] | .[] | {
instance: .DBInstanceIdentifier,
cluster: .DBClusterIdentifier,
type: .Engine,
host: .Endpoint.Address,
port: .Endpoint.Port,
user: .MasterUsername,
database: .DBName
}'
}
RDS__GET_DATABASE_CREDENTIALS() {
local PRINT_PASSWORD=0 local PRINT_PASSWORD=0
local ERRORS=0 local ERRORS=0
@ -12,22 +51,22 @@ GET_DATABASE_CREDENTIALS() {
case $1 in case $1 in
--print-password ) PRINT_PASSWORD=1 ;; --print-password ) PRINT_PASSWORD=1 ;;
* ) * )
__WARNING "unrecognized argument $1" WARNING "unrecognized argument $1"
ERRORS+=1 ERRORS+=1
;; ;;
esac esac
shift 1 shift 1
done done
__ERROR_CHECK CHECK_ERRORS
########################################## ##########################################
local DATABASE=$(SELECT_DATABASE) local DATABASE=$(RDS__SELECT_DATABASE)
[ ! $DATABASE ] && __ABORT [ ! $DATABASE ] && ABORT
DB_HOST="$(echo $DATABASE | jq -r '.host')" DB_HOST="$(echo $DATABASE | jq -r '.host')"
[ ! $DB_HOST ] && { __ERROR 'unable to find host'; return 2; } [ ! $DB_HOST ] && { ERROR 'unable to find host'; return 2; }
DB_PORT="$(echo $DATABASE | jq -r '.port')" DB_PORT="$(echo $DATABASE | jq -r '.port')"
[ ! $DB_PORT ] && DB_PORT=5432 [ ! $DB_PORT ] && DB_PORT=5432
@ -37,37 +76,37 @@ GET_DATABASE_CREDENTIALS() {
local AUTH_METHOD=$(\ local AUTH_METHOD=$(\
echo "iam\nsecretsmanager\nuser-input" \ echo "iam\nsecretsmanager\nuser-input" \
| __FZF 'select an authentication method' \ | FZF 'select an authentication method' \
) )
[ ! $AUTH_METHOD ] && __ABORT [ ! $AUTH_METHOD ] && ABORT
case $AUTH_METHOD in case $AUTH_METHOD in
iam ) GET_AUTH__IAM ;; iam ) _RDS_AUTH__iam ;;
secretsmanager ) GET_AUTH__SECRETSMANAGER ;; secretsmanager ) _RDS_AUTH__secretsmanager ;;
user-input ) GET_AUTH__USER_INPUT ;; user-input ) _RDS_AUTH__userinput ;;
esac esac
__STATUS STATUS
__STATUS "host : $DB_HOST" STATUS "host : $DB_HOST"
__STATUS "type : $DB_TYPE" STATUS "type : $DB_TYPE"
__STATUS "port : $DB_PORT" STATUS "port : $DB_PORT"
__STATUS "database : $DB_NAME" STATUS "database : $DB_NAME"
__STATUS "username : $DB_USER" STATUS "username : $DB_USER"
[[ $PRINT_PASSWORD -eq 1 ]] && __STATUS "password : $DB_PASS" [[ $PRINT_PASSWORD -eq 1 ]] && STATUS "password : $DB_PASS"
__STATUS STATUS
} }
GET_AUTH__IAM() { _RDS_AUTH__iam() {
DB_PASS=$(\ DB_PASS=$(\
_AWS rds generate-db-auth-token \ AWS rds generate-db-auth-token \
--hostname $DB_HOST \ --hostname $DB_HOST \
--port $DB_PORT \ --port $DB_PORT \
--username $DB_USER \ --username $DB_USER \
) )
} }
GET_AUTH__SECRETSMANAGER() { _RDS_AUTH__secretsmanager() {
local CREDENTIALS=$(GET_SECRETSMANAGER_CREDENTIALS) local CREDENTIALS=$(_RDS__GET_SECRETSMANAGER_CREDENTIALS)
echo $CREDENTIALS | jq -e '.pass' >/dev/null 2>&1 \ echo $CREDENTIALS | jq -e '.pass' >/dev/null 2>&1 \
&& DB_PASS="'$(echo $CREDENTIALS | jq -r '.pass' | sed "s/'/'\"'\"'/g")'" && DB_PASS="'$(echo $CREDENTIALS | jq -r '.pass' | sed "s/'/'\"'\"'/g")'"
@ -87,44 +126,15 @@ GET_AUTH__SECRETSMANAGER() {
&& DB_NAME=$(echo $CREDENTIALS | jq -r '.dbname') && DB_NAME=$(echo $CREDENTIALS | jq -r '.dbname')
} }
GET_SECRETSMANAGER_CREDENTIALS() { _RDS__GET_SECRETSMANAGER_CREDENTIALS() {
local ID=$(\ local ID=$(\
_AWS secretsmanager list-secrets \ AWS secretsmanager list-secrets \
| jq -r '.[] | .[] | .Name' \ | jq -r '.[] | .[] | .Name' \
| __FZF 'select a secret' \ | FZF 'select a secret' \
) )
[ ! $ID ] && return 1 [ ! $ID ] && return 1
_AWS secretsmanager get-secret-value --secret-id "$ID" \ AWS secretsmanager get-secret-value --secret-id "$ID" \
| jq -r '.SecretString' | jq | jq -r '.SecretString' | jq
} }
SELECT_DATABASE() {
local DATABASES=$(GET_AVAILABLE_DATABASES)
[ ! $DATABASES ] && __FAIL 1 'no databases available'
local ID=$(\
echo $DATABASES | jq -r '.instance + " @ " + .cluster' \
| __FZF 'select a database (instance@cluster)' \
)
[ ! $ID ] && __ABORT
local INSTANCE=$(echo $ID | sed 's/ @ .*$//')
local CLUSTER=$(echo $ID | sed 's/^.* @ //')
echo $DATABASES | jq "select (.instance == \"$INSTANCE\" and .cluster == \"$CLUSTER\")"
}
GET_AVAILABLE_DATABASES() {
_AWS rds describe-db-instances \
| jq -r '.[] | .[] | {
instance: .DBInstanceIdentifier,
cluster: .DBClusterIdentifier,
type: .Engine,
host: .Endpoint.Address,
port: .Endpoint.Port,
user: .MasterUsername,
database: .DBName
}'
}

Some files were not shown because too many files have changed in this diff Show More