Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
187a467092 |
@ -3,23 +3,12 @@ export AWS_ACCOUNT=
|
||||
export AWS_PROFILE=
|
||||
export AWS_REGION=
|
||||
export AWS__EFS__LOCAL_MOUNT_POINT=
|
||||
export DIRECTUS__API_TOKEN=
|
||||
export DIRECTUS__BASE_URL=
|
||||
export DISCORD__BOT_TOKEN=
|
||||
export DISCORD__DEFAULT_AVATAR_URL=
|
||||
export DISCORD__DEFAULT_CHANNEL_ID=
|
||||
export AWS__S3__MEDIA_BUCKET=
|
||||
export AWS__S3__MEDIA_TARGETS=
|
||||
export I3__BORDER_PIXEL_SIZE=
|
||||
export I3__DMENU_FONT_SIZE=
|
||||
export I3__GLOBAL_FONT_SIZE=
|
||||
export I3__MODEL_CONFIG=
|
||||
export LINEAR__API_TOKEN=
|
||||
export MEDIA_SYNC__S3_BUCKET
|
||||
export MEDIA_SYNC__TARGETS
|
||||
export REDIS_AUTH=
|
||||
export REDIS_HOST=
|
||||
export REDIS_PORT=
|
||||
export TWILIO__ACCOUNT_SID=
|
||||
export TWILIO__API_KEY=
|
||||
export TWILIO__API_SECRET=
|
||||
export TWILIO__DEFAULT_PHONE_FROM=
|
||||
export TWILIO__DEFAULT_PHONE_TO=
|
||||
|
@ -4,29 +4,14 @@ AWS_REGION |
|
||||
|
||||
AWS__EFS__LOCAL_MOUNT_POINT | fully-qualified path to mount the EFS drive
|
||||
|
||||
DIRECTUS__API_TOKEN | details for a directus instance
|
||||
DIRECTUS__BASE_URL |
|
||||
|
||||
DISCORD__BOT_TOKEN | details for discord bot
|
||||
DISCORD__DEFAULT_AVATAR_URL |
|
||||
DISCORD__DEFAULT_CHANNEL_ID |
|
||||
AWS__S3__MEDIA_BUCKET | s3 bucket name and filesystem targets for media backups
|
||||
AWS__S3__MEDIA_TARGETS |
|
||||
|
||||
I3__BORDER_PIXEL_SIZE | custom i3 configuration settings
|
||||
I3__DMENU_FONT_SIZE |
|
||||
I3__GLOBAL_FONT_SIZE |
|
||||
I3__MODEL_CONFIG |
|
||||
|
||||
LINEAR__API_TOKEN | linear.app project management configuration
|
||||
|
||||
MEDIA_SYNC__S3_BUCKET | s3 bucket name and filesystem targets for media backups
|
||||
MEDIA_SYNC__TARGETS |
|
||||
|
||||
REDIS_AUTH | redis connection credentials
|
||||
REDIS_HOST |
|
||||
REDIS_PORT |
|
||||
|
||||
TWILIO__ACCOUNT_SID | twilio account / credentials
|
||||
TWILIO__API_KEY |
|
||||
TWILIO__API_SECRET |
|
||||
TWILIO__DEFAULT_PHONE_FROM |
|
||||
TWILIO__DEFAULT_PHONE_TO |
|
||||
|
42
global/common.zsh
Normal file
42
global/common.zsh
Normal file
@ -0,0 +1,42 @@
|
||||
#####################################################################
|
||||
|
||||
[ ! $SCWRYPTS_ROOT ] && SCWRYPTS_ROOT="$(dirname ${0:a:h})"
|
||||
|
||||
source "${0:a:h}/config.zsh"
|
||||
|
||||
#####################################################################
|
||||
|
||||
__SCWRYPT=1 # arbitrary; indicates scwrypts exists
|
||||
|
||||
__PREFERRED_PYTHON_VERSIONS=(3.10 3.9)
|
||||
__NODE_VERSION=18.0.0
|
||||
|
||||
__ENV_TEMPLATE=$SCWRYPTS_ROOT/.env.template
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_PATH_TO_RELATIVE_ARGUMENT() {
|
||||
[[ $1 =~ ^[.] ]] \
|
||||
&& echo $(readlink -f "$EXECUTION_DIR/$1") \
|
||||
|| echo "$1" \
|
||||
;
|
||||
true
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__RUN_SCWRYPT() {
|
||||
((SUBSCWRYPT+=1))
|
||||
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
|
||||
echo " BEGIN SUBSCWRYPT : $@" >&2
|
||||
|
||||
SUBSCWRYPT=$SUBSCWRYPT SCWRYPTS_ENV=$ENV_NAME \
|
||||
"$SCWRYPTS_ROOT/scwrypts" $@
|
||||
EXIT_CODE=$?
|
||||
|
||||
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
|
||||
echo " END SUBSCWRYPT : $1" >&2
|
||||
((SUBSCWRYPT-=1))
|
||||
|
||||
return $EXIT_CODE
|
||||
}
|
BIN
global/config.zsh
Normal file
BIN
global/config.zsh
Normal file
Binary file not shown.
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'csv',
|
||||
output_stream = stream.output,
|
||||
output_type = 'json',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert csv into json',
|
||||
parse_args = [],
|
||||
)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'csv',
|
||||
output_stream = stream.output,
|
||||
output_type = 'yaml',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert csv into yaml',
|
||||
parse_args = [],
|
||||
)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'json',
|
||||
output_stream = stream.output,
|
||||
output_type = 'csv',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert json into csv',
|
||||
parse_args = [],
|
||||
)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'json',
|
||||
output_stream = stream.output,
|
||||
output_type = 'yaml',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert json into yaml',
|
||||
parse_args = [],
|
||||
)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'yaml',
|
||||
output_stream = stream.output,
|
||||
output_type = 'csv',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert yaml into csv',
|
||||
parse_args = [],
|
||||
)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'yaml',
|
||||
output_stream = stream.output,
|
||||
output_type = 'json',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert yaml into json',
|
||||
parse_args = [],
|
||||
)
|
@ -1,145 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from json import dumps
|
||||
|
||||
from py.lib.fzf import fzf, fzf_tail
|
||||
from py.lib.http import directus
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if {None} == { args.collection, args.filters, args.fields }:
|
||||
args.interactive = True
|
||||
|
||||
if args.interactive:
|
||||
args.generate_filters_prompt = True
|
||||
args.generate_fields_prompt = True
|
||||
|
||||
collection = _get_or_select_collection(args)
|
||||
filters = _get_or_select_filters(args, collection)
|
||||
fields = _get_or_select_fields(args, collection)
|
||||
|
||||
query = '&'.join([
|
||||
param for param in [
|
||||
fields,
|
||||
filters,
|
||||
]
|
||||
if param
|
||||
])
|
||||
|
||||
endpoint = f'items/{collection}?{query}'
|
||||
|
||||
response = directus.request('GET', endpoint)
|
||||
|
||||
stream.writeline(dumps({
|
||||
**response.json(),
|
||||
'scwrypts_metadata': {
|
||||
'endpoint': endpoint,
|
||||
'repeat_with': f'scwrypts -n py/directus/get-items -- -c {collection} -f \'{query}\'',
|
||||
},
|
||||
}))
|
||||
|
||||
def _get_or_select_collection(args):
|
||||
collection = args.collection
|
||||
|
||||
if collection is None:
|
||||
collection = fzf(
|
||||
prompt = 'select a collection',
|
||||
choices = directus.get_collections(),
|
||||
)
|
||||
|
||||
if not collection:
|
||||
raise ValueError('collection required for query')
|
||||
|
||||
return collection
|
||||
|
||||
def _get_or_select_filters(args, collection):
|
||||
filters = args.filters or ''
|
||||
|
||||
if filters == '' and args.generate_filters_prompt:
|
||||
filters = '&'.join([
|
||||
f'filter[{filter}][' + (
|
||||
operator := fzf(
|
||||
prompt = f'select operator for {filter}',
|
||||
choices = directus.FILTER_OPERATORS,
|
||||
)
|
||||
) + ']=' + fzf_tail(prompt = f'filter[{filter}][{operator}]')
|
||||
|
||||
for filter in fzf(
|
||||
prompt = 'select filter(s) [C^c to skip]',
|
||||
fzf_options = '--multi',
|
||||
force_list = True,
|
||||
choices = directus.get_fields(collection),
|
||||
)
|
||||
])
|
||||
|
||||
return filters
|
||||
|
||||
def _get_or_select_fields(args, collection):
|
||||
fields = args.fields or ''
|
||||
|
||||
if fields == '' and args.generate_fields_prompt:
|
||||
fields = ','.join(fzf(
|
||||
prompt = 'select return field(s) [C^c to get all]',
|
||||
fzf_options = '--multi',
|
||||
choices = directus.get_fields(collection),
|
||||
force_list = True,
|
||||
))
|
||||
|
||||
if fields:
|
||||
fields = f'fields[]={fields}'
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'interactive CLI to get data from directus',
|
||||
parse_args = [
|
||||
( ['-c', '--collection'], {
|
||||
"dest" : 'collection',
|
||||
"default" : None,
|
||||
"help" : 'the name of the collection',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-f', '--filters'], {
|
||||
"dest" : 'filters',
|
||||
"default" : None,
|
||||
"help" : 'as a URL-suffix, filters for the query',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-d', '--fields'], {
|
||||
"dest" : 'fields',
|
||||
"default" : None,
|
||||
"help" : 'comma-separated list of fields to include',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-p', '--interactive-prompt'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'interactive',
|
||||
"default" : False,
|
||||
"help" : 'interactively generate filter prompts; implied if no flags are provided',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['--prompt-filters'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'generate_filters_prompt',
|
||||
"default" : False,
|
||||
"help" : '(superceded by -p) only generate filters interactively',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['--prompt-fields'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'generate_fields_prompt',
|
||||
"default" : False,
|
||||
"help" : '(superceded by -p) only generate filters interactively',
|
||||
"required" : False,
|
||||
}),
|
||||
]
|
||||
|
||||
)
|
@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from json import dumps
|
||||
from sys import stderr
|
||||
|
||||
from py.lib.http import discord
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if args.body is None:
|
||||
print(f'reading input from {stream.input.name}', file=stderr)
|
||||
args.body = ''.join(stream.readlines()).strip()
|
||||
|
||||
if len(args.body) == 0:
|
||||
args.body = 'PING'
|
||||
|
||||
response = discord.send_message(
|
||||
content = args.body,
|
||||
channel_id = args.channel_id,
|
||||
webhook = args.webhook,
|
||||
avatar_url = args.avatar_url,
|
||||
)
|
||||
|
||||
stream.writeline(dumps({
|
||||
**(response.json() if response.text != '' else {'message': 'OK'}),
|
||||
'scwrypts_metadata': {},
|
||||
}))
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'post a message to the indicated discord channel',
|
||||
parse_args = [
|
||||
( ['-b', '--body'], {
|
||||
'dest' : 'body',
|
||||
'help' : 'message body',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['-c', '--channel-id'], {
|
||||
'dest' : 'channel_id',
|
||||
'help' : 'target channel id',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['-w', '--webhook'], {
|
||||
'dest' : 'webhook',
|
||||
'help' : 'target webhook (takes precedence over -c)',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['--avatar-url'], {
|
||||
'dest' : 'avatar_url',
|
||||
'help' : 'replace default avatar_url',
|
||||
'required' : False,
|
||||
}),
|
||||
]
|
||||
)
|
@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
|
||||
def main(args, stream):
|
||||
stream.writeline(args.message)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'a simple "Hello, World!" program',
|
||||
parse_args = [
|
||||
( ['-m', '--message'], {
|
||||
'dest' : 'message',
|
||||
'default' : 'HELLO WORLD',
|
||||
'help' : 'message to print',
|
||||
'required' : False,
|
||||
}),
|
||||
],
|
||||
)
|
7
py/hello_world.py
Executable file
7
py/hello_world.py
Executable file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
def main():
|
||||
print('HELLO WORLD')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,6 +0,0 @@
|
||||
import py.lib.data
|
||||
import py.lib.fzf
|
||||
import py.lib.http
|
||||
import py.lib.redis
|
||||
import py.lib.scwrypts
|
||||
import py.lib.twilio
|
@ -1 +0,0 @@
|
||||
import py.lib.data.converter
|
@ -1,73 +0,0 @@
|
||||
import csv
|
||||
import json
|
||||
import yaml
|
||||
|
||||
|
||||
def convert(input_stream, input_type, output_stream, output_type):
|
||||
if input_type == output_type:
|
||||
raise ValueError('input type and output type are the same')
|
||||
|
||||
data = convert_input(input_stream, input_type)
|
||||
write_output(output_stream, output_type, data)
|
||||
|
||||
|
||||
def convert_input(stream, input_type):
|
||||
supported_input_types = {'csv', 'json', 'yaml'}
|
||||
|
||||
if input_type not in supported_input_types:
|
||||
raise ValueError(f'input_type "{input_type}" not supported; must be one of {supported_input_types}')
|
||||
|
||||
return {
|
||||
'csv': _read_csv,
|
||||
'json': _read_json,
|
||||
'yaml': _read_yaml,
|
||||
}[input_type](stream)
|
||||
|
||||
|
||||
def write_output(stream, output_type, data):
|
||||
supported_output_types = {'csv', 'json', 'yaml'}
|
||||
|
||||
if output_type not in supported_output_types:
|
||||
raise ValueError(f'output_type "{output_type}" not supported; must be one of {supported_output_types}')
|
||||
|
||||
return {
|
||||
'csv': _write_csv,
|
||||
'json': _write_json,
|
||||
'yaml': _write_yaml,
|
||||
}[output_type](stream, data)
|
||||
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_csv(stream):
|
||||
return [dict(line) for line in csv.DictReader(stream)]
|
||||
|
||||
def _write_csv(stream, data):
|
||||
writer = csv.DictWriter(stream, fieldnames=list({
|
||||
key
|
||||
for dictionary in data
|
||||
for key in dictionary.keys()
|
||||
}))
|
||||
|
||||
writer.writeheader()
|
||||
|
||||
for value in data:
|
||||
writer.writerow(value)
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_json(stream):
|
||||
data = json.loads(stream.read())
|
||||
return data if isinstance(data, list) else [data]
|
||||
|
||||
def _write_json(stream, data):
|
||||
stream.write(json.dumps(data))
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_yaml(stream):
|
||||
data = yaml.safe_load(stream)
|
||||
return data if isinstance(data, list) else [data]
|
||||
|
||||
def _write_yaml(stream, data):
|
||||
yaml.dump(data, stream, default_flow_style=False)
|
@ -1 +0,0 @@
|
||||
from py.lib.fzf.client import fzf, fzf_tail, fzf_head
|
@ -1,61 +0,0 @@
|
||||
from pyfzf.pyfzf import FzfPrompt
|
||||
|
||||
FZF_PROMPT = None
|
||||
|
||||
|
||||
def fzf( # pylint: disable=too-many-arguments
|
||||
choices=None,
|
||||
prompt=None,
|
||||
fzf_options='',
|
||||
delimiter='\n',
|
||||
return_type=str,
|
||||
force_list=False,
|
||||
):
|
||||
global FZF_PROMPT # pylint: disable=global-statement
|
||||
|
||||
if choices is None:
|
||||
choices = []
|
||||
|
||||
if not isinstance(return_type, type):
|
||||
raise ValueError(f'return_type must be a valid python type; "{return_type}" is not a type')
|
||||
|
||||
if FZF_PROMPT is None:
|
||||
FZF_PROMPT = FzfPrompt()
|
||||
|
||||
options = ' '.join({
|
||||
'-i',
|
||||
'--layout=reverse',
|
||||
'--ansi',
|
||||
'--height=30%',
|
||||
f'--prompt "{prompt} : "' if prompt is not None else '',
|
||||
fzf_options,
|
||||
})
|
||||
|
||||
selections = [
|
||||
return_type(selection)
|
||||
for selection in FZF_PROMPT.prompt(choices, options, delimiter)
|
||||
]
|
||||
|
||||
if not force_list:
|
||||
if len(selections) == 0:
|
||||
return None
|
||||
|
||||
if len(selections) == 1:
|
||||
return selections[0]
|
||||
|
||||
return selections
|
||||
|
||||
|
||||
def fzf_tail(*args, **kwargs):
|
||||
return _fzf_print(*args, **kwargs)[-1]
|
||||
|
||||
def fzf_head(*args, **kwargs):
|
||||
return _fzf_print(*args, **kwargs)[0]
|
||||
|
||||
def _fzf_print(*args, fzf_options='', **kwargs):
|
||||
return fzf(
|
||||
*args,
|
||||
**kwargs,
|
||||
fzf_options = f'--print-query {fzf_options}',
|
||||
force_list = True,
|
||||
)
|
@ -1,5 +0,0 @@
|
||||
from py.lib.http.client import get_request_client
|
||||
|
||||
import py.lib.http.directus
|
||||
import py.lib.http.discord
|
||||
import py.lib.http.linear
|
@ -1,20 +0,0 @@
|
||||
from requests import request
|
||||
|
||||
|
||||
def get_request_client(base_url, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
return lambda method, endpoint, **kwargs: request(
|
||||
method = method,
|
||||
url = f'{base_url}/{endpoint}',
|
||||
headers = {
|
||||
**headers,
|
||||
**kwargs.get('headers', {}),
|
||||
},
|
||||
**{
|
||||
key: value
|
||||
for key, value in kwargs.items()
|
||||
if key != 'headers'
|
||||
},
|
||||
)
|
@ -1,2 +0,0 @@
|
||||
from py.lib.http.directus.client import *
|
||||
from py.lib.http.directus.constant import *
|
@ -1,56 +0,0 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
|
||||
REQUEST = None
|
||||
COLLECTIONS = None
|
||||
FIELDS = {}
|
||||
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
REQUEST = get_request_client(
|
||||
base_url = getenv("DIRECTUS__BASE_URL"),
|
||||
headers = {
|
||||
'Authorization': f'bearer {getenv("DIRECTUS__API_TOKEN")}',
|
||||
}
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
||||
|
||||
def graphql(query, system=False):
|
||||
return request(
|
||||
'POST',
|
||||
'graphql' if system is True else 'graphql/system',
|
||||
json={'query': query},
|
||||
)
|
||||
|
||||
|
||||
def get_collections():
|
||||
global COLLECTIONS # pylint: disable=global-statement
|
||||
|
||||
if COLLECTIONS is None:
|
||||
COLLECTIONS = [
|
||||
item['collection']
|
||||
for item in request(
|
||||
'GET',
|
||||
'collections?limit=-1&fields[]=collection',
|
||||
).json()['data']
|
||||
]
|
||||
|
||||
return COLLECTIONS
|
||||
|
||||
|
||||
def get_fields(collection):
|
||||
if FIELDS.get(collection) is None:
|
||||
FIELDS[collection] = [
|
||||
item['field']
|
||||
for item in request(
|
||||
'GET',
|
||||
f'fields/{collection}?limit=-1&fields[]=field',
|
||||
).json()['data']
|
||||
]
|
||||
|
||||
return FIELDS[collection]
|
@ -1,25 +0,0 @@
|
||||
FILTER_OPERATORS = {
|
||||
'_eq',
|
||||
'_neq',
|
||||
'_lt',
|
||||
'_lte',
|
||||
'_gt',
|
||||
'_gte',
|
||||
'_in',
|
||||
'_nin',
|
||||
'_null',
|
||||
'_nnull',
|
||||
'_contains',
|
||||
'_ncontains',
|
||||
'_starts_with',
|
||||
'_ends_with',
|
||||
'_nends_with',
|
||||
'_between',
|
||||
'_nbetween',
|
||||
'_empty',
|
||||
'_nempty',
|
||||
'_intersects',
|
||||
'_nintersects',
|
||||
'_intersects_bbox',
|
||||
'_nintersects_bbox',
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
from py.lib.http.discord.client import *
|
||||
from py.lib.http.discord.send_message import *
|
@ -1,20 +0,0 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
REQUEST = None
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
headers = {}
|
||||
|
||||
if (token := getenv("DISCORD__BOT_TOKEN", required = False)) is not None:
|
||||
headers['Authorization'] = f'Bot {token}'
|
||||
|
||||
REQUEST = get_request_client(
|
||||
base_url = 'https://discord.com/api',
|
||||
headers = headers,
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
@ -1,34 +0,0 @@
|
||||
from py.lib.scwrypts import getenv
|
||||
from py.lib.http.discord import request
|
||||
|
||||
def send_message(content, channel_id=None, webhook=None, avatar_url=None, **kwargs):
|
||||
if channel_id is None:
|
||||
channel_id = getenv('DISCORD__DEFAULT_CHANNEL_ID', required=False)
|
||||
|
||||
if avatar_url is None:
|
||||
avatar_url = getenv('DISCORD__DEFAULT_AVATAR_URL', required=False)
|
||||
|
||||
endpoint = None
|
||||
|
||||
if webhook is not None:
|
||||
endpoint = f'webhooks/{webhook}'
|
||||
elif channel_id is not None:
|
||||
endpoint = f'channels/{channel_id}/messages'
|
||||
else:
|
||||
raise ValueError('must provide target channel_id or webhook')
|
||||
|
||||
|
||||
return request(
|
||||
method = 'POST',
|
||||
endpoint = endpoint,
|
||||
json = {
|
||||
key: value
|
||||
for key, value in {
|
||||
'content': content,
|
||||
'username': 'wrobot',
|
||||
'avatar_url': avatar_url,
|
||||
**kwargs,
|
||||
}.items()
|
||||
if value is not None
|
||||
},
|
||||
)
|
@ -1 +0,0 @@
|
||||
from py.lib.http.linear.client import *
|
@ -1,20 +0,0 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
REQUEST = None
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
REQUEST = get_request_client(
|
||||
base_url = 'https://api.linear.app',
|
||||
headers = {
|
||||
'Authorization': f'bearer {getenv("LINEAR__API_TOKEN")}',
|
||||
}
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
||||
|
||||
def graphql(query):
|
||||
return request('POST', 'graphql', json={'query': query})
|
@ -1 +0,0 @@
|
||||
from py.lib.redis.client import get_client
|
@ -1,6 +0,0 @@
|
||||
from py.lib.scwrypts.execute import execute
|
||||
from py.lib.scwrypts.getenv import getenv
|
||||
from py.lib.scwrypts.interactive import interactive
|
||||
from py.lib.scwrypts.run import run
|
||||
|
||||
import py.lib.scwrypts.io
|
@ -1,16 +0,0 @@
|
||||
from argparse import ArgumentError
|
||||
|
||||
|
||||
class MissingVariableError(EnvironmentError):
|
||||
def init(self, name):
|
||||
super().__init__(f'Missing required environment variable "{name}"')
|
||||
|
||||
|
||||
class ImportedExecutableError(ImportError):
|
||||
def __init__(self):
|
||||
super().__init__('executable only; must run through scwrypts')
|
||||
|
||||
|
||||
class MissingFlagAndEnvironmentVariableError(EnvironmentError, ArgumentError):
|
||||
def __init__(self, flags, env_var):
|
||||
super().__init__(f'must provide at least one of : {{ flags: {flags} OR {env_var} }}')
|
@ -1,23 +0,0 @@
|
||||
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
|
||||
|
||||
from py.lib.scwrypts.io import get_combined_stream, add_io_arguments
|
||||
|
||||
|
||||
def execute(main, description=None, parse_args=None, toggle_input=True, toggle_output=True):
|
||||
if parse_args is None:
|
||||
parse_args = []
|
||||
|
||||
parser = ArgumentParser(
|
||||
description = description,
|
||||
formatter_class = ArgumentDefaultsHelpFormatter,
|
||||
)
|
||||
|
||||
add_io_arguments(parser, toggle_input, toggle_output)
|
||||
|
||||
for a in parse_args:
|
||||
parser.add_argument(*a[0], **a[1])
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with get_combined_stream(args.input_file, args.output_file) as stream:
|
||||
return main(args, stream)
|
@ -1,22 +0,0 @@
|
||||
from bpython import embed
|
||||
|
||||
|
||||
def interactive(variable_descriptions):
|
||||
def outer(function):
|
||||
|
||||
def inner(*args, **kwargs):
|
||||
|
||||
print('\npreparing interactive environment...\n')
|
||||
|
||||
local_vars = function(*args, **kwargs)
|
||||
|
||||
print('\n\n'.join([
|
||||
f'>>> {x}' for x in variable_descriptions
|
||||
]))
|
||||
print('\nenvironment ready; user, GO! :)\n')
|
||||
|
||||
embed(local_vars)
|
||||
|
||||
return inner
|
||||
|
||||
return outer
|
@ -1,86 +0,0 @@
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from sys import stdin, stdout, stderr
|
||||
|
||||
from py.lib.scwrypts.getenv import getenv
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_stream(filename=None, mode='r', encoding='utf-8', verbose=False, **kwargs):
|
||||
allowed_modes = {'r', 'w', 'w+'}
|
||||
|
||||
if mode not in allowed_modes:
|
||||
raise ValueError(f'mode "{mode}" not supported modes (must be one of {allowed_modes})')
|
||||
|
||||
is_read = mode == 'r'
|
||||
|
||||
if filename is not None:
|
||||
|
||||
if verbose:
|
||||
print(f'opening file {filename} for {"read" if is_read else "write"}', file=stderr)
|
||||
|
||||
if filename[0] not in {'/', '~'}:
|
||||
filename = Path(f'{getenv("EXECUTION_DIR")}/{filename}').resolve()
|
||||
with open(filename, mode=mode, encoding=encoding, **kwargs) as stream:
|
||||
yield stream
|
||||
|
||||
else:
|
||||
if verbose:
|
||||
print('using stdin for read' if is_read else 'using stdout for write', file=stderr)
|
||||
|
||||
yield stdin if is_read else stdout
|
||||
|
||||
if not is_read:
|
||||
stdout.flush()
|
||||
|
||||
|
||||
def add_io_arguments(parser, toggle_input=True, toggle_output=True):
|
||||
if toggle_input:
|
||||
parser.add_argument(
|
||||
'-i', '--input-file',
|
||||
dest = 'input_file',
|
||||
default = None,
|
||||
help = 'path to input file; omit for stdin',
|
||||
required = False,
|
||||
)
|
||||
|
||||
if toggle_output:
|
||||
parser.add_argument(
|
||||
'-o', '--output-file',
|
||||
dest = 'output_file',
|
||||
default = None,
|
||||
help = 'path to output file; omit for stdout',
|
||||
required = False,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_combined_stream(input_file=None, output_file=None):
|
||||
with get_stream(input_file, 'r') as input_stream, get_stream(output_file, 'w+') as output_stream:
|
||||
yield CombinedStream(input_stream, output_stream)
|
||||
|
||||
|
||||
class CombinedStream:
|
||||
def __init__(self, input_stream, output_stream):
|
||||
self.input = input_stream
|
||||
self.output = output_stream
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
return self.input.read(*args, **kwargs)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
return self.input.readline(*args, **kwargs)
|
||||
|
||||
def readlines(self, *args, **kwargs):
|
||||
return self.input.readlines(*args, **kwargs)
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
return self.output.write(*args, **kwargs)
|
||||
|
||||
def writeline(self, line):
|
||||
x = self.output.write(f'{line}\n')
|
||||
self.output.flush()
|
||||
return x
|
||||
|
||||
def writelines(self, *args, **kwargs):
|
||||
return self.output.writelines(*args, **kwargs)
|
@ -1,2 +0,0 @@
|
||||
from py.lib.twilio.client import get_client
|
||||
from py.lib.twilio.send_sms import send_sms
|
@ -1,18 +0,0 @@
|
||||
from twilio.rest import Client
|
||||
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
CLIENT = None
|
||||
|
||||
def get_client():
|
||||
global CLIENT # pylint: disable=global-statement
|
||||
|
||||
if CLIENT is None:
|
||||
print('loading client')
|
||||
CLIENT = Client(
|
||||
username = getenv('TWILIO__API_KEY'),
|
||||
password = getenv('TWILIO__API_SECRET'),
|
||||
account_sid = getenv('TWILIO__ACCOUNT_SID'),
|
||||
)
|
||||
|
||||
return CLIENT
|
@ -1,57 +0,0 @@
|
||||
from json import dumps
|
||||
from time import sleep
|
||||
|
||||
from py.lib.twilio.client import get_client
|
||||
|
||||
|
||||
def send_sms(to, from_, body, max_char_count=300, stream=None):
|
||||
'''
|
||||
abstraction for twilio.client.messages.create which will break
|
||||
messages into multi-part SMS rather than throwing an error or
|
||||
requiring the use of MMS data
|
||||
|
||||
@param to messages.create parameter
|
||||
@param from_ messages.create parameter
|
||||
@param body messages.create parameter
|
||||
@param max_char_count 1 ≤ N ≤ 1500 (default 300)
|
||||
@param stream used to report success/failure (optional)
|
||||
|
||||
@return a list of twilio MessageInstance objects
|
||||
'''
|
||||
client = get_client()
|
||||
messages = []
|
||||
|
||||
max_char_count = max(1, min(max_char_count, 1500))
|
||||
|
||||
total_sms_parts = 1 + len(body) // max_char_count
|
||||
contains_multiple_parts = total_sms_parts > 1
|
||||
|
||||
for i in range(0, len(body), max_char_count):
|
||||
msg_body = body[i:i+max_char_count]
|
||||
current_part = 1 + i // max_char_count
|
||||
|
||||
if contains_multiple_parts:
|
||||
msg_body = f'{current_part}/{total_sms_parts}\n{msg_body}'
|
||||
|
||||
message = client.messages.create(
|
||||
to = to,
|
||||
from_ = from_,
|
||||
body = msg_body,
|
||||
)
|
||||
|
||||
messages.append(message)
|
||||
|
||||
if stream is not None:
|
||||
stream.writeline(
|
||||
dumps({
|
||||
'sid': message.sid,
|
||||
'to': to,
|
||||
'from': from_,
|
||||
'body': msg_body,
|
||||
})
|
||||
)
|
||||
|
||||
if contains_multiple_parts:
|
||||
sleep(2 if max_char_count <= 500 else 5)
|
||||
|
||||
return messages
|
@ -1,45 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.http.linear import graphql
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
|
||||
def get_query(args):
|
||||
body = f'"""from wrobot:\n```\n{args.message}\n```\n"""'
|
||||
return f'''
|
||||
mutation CommentCreate {{
|
||||
commentCreate(
|
||||
input: {{
|
||||
issueId: "{args.issue_id}"
|
||||
body: {body}
|
||||
}}
|
||||
) {{ success }}
|
||||
}}'''
|
||||
|
||||
def main(args, stream):
|
||||
response = graphql(get_query(args))
|
||||
stream.writeline(response)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'comment on an inssue in linear.app',
|
||||
parse_args = [
|
||||
( ['-d', '--issue-id'], {
|
||||
'dest' : 'issue_id',
|
||||
'help' : 'issue short-code (e.g. CLOUD-319)',
|
||||
'required' : True,
|
||||
}),
|
||||
( ['-m', '--message'], {
|
||||
'dest' : 'message',
|
||||
'help' : 'comment to post to the target issue',
|
||||
'required' : True,
|
||||
}),
|
||||
]
|
||||
)
|
@ -1,19 +1,15 @@
|
||||
from redis import StrictRedis
|
||||
|
||||
from py.lib.scwrypts import getenv
|
||||
from py.scwrypts import getenv
|
||||
|
||||
CLIENT = None
|
||||
|
||||
def get_client():
|
||||
global CLIENT # pylint: disable=global-statement
|
||||
|
||||
if CLIENT is None:
|
||||
print('getting redis client')
|
||||
CLIENT = StrictRedis(
|
||||
class RedisClient(StrictRedis):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
host = getenv('REDIS_HOST'),
|
||||
port = getenv('REDIS_PORT'),
|
||||
password = getenv('REDIS_AUTH', required=False),
|
||||
decode_responses = True,
|
||||
)
|
||||
|
||||
return CLIENT
|
||||
Client = RedisClient()
|
@ -1,26 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.redis import get_client
|
||||
from py.lib.scwrypts import execute, interactive, getenv
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
from py.redis.client import Client
|
||||
from py.scwrypts import interactive, getenv
|
||||
|
||||
|
||||
@interactive([
|
||||
f'r = StrictRedis(\'{getenv("REDIS_HOST")}:{getenv("REDIS_PORT")}\')',
|
||||
])
|
||||
def main(_args, _stream):
|
||||
# pylint: disable=possibly-unused-variable
|
||||
r = get_client()
|
||||
@interactive
|
||||
def main():
|
||||
r = Client
|
||||
|
||||
print(f'''
|
||||
>>> r = StrictRedis({getenv("REDIS_HOST")}:{getenv("REDIS_PORT")})
|
||||
''')
|
||||
|
||||
return locals()
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'establishes a redis client in an interactive python shell',
|
||||
parse_args = [],
|
||||
)
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -1,5 +1,2 @@
|
||||
bpython
|
||||
pyfzf
|
||||
pyyaml
|
||||
redis
|
||||
twilio
|
||||
bpython
|
||||
|
3
py/scwrypts/__init__.py
Normal file
3
py/scwrypts/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from py.scwrypts.getenv import getenv
|
||||
from py.scwrypts.interactive import interactive
|
||||
from py.scwrypts.run import run
|
3
py/scwrypts/exceptions.py
Normal file
3
py/scwrypts/exceptions.py
Normal file
@ -0,0 +1,3 @@
|
||||
class MissingVariableError(Exception):
|
||||
def init(self, name):
|
||||
super().__init__(f'Missing required environment variable "{name}"')
|
@ -1,15 +1,16 @@
|
||||
from os import getenv as os_getenv
|
||||
|
||||
from py.lib.scwrypts.exceptions import MissingVariableError
|
||||
from py.scwrypts.exceptions import MissingVariableError
|
||||
from py.scwrypts.run import run
|
||||
|
||||
|
||||
def getenv(name, required=True):
|
||||
value = os_getenv(name, None)
|
||||
|
||||
if value == None:
|
||||
run('zsh/scwrypts/environment/stage-variables', name)
|
||||
|
||||
if required and not value:
|
||||
raise MissingVariableError(name)
|
||||
|
||||
if value == '':
|
||||
value = None
|
||||
|
||||
return value
|
11
py/scwrypts/interactive.py
Normal file
11
py/scwrypts/interactive.py
Normal file
@ -0,0 +1,11 @@
|
||||
from bpython import embed
|
||||
|
||||
|
||||
def interactive(function):
|
||||
def main(*args, **kwargs):
|
||||
print('preparing interactive environment...')
|
||||
local_vars = function(*args, **kwargs)
|
||||
print('environment ready; user, GO! :)')
|
||||
embed(local_vars)
|
||||
|
||||
return main
|
@ -7,16 +7,11 @@ def run(scwrypt_name, *args):
|
||||
DEPTH = int(getenv('SUBSCWRYPT', '0'))
|
||||
DEPTH += 1
|
||||
|
||||
SCWRYPTS_EXE = Path(__file__).parents[3] / 'scwrypts'
|
||||
ARGS = ' '.join([str(x) for x in args])
|
||||
print(f'SUBSCWRYPT={DEPTH} {SCWRYPTS_EXE} {scwrypt_name} -- {ARGS}')
|
||||
|
||||
print(f'\n {"--"*DEPTH} ({DEPTH}) BEGIN SUBSCWRYPT : {Path(scwrypt_name).name}')
|
||||
subprocess_run(
|
||||
f'SUBSCWRYPT={DEPTH} {SCWRYPTS_EXE} {scwrypt_name} -- {ARGS}',
|
||||
f'SUBSCWRYPT={DEPTH} {Path(__file__).parents[2] / "scwrypts"} {scwrypt_name} -- {" ".join([str(x) for x in args])}',
|
||||
shell=True,
|
||||
executable='/bin/zsh',
|
||||
check=False,
|
||||
)
|
||||
|
||||
print(f' {"--"*DEPTH} ({DEPTH}) END SUBSCWRYPT : {Path(scwrypt_name).name}\n')
|
@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from sys import stderr
|
||||
|
||||
from py.lib.scwrypts import execute, getenv
|
||||
from py.lib.twilio import send_sms
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError, MissingFlagAndEnvironmentVariableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if args.body is None:
|
||||
print(f'reading input from {stream.input.name}', file=stderr)
|
||||
args.body = ''.join(stream.readlines()).strip()
|
||||
|
||||
if len(args.body) == 0:
|
||||
args.body = 'PING'
|
||||
|
||||
if args.from_ is None:
|
||||
raise MissingFlagAndEnvironmentVariableError(['-f', '--from'], 'TWILIO__DEFAULT_PHONE_FROM')
|
||||
|
||||
if args.to is None:
|
||||
raise MissingFlagAndEnvironmentVariableError(['-t', '--to'], 'TWILIO__DEFAULT_PHONE_TO')
|
||||
|
||||
send_sms(
|
||||
to = args.to,
|
||||
from_ = args.from_,
|
||||
body = args.body,
|
||||
max_char_count = args.max_char_count,
|
||||
stream = stream,
|
||||
)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'send a simple SMS through twilio',
|
||||
parse_args = [
|
||||
( ['-t', '--to'], {
|
||||
'dest' : 'to',
|
||||
'help' : 'phone number of the receipient',
|
||||
'required' : False,
|
||||
'default' : getenv('TWILIO__DEFAULT_PHONE_TO', required=False),
|
||||
}),
|
||||
( ['-f', '--from'], {
|
||||
'dest' : 'from_',
|
||||
'help' : 'phone number of the receipient',
|
||||
'required' : False,
|
||||
'default' : getenv('TWILIO__DEFAULT_PHONE_FROM', required=False),
|
||||
}),
|
||||
( ['-b', '--body'], {
|
||||
'dest' : 'body',
|
||||
'help' : 'message body',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['--max-char-count'], {
|
||||
'dest' : 'max_char_count',
|
||||
'help' : 'separate message into parts by character count (1 < N <= 1500)',
|
||||
'required' : False,
|
||||
'default' : 300,
|
||||
}),
|
||||
]
|
||||
)
|
315
run
315
run
@ -1,7 +1,8 @@
|
||||
#!/bin/zsh
|
||||
export EXECUTION_DIR=$(pwd)
|
||||
source "${0:a:h}/zsh/lib/import.driver.zsh" || exit 42
|
||||
|
||||
SCWRYPTS_ROOT="${0:a:h}"
|
||||
source "$SCWRYPTS_ROOT/zsh/common.zsh" || exit 42
|
||||
#####################################################################
|
||||
|
||||
__RUN() {
|
||||
@ -9,54 +10,24 @@ __RUN() {
|
||||
usage: scwrypts [OPTIONS ...] SCRIPT -- [SCRIPT OPTIONS ...]
|
||||
|
||||
OPTIONS
|
||||
-g, --group <group-name> only use scripts from the indicated group
|
||||
-t, --type <type-name> only use scripts of the indicated type
|
||||
-m, --name <scwrypt-name> only run the script if there is an exact match
|
||||
(requires type and group)
|
||||
|
||||
-e, --env <env-name> set environment; overwrites SCWRYPTS_ENV
|
||||
-n, --no-log skip logging and run in quiet mode
|
||||
-n, --no-log skip logging (useful when calling scwrypts as an api)
|
||||
-l, --list print out command list and exit
|
||||
|
||||
--update update scwrypts library to latest version
|
||||
|
||||
-v, --version print out scwrypts version and exit
|
||||
-l, --list print out command list and exit
|
||||
-h, --help display this message and exit
|
||||
-h, --help display this message and exit
|
||||
'
|
||||
cd "$SCWRYPTS_ROOT"
|
||||
|
||||
local ENV_NAME="$SCWRYPTS_ENV"
|
||||
local SEARCH_PATTERNS=()
|
||||
|
||||
local VARSPLIT SEARCH_GROUP SEARCH_TYPE SEARCH_NAME
|
||||
|
||||
local ERROR=0
|
||||
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
-t | --type )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_TYPE=$2
|
||||
shift 2
|
||||
;;
|
||||
-g | --group )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_GROUP=$2
|
||||
shift 2
|
||||
;;
|
||||
-m | --name )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_NAME=$2
|
||||
shift 2
|
||||
;;
|
||||
|
||||
-[a-z][a-z]* )
|
||||
VARSPLIT=$(echo "$1 " | sed 's/^\(-.\)\(.*\) /\1 -\2/')
|
||||
set -- $(echo " $VARSPLIT ") ${@:2}
|
||||
;;
|
||||
-h | --help )
|
||||
USAGE
|
||||
__USAGE
|
||||
return 0
|
||||
;;
|
||||
-n | --no-log )
|
||||
@ -64,173 +35,55 @@ __RUN() {
|
||||
shift 1
|
||||
;;
|
||||
-e | --env )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
[ ! $SUBSCWRYPTS ] \
|
||||
&& [ $ENV_NAME ] \
|
||||
&& WARNING 'overwriting session environment' \
|
||||
;
|
||||
|
||||
[ $ENV_NAME ] && __WARNING 'overwriting session environment'
|
||||
ENV_NAME="$2"
|
||||
STATUS "using CLI environment '$ENV_NAME'"
|
||||
__STATUS "using CLI environment '$ENV_NAME'"
|
||||
shift 2
|
||||
;;
|
||||
-l | --list )
|
||||
SCWRYPTS__GET_AVAILABLE_SCWRYPTS
|
||||
return 0
|
||||
;;
|
||||
-v | --version )
|
||||
echo scwrypts $(cd "$SCWRYPTS__ROOT__scwrypts"; git describe --tags)
|
||||
return 0
|
||||
;;
|
||||
--update )
|
||||
cd "$SCWRYPTS__ROOT__scwrypts"
|
||||
git fetch --quiet origin main
|
||||
local SYNC_STATUS=$?
|
||||
|
||||
git diff --exit-code origin/main -- . >&2
|
||||
local DIFF_STATUS=$?
|
||||
|
||||
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
|
||||
SUCCESS 'already up-to-date with origin/main'
|
||||
} || {
|
||||
git rebase --autostash origin/main \
|
||||
&& SUCCESS 'up-to-date with origin/main' \
|
||||
|| {
|
||||
git rebase --abort
|
||||
ERROR 'unable to update scwrypts; please try manual upgrade'
|
||||
REMINDER "installation in '$(pwd)'"
|
||||
}
|
||||
}
|
||||
__OUTPUT_COMMAND_LIST
|
||||
return 0
|
||||
;;
|
||||
-- )
|
||||
shift 1
|
||||
break # pass arguments after '--' to the scwrypt
|
||||
;;
|
||||
--* )
|
||||
ERROR "unrecognized argument '$1'"
|
||||
-* )
|
||||
__ERROR "unrecognized argument '$1'"
|
||||
shift 1
|
||||
;;
|
||||
* )
|
||||
SEARCH_PATTERNS+=($1)
|
||||
SEARCH_PATTERNS+=$1
|
||||
shift 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ $SEARCH_NAME ] && {
|
||||
[ ! $SEARCH_TYPE ] && ERROR '--name requires --type argument'
|
||||
[ ! $SEARCH_GROUP ] && ERROR '--name requires --group argument'
|
||||
}
|
||||
|
||||
CHECK_ERRORS
|
||||
__ERROR_CHECK
|
||||
|
||||
##########################################
|
||||
|
||||
local SCWRYPTS_AVAILABLE
|
||||
local POTENTIAL_ERROR="no such scwrypt exists:"
|
||||
|
||||
SCWRYPTS_AVAILABLE=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS)
|
||||
|
||||
[ $SEARCH_NAME ] && {
|
||||
POTENTIAL_ERROR+="\n NAME : '$SEARCH_NAME'"
|
||||
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
|
||||
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
|
||||
SCWRYPTS_AVAILABLE=$({
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | sed -e 's/\x1b\[[0-9;]*m//g' | grep "^$SEARCH_NAME *$SEARCH_TYPE *$SEARCH_GROUP\$"
|
||||
})
|
||||
}
|
||||
|
||||
[ ! $SEARCH_NAME ] && {
|
||||
[ $SEARCH_TYPE ] && {
|
||||
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
|
||||
SCWRYPTS_AVAILABLE=$(\
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep ' [^/]*'$SEARCH_TYPE'[^/]* '
|
||||
} \
|
||||
| awk '{$2=""; print $0;}' \
|
||||
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
|
||||
| column -ts '^'
|
||||
)
|
||||
}
|
||||
|
||||
[ $SEARCH_GROUP ] && {
|
||||
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
|
||||
SCWRYPTS_AVAILABLE=$(
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep "$SEARCH_GROUP"'[^/]*$'
|
||||
} \
|
||||
| awk '{$NF=""; print $0;}' \
|
||||
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
|
||||
| column -ts '^'
|
||||
)
|
||||
}
|
||||
|
||||
[[ ${#SEARCH_PATTERNS[@]} -gt 0 ]] && {
|
||||
POTENTIAL_ERROR+="\n PATTERNS : $SEARCH_PATTERNS"
|
||||
local P
|
||||
for P in ${SEARCH_PATTERNS[@]}
|
||||
do
|
||||
SCWRYPTS_AVAILABLE=$(
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep $P
|
||||
}
|
||||
)
|
||||
done
|
||||
}
|
||||
}
|
||||
|
||||
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -lt 2 ]] && ERROR "$POTENTIAL_ERROR"
|
||||
|
||||
CHECK_ERRORS
|
||||
|
||||
##########################################
|
||||
|
||||
local NAME="$SEARCH_NAME"
|
||||
local TYPE="$SEARCH_TYPE"
|
||||
local GROUP="$SEARCH_GROUP"
|
||||
|
||||
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -eq 2 ]] \
|
||||
&& SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | tail -n1) \
|
||||
|| SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | FZF "select a script to run" --header-lines 1)
|
||||
[ $SCWRYPT_SELECTION ] || exit 2
|
||||
|
||||
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
|
||||
|
||||
export SCWRYPT_NAME=$NAME
|
||||
export SCWRYPT_TYPE=$TYPE
|
||||
export SCWRYPT_GROUP=$GROUP
|
||||
|
||||
##########################################
|
||||
local SCRIPT=$(__SELECT_SCRIPT $SEARCH_PATTERNS)
|
||||
[ ! $SCRIPT ] && exit 2
|
||||
export SCWRYPT_NAME=$SCRIPT
|
||||
|
||||
local ENV_REQUIRED=$(__CHECK_ENV_REQUIRED && echo 1 || echo 0)
|
||||
|
||||
[[ $ENV_REQUIRED -eq 1 ]] && {
|
||||
[ ! $ENV_NAME ] && ENV_NAME=$(SCWRYPTS__SELECT_ENV)
|
||||
local ENV_FILE=$(SCWRYPTS__GET_ENV_FILE "$ENV_NAME")
|
||||
source "$ENV_FILE" || FAIL 5 "missing or invalid environment '$ENV_NAME'"
|
||||
[ ! $ENV_NAME ] && ENV_NAME=$(__SELECT_ENV)
|
||||
local ENV_FILE=$(__GET_ENV_FILE $ENV_NAME)
|
||||
|
||||
[ -f "$ENV_FILE" ] && source "$ENV_FILE" \
|
||||
|| __FAIL 5 "missing or invalid environment '$ENV_NAME'"
|
||||
|
||||
export ENV_NAME
|
||||
}
|
||||
|
||||
for f in $(eval 'echo $SCWRYPTS_STATIC_CONFIG__'$SCWRYPT_GROUP)
|
||||
do
|
||||
source "$f" || FAIL 5 "invalid static config '$f'"
|
||||
done
|
||||
|
||||
##########################################
|
||||
|
||||
[ ! $SUBSCWRYPT ] \
|
||||
&& [[ $ENV_NAME =~ prod ]] \
|
||||
&& { __VALIDATE_UPSTREAM_TIMELINE || ABORT; }
|
||||
&& { __VALIDATE_UPSTREAM_TIMELINE || __ABORT; }
|
||||
|
||||
##########################################
|
||||
|
||||
local RUN_STRING=$(SCWRYPTS__GET_RUNSTRING $SCWRYPT_NAME $SCWRYPT_TYPE $SCWRYPT_GROUP)
|
||||
local RUN_STRING=$(__GET_RUN_STRING $SCRIPT $ENV_NAME)
|
||||
[ ! $RUN_STRING ] && exit 3
|
||||
|
||||
##########################################
|
||||
@ -240,7 +93,7 @@ __RUN() {
|
||||
local HEADER=$(
|
||||
[ $SUBSCWRYPT ] && return 0
|
||||
echo '====================================================================='
|
||||
echo "script : $SCWRYPT_GROUP $SCWRYPT_TYPE $SCWRYPT_NAME"
|
||||
echo "script : $SCRIPT"
|
||||
echo "run at : $(date)"
|
||||
echo "config : $ENV_NAME"
|
||||
[ ! $LOGFILE ] && echo '\033[1;33m------------------------------------------\033[0m'
|
||||
@ -249,10 +102,10 @@ __RUN() {
|
||||
[ ! $LOGFILE ] && {
|
||||
[ $HEADER ] && echo $HEADER
|
||||
[ $SUBSCWRYPT ] && {
|
||||
eval "$RUN_STRING $(printf "%q " "$@")"
|
||||
eval $RUN_STRING $@
|
||||
exit $?
|
||||
} || {
|
||||
eval "$RUN_STRING $(printf "%q " "$@")" </dev/tty >/dev/tty 2>&1
|
||||
eval $RUN_STRING $@ </dev/tty >/dev/tty 2>&1
|
||||
exit $?
|
||||
}
|
||||
}
|
||||
@ -260,7 +113,7 @@ __RUN() {
|
||||
{
|
||||
[ $HEADER ] && echo $HEADER
|
||||
echo '\033[1;33m--- BEGIN OUTPUT -------------------------\033[0m'
|
||||
eval "$RUN_STRING $(printf "%q " "$@")"
|
||||
eval $RUN_STRING $@
|
||||
EXIT_CODE=$?
|
||||
echo '\033[1;33m--- END OUTPUT ---------------------------\033[0m'
|
||||
|
||||
@ -277,17 +130,111 @@ __RUN() {
|
||||
|
||||
#####################################################################
|
||||
|
||||
__OUTPUT_COMMAND_LIST() {
|
||||
local LAST_TYPE LAST_SUBSET
|
||||
for SCRIPT in $(__GET_AVAILABLE_SCRIPTS)
|
||||
do
|
||||
TYPE=$(echo $SCRIPT | sed 's/\/.*//')
|
||||
SUBSET=$(echo $SCRIPT | sed 's/.*\/\(.*\)\/[^\/]*$/\1/')
|
||||
[[ ! $LAST_TYPE =~ $TYPE ]] && {
|
||||
echo >&2
|
||||
echo "\\033[1;32m$TYPE scwrypts\\033[0m" >&2
|
||||
LAST_SUBSET=''
|
||||
}
|
||||
[ $LAST_SUBSET ] && [[ ! $LAST_SUBSET =~ $SUBSET ]] && {
|
||||
echo >&2
|
||||
}
|
||||
printf ' - ' >&2
|
||||
echo $SCRIPT
|
||||
LAST_TYPE=$TYPE
|
||||
LAST_SUBSET=$SUBSET
|
||||
done
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__SELECT_SCRIPT() {
|
||||
local SCRIPT
|
||||
local SCRIPTS=$(__GET_AVAILABLE_SCRIPTS)
|
||||
local SEARCH=($@)
|
||||
|
||||
[[ ${#SEARCH[@]} -eq 0 ]] && {
|
||||
SCRIPT=$(echo $SCRIPTS | __FZF 'select a script')
|
||||
}
|
||||
|
||||
[[ ${#SEARCH[@]} -eq 1 ]] && [ -f ./$SEARCH ] && {
|
||||
SCRIPT=$SEARCH
|
||||
}
|
||||
|
||||
[ ! $SCRIPT ] && [[ ${#SEARCH[@]} -gt 0 ]] && {
|
||||
SCRIPT=$SCRIPTS
|
||||
for PATTERN in $SEARCH
|
||||
do
|
||||
SCRIPT=$(echo $SCRIPT | grep $PATTERN)
|
||||
done
|
||||
|
||||
[ ! $SCRIPT ] && __FAIL 2 "no script found by name '$@'"
|
||||
|
||||
[[ $(echo $SCRIPT | wc -l) -gt 1 ]] && {
|
||||
__STATUS "more than one script matched '$@'"
|
||||
SCRIPT=$(echo $SCRIPT | __FZF 'select a script')
|
||||
}
|
||||
}
|
||||
|
||||
echo $SCRIPT
|
||||
}
|
||||
|
||||
__GET_RUN_STRING() {
|
||||
local SCRIPT="$1"
|
||||
local ENV_NAME="$2"
|
||||
local TYPE=$(echo $SCRIPT | sed 's/\/.*$//')
|
||||
|
||||
local RUN_STRING
|
||||
|
||||
local _VIRTUALENV="$SCWRYPTS_VIRTUALENV_PATH/$TYPE/bin/activate"
|
||||
[ -f $_VIRTUALENV ] && source $_VIRTUALENV
|
||||
|
||||
case $TYPE in
|
||||
py ) __CHECK_DEPENDENCY python || return 1
|
||||
RUN_STRING="python -m $(echo $SCRIPT | sed 's/\//./g; s/\.py$//; s/\.\.//')"
|
||||
|
||||
CURRENT_PYTHON_VERSION=$(python --version | sed 's/^[^0-9]*\(3\.[^.]*\).*$/\1/')
|
||||
|
||||
echo $__PREFERRED_PYTHON_VERSIONS | grep -q $CURRENT_PYTHON_VERSION || {
|
||||
__WARNING "only tested on the following python versions: $(printf ', %s.x' ${__PREFERRED_PYTHON_VERSIONS[@]} | sed 's/^, //')"
|
||||
__WARNING 'compatibility may vary'
|
||||
}
|
||||
;;
|
||||
|
||||
zsh ) __CHECK_DEPENDENCY zsh || return 1
|
||||
RUN_STRING="noglob ./$SCRIPT"
|
||||
;;
|
||||
|
||||
zx ) __CHECK_DEPENDENCY zx || return 1
|
||||
RUN_STRING="FORCE_COLOR=3 ./$SCRIPT.mjs"
|
||||
;;
|
||||
|
||||
* ) __ERROR "unsupported script type '$SCRIPT_TYPE'"
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
|
||||
RUN_STRING="SCWRYPTS_ENV='$ENV_NAME' $RUN_STRING"
|
||||
[ -f $_VIRTUALENV ] && RUN_STRING="source '$_VIRTUALENV'; $RUN_STRING"
|
||||
|
||||
echo $RUN_STRING
|
||||
}
|
||||
|
||||
__CHECK_ENV_REQUIRED() {
|
||||
[ $CI ] && return 1
|
||||
|
||||
echo $SCWRYPT_NAME | grep -q 'scwrypts/logs/' && return 1
|
||||
echo $SCWRYPT_NAME | grep -q 'scwrypts/environment/' && return 1
|
||||
echo $SCRIPT | grep -q 'zsh/scwrypts/logs' && return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
__VALIDATE_UPSTREAM_TIMELINE() {
|
||||
STATUS "on '$ENV_NAME'; checking diff against origin/main"
|
||||
__STATUS "on '$ENV_NAME'; checking diff against origin/main"
|
||||
|
||||
git fetch --quiet origin main
|
||||
local SYNC_STATUS=$?
|
||||
@ -296,14 +243,14 @@ __VALIDATE_UPSTREAM_TIMELINE() {
|
||||
local DIFF_STATUS=$?
|
||||
|
||||
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
|
||||
SUCCESS 'up-to-date with origin/main'
|
||||
__SUCCESS 'up-to-date with origin/main'
|
||||
} || {
|
||||
WARNING
|
||||
[[ $SYNC_STATUS -ne 0 ]] && WARNING 'unable to synchronize with origin/main'
|
||||
[[ $DIFF_STATUS -ne 0 ]] && WARNING 'your branch differs from origin/main (diff listed above)'
|
||||
WARNING
|
||||
__WARNING
|
||||
[[ $SYNC_STATUS -ne 0 ]] && __WARNING 'unable to synchronize with origin/main'
|
||||
[[ $DIFF_STATUS -ne 0 ]] && __WARNING 'your branch differs from origin/main (diff listed above)'
|
||||
__WARNING
|
||||
|
||||
yN 'continue?' || return 1
|
||||
__yN 'continue?' || return 1
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,7 +262,7 @@ __GET_LOGFILE() {
|
||||
|| [[ $SCRIPT =~ interactive ]] \
|
||||
&& return 0
|
||||
|
||||
echo "$SCWRYPTS_LOG_PATH/$(echo $GROUP/$TYPE/$NAME | sed 's/^\.\///; s/\//\%/g').log"
|
||||
echo "$SCWRYPTS_LOG_PATH/$(echo $SCRIPT | sed 's/^\.\///; s/\//\%/g').log"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
@ -1,32 +1,26 @@
|
||||
NO_EXPORT_CONFIG=1 source "${0:a:h}/zsh/lib/import.driver.zsh" || return 42
|
||||
|
||||
DONT_EXIT=1 source ${0:a:h}/zsh/common.zsh
|
||||
#####################################################################
|
||||
SCWRYPTS__ZSH_PLUGIN() {
|
||||
local SCWRYPT_SELECTION=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS | FZF 'select a script' --header-lines 1)
|
||||
local NAME
|
||||
local TYPE
|
||||
local GROUP
|
||||
__SCWRYPTS() {
|
||||
local SCRIPT=$(__GET_AVAILABLE_SCRIPTS | __FZF 'select a script')
|
||||
zle clear-command-line
|
||||
[ ! $SCWRYPT_SELECTION ] && { zle accept-line; return 0; }
|
||||
|
||||
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
|
||||
[ ! $SCRIPT ] && { zle accept-line; return 0; }
|
||||
|
||||
which scwrypts >/dev/null 2>&1\
|
||||
&& RBUFFER="scwrypts" || RBUFFER="$SCWRYPTS_ROOT/scwrypts"
|
||||
|
||||
RBUFFER+=" --name $NAME --group $GROUP --type $TYPE"
|
||||
RBUFFER+=" $SCRIPT"
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
zle -N scwrypts SCWRYPTS__ZSH_PLUGIN
|
||||
zle -N scwrypts __SCWRYPTS
|
||||
bindkey $SCWRYPTS_SHORTCUT scwrypts
|
||||
|
||||
#####################################################################
|
||||
SCWRYPTS__ZSH_PLUGIN_ENV() {
|
||||
__SCWRYPTS_ENV() {
|
||||
local RESET='reset'
|
||||
local SELECTED=$(\
|
||||
{ [ $SCWRYPTS_ENV ] && echo $RESET; SCWRYPTS__GET_ENV_NAMES; } \
|
||||
| FZF 'select an environment' \
|
||||
{ [ $SCWRYPTS_ENV ] && echo $RESET; __GET_ENV_NAMES; } \
|
||||
| __FZF 'select an environment' \
|
||||
)
|
||||
|
||||
zle clear-command-line
|
||||
@ -38,5 +32,5 @@ SCWRYPTS__ZSH_PLUGIN_ENV() {
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
zle -N scwrypts-setenv SCWRYPTS__ZSH_PLUGIN_ENV
|
||||
zle -N scwrypts-setenv __SCWRYPTS_ENV
|
||||
bindkey $SCWRYPTS_ENV_SHORTCUT scwrypts-setenv
|
||||
|
13
zsh/aws/common.zsh
Normal file
13
zsh/aws/common.zsh
Normal file
@ -0,0 +1,13 @@
|
||||
_DEPENDENCIES+=(
|
||||
aws
|
||||
jq
|
||||
)
|
||||
_REQUIRED_ENV+=(
|
||||
AWS_ACCOUNT
|
||||
AWS_PROFILE
|
||||
AWS_REGION
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
_AWS() { aws --profile $AWS_PROFILE --region $AWS_REGION --output json $@; }
|
6
zsh/aws/ecr/common.zsh
Normal file
6
zsh/aws/ecr/common.zsh
Normal file
@ -0,0 +1,6 @@
|
||||
_DEPENDENCIES+=(
|
||||
docker
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
14
zsh/aws/ecr/login
Executable file
14
zsh/aws/ecr/login
Executable file
@ -0,0 +1,14 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS "performing AWS ECR docker login"
|
||||
|
||||
_AWS ecr get-login-password | docker login \
|
||||
--username AWS \
|
||||
--password-stdin \
|
||||
"$AWS_ACCOUNT.dkr.ecr.$AWS_REGION.amazonaws.com" \
|
||||
&& __SUCCESS "logged in to 'AWS:$AWS_ACCOUNT:$AWS_REGION'" \
|
||||
|| __FAIL 1 "unable to login to '$AWS_ACCOUNT' in '$AWS_REGION'"
|
6
zsh/aws/efs/common.zsh
Normal file
6
zsh/aws/efs/common.zsh
Normal file
@ -0,0 +1,6 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=(
|
||||
AWS__EFS__LOCAL_MOUNT_POINT
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,40 +1,37 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(jq)
|
||||
REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
|
||||
|
||||
use cloud/aws/cli
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
EFS_CONNECT() {
|
||||
GETSUDO || exit 1
|
||||
_EFS_CONNECT() {
|
||||
__GETSUDO || exit 1
|
||||
[ ! -d $AWS__EFS__LOCAL_MOUNT_POINT ] && {
|
||||
sudo mkdir $AWS__EFS__LOCAL_MOUNT_POINT \
|
||||
&& STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'"
|
||||
&& __STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'"
|
||||
}
|
||||
|
||||
local FS_ID=$(\
|
||||
AWS efs describe-file-systems \
|
||||
_AWS efs describe-file-systems \
|
||||
| jq -r '.[] | .[] | .FileSystemId' \
|
||||
| FZF 'select a filesystem to mount' \
|
||||
| __FZF 'select a filesystem to mount' \
|
||||
)
|
||||
[ ! $FS_ID ] && ABORT
|
||||
[ ! $FS_ID ] && __ABORT
|
||||
|
||||
local MOUNT_POINT="$AWS__EFS__LOCAL_MOUNT_POINT/$FS_ID"
|
||||
[ -d "$MOUNT_POINT" ] && sudo rmdir "$MOUNT_POINT" >/dev/null 2>&1
|
||||
[ -d "$MOUNT_POINT" ] && {
|
||||
STATUS "$FS_ID is already mounted"
|
||||
__STATUS "$FS_ID is already mounted"
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNT_TARGETS=$(AWS efs describe-mount-targets --file-system-id $FS_ID)
|
||||
local MOUNT_TARGETS=$(_AWS efs describe-mount-targets --file-system-id $FS_ID)
|
||||
local ZONE=$(\
|
||||
echo $MOUNT_TARGETS \
|
||||
| jq -r '.[] | .[] | .AvailabilityZoneName' \
|
||||
| sort -u | FZF 'select availability zone'\
|
||||
| sort -u | __FZF 'select availability zone'\
|
||||
)
|
||||
[ ! $ZONE ] && ABORT
|
||||
[ ! $ZONE ] && __ABORT
|
||||
|
||||
local MOUNT_IP=$(\
|
||||
echo $MOUNT_TARGETS \
|
||||
@ -42,15 +39,15 @@ EFS_CONNECT() {
|
||||
| head -n1 \
|
||||
)
|
||||
|
||||
SUCCESS 'ready to mount!'
|
||||
REMINDER 'for private file-systems, you must be connected to the appropriate VPN'
|
||||
__SUCCESS 'ready to mount!'
|
||||
__REMINDER 'your device must be connected to the appropriate VPN'
|
||||
|
||||
STATUS "file system id : $FS_ID"
|
||||
STATUS "availability zone : $ZONE"
|
||||
STATUS "file system ip : $MOUNT_IP"
|
||||
STATUS "local mount point : $MOUNT_POINT"
|
||||
__STATUS "file system id : $FS_ID"
|
||||
__STATUS "availability zone : $ZONE"
|
||||
__STATUS "file system ip : $MOUNT_IP"
|
||||
__STATUS "local mount point : $MOUNT_POINT"
|
||||
|
||||
Yn 'proceed?' || ABORT
|
||||
__Yn 'proceed?' || __ABORT
|
||||
|
||||
sudo mkdir $MOUNT_POINT \
|
||||
&& sudo mount \
|
||||
@ -58,12 +55,12 @@ EFS_CONNECT() {
|
||||
-o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport \
|
||||
$MOUNT_IP:/ \
|
||||
"$MOUNT_POINT" \
|
||||
&& SUCCESS "mounted at '$MOUNT_POINT'" \
|
||||
&& __SUCCESS "mounted at '$MOUNT_POINT'" \
|
||||
|| {
|
||||
sudo rmdir $MOUNT_POINT >/dev/null 2>&1
|
||||
FAIL 2 "unable to mount '$FS_ID'"
|
||||
__FAIL 2 "unable to mount '$FS_ID'"
|
||||
}
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
EFS_CONNECT $@
|
||||
_EFS_CONNECT
|
34
zsh/aws/efs/unmount
Executable file
34
zsh/aws/efs/unmount
Executable file
@ -0,0 +1,34 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_EFS_DISCONNECT() {
|
||||
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
|
||||
__STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
|
||||
[ ! $MOUNTED ] && {
|
||||
__STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
__GETSUDO || exit 1
|
||||
|
||||
|
||||
local SELECTED=$(echo $MOUNTED | __FZF 'select a file system to unmount')
|
||||
[ ! $SELECTED ] && __ABORT
|
||||
|
||||
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
|
||||
__STATUS "unmounting '$SELECTED'"
|
||||
sudo umount $EFS >/dev/null 2>&1
|
||||
sudo rmdir $EFS \
|
||||
&& __SUCCESS "done" \
|
||||
|| __FAIL 2 "failed to unmount '$EFS'"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_EFS_DISCONNECT
|
6
zsh/aws/eks/common.zsh
Normal file
6
zsh/aws/eks/common.zsh
Normal file
@ -0,0 +1,6 @@
|
||||
_DEPENDENCIES+=(
|
||||
kubectl
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
19
zsh/aws/eks/login
Executable file
19
zsh/aws/eks/login
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS "performing AWS ECR docker login"
|
||||
|
||||
CLUSTER_NAME=$(\
|
||||
_AWS eks list-clusters \
|
||||
| jq -r '.[] | .[]' \
|
||||
| __FZF 'select a cluster'
|
||||
)
|
||||
[ ! $CLUSTER_NAME ] && __ABORT
|
||||
|
||||
__STATUS "updating kubeconfig for '$CLUSTER_NAME'"
|
||||
_AWS eks update-kubeconfig --name $CLUSTER_NAME \
|
||||
&& __SUCCESS "kubeconfig updated with '$CLUSTER_NAME'" \
|
||||
|| __ERROR "failed to update kubeconfig; do you have permissions to access '$CLUSTER_NAME'?"
|
@ -1,48 +1,9 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
DEPENDENCIES+=(
|
||||
docker
|
||||
)
|
||||
|
||||
REQUIRED_ENV+=(
|
||||
AWS_ACCOUNT
|
||||
AWS_REGION
|
||||
)
|
||||
|
||||
use cloud/aws/cli
|
||||
|
||||
#####################################################################
|
||||
|
||||
RDS__SELECT_DATABASE() {
|
||||
local DATABASES=$(_RDS__GET_AVAILABLE_DATABASES)
|
||||
[ ! $DATABASES ] && FAIL 1 'no databases available'
|
||||
|
||||
local ID=$(\
|
||||
echo $DATABASES | jq -r '.instance + " @ " + .cluster' \
|
||||
| FZF 'select a database (instance@cluster)' \
|
||||
)
|
||||
[ ! $ID ] && ABORT
|
||||
|
||||
local INSTANCE=$(echo $ID | sed 's/ @ .*$//')
|
||||
local CLUSTER=$(echo $ID | sed 's/^.* @ //')
|
||||
|
||||
echo $DATABASES | jq "select (.instance == \"$INSTANCE\" and .cluster == \"$CLUSTER\")"
|
||||
}
|
||||
|
||||
_RDS__GET_AVAILABLE_DATABASES() {
|
||||
AWS rds describe-db-instances \
|
||||
| jq -r '.[] | .[] | {
|
||||
instance: .DBInstanceIdentifier,
|
||||
cluster: .DBClusterIdentifier,
|
||||
type: .Engine,
|
||||
host: .Endpoint.Address,
|
||||
port: .Endpoint.Port,
|
||||
user: .MasterUsername,
|
||||
database: .DBName
|
||||
}'
|
||||
}
|
||||
|
||||
RDS__GET_DATABASE_CREDENTIALS() {
|
||||
GET_DATABASE_CREDENTIALS() {
|
||||
local PRINT_PASSWORD=0
|
||||
local ERRORS=0
|
||||
|
||||
@ -51,22 +12,22 @@ RDS__GET_DATABASE_CREDENTIALS() {
|
||||
case $1 in
|
||||
--print-password ) PRINT_PASSWORD=1 ;;
|
||||
* )
|
||||
WARNING "unrecognized argument $1"
|
||||
__WARNING "unrecognized argument $1"
|
||||
ERRORS+=1
|
||||
;;
|
||||
esac
|
||||
shift 1
|
||||
done
|
||||
|
||||
CHECK_ERRORS
|
||||
__ERROR_CHECK
|
||||
|
||||
##########################################
|
||||
|
||||
local DATABASE=$(RDS__SELECT_DATABASE)
|
||||
[ ! $DATABASE ] && ABORT
|
||||
local DATABASE=$(SELECT_DATABASE)
|
||||
[ ! $DATABASE ] && __ABORT
|
||||
|
||||
DB_HOST="$(echo $DATABASE | jq -r '.host')"
|
||||
[ ! $DB_HOST ] && { ERROR 'unable to find host'; return 2; }
|
||||
[ ! $DB_HOST ] && { __ERROR 'unable to find host'; return 2; }
|
||||
|
||||
DB_PORT="$(echo $DATABASE | jq -r '.port')"
|
||||
[ ! $DB_PORT ] && DB_PORT=5432
|
||||
@ -76,37 +37,37 @@ RDS__GET_DATABASE_CREDENTIALS() {
|
||||
|
||||
local AUTH_METHOD=$(\
|
||||
echo "iam\nsecretsmanager\nuser-input" \
|
||||
| FZF 'select an authentication method' \
|
||||
| __FZF 'select an authentication method' \
|
||||
)
|
||||
[ ! $AUTH_METHOD ] && ABORT
|
||||
[ ! $AUTH_METHOD ] && __ABORT
|
||||
|
||||
case $AUTH_METHOD in
|
||||
iam ) _RDS_AUTH__iam ;;
|
||||
secretsmanager ) _RDS_AUTH__secretsmanager ;;
|
||||
user-input ) _RDS_AUTH__userinput ;;
|
||||
iam ) GET_AUTH__IAM ;;
|
||||
secretsmanager ) GET_AUTH__SECRETSMANAGER ;;
|
||||
user-input ) GET_AUTH__USER_INPUT ;;
|
||||
esac
|
||||
|
||||
STATUS
|
||||
STATUS "host : $DB_HOST"
|
||||
STATUS "type : $DB_TYPE"
|
||||
STATUS "port : $DB_PORT"
|
||||
STATUS "database : $DB_NAME"
|
||||
STATUS "username : $DB_USER"
|
||||
[[ $PRINT_PASSWORD -eq 1 ]] && STATUS "password : $DB_PASS"
|
||||
STATUS
|
||||
__STATUS
|
||||
__STATUS "host : $DB_HOST"
|
||||
__STATUS "type : $DB_TYPE"
|
||||
__STATUS "port : $DB_PORT"
|
||||
__STATUS "database : $DB_NAME"
|
||||
__STATUS "username : $DB_USER"
|
||||
[[ $PRINT_PASSWORD -eq 1 ]] && __STATUS "password : $DB_PASS"
|
||||
__STATUS
|
||||
}
|
||||
|
||||
_RDS_AUTH__iam() {
|
||||
GET_AUTH__IAM() {
|
||||
DB_PASS=$(\
|
||||
AWS rds generate-db-auth-token \
|
||||
_AWS rds generate-db-auth-token \
|
||||
--hostname $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--username $DB_USER \
|
||||
)
|
||||
}
|
||||
|
||||
_RDS_AUTH__secretsmanager() {
|
||||
local CREDENTIALS=$(_RDS__GET_SECRETSMANAGER_CREDENTIALS)
|
||||
GET_AUTH__SECRETSMANAGER() {
|
||||
local CREDENTIALS=$(GET_SECRETSMANAGER_CREDENTIALS)
|
||||
echo $CREDENTIALS | jq -e '.pass' >/dev/null 2>&1 \
|
||||
&& DB_PASS="'$(echo $CREDENTIALS | jq -r '.pass' | sed "s/'/'\"'\"'/g")'"
|
||||
|
||||
@ -126,15 +87,44 @@ _RDS_AUTH__secretsmanager() {
|
||||
&& DB_NAME=$(echo $CREDENTIALS | jq -r '.dbname')
|
||||
}
|
||||
|
||||
_RDS__GET_SECRETSMANAGER_CREDENTIALS() {
|
||||
GET_SECRETSMANAGER_CREDENTIALS() {
|
||||
local ID=$(\
|
||||
AWS secretsmanager list-secrets \
|
||||
_AWS secretsmanager list-secrets \
|
||||
| jq -r '.[] | .[] | .Name' \
|
||||
| FZF 'select a secret' \
|
||||
| __FZF 'select a secret' \
|
||||
)
|
||||
[ ! $ID ] && return 1
|
||||
|
||||
AWS secretsmanager get-secret-value --secret-id "$ID" \
|
||||
_AWS secretsmanager get-secret-value --secret-id "$ID" \
|
||||
| jq -r '.SecretString' | jq
|
||||
}
|
||||
|
||||
SELECT_DATABASE() {
|
||||
local DATABASES=$(GET_AVAILABLE_DATABASES)
|
||||
[ ! $DATABASES ] && __FAIL 1 'no databases available'
|
||||
|
||||
local ID=$(\
|
||||
echo $DATABASES | jq -r '.instance + " @ " + .cluster' \
|
||||
| __FZF 'select a database (instance@cluster)' \
|
||||
)
|
||||
[ ! $ID ] && __ABORT
|
||||
|
||||
local INSTANCE=$(echo $ID | sed 's/ @ .*$//')
|
||||
local CLUSTER=$(echo $ID | sed 's/^.* @ //')
|
||||
|
||||
echo $DATABASES | jq "select (.instance == \"$INSTANCE\" and .cluster == \"$CLUSTER\")"
|
||||
}
|
||||
|
||||
GET_AVAILABLE_DATABASES() {
|
||||
_AWS rds describe-db-instances \
|
||||
| jq -r '.[] | .[] | {
|
||||
instance: .DBInstanceIdentifier,
|
||||
cluster: .DBClusterIdentifier,
|
||||
type: .Engine,
|
||||
host: .Endpoint.Address,
|
||||
port: .Endpoint.Port,
|
||||
user: .MasterUsername,
|
||||
database: .DBName
|
||||
}'
|
||||
}
|
||||
|
@ -1,18 +1,14 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
CREATE_BACKUP() {
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
PG_DUMP \
|
||||
__RUN_SCWRYPT 'zsh/db/postgres/pg_dump' -- \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
||||
@ -21,5 +17,6 @@ CREATE_BACKUP() {
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
#####################################################################
|
||||
CREATE_BACKUP $@
|
||||
RDS_INTERACTIVE_LOGIN $@
|
@ -1,18 +1,14 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
POSTGRES__LOGIN_INTERACTIVE \
|
||||
__RUN_SCWRYPT 'zsh/db/interactive/postgres' -- \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
@ -1,18 +1,14 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
LOAD_BACKUP() {
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
PG_RESTORE \
|
||||
__RUN_SCWRYPT 'zsh/db/postgres/pg_restore' -- \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
||||
@ -23,4 +19,4 @@ LOAD_BACKUP() {
|
||||
|
||||
|
||||
#####################################################################
|
||||
LOAD_BACKUP $@
|
||||
RDS_INTERACTIVE_LOGIN $@
|
@ -1,22 +1,21 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(cli53)
|
||||
REQUIRED_ENV+=(AWS_PROFILE)
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
ROUTE53_BACKUP() {
|
||||
_ROUTE53_BACKUP() {
|
||||
local BACKUP_PATH="$SCWRYPTS_OUTPUT_PATH/$ENV_NAME/aws-dns-backup/$(date '+%Y-%m-%d')"
|
||||
mkdir -p $BACKUP_PATH >/dev/null 2>&1
|
||||
|
||||
local DOMAIN
|
||||
local JOBS=()
|
||||
for DOMAIN in $(ROUTE53_GET_DOMAINS)
|
||||
for DOMAIN in $(_ROUTE53_GET_DOMAINS)
|
||||
do
|
||||
( STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \
|
||||
( __STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \
|
||||
&& cli53 export --profile $AWS_PROFILE $DOMAIN > "$BACKUP_PATH/$DOMAIN.txt" \
|
||||
&& SUCCESS "backed up '$DOMAIN'" \
|
||||
|| ERROR "failed to back up '$DOMAIN'" \
|
||||
&& __SUCCESS "backed up '$DOMAIN'" \
|
||||
|| __ERROR "failed to back up '$DOMAIN'" \
|
||||
) &
|
||||
JOBS+=$!
|
||||
done
|
||||
@ -25,7 +24,7 @@ ROUTE53_BACKUP() {
|
||||
for P in ${JOBS[@]}; do wait $P >/dev/null 2>&1; done
|
||||
}
|
||||
|
||||
ROUTE53_GET_DOMAINS() {
|
||||
_ROUTE53_GET_DOMAINS() {
|
||||
cli53 list --profile $AWS_PROFILE \
|
||||
| awk '{print $2;}' \
|
||||
| sed '1d; s/\.$//'\
|
||||
@ -33,4 +32,4 @@ ROUTE53_GET_DOMAINS() {
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
ROUTE53_BACKUP
|
||||
_ROUTE53_BACKUP
|
6
zsh/aws/route53/common.zsh
Normal file
6
zsh/aws/route53/common.zsh
Normal file
@ -0,0 +1,6 @@
|
||||
_DEPENDENCIES+=(
|
||||
cli53
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
4
zsh/aws/s3/common.zsh
Normal file
4
zsh/aws/s3/common.zsh
Normal file
@ -0,0 +1,4 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
30
zsh/aws/s3/media-sync/common.zsh
Normal file
30
zsh/aws/s3/media-sync/common.zsh
Normal file
@ -0,0 +1,30 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=(
|
||||
AWS__S3__MEDIA_TARGETS
|
||||
AWS__S3__MEDIA_BUCKET
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
AWS__S3__MEDIA_TARGETS=($(echo $AWS__S3__MEDIA_TARGETS | sed 's/,/\n/g'))
|
||||
|
||||
__SYNC_MEDIA() {
|
||||
local ACTION="$1"
|
||||
local REMOTE_TARGET="s3://$AWS__S3__MEDIA_BUCKET/$2"
|
||||
local LOCAL_TARGET="$HOME/$2"
|
||||
|
||||
local A B
|
||||
case $ACTION in
|
||||
push ) A="$LOCAL_TARGET"; B="$REMOTE_TARGET" ;;
|
||||
pull ) A="$REMOTE_TARGET"; B="$LOCAL_TARGET" ;;
|
||||
|
||||
* ) __ERROR "unknown action '$1'"; return 1 ;;
|
||||
esac
|
||||
|
||||
local FLAGS=(${@:3})
|
||||
|
||||
__STATUS "${ACTION}ing $2"
|
||||
_AWS s3 sync $A $B $FLAGS \
|
||||
&& __SUCCESS "$2 up-to-date" \
|
||||
|| { __ERROR "unable to sync $2 (see above)"; return 1; }
|
||||
}
|
27
zsh/aws/s3/media-sync/pull
Executable file
27
zsh/aws/s3/media-sync/pull
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__PULL_ALL_MEDIA() {
|
||||
local FLAGS=($@)
|
||||
local FAILED_COUNT=0
|
||||
|
||||
__STATUS 'starting media download from s3'
|
||||
|
||||
local TARGET
|
||||
for TARGET in $AWS__S3__MEDIA_TARGETS
|
||||
do
|
||||
__SYNC_MEDIA pull $TARGET $FLAGS || ((FAILED_COUNT+=1))
|
||||
done
|
||||
|
||||
[[ $FAILED_COUNT -eq 0 ]] \
|
||||
&& __SUCCESS 'local media files now up-to-date' \
|
||||
|| __FAIL $FAILED_COUNT 'unable to download one or more targets' \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__PULL_ALL_MEDIA $@
|
27
zsh/aws/s3/media-sync/push
Executable file
27
zsh/aws/s3/media-sync/push
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__PUSH_ALL_MEDIA() {
|
||||
local FLAGS=($@)
|
||||
local FAILED_COUNT=0
|
||||
|
||||
__STATUS 'starting media upload to s3'
|
||||
|
||||
local TARGET
|
||||
for TARGET in $AWS__S3__MEDIA_TARGETS
|
||||
do
|
||||
__SYNC_MEDIA push $TARGET $FLAGS || ((FAILED_COUNT+=1))
|
||||
done
|
||||
|
||||
[[ $FAILED_COUNT -eq 0 ]] \
|
||||
&& __SUCCESS 's3 media files now up-to-date' \
|
||||
|| __FAIL $FAILED_COUNT 'unable to upload one or more targets' \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__PUSH_ALL_MEDIA $@
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/ecr
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
ECR_LOGIN $@
|
@ -1,37 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(jq)
|
||||
REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
|
||||
|
||||
use cloud/aws/cli
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
EFS_DISCONNECT() {
|
||||
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
|
||||
STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
|
||||
[ ! $MOUNTED ] && {
|
||||
STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
GETSUDO || exit 1
|
||||
|
||||
|
||||
local SELECTED=$(echo $MOUNTED | FZF 'select a file system to unmount')
|
||||
[ ! $SELECTED ] && ABORT
|
||||
|
||||
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
|
||||
STATUS "unmounting '$SELECTED'"
|
||||
sudo umount $EFS >/dev/null 2>&1
|
||||
sudo rmdir $EFS \
|
||||
&& SUCCESS "done" \
|
||||
|| FAIL 2 "failed to unmount '$EFS'"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
EFS_DISCONNECT $@
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/eks
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
EKS_CLUSTER_LOGIN $@
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/media-sync
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
MEDIA_SYNC__PULL $@
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/media-sync
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
MEDIA_SYNC__PUSH $@
|
31
zsh/common.zsh
Normal file
31
zsh/common.zsh
Normal file
@ -0,0 +1,31 @@
|
||||
#####################################################################
|
||||
|
||||
source ${0:a:h}/../global/common.zsh
|
||||
source ${0:a:h}/utils/utils.module.zsh \
|
||||
|| { [ $DONT_EXIT ] && return 1 || exit 1; }
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_ENV_FILES() { ls $SCWRYPTS_CONFIG_PATH/env | sort -r }
|
||||
[ ! "$(__GET_ENV_FILES)" ] && {
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/dev"
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/local"
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/prod"
|
||||
}
|
||||
|
||||
__GET_ENV_NAMES() { __GET_ENV_FILES | sed 's/.*\///'; }
|
||||
__GET_ENV_FILE() { echo "$SCWRYPTS_CONFIG_PATH/env/$1"; }
|
||||
|
||||
__SELECT_OR_CREATE_ENV() { __GET_ENV_NAMES | __FZF_TAIL 'select/create an environment'; }
|
||||
__SELECT_ENV() { __GET_ENV_NAMES | __FZF 'select an environment'; }
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_AVAILABLE_SCRIPTS() {
|
||||
cd $SCWRYPTS_ROOT;
|
||||
find . -mindepth 2 -type f -executable \
|
||||
| grep -v '\.git' \
|
||||
| grep -v 'node_modules' \
|
||||
| sed 's/^\.\///; s/\.[^.]*$//' \
|
||||
;
|
||||
}
|
@ -1,18 +1,14 @@
|
||||
#####################################################################
|
||||
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
#####################################################################
|
||||
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
DEFAULT_CONFIG="${0:a:h}/default.conf.zsh"
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
SAFE_SYMLINKS=1
|
||||
|
||||
# in case dotfiles.zsh is sourced; allows users to provide initial config
|
||||
# in case dotfiles.zsh is sourced... allow user to provide initial config ;)
|
||||
[ ! $CONFIG__USER_SETTINGS ] \
|
||||
&& CONFIG__USER_SETTINGS="$SCWRYPTS_CONFIG_PATH/dotfiles.zsh"
|
||||
|
||||
[ ! -f "$CONFIG__USER_SETTINGS" ] && cp "$DEFAULT_CONFIG" "$CONFIG__USER_SETTINGS"
|
||||
|
||||
source "$CONFIG__USER_SETTINGS"
|
||||
source $CONFIG__USER_SETTINGS
|
6
zsh/config/settings
Executable file
6
zsh/config/settings
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
__EDIT "$CONFIG__USER_SETTINGS"
|
@ -1,10 +1,7 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use system/config
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
SETUP_SYMLINKS() {
|
||||
@ -15,10 +12,10 @@ SETUP_SYMLINKS() {
|
||||
}
|
||||
|
||||
SETUP_SYMLINK() {
|
||||
[ ! $2 ] && FAIL 1 'must provide SOURCE_CONFIG and TARGET_CONFIG'
|
||||
[ ! $2 ] && __FAIL 1 'must provide SOURCE_CONFIG and TARGET_CONFIG'
|
||||
|
||||
local SOURCE_CONFIG="$1"
|
||||
[ ! -f "$SOURCE_CONFIG" ] && [ ! -d "$SOURCE_CONFIG" ] && FAIL 2 "no such file or directory '$SOURCE_CONFIG'"
|
||||
[ ! -f "$SOURCE_CONFIG" ] && [ ! -d "$SOURCE_CONFIG" ] && __FAIL 2 "no such file or directory '$SOURCE_CONFIG'"
|
||||
|
||||
local TARGET_CONFIG="$HOME/.config/$2"
|
||||
|
||||
@ -30,8 +27,8 @@ SETUP_SYMLINK() {
|
||||
rm "$TARGET_CONFIG" >/dev/null 2>&1
|
||||
|
||||
ln -s "$SOURCE_CONFIG" "$TARGET_CONFIG" \
|
||||
&& SUCCESS "successfully linked '$(basename $(dirname $TARGET_CONFIG))/$(basename $TARGET_CONFIG)'" \
|
||||
|| FAIL 3 "failed to create link '$TARGET_CONFIG'" \
|
||||
&& __SUCCESS "successfully linked '$(basename $(dirname $TARGET_CONFIG))/$(basename $TARGET_CONFIG)'" \
|
||||
|| __FAIL 3 "failed to create link '$TARGET_CONFIG'" \
|
||||
;
|
||||
}
|
||||
|
@ -1,26 +1,25 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(tic)
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use system/config
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=(
|
||||
tic
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
SETUP_TERMINFO() {
|
||||
[ ! $TERMINFO_PATH ] && return 0
|
||||
[ ! -d $TERMINFO_PATH ] && FAIL 1 "TERMINFO_PATH='$TERMINFO_PATH' does not exist"
|
||||
[ ! -d $TERMINFO_PATH ] && __FAIL 1 "TERMINFO_PATH='$TERMINFO_PATH' does not exist"
|
||||
|
||||
local ERRORS=0
|
||||
for TERMINFO in $(find $TERMINFO_PATH -type f)
|
||||
do
|
||||
tic -x $TERMINFO >/dev/null 2>&1 \
|
||||
&& SUCCESS "added '$(basename $TERMINFO)'" \
|
||||
|| ERROR "failed to add '$(basename $TERMINFO)'" \
|
||||
&& __SUCCESS "added '$(basename $TERMINFO)'" \
|
||||
|| __ERROR "failed to add '$(basename $TERMINFO)'" \
|
||||
;
|
||||
done
|
||||
|
||||
CHECK_ERRORS
|
||||
__ERROR_CHECK
|
||||
}
|
||||
|
||||
#####################################################################
|
10
zsh/config/update
Executable file
10
zsh/config/update
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS 'updating all config files and links'
|
||||
__RUN_SCWRYPT zsh/config/symlinks || exit 1
|
||||
__RUN_SCWRYPT zsh/config/terminfo || exit 2
|
||||
__SUCCESS 'finished updating config files and links'
|
24
zsh/db/common.zsh
Normal file
24
zsh/db/common.zsh
Normal file
@ -0,0 +1,24 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
|
||||
GET_POSTGRES_LOGIN_ARGS() {
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
--host | -h ) _HOST="$2"; shift 2 ;;
|
||||
--name | -d ) _NAME="$2"; shift 2 ;;
|
||||
--pass | -w ) _PASS="$2"; shift 2 ;;
|
||||
--port | -p ) _PORT="$2"; shift 2 ;;
|
||||
--user | -U ) _USER="$2"; shift 2 ;;
|
||||
* ) shift 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ ! $_HOST ] && _HOST=127.0.0.1
|
||||
[ ! $_NAME ] && _NAME=postgres
|
||||
[ ! $_PORT ] && _PORT=5432
|
||||
[ ! $_USER ] && _USER=postgres
|
||||
}
|
4
zsh/db/interactive/common.zsh
Normal file
4
zsh/db/interactive/common.zsh
Normal file
@ -0,0 +1,4 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
29
zsh/db/interactive/postgres
Executable file
29
zsh/db/interactive/postgres
Executable file
@ -0,0 +1,29 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
pgcli
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_LOGIN_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
__STATUS "performing login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "working directory : $DATA_DIR"
|
||||
|
||||
PGPASSWORD="$_PASS" pgcli \
|
||||
--host $_HOST \
|
||||
--port $_PORT \
|
||||
--user $_USER \
|
||||
--dbname $_NAME \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_LOGIN_POSTGRES $@
|
4
zsh/db/postgres/common.zsh
Normal file
4
zsh/db/postgres/common.zsh
Normal file
@ -0,0 +1,4 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,9 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
POSTGRES__LOGIN_INTERACTIVE $@
|
@ -1,9 +1,44 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=(
|
||||
pg_dump
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
PG_DUMP $@
|
||||
|
||||
BACKUP_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_dump"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
local OUTPUT_FILE="$DATA_DIR/$_NAME.dump"
|
||||
[ -f $OUTPUT_FILE ] && {
|
||||
local BACKUP_COUNT=$(ls "$DATA_DIR/$_NAME."*".dump" | wc -l)
|
||||
ls "$DATA_DIR/$_NAME."*".dump"
|
||||
|
||||
__INFO "discovered previous dump for '$_HOST/$_NAME'"
|
||||
__INFO "backing up previous dump to '$_NAME.$BACKUP_COUNT.dump'"
|
||||
|
||||
mv "$OUTPUT_FILE" "$DATA_DIR/$_NAME.$BACKUP_COUNT.dump"
|
||||
}
|
||||
|
||||
__STATUS "making backup of : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "output file : $OUTPUT_FILE"
|
||||
|
||||
PGPASSWORD="$_PASS" pg_dump \
|
||||
--verbose \
|
||||
--format custom \
|
||||
--host "$_HOST" \
|
||||
--port "$_PORT" \
|
||||
--username "$_USER" \
|
||||
--dbname "$_NAME" \
|
||||
--file "$OUTPUT_FILE" \
|
||||
&& { __SUCCESS "finished backup of '$_HOST/$_NAME'"; __SUCCESS "saved to '$OUTPUT_FILE'"; } \
|
||||
|| { __ERROR "error creating backup for '$_HOST/$_NAME' (see above)"; return 1; }
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
BACKUP_POSTGRES $@
|
||||
|
@ -1,9 +1,55 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
_DEPENDENCIES+=(
|
||||
pg_dump
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
PG_RESTORE $@
|
||||
|
||||
BACKUP_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_restore"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
local INPUT_FILE="$DATA_DIR/$_NAME.dump"
|
||||
|
||||
[ ! -f $INPUT_FILE ] && {
|
||||
local DUMP="$(dirname $DATA_DIR)/pg_dump/$_NAME.dump"
|
||||
__STATUS $DUMP
|
||||
ls $DUMP
|
||||
|
||||
[ -f "$DUMP" ] && {
|
||||
__SUCCESS "discovered previous scwrypts dump"
|
||||
__SUCCESS "$DUMP"
|
||||
__Yn 'restore from this backup?' && INPUT_FILE="$DUMP"
|
||||
}
|
||||
|
||||
[ ! -f "$INPUT_FILE" ] && {
|
||||
__STATUS 'place backup in the following location:'
|
||||
__STATUS "$INPUT_FILE"
|
||||
}
|
||||
|
||||
while [ ! -f $INPUT_FILE ]; do sleep 1; done
|
||||
}
|
||||
|
||||
__STATUS "backup file : $DATA_DIR"
|
||||
__STATUS "database : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
|
||||
PGPASSWORD="$_PASS" pg_restore \
|
||||
--verbose \
|
||||
--single-transaction \
|
||||
--format custom \
|
||||
--host "$_HOST" \
|
||||
--port "$_PORT" \
|
||||
--username "$_USER" \
|
||||
--dbname "$_NAME" \
|
||||
"$INPUT_FILE" \
|
||||
&& { __SUCCESS "finished restoring backup for '$_HOST/$_NAME'"; } \
|
||||
|| { __ERROR "error restoring backup for '$_HOST/$_NAME' (see above)"; return 1; }
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
BACKUP_POSTGRES $@
|
||||
|
@ -1,51 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
RUN_SQL_POSTGRES() {
|
||||
local _PASS _ARGS=()
|
||||
POSTGRES__SET_LOGIN_ARGS $@
|
||||
|
||||
local INPUT_FILE="$FILENAME"
|
||||
|
||||
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
|
||||
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
|
||||
|
||||
cd $SQL_DIR
|
||||
|
||||
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
|
||||
ERROR "you haven't made any SQL commands yet"
|
||||
REMINDER "add '.sql' files here: '$SQL_DIR/'"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ ! $INPUT_FILE ] && INPUT_FILE=$(FZF 'select a sql file to run')
|
||||
[ ! $INPUT_FILE ] && ABORT
|
||||
|
||||
[ ! -f "$INPUT_FILE" ] && FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
|
||||
|
||||
STATUS "loading '$INPUT_FILE' preview..."
|
||||
LESS "$INPUT_FILE"
|
||||
|
||||
STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
STATUS "command : '$INPUT_FILE'"
|
||||
|
||||
yN 'run this command?' || ABORT
|
||||
|
||||
STATUS "running '$INPUT_FILE'"
|
||||
|
||||
PSQL < $INPUT_FILE \
|
||||
&& SUCCESS "finished running '$INPUT_FILE'" \
|
||||
|| FAIL 3 "something went wrong running '$INPUT_FILE' (see above)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
WARNING
|
||||
WARNING 'this function is in a beta state'
|
||||
WARNING
|
||||
RUN_SQL_POSTGRES $@
|
4
zsh/db/run-sql/common.zsh
Normal file
4
zsh/db/run-sql/common.zsh
Normal file
@ -0,0 +1,4 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
72
zsh/db/run-sql/postgres
Executable file
72
zsh/db/run-sql/postgres
Executable file
@ -0,0 +1,72 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
psql
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_RUN_SQL_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER INPUT_FILE
|
||||
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
--host | -h ) _HOST="$2"; shift 2 ;;
|
||||
--name | -d ) _NAME="$2"; shift 2 ;;
|
||||
--pass | -w ) _PASS="$2"; shift 2 ;;
|
||||
--port | -p ) _PORT="$2"; shift 2 ;;
|
||||
--user | -U ) _USER="$2"; shift 2 ;;
|
||||
--file | -i ) INPUT_FILE="$2"; shift 2 ;;
|
||||
* ) shift 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ ! $_HOST ] && _HOST=127.0.0.1
|
||||
[ ! $_NAME ] && _NAME=postgres
|
||||
[ ! $_PORT ] && _PORT=5432
|
||||
[ ! $_USER ] && _USER=postgres
|
||||
|
||||
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
|
||||
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
|
||||
cd $SQL_DIR
|
||||
|
||||
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
|
||||
__ERROR "you haven't made any SQL commands yet"
|
||||
__REMINDER "add '.sql' files here: '$SQL_DIR/'"
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ ! $INPUT_FILE ] && INPUT_FILE=$(\
|
||||
__FZF 'select a sql file to run'
|
||||
)
|
||||
[ ! $INPUT_FILE ] && __ABORT
|
||||
|
||||
[ ! -f $INPUT_FILE ] && {
|
||||
__FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
|
||||
}
|
||||
|
||||
__STATUS "loading $INPUT_FILE preview..."
|
||||
_LESS $INPUT_FILE
|
||||
|
||||
__STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "command : ./$INPUT_FILE"
|
||||
|
||||
__yN 'run this command?' || __ABORT
|
||||
|
||||
__STATUS "running './$INPUT_FILE'"
|
||||
PGPASSWORD="$_PASS" psql \
|
||||
-h $_HOST \
|
||||
-p $_PORT \
|
||||
-U $_USER \
|
||||
-d $_NAME \
|
||||
< $INPUT_FILE \
|
||||
&& __SUCCESS "finished running './$INPUT_FILE'" \
|
||||
|| __FAIL 3 "something went wrong running './$INPUT_FILE' (see above)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
__WARNING
|
||||
__WARNING 'this function is in a beta state'
|
||||
__WARNING
|
||||
_RUN_SQL_POSTGRES $@
|
@ -1,19 +0,0 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(docker)
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
DOCKER_CLEAN() {
|
||||
WARNING 'this will prune all docker resources from the current machine'
|
||||
WARNING 'pruned resources are PERMANENTLY DELETED'
|
||||
yN 'continue?' || return 1
|
||||
|
||||
SUCCESS "CONTAINER : $(docker container prune -f 2>/dev/null | tail -n 1)"
|
||||
SUCCESS "IMAGE : $(docker image prune -f 2>/dev/null | tail -n 1)"
|
||||
SUCCESS "VOLUME : $(docker volume prune -f 2>/dev/null | tail -n 1)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
DOCKER_CLEAN $@
|
6
zsh/git/common.zsh
Normal file
6
zsh/git/common.zsh
Normal file
@ -0,0 +1,6 @@
|
||||
_DEPENDENCIES+=(
|
||||
git
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
6
zsh/git/package/build
Executable file
6
zsh/git/package/build
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
__RUN_SCWRYPT zsh/git/package/install -- --only-build $@
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user