Compare commits
15 Commits
Author | SHA1 | Date | |
---|---|---|---|
e0cbf58b3c | |||
09c214f939 | |||
e2c6007a65 | |||
620d07f1a8 | |||
4baacd9c32 | |||
6c546ebb6f | |||
9783119a7d | |||
a94d6bc197 | |||
76a746a53e | |||
7617c938b1 | |||
a1256bb0af | |||
73e26a2ecb | |||
20b7cc32eb | |||
22dd6f8112 | |||
710d42e248 |
29
.config/env.template
Normal file
29
.config/env.template
Normal file
@ -0,0 +1,29 @@
|
||||
#!/bin/zsh
|
||||
export AWS_ACCOUNT=
|
||||
export AWS_PROFILE=
|
||||
export AWS_REGION=
|
||||
export AWS__EFS__LOCAL_MOUNT_POINT=
|
||||
export DIRECTUS__API_TOKEN=
|
||||
export DIRECTUS__BASE_URL=
|
||||
export DISCORD__BOT_TOKEN=
|
||||
export DISCORD__CONTENT_FOOTER=
|
||||
export DISCORD__CONTENT_HEADER=
|
||||
export DISCORD__DEFAULT_AVATAR_URL=
|
||||
export DISCORD__DEFAULT_CHANNEL_ID=
|
||||
export DISCORD__DEFAULT_USERNAME=
|
||||
export DISCORD__DEFAULT_WEBHOOK=
|
||||
export I3__BORDER_PIXEL_SIZE=
|
||||
export I3__DMENU_FONT_SIZE=
|
||||
export I3__GLOBAL_FONT_SIZE=
|
||||
export I3__MODEL_CONFIG=
|
||||
export LINEAR__API_TOKEN=
|
||||
export MEDIA_SYNC__S3_BUCKET
|
||||
export MEDIA_SYNC__TARGETS
|
||||
export REDIS_AUTH=
|
||||
export REDIS_HOST=
|
||||
export REDIS_PORT=
|
||||
export TWILIO__ACCOUNT_SID=
|
||||
export TWILIO__API_KEY=
|
||||
export TWILIO__API_SECRET=
|
||||
export TWILIO__DEFAULT_PHONE_FROM=
|
||||
export TWILIO__DEFAULT_PHONE_TO=
|
36
.config/env.template.descriptions
Normal file
36
.config/env.template.descriptions
Normal file
@ -0,0 +1,36 @@
|
||||
AWS_ACCOUNT | standard AWS environment variables used by awscli and other tools
|
||||
AWS_PROFILE |
|
||||
AWS_REGION |
|
||||
|
||||
AWS__EFS__LOCAL_MOUNT_POINT | fully-qualified path to mount the EFS drive
|
||||
|
||||
DIRECTUS__API_TOKEN | details for a directus instance
|
||||
DIRECTUS__BASE_URL |
|
||||
|
||||
DISCORD__BOT_TOKEN | details for discord bot
|
||||
DISCORD__CONTENT_HEADER |
|
||||
DISCORD__CONTENT_FOOTER |
|
||||
DISCORD__DEFAULT_AVATAR_URL |
|
||||
DISCORD__DEFAULT_CHANNEL_ID |
|
||||
DISCORD__DEFAULT_USERNAME |
|
||||
DISCORD__DEFAULT_WEBHOOK |
|
||||
|
||||
I3__BORDER_PIXEL_SIZE | custom i3 configuration settings
|
||||
I3__DMENU_FONT_SIZE |
|
||||
I3__GLOBAL_FONT_SIZE |
|
||||
I3__MODEL_CONFIG |
|
||||
|
||||
LINEAR__API_TOKEN | linear.app project management configuration
|
||||
|
||||
MEDIA_SYNC__S3_BUCKET | s3 bucket name and filesystem targets for media backups
|
||||
MEDIA_SYNC__TARGETS |
|
||||
|
||||
REDIS_AUTH | redis connection credentials
|
||||
REDIS_HOST |
|
||||
REDIS_PORT |
|
||||
|
||||
TWILIO__ACCOUNT_SID | twilio account / credentials
|
||||
TWILIO__API_KEY |
|
||||
TWILIO__API_SECRET |
|
||||
TWILIO__DEFAULT_PHONE_FROM |
|
||||
TWILIO__DEFAULT_PHONE_TO |
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
export AWS_ACCOUNT=
|
||||
export AWS_PROFILE=
|
||||
export AWS_REGION=
|
||||
export AWS__EFS__LOCAL_MOUNT_POINT=
|
||||
export AWS__S3__MEDIA_BUCKET=
|
||||
export AWS__S3__MEDIA_TARGETS=
|
||||
export REDIS_AUTH=
|
||||
export REDIS_HOST=
|
||||
export REDIS_PORT=
|
@ -1,12 +0,0 @@
|
||||
AWS_ACCOUNT | standard AWS environment variables used by awscli and other tools
|
||||
AWS_PROFILE |
|
||||
AWS_REGION |
|
||||
|
||||
AWS__EFS__LOCAL_MOUNT_POINT | fully-qualified path to mount the EFS drive
|
||||
|
||||
AWS__S3__MEDIA_BUCKET | s3 bucket name and filesystem targets for media backups
|
||||
AWS__S3__MEDIA_TARGETS |
|
||||
|
||||
REDIS_AUTH | redis connection credentials
|
||||
REDIS_HOST |
|
||||
REDIS_PORT |
|
@ -1,42 +0,0 @@
|
||||
#####################################################################
|
||||
|
||||
[ ! $SCWRYPTS_ROOT ] && SCWRYPTS_ROOT="$(dirname ${0:a:h})"
|
||||
|
||||
source "${0:a:h}/config.zsh"
|
||||
|
||||
#####################################################################
|
||||
|
||||
__SCWRYPT=1 # arbitrary; indicates scwrypts exists
|
||||
|
||||
__PREFERRED_PYTHON_VERSIONS=(3.10 3.9)
|
||||
__NODE_VERSION=18.0.0
|
||||
|
||||
__ENV_TEMPLATE=$SCWRYPTS_ROOT/.env.template
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_PATH_TO_RELATIVE_ARGUMENT() {
|
||||
[[ $1 =~ ^[.] ]] \
|
||||
&& echo $(readlink -f "$EXECUTION_DIR/$1") \
|
||||
|| echo "$1" \
|
||||
;
|
||||
true
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__RUN_SCWRYPT() {
|
||||
((SUBSCWRYPT+=1))
|
||||
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
|
||||
echo " BEGIN SUBSCWRYPT : $@" >&2
|
||||
|
||||
SUBSCWRYPT=$SUBSCWRYPT SCWRYPTS_ENV=$ENV_NAME \
|
||||
"$SCWRYPTS_ROOT/scwrypts" $@
|
||||
EXIT_CODE=$?
|
||||
|
||||
{ printf ' '; printf '--%.0s' {1..$SUBSCWRYPT}; printf " ($SUBSCWRYPT) "; } >&2
|
||||
echo " END SUBSCWRYPT : $1" >&2
|
||||
((SUBSCWRYPT-=1))
|
||||
|
||||
return $EXIT_CODE
|
||||
}
|
Binary file not shown.
0
py/data/convert/__init__.py
Normal file
0
py/data/convert/__init__.py
Normal file
24
py/data/convert/csv-to-json.py
Executable file
24
py/data/convert/csv-to-json.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'csv',
|
||||
output_stream = stream.output,
|
||||
output_type = 'json',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert csv into json',
|
||||
parse_args = [],
|
||||
)
|
24
py/data/convert/csv-to-yaml.py
Executable file
24
py/data/convert/csv-to-yaml.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'csv',
|
||||
output_stream = stream.output,
|
||||
output_type = 'yaml',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert csv into yaml',
|
||||
parse_args = [],
|
||||
)
|
24
py/data/convert/json-to-csv.py
Executable file
24
py/data/convert/json-to-csv.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'json',
|
||||
output_stream = stream.output,
|
||||
output_type = 'csv',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert json into csv',
|
||||
parse_args = [],
|
||||
)
|
24
py/data/convert/json-to-yaml.py
Executable file
24
py/data/convert/json-to-yaml.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'json',
|
||||
output_stream = stream.output,
|
||||
output_type = 'yaml',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert json into yaml',
|
||||
parse_args = [],
|
||||
)
|
24
py/data/convert/yaml-to-csv.py
Executable file
24
py/data/convert/yaml-to-csv.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'yaml',
|
||||
output_stream = stream.output,
|
||||
output_type = 'csv',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert yaml into csv',
|
||||
parse_args = [],
|
||||
)
|
24
py/data/convert/yaml-to-json.py
Executable file
24
py/data/convert/yaml-to-json.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.data.converter import convert
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(_args, stream):
|
||||
return convert(
|
||||
input_stream = stream.input,
|
||||
input_type = 'yaml',
|
||||
output_stream = stream.output,
|
||||
output_type = 'json',
|
||||
)
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'convert yaml into json',
|
||||
parse_args = [],
|
||||
)
|
0
py/directus/__init__.py
Normal file
0
py/directus/__init__.py
Normal file
145
py/directus/get-items.py
Executable file
145
py/directus/get-items.py
Executable file
@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python
|
||||
from json import dumps
|
||||
|
||||
from py.lib.fzf import fzf, fzf_tail
|
||||
from py.lib.http import directus
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if {None} == { args.collection, args.filters, args.fields }:
|
||||
args.interactive = True
|
||||
|
||||
if args.interactive:
|
||||
args.generate_filters_prompt = True
|
||||
args.generate_fields_prompt = True
|
||||
|
||||
collection = _get_or_select_collection(args)
|
||||
filters = _get_or_select_filters(args, collection)
|
||||
fields = _get_or_select_fields(args, collection)
|
||||
|
||||
query = '&'.join([
|
||||
param for param in [
|
||||
fields,
|
||||
filters,
|
||||
]
|
||||
if param
|
||||
])
|
||||
|
||||
endpoint = f'items/{collection}?{query}'
|
||||
|
||||
response = directus.request('GET', endpoint)
|
||||
|
||||
stream.writeline(dumps({
|
||||
**response.json(),
|
||||
'scwrypts_metadata': {
|
||||
'endpoint': endpoint,
|
||||
'repeat_with': f'scwrypts -n py/directus/get-items -- -c {collection} -f \'{query}\'',
|
||||
},
|
||||
}))
|
||||
|
||||
def _get_or_select_collection(args):
|
||||
collection = args.collection
|
||||
|
||||
if collection is None:
|
||||
collection = fzf(
|
||||
prompt = 'select a collection',
|
||||
choices = directus.get_collections(),
|
||||
)
|
||||
|
||||
if not collection:
|
||||
raise ValueError('collection required for query')
|
||||
|
||||
return collection
|
||||
|
||||
def _get_or_select_filters(args, collection):
|
||||
filters = args.filters or ''
|
||||
|
||||
if filters == '' and args.generate_filters_prompt:
|
||||
filters = '&'.join([
|
||||
f'filter[{filter}][' + (
|
||||
operator := fzf(
|
||||
prompt = f'select operator for {filter}',
|
||||
choices = directus.FILTER_OPERATORS,
|
||||
)
|
||||
) + ']=' + fzf_tail(prompt = f'filter[{filter}][{operator}]')
|
||||
|
||||
for filter in fzf(
|
||||
prompt = 'select filter(s) [C^c to skip]',
|
||||
fzf_options = '--multi',
|
||||
force_list = True,
|
||||
choices = directus.get_fields(collection),
|
||||
)
|
||||
])
|
||||
|
||||
return filters
|
||||
|
||||
def _get_or_select_fields(args, collection):
|
||||
fields = args.fields or ''
|
||||
|
||||
if fields == '' and args.generate_fields_prompt:
|
||||
fields = ','.join(fzf(
|
||||
prompt = 'select return field(s) [C^c to get all]',
|
||||
fzf_options = '--multi',
|
||||
choices = directus.get_fields(collection),
|
||||
force_list = True,
|
||||
))
|
||||
|
||||
if fields:
|
||||
fields = f'fields[]={fields}'
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'interactive CLI to get data from directus',
|
||||
parse_args = [
|
||||
( ['-c', '--collection'], {
|
||||
"dest" : 'collection',
|
||||
"default" : None,
|
||||
"help" : 'the name of the collection',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-f', '--filters'], {
|
||||
"dest" : 'filters',
|
||||
"default" : None,
|
||||
"help" : 'as a URL-suffix, filters for the query',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-d', '--fields'], {
|
||||
"dest" : 'fields',
|
||||
"default" : None,
|
||||
"help" : 'comma-separated list of fields to include',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['-p', '--interactive-prompt'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'interactive',
|
||||
"default" : False,
|
||||
"help" : 'interactively generate filter prompts; implied if no flags are provided',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['--prompt-filters'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'generate_filters_prompt',
|
||||
"default" : False,
|
||||
"help" : '(superceded by -p) only generate filters interactively',
|
||||
"required" : False,
|
||||
}),
|
||||
( ['--prompt-fields'], {
|
||||
"action" : 'store_true',
|
||||
"dest" : 'generate_fields_prompt',
|
||||
"default" : False,
|
||||
"help" : '(superceded by -p) only generate filters interactively',
|
||||
"required" : False,
|
||||
}),
|
||||
]
|
||||
|
||||
)
|
0
py/discord/__init__.py
Normal file
0
py/discord/__init__.py
Normal file
61
py/discord/post-message.py
Executable file
61
py/discord/post-message.py
Executable file
@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python
|
||||
from json import dumps
|
||||
from sys import stderr
|
||||
|
||||
from py.lib.http import discord
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if args.content is None:
|
||||
print(f'reading input from {stream.input.name}', file=stderr)
|
||||
args.content = ''.join(stream.readlines()).strip()
|
||||
|
||||
if len(args.content) == 0:
|
||||
args.content = 'PING'
|
||||
|
||||
response = discord.send_message(**vars(args))
|
||||
|
||||
stream.writeline(dumps({
|
||||
**(response.json() if response.text != '' else {'message': 'OK'}),
|
||||
'scwrypts_metadata': {},
|
||||
}))
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'post a message to the indicated discord channel',
|
||||
parse_args = [
|
||||
( ['-b', '--body'], {
|
||||
'dest' : 'content',
|
||||
'help' : 'message body',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['-c', '--channel-id'], {
|
||||
'dest' : 'channel_id',
|
||||
'help' : 'override default target channel id',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['-w', '--webhook'], {
|
||||
'dest' : 'webhook',
|
||||
'help' : 'override default target webhook (takes precedence over -c)',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['--avatar-url'], {
|
||||
'dest' : 'avatar_url',
|
||||
'help' : 'override default avatar_url',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['--username'], {
|
||||
'dest' : 'username',
|
||||
'help' : 'override default username',
|
||||
'required' : False,
|
||||
}),
|
||||
]
|
||||
)
|
27
py/hello-world.py
Executable file
27
py/hello-world.py
Executable file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
|
||||
def main(args, stream):
|
||||
stream.writeline(args.message)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'a simple "Hello, World!" program',
|
||||
parse_args = [
|
||||
( ['-m', '--message'], {
|
||||
'dest' : 'message',
|
||||
'default' : 'HELLO WORLD',
|
||||
'help' : 'message to print',
|
||||
'required' : False,
|
||||
}),
|
||||
],
|
||||
)
|
@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
def main():
|
||||
print('HELLO WORLD')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
6
py/lib/__init__.py
Normal file
6
py/lib/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
import py.lib.data
|
||||
import py.lib.fzf
|
||||
import py.lib.http
|
||||
import py.lib.redis
|
||||
import py.lib.scwrypts
|
||||
import py.lib.twilio
|
1
py/lib/data/__init__.py
Normal file
1
py/lib/data/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
import py.lib.data.converter
|
73
py/lib/data/converter.py
Normal file
73
py/lib/data/converter.py
Normal file
@ -0,0 +1,73 @@
|
||||
import csv
|
||||
import json
|
||||
import yaml
|
||||
|
||||
|
||||
def convert(input_stream, input_type, output_stream, output_type):
|
||||
if input_type == output_type:
|
||||
raise ValueError('input type and output type are the same')
|
||||
|
||||
data = convert_input(input_stream, input_type)
|
||||
write_output(output_stream, output_type, data)
|
||||
|
||||
|
||||
def convert_input(stream, input_type):
|
||||
supported_input_types = {'csv', 'json', 'yaml'}
|
||||
|
||||
if input_type not in supported_input_types:
|
||||
raise ValueError(f'input_type "{input_type}" not supported; must be one of {supported_input_types}')
|
||||
|
||||
return {
|
||||
'csv': _read_csv,
|
||||
'json': _read_json,
|
||||
'yaml': _read_yaml,
|
||||
}[input_type](stream)
|
||||
|
||||
|
||||
def write_output(stream, output_type, data):
|
||||
supported_output_types = {'csv', 'json', 'yaml'}
|
||||
|
||||
if output_type not in supported_output_types:
|
||||
raise ValueError(f'output_type "{output_type}" not supported; must be one of {supported_output_types}')
|
||||
|
||||
return {
|
||||
'csv': _write_csv,
|
||||
'json': _write_json,
|
||||
'yaml': _write_yaml,
|
||||
}[output_type](stream, data)
|
||||
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_csv(stream):
|
||||
return [dict(line) for line in csv.DictReader(stream)]
|
||||
|
||||
def _write_csv(stream, data):
|
||||
writer = csv.DictWriter(stream, fieldnames=list({
|
||||
key
|
||||
for dictionary in data
|
||||
for key in dictionary.keys()
|
||||
}))
|
||||
|
||||
writer.writeheader()
|
||||
|
||||
for value in data:
|
||||
writer.writerow(value)
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_json(stream):
|
||||
data = json.loads(stream.read())
|
||||
return data if isinstance(data, list) else [data]
|
||||
|
||||
def _write_json(stream, data):
|
||||
stream.write(json.dumps(data))
|
||||
|
||||
#####################################################################
|
||||
|
||||
def _read_yaml(stream):
|
||||
data = yaml.safe_load(stream)
|
||||
return data if isinstance(data, list) else [data]
|
||||
|
||||
def _write_yaml(stream, data):
|
||||
yaml.dump(data, stream, default_flow_style=False)
|
1
py/lib/fzf/__init__.py
Normal file
1
py/lib/fzf/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from py.lib.fzf.client import fzf, fzf_tail, fzf_head
|
61
py/lib/fzf/client.py
Normal file
61
py/lib/fzf/client.py
Normal file
@ -0,0 +1,61 @@
|
||||
from pyfzf.pyfzf import FzfPrompt
|
||||
|
||||
FZF_PROMPT = None
|
||||
|
||||
|
||||
def fzf( # pylint: disable=too-many-arguments
|
||||
choices=None,
|
||||
prompt=None,
|
||||
fzf_options='',
|
||||
delimiter='\n',
|
||||
return_type=str,
|
||||
force_list=False,
|
||||
):
|
||||
global FZF_PROMPT # pylint: disable=global-statement
|
||||
|
||||
if choices is None:
|
||||
choices = []
|
||||
|
||||
if not isinstance(return_type, type):
|
||||
raise ValueError(f'return_type must be a valid python type; "{return_type}" is not a type')
|
||||
|
||||
if FZF_PROMPT is None:
|
||||
FZF_PROMPT = FzfPrompt()
|
||||
|
||||
options = ' '.join({
|
||||
'-i',
|
||||
'--layout=reverse',
|
||||
'--ansi',
|
||||
'--height=30%',
|
||||
f'--prompt "{prompt} : "' if prompt is not None else '',
|
||||
fzf_options,
|
||||
})
|
||||
|
||||
selections = [
|
||||
return_type(selection)
|
||||
for selection in FZF_PROMPT.prompt(choices, options, delimiter)
|
||||
]
|
||||
|
||||
if not force_list:
|
||||
if len(selections) == 0:
|
||||
return None
|
||||
|
||||
if len(selections) == 1:
|
||||
return selections[0]
|
||||
|
||||
return selections
|
||||
|
||||
|
||||
def fzf_tail(*args, **kwargs):
|
||||
return _fzf_print(*args, **kwargs)[-1]
|
||||
|
||||
def fzf_head(*args, **kwargs):
|
||||
return _fzf_print(*args, **kwargs)[0]
|
||||
|
||||
def _fzf_print(*args, fzf_options='', **kwargs):
|
||||
return fzf(
|
||||
*args,
|
||||
**kwargs,
|
||||
fzf_options = f'--print-query {fzf_options}',
|
||||
force_list = True,
|
||||
)
|
5
py/lib/http/__init__.py
Normal file
5
py/lib/http/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from py.lib.http.client import get_request_client
|
||||
|
||||
import py.lib.http.directus
|
||||
import py.lib.http.discord
|
||||
import py.lib.http.linear
|
20
py/lib/http/client.py
Normal file
20
py/lib/http/client.py
Normal file
@ -0,0 +1,20 @@
|
||||
from requests import request
|
||||
|
||||
|
||||
def get_request_client(base_url, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
return lambda method, endpoint, **kwargs: request(
|
||||
method = method,
|
||||
url = f'{base_url}/{endpoint}',
|
||||
headers = {
|
||||
**headers,
|
||||
**kwargs.get('headers', {}),
|
||||
},
|
||||
**{
|
||||
key: value
|
||||
for key, value in kwargs.items()
|
||||
if key != 'headers'
|
||||
},
|
||||
)
|
2
py/lib/http/directus/__init__.py
Normal file
2
py/lib/http/directus/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from py.lib.http.directus.client import *
|
||||
from py.lib.http.directus.constant import *
|
56
py/lib/http/directus/client.py
Normal file
56
py/lib/http/directus/client.py
Normal file
@ -0,0 +1,56 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
|
||||
REQUEST = None
|
||||
COLLECTIONS = None
|
||||
FIELDS = {}
|
||||
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
REQUEST = get_request_client(
|
||||
base_url = getenv("DIRECTUS__BASE_URL"),
|
||||
headers = {
|
||||
'Authorization': f'bearer {getenv("DIRECTUS__API_TOKEN")}',
|
||||
}
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
||||
|
||||
def graphql(query, system=False):
|
||||
return request(
|
||||
'POST',
|
||||
'graphql' if system is True else 'graphql/system',
|
||||
json={'query': query},
|
||||
)
|
||||
|
||||
|
||||
def get_collections():
|
||||
global COLLECTIONS # pylint: disable=global-statement
|
||||
|
||||
if COLLECTIONS is None:
|
||||
COLLECTIONS = [
|
||||
item['collection']
|
||||
for item in request(
|
||||
'GET',
|
||||
'collections?limit=-1&fields[]=collection',
|
||||
).json()['data']
|
||||
]
|
||||
|
||||
return COLLECTIONS
|
||||
|
||||
|
||||
def get_fields(collection):
|
||||
if FIELDS.get(collection) is None:
|
||||
FIELDS[collection] = [
|
||||
item['field']
|
||||
for item in request(
|
||||
'GET',
|
||||
f'fields/{collection}?limit=-1&fields[]=field',
|
||||
).json()['data']
|
||||
]
|
||||
|
||||
return FIELDS[collection]
|
25
py/lib/http/directus/constant.py
Normal file
25
py/lib/http/directus/constant.py
Normal file
@ -0,0 +1,25 @@
|
||||
FILTER_OPERATORS = {
|
||||
'_eq',
|
||||
'_neq',
|
||||
'_lt',
|
||||
'_lte',
|
||||
'_gt',
|
||||
'_gte',
|
||||
'_in',
|
||||
'_nin',
|
||||
'_null',
|
||||
'_nnull',
|
||||
'_contains',
|
||||
'_ncontains',
|
||||
'_starts_with',
|
||||
'_ends_with',
|
||||
'_nends_with',
|
||||
'_between',
|
||||
'_nbetween',
|
||||
'_empty',
|
||||
'_nempty',
|
||||
'_intersects',
|
||||
'_nintersects',
|
||||
'_intersects_bbox',
|
||||
'_nintersects_bbox',
|
||||
}
|
2
py/lib/http/discord/__init__.py
Normal file
2
py/lib/http/discord/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from py.lib.http.discord.client import *
|
||||
from py.lib.http.discord.send_message import *
|
20
py/lib/http/discord/client.py
Normal file
20
py/lib/http/discord/client.py
Normal file
@ -0,0 +1,20 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
REQUEST = None
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
headers = {}
|
||||
|
||||
if (token := getenv("DISCORD__BOT_TOKEN", required = False)) is not None:
|
||||
headers['Authorization'] = f'Bot {token}'
|
||||
|
||||
REQUEST = get_request_client(
|
||||
base_url = 'https://discord.com/api',
|
||||
headers = headers,
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
48
py/lib/http/discord/send_message.py
Normal file
48
py/lib/http/discord/send_message.py
Normal file
@ -0,0 +1,48 @@
|
||||
from py.lib.scwrypts import getenv
|
||||
from py.lib.http.discord import request
|
||||
|
||||
def send_message(content, channel_id=None, webhook=None, username=None, avatar_url=None, **kwargs):
|
||||
if username is None:
|
||||
username = getenv('DISCORD__DEFAULT_USERNAME', required=False)
|
||||
|
||||
if avatar_url is None:
|
||||
avatar_url = getenv('DISCORD__DEFAULT_AVATAR_URL', required=False)
|
||||
|
||||
endpoint = None
|
||||
|
||||
if webhook is not None:
|
||||
endpoint = f'webhooks/{webhook}'
|
||||
|
||||
elif channel_id is not None:
|
||||
endpoint = f'channels/{channel_id}/messages'
|
||||
|
||||
elif (webhook := getenv('DISCORD__DEFAULT_WEBHOOK', required=False)) is not None:
|
||||
endpoint = f'webhooks/{webhook}'
|
||||
|
||||
elif (channel_id := getenv('DISCORD__DEFAULT_CHANNEL_ID', required=False)) is not None:
|
||||
endpoint = f'channels/{channel_id}/messages'
|
||||
|
||||
else:
|
||||
raise ValueError('must provide target channel_id or webhook')
|
||||
|
||||
if (header := getenv('DISCORD__CONTENT_HEADER', required=False)) is not None:
|
||||
content = f'{header}{content}'
|
||||
|
||||
if (footer := getenv('DISCORD__CONTENT_FOOTER', required=False)) is not None:
|
||||
content = f'{content}{footer}'
|
||||
|
||||
|
||||
return request(
|
||||
method = 'POST',
|
||||
endpoint = endpoint,
|
||||
json = {
|
||||
key: value
|
||||
for key, value in {
|
||||
'content': content,
|
||||
'username': username,
|
||||
'avatar_url': avatar_url,
|
||||
**kwargs,
|
||||
}.items()
|
||||
if value is not None
|
||||
},
|
||||
)
|
1
py/lib/http/linear/__init__.py
Normal file
1
py/lib/http/linear/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from py.lib.http.linear.client import *
|
20
py/lib/http/linear/client.py
Normal file
20
py/lib/http/linear/client.py
Normal file
@ -0,0 +1,20 @@
|
||||
from py.lib.http import get_request_client
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
REQUEST = None
|
||||
|
||||
def request(method, endpoint, **kwargs):
|
||||
global REQUEST # pylint: disable=global-statement
|
||||
|
||||
if REQUEST is None:
|
||||
REQUEST = get_request_client(
|
||||
base_url = 'https://api.linear.app',
|
||||
headers = {
|
||||
'Authorization': f'bearer {getenv("LINEAR__API_TOKEN")}',
|
||||
}
|
||||
)
|
||||
|
||||
return REQUEST(method, endpoint, **kwargs)
|
||||
|
||||
def graphql(query):
|
||||
return request('POST', 'graphql', json={'query': query})
|
1
py/lib/redis/__init__.py
Normal file
1
py/lib/redis/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from py.lib.redis.client import get_client
|
@ -1,15 +1,19 @@
|
||||
from redis import StrictRedis
|
||||
|
||||
from py.scwrypts import getenv
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
CLIENT = None
|
||||
|
||||
class RedisClient(StrictRedis):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
def get_client():
|
||||
global CLIENT # pylint: disable=global-statement
|
||||
|
||||
if CLIENT is None:
|
||||
print('getting redis client')
|
||||
CLIENT = StrictRedis(
|
||||
host = getenv('REDIS_HOST'),
|
||||
port = getenv('REDIS_PORT'),
|
||||
password = getenv('REDIS_AUTH', required=False),
|
||||
decode_responses = True,
|
||||
)
|
||||
|
||||
Client = RedisClient()
|
||||
return CLIENT
|
6
py/lib/scwrypts/__init__.py
Normal file
6
py/lib/scwrypts/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from py.lib.scwrypts.execute import execute
|
||||
from py.lib.scwrypts.getenv import getenv
|
||||
from py.lib.scwrypts.interactive import interactive
|
||||
from py.lib.scwrypts.run import run
|
||||
|
||||
import py.lib.scwrypts.io
|
16
py/lib/scwrypts/exceptions.py
Normal file
16
py/lib/scwrypts/exceptions.py
Normal file
@ -0,0 +1,16 @@
|
||||
from argparse import ArgumentError
|
||||
|
||||
|
||||
class MissingVariableError(EnvironmentError):
|
||||
def init(self, name):
|
||||
super().__init__(f'Missing required environment variable "{name}"')
|
||||
|
||||
|
||||
class ImportedExecutableError(ImportError):
|
||||
def __init__(self):
|
||||
super().__init__('executable only; must run through scwrypts')
|
||||
|
||||
|
||||
class MissingFlagAndEnvironmentVariableError(EnvironmentError, ArgumentError):
|
||||
def __init__(self, flags, env_var):
|
||||
super().__init__(f'must provide at least one of : {{ flags: {flags} OR {env_var} }}')
|
23
py/lib/scwrypts/execute.py
Normal file
23
py/lib/scwrypts/execute.py
Normal file
@ -0,0 +1,23 @@
|
||||
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
|
||||
|
||||
from py.lib.scwrypts.io import get_combined_stream, add_io_arguments
|
||||
|
||||
|
||||
def execute(main, description=None, parse_args=None, toggle_input=True, toggle_output=True):
|
||||
if parse_args is None:
|
||||
parse_args = []
|
||||
|
||||
parser = ArgumentParser(
|
||||
description = description,
|
||||
formatter_class = ArgumentDefaultsHelpFormatter,
|
||||
)
|
||||
|
||||
add_io_arguments(parser, toggle_input, toggle_output)
|
||||
|
||||
for a in parse_args:
|
||||
parser.add_argument(*a[0], **a[1])
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with get_combined_stream(args.input_file, args.output_file) as stream:
|
||||
return main(args, stream)
|
@ -1,16 +1,15 @@
|
||||
from os import getenv as os_getenv
|
||||
|
||||
from py.scwrypts.exceptions import MissingVariableError
|
||||
from py.scwrypts.run import run
|
||||
from py.lib.scwrypts.exceptions import MissingVariableError
|
||||
|
||||
|
||||
def getenv(name, required=True):
|
||||
value = os_getenv(name, None)
|
||||
|
||||
if value == None:
|
||||
run('zsh/scwrypts/environment/stage-variables', name)
|
||||
|
||||
if required and not value:
|
||||
raise MissingVariableError(name)
|
||||
|
||||
if value == '':
|
||||
value = None
|
||||
|
||||
return value
|
22
py/lib/scwrypts/interactive.py
Normal file
22
py/lib/scwrypts/interactive.py
Normal file
@ -0,0 +1,22 @@
|
||||
from bpython import embed
|
||||
|
||||
|
||||
def interactive(variable_descriptions):
|
||||
def outer(function):
|
||||
|
||||
def inner(*args, **kwargs):
|
||||
|
||||
print('\npreparing interactive environment...\n')
|
||||
|
||||
local_vars = function(*args, **kwargs)
|
||||
|
||||
print('\n\n'.join([
|
||||
f'>>> {x}' for x in variable_descriptions
|
||||
]))
|
||||
print('\nenvironment ready; user, GO! :)\n')
|
||||
|
||||
embed(local_vars)
|
||||
|
||||
return inner
|
||||
|
||||
return outer
|
86
py/lib/scwrypts/io.py
Normal file
86
py/lib/scwrypts/io.py
Normal file
@ -0,0 +1,86 @@
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from sys import stdin, stdout, stderr
|
||||
|
||||
from py.lib.scwrypts.getenv import getenv
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_stream(filename=None, mode='r', encoding='utf-8', verbose=False, **kwargs):
|
||||
allowed_modes = {'r', 'w', 'w+'}
|
||||
|
||||
if mode not in allowed_modes:
|
||||
raise ValueError(f'mode "{mode}" not supported modes (must be one of {allowed_modes})')
|
||||
|
||||
is_read = mode == 'r'
|
||||
|
||||
if filename is not None:
|
||||
|
||||
if verbose:
|
||||
print(f'opening file {filename} for {"read" if is_read else "write"}', file=stderr)
|
||||
|
||||
if filename[0] not in {'/', '~'}:
|
||||
filename = Path(f'{getenv("EXECUTION_DIR")}/{filename}').resolve()
|
||||
with open(filename, mode=mode, encoding=encoding, **kwargs) as stream:
|
||||
yield stream
|
||||
|
||||
else:
|
||||
if verbose:
|
||||
print('using stdin for read' if is_read else 'using stdout for write', file=stderr)
|
||||
|
||||
yield stdin if is_read else stdout
|
||||
|
||||
if not is_read:
|
||||
stdout.flush()
|
||||
|
||||
|
||||
def add_io_arguments(parser, toggle_input=True, toggle_output=True):
|
||||
if toggle_input:
|
||||
parser.add_argument(
|
||||
'-i', '--input-file',
|
||||
dest = 'input_file',
|
||||
default = None,
|
||||
help = 'path to input file; omit for stdin',
|
||||
required = False,
|
||||
)
|
||||
|
||||
if toggle_output:
|
||||
parser.add_argument(
|
||||
'-o', '--output-file',
|
||||
dest = 'output_file',
|
||||
default = None,
|
||||
help = 'path to output file; omit for stdout',
|
||||
required = False,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_combined_stream(input_file=None, output_file=None):
|
||||
with get_stream(input_file, 'r') as input_stream, get_stream(output_file, 'w+') as output_stream:
|
||||
yield CombinedStream(input_stream, output_stream)
|
||||
|
||||
|
||||
class CombinedStream:
|
||||
def __init__(self, input_stream, output_stream):
|
||||
self.input = input_stream
|
||||
self.output = output_stream
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
return self.input.read(*args, **kwargs)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
return self.input.readline(*args, **kwargs)
|
||||
|
||||
def readlines(self, *args, **kwargs):
|
||||
return self.input.readlines(*args, **kwargs)
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
return self.output.write(*args, **kwargs)
|
||||
|
||||
def writeline(self, line):
|
||||
x = self.output.write(f'{line}\n')
|
||||
self.output.flush()
|
||||
return x
|
||||
|
||||
def writelines(self, *args, **kwargs):
|
||||
return self.output.writelines(*args, **kwargs)
|
@ -7,11 +7,16 @@ def run(scwrypt_name, *args):
|
||||
DEPTH = int(getenv('SUBSCWRYPT', '0'))
|
||||
DEPTH += 1
|
||||
|
||||
SCWRYPTS_EXE = Path(__file__).parents[3] / 'scwrypts'
|
||||
ARGS = ' '.join([str(x) for x in args])
|
||||
print(f'SUBSCWRYPT={DEPTH} {SCWRYPTS_EXE} {scwrypt_name} -- {ARGS}')
|
||||
|
||||
print(f'\n {"--"*DEPTH} ({DEPTH}) BEGIN SUBSCWRYPT : {Path(scwrypt_name).name}')
|
||||
subprocess_run(
|
||||
f'SUBSCWRYPT={DEPTH} {Path(__file__).parents[2] / "scwrypts"} {scwrypt_name} -- {" ".join([str(x) for x in args])}',
|
||||
f'SUBSCWRYPT={DEPTH} {SCWRYPTS_EXE} {scwrypt_name} -- {ARGS}',
|
||||
shell=True,
|
||||
executable='/bin/zsh',
|
||||
check=False,
|
||||
)
|
||||
|
||||
print(f' {"--"*DEPTH} ({DEPTH}) END SUBSCWRYPT : {Path(scwrypt_name).name}\n')
|
2
py/lib/twilio/__init__.py
Normal file
2
py/lib/twilio/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from py.lib.twilio.client import get_client
|
||||
from py.lib.twilio.send_sms import send_sms
|
18
py/lib/twilio/client.py
Normal file
18
py/lib/twilio/client.py
Normal file
@ -0,0 +1,18 @@
|
||||
from twilio.rest import Client
|
||||
|
||||
from py.lib.scwrypts import getenv
|
||||
|
||||
CLIENT = None
|
||||
|
||||
def get_client():
|
||||
global CLIENT # pylint: disable=global-statement
|
||||
|
||||
if CLIENT is None:
|
||||
print('loading client')
|
||||
CLIENT = Client(
|
||||
username = getenv('TWILIO__API_KEY'),
|
||||
password = getenv('TWILIO__API_SECRET'),
|
||||
account_sid = getenv('TWILIO__ACCOUNT_SID'),
|
||||
)
|
||||
|
||||
return CLIENT
|
57
py/lib/twilio/send_sms.py
Normal file
57
py/lib/twilio/send_sms.py
Normal file
@ -0,0 +1,57 @@
|
||||
from json import dumps
|
||||
from time import sleep
|
||||
|
||||
from py.lib.twilio.client import get_client
|
||||
|
||||
|
||||
def send_sms(to, from_, body, max_char_count=300, stream=None):
|
||||
'''
|
||||
abstraction for twilio.client.messages.create which will break
|
||||
messages into multi-part SMS rather than throwing an error or
|
||||
requiring the use of MMS data
|
||||
|
||||
@param to messages.create parameter
|
||||
@param from_ messages.create parameter
|
||||
@param body messages.create parameter
|
||||
@param max_char_count 1 ≤ N ≤ 1500 (default 300)
|
||||
@param stream used to report success/failure (optional)
|
||||
|
||||
@return a list of twilio MessageInstance objects
|
||||
'''
|
||||
client = get_client()
|
||||
messages = []
|
||||
|
||||
max_char_count = max(1, min(max_char_count, 1500))
|
||||
|
||||
total_sms_parts = 1 + len(body) // max_char_count
|
||||
contains_multiple_parts = total_sms_parts > 1
|
||||
|
||||
for i in range(0, len(body), max_char_count):
|
||||
msg_body = body[i:i+max_char_count]
|
||||
current_part = 1 + i // max_char_count
|
||||
|
||||
if contains_multiple_parts:
|
||||
msg_body = f'{current_part}/{total_sms_parts}\n{msg_body}'
|
||||
|
||||
message = client.messages.create(
|
||||
to = to,
|
||||
from_ = from_,
|
||||
body = msg_body,
|
||||
)
|
||||
|
||||
messages.append(message)
|
||||
|
||||
if stream is not None:
|
||||
stream.writeline(
|
||||
dumps({
|
||||
'sid': message.sid,
|
||||
'to': to,
|
||||
'from': from_,
|
||||
'body': msg_body,
|
||||
})
|
||||
)
|
||||
|
||||
if contains_multiple_parts:
|
||||
sleep(2 if max_char_count <= 500 else 5)
|
||||
|
||||
return messages
|
0
py/linear/__init__.py
Normal file
0
py/linear/__init__.py
Normal file
45
py/linear/comment.py
Executable file
45
py/linear/comment.py
Executable file
@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.http.linear import graphql
|
||||
from py.lib.scwrypts import execute
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
|
||||
def get_query(args):
|
||||
body = f'"""from wrobot:\n```\n{args.message}\n```\n"""'
|
||||
return f'''
|
||||
mutation CommentCreate {{
|
||||
commentCreate(
|
||||
input: {{
|
||||
issueId: "{args.issue_id}"
|
||||
body: {body}
|
||||
}}
|
||||
) {{ success }}
|
||||
}}'''
|
||||
|
||||
def main(args, stream):
|
||||
response = graphql(get_query(args))
|
||||
stream.writeline(response)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'comment on an inssue in linear.app',
|
||||
parse_args = [
|
||||
( ['-d', '--issue-id'], {
|
||||
'dest' : 'issue_id',
|
||||
'help' : 'issue short-code (e.g. CLOUD-319)',
|
||||
'required' : True,
|
||||
}),
|
||||
( ['-m', '--message'], {
|
||||
'dest' : 'message',
|
||||
'help' : 'comment to post to the target issue',
|
||||
'required' : True,
|
||||
}),
|
||||
]
|
||||
)
|
@ -1,19 +1,26 @@
|
||||
#!/usr/bin/env python
|
||||
from py.lib.redis import get_client
|
||||
from py.lib.scwrypts import execute, interactive, getenv
|
||||
|
||||
from py.redis.client import Client
|
||||
from py.scwrypts import interactive, getenv
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
|
||||
@interactive
|
||||
def main():
|
||||
r = Client
|
||||
|
||||
print(f'''
|
||||
>>> r = StrictRedis({getenv("REDIS_HOST")}:{getenv("REDIS_PORT")})
|
||||
''')
|
||||
|
||||
@interactive([
|
||||
f'r = StrictRedis(\'{getenv("REDIS_HOST")}:{getenv("REDIS_PORT")}\')',
|
||||
])
|
||||
def main(_args, _stream):
|
||||
# pylint: disable=possibly-unused-variable
|
||||
r = get_client()
|
||||
return locals()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'establishes a redis client in an interactive python shell',
|
||||
parse_args = [],
|
||||
)
|
||||
|
@ -1,2 +1,5 @@
|
||||
redis
|
||||
bpython
|
||||
pyfzf
|
||||
pyyaml
|
||||
redis
|
||||
twilio
|
||||
|
@ -1,3 +0,0 @@
|
||||
from py.scwrypts.getenv import getenv
|
||||
from py.scwrypts.interactive import interactive
|
||||
from py.scwrypts.run import run
|
@ -1,3 +0,0 @@
|
||||
class MissingVariableError(Exception):
|
||||
def init(self, name):
|
||||
super().__init__(f'Missing required environment variable "{name}"')
|
@ -1,11 +0,0 @@
|
||||
from bpython import embed
|
||||
|
||||
|
||||
def interactive(function):
|
||||
def main(*args, **kwargs):
|
||||
print('preparing interactive environment...')
|
||||
local_vars = function(*args, **kwargs)
|
||||
print('environment ready; user, GO! :)')
|
||||
embed(local_vars)
|
||||
|
||||
return main
|
0
py/twilio/__init__.py
Normal file
0
py/twilio/__init__.py
Normal file
65
py/twilio/send-sms.py
Executable file
65
py/twilio/send-sms.py
Executable file
@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python
|
||||
from sys import stderr
|
||||
|
||||
from py.lib.scwrypts import execute, getenv
|
||||
from py.lib.twilio import send_sms
|
||||
|
||||
from py.lib.scwrypts.exceptions import ImportedExecutableError, MissingFlagAndEnvironmentVariableError
|
||||
|
||||
if __name__ != '__main__':
|
||||
raise ImportedExecutableError()
|
||||
|
||||
#####################################################################
|
||||
|
||||
def main(args, stream):
|
||||
if args.body is None:
|
||||
print(f'reading input from {stream.input.name}', file=stderr)
|
||||
args.body = ''.join(stream.readlines()).strip()
|
||||
|
||||
if len(args.body) == 0:
|
||||
args.body = 'PING'
|
||||
|
||||
if args.from_ is None:
|
||||
raise MissingFlagAndEnvironmentVariableError(['-f', '--from'], 'TWILIO__DEFAULT_PHONE_FROM')
|
||||
|
||||
if args.to is None:
|
||||
raise MissingFlagAndEnvironmentVariableError(['-t', '--to'], 'TWILIO__DEFAULT_PHONE_TO')
|
||||
|
||||
send_sms(
|
||||
to = args.to,
|
||||
from_ = args.from_,
|
||||
body = args.body,
|
||||
max_char_count = args.max_char_count,
|
||||
stream = stream,
|
||||
)
|
||||
|
||||
|
||||
#####################################################################
|
||||
execute(main,
|
||||
description = 'send a simple SMS through twilio',
|
||||
parse_args = [
|
||||
( ['-t', '--to'], {
|
||||
'dest' : 'to',
|
||||
'help' : 'phone number of the receipient',
|
||||
'required' : False,
|
||||
'default' : getenv('TWILIO__DEFAULT_PHONE_TO', required=False),
|
||||
}),
|
||||
( ['-f', '--from'], {
|
||||
'dest' : 'from_',
|
||||
'help' : 'phone number of the receipient',
|
||||
'required' : False,
|
||||
'default' : getenv('TWILIO__DEFAULT_PHONE_FROM', required=False),
|
||||
}),
|
||||
( ['-b', '--body'], {
|
||||
'dest' : 'body',
|
||||
'help' : 'message body',
|
||||
'required' : False,
|
||||
}),
|
||||
( ['--max-char-count'], {
|
||||
'dest' : 'max_char_count',
|
||||
'help' : 'separate message into parts by character count (1 < N <= 1500)',
|
||||
'required' : False,
|
||||
'default' : 300,
|
||||
}),
|
||||
]
|
||||
)
|
328
run
328
run
@ -1,8 +1,7 @@
|
||||
#!/bin/zsh
|
||||
export EXECUTION_DIR=$(pwd)
|
||||
source "${0:a:h}/zsh/lib/import.driver.zsh" || exit 42
|
||||
|
||||
SCWRYPTS_ROOT="${0:a:h}"
|
||||
source "$SCWRYPTS_ROOT/zsh/common.zsh" || exit 42
|
||||
#####################################################################
|
||||
|
||||
__RUN() {
|
||||
@ -10,24 +9,54 @@ __RUN() {
|
||||
usage: scwrypts [OPTIONS ...] SCRIPT -- [SCRIPT OPTIONS ...]
|
||||
|
||||
OPTIONS
|
||||
-e, --env <env-name> set environment; overwrites SCWRYPTS_ENV
|
||||
-n, --no-log skip logging (useful when calling scwrypts as an api)
|
||||
-l, --list print out command list and exit
|
||||
-g, --group <group-name> only use scripts from the indicated group
|
||||
-t, --type <type-name> only use scripts of the indicated type
|
||||
-m, --name <scwrypt-name> only run the script if there is an exact match
|
||||
(requires type and group)
|
||||
|
||||
-h, --help display this message and exit
|
||||
-e, --env <env-name> set environment; overwrites SCWRYPTS_ENV
|
||||
-n, --no-log skip logging and run in quiet mode
|
||||
|
||||
--update update scwrypts library to latest version
|
||||
|
||||
-v, --version print out scwrypts version and exit
|
||||
-l, --list print out command list and exit
|
||||
-h, --help display this message and exit
|
||||
'
|
||||
cd "$SCWRYPTS_ROOT"
|
||||
|
||||
local ENV_NAME="$SCWRYPTS_ENV"
|
||||
local SEARCH_PATTERNS=()
|
||||
|
||||
local VARSPLIT SEARCH_GROUP SEARCH_TYPE SEARCH_NAME
|
||||
|
||||
local ERROR=0
|
||||
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
-t | --type )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_TYPE=$2
|
||||
shift 2
|
||||
;;
|
||||
-g | --group )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_GROUP=$2
|
||||
shift 2
|
||||
;;
|
||||
-m | --name )
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
SEARCH_NAME=$2
|
||||
shift 2
|
||||
;;
|
||||
|
||||
-[a-z][a-z]* )
|
||||
VARSPLIT=$(echo "$1 " | sed 's/^\(-.\)\(.*\) /\1 -\2/')
|
||||
set -- $(echo " $VARSPLIT ") ${@:2}
|
||||
;;
|
||||
-h | --help )
|
||||
__USAGE
|
||||
USAGE
|
||||
return 0
|
||||
;;
|
||||
-n | --no-log )
|
||||
@ -35,65 +64,186 @@ __RUN() {
|
||||
shift 1
|
||||
;;
|
||||
-e | --env )
|
||||
[ $ENV_NAME ] && __WARNING 'overwriting session environment'
|
||||
[ ! $2 ] && ERROR "missing value for argument $1" && break
|
||||
[ ! $SUBSCWRYPTS ] \
|
||||
&& [ $ENV_NAME ] \
|
||||
&& WARNING 'overwriting session environment' \
|
||||
;
|
||||
|
||||
ENV_NAME="$2"
|
||||
__STATUS "using CLI environment '$ENV_NAME'"
|
||||
STATUS "using CLI environment '$ENV_NAME'"
|
||||
shift 2
|
||||
;;
|
||||
-l | --list )
|
||||
__OUTPUT_COMMAND_LIST
|
||||
SCWRYPTS__GET_AVAILABLE_SCWRYPTS
|
||||
return 0
|
||||
;;
|
||||
-v | --version )
|
||||
echo scwrypts $(cd "$SCWRYPTS__ROOT__scwrypts"; git describe --tags)
|
||||
return 0
|
||||
;;
|
||||
--update )
|
||||
cd "$SCWRYPTS__ROOT__scwrypts"
|
||||
git fetch --quiet origin main
|
||||
local SYNC_STATUS=$?
|
||||
|
||||
git diff --exit-code origin/main -- . >&2
|
||||
local DIFF_STATUS=$?
|
||||
|
||||
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
|
||||
SUCCESS 'already up-to-date with origin/main'
|
||||
} || {
|
||||
git rebase --autostash origin/main \
|
||||
&& SUCCESS 'up-to-date with origin/main' \
|
||||
|| {
|
||||
git rebase --abort
|
||||
ERROR 'unable to update scwrypts; please try manual upgrade'
|
||||
REMINDER "installation in '$(pwd)'"
|
||||
}
|
||||
}
|
||||
return 0
|
||||
;;
|
||||
-- )
|
||||
shift 1
|
||||
break # pass arguments after '--' to the scwrypt
|
||||
;;
|
||||
-* )
|
||||
__ERROR "unrecognized argument '$1'"
|
||||
--* )
|
||||
ERROR "unrecognized argument '$1'"
|
||||
shift 1
|
||||
;;
|
||||
* )
|
||||
SEARCH_PATTERNS+=$1
|
||||
SEARCH_PATTERNS+=($1)
|
||||
shift 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
__ERROR_CHECK
|
||||
[ $SEARCH_NAME ] && {
|
||||
[ ! $SEARCH_TYPE ] && ERROR '--name requires --type argument'
|
||||
[ ! $SEARCH_GROUP ] && ERROR '--name requires --group argument'
|
||||
}
|
||||
|
||||
CHECK_ERRORS
|
||||
|
||||
##########################################
|
||||
|
||||
local SCRIPT=$(__SELECT_SCRIPT $SEARCH_PATTERNS)
|
||||
[ ! $SCRIPT ] && exit 2
|
||||
export SCWRYPT_NAME=$SCRIPT
|
||||
local SCWRYPTS_AVAILABLE
|
||||
local POTENTIAL_ERROR="no such scwrypt exists:"
|
||||
|
||||
SCWRYPTS_AVAILABLE=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS)
|
||||
|
||||
[ $SEARCH_NAME ] && {
|
||||
POTENTIAL_ERROR+="\n NAME : '$SEARCH_NAME'"
|
||||
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
|
||||
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
|
||||
SCWRYPTS_AVAILABLE=$({
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | sed -e 's/\x1b\[[0-9;]*m//g' | grep "^$SEARCH_NAME *$SEARCH_TYPE *$SEARCH_GROUP\$"
|
||||
})
|
||||
}
|
||||
|
||||
[ ! $SEARCH_NAME ] && {
|
||||
[ $SEARCH_TYPE ] && {
|
||||
POTENTIAL_ERROR+="\n TYPE : '$SEARCH_TYPE'"
|
||||
SCWRYPTS_AVAILABLE=$(\
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep ' [^/]*'$SEARCH_TYPE'[^/]* '
|
||||
} \
|
||||
| awk '{$2=""; print $0;}' \
|
||||
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
|
||||
| column -ts '^'
|
||||
)
|
||||
}
|
||||
|
||||
[ $SEARCH_GROUP ] && {
|
||||
POTENTIAL_ERROR+="\n GROUP : '$SEARCH_GROUP'"
|
||||
SCWRYPTS_AVAILABLE=$(
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep "$SEARCH_GROUP"'[^/]*$'
|
||||
} \
|
||||
| awk '{$NF=""; print $0;}' \
|
||||
| sed 's/ \+$/'$(printf $__COLOR_RESET)'/; s/ \+/^/g' \
|
||||
| column -ts '^'
|
||||
)
|
||||
}
|
||||
|
||||
[[ ${#SEARCH_PATTERNS[@]} -gt 0 ]] && {
|
||||
POTENTIAL_ERROR+="\n PATTERNS : $SEARCH_PATTERNS"
|
||||
local P
|
||||
for P in ${SEARCH_PATTERNS[@]}
|
||||
do
|
||||
SCWRYPTS_AVAILABLE=$(
|
||||
{
|
||||
echo $SCWRYPTS_AVAILABLE | head -n1
|
||||
echo $SCWRYPTS_AVAILABLE | grep $P
|
||||
}
|
||||
)
|
||||
done
|
||||
}
|
||||
}
|
||||
|
||||
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -lt 2 ]] && ERROR "$POTENTIAL_ERROR"
|
||||
|
||||
CHECK_ERRORS
|
||||
|
||||
##########################################
|
||||
|
||||
local NAME="$SEARCH_NAME"
|
||||
local TYPE="$SEARCH_TYPE"
|
||||
local GROUP="$SEARCH_GROUP"
|
||||
|
||||
[[ $(echo $SCWRYPTS_AVAILABLE | wc -l) -eq 2 ]] \
|
||||
&& SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | tail -n1) \
|
||||
|| SCWRYPT_SELECTION=$(echo $SCWRYPTS_AVAILABLE | FZF "select a script to run" --header-lines 1)
|
||||
[ $SCWRYPT_SELECTION ] || exit 2
|
||||
|
||||
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
|
||||
|
||||
export SCWRYPT_NAME=$NAME
|
||||
export SCWRYPT_TYPE=$TYPE
|
||||
export SCWRYPT_GROUP=$GROUP
|
||||
|
||||
##########################################
|
||||
|
||||
local ENV_REQUIRED=$(__CHECK_ENV_REQUIRED && echo 1 || echo 0)
|
||||
|
||||
[[ $ENV_REQUIRED -eq 1 ]] && {
|
||||
[ ! $ENV_NAME ] && ENV_NAME=$(__SELECT_ENV)
|
||||
local ENV_FILE=$(__GET_ENV_FILE $ENV_NAME)
|
||||
|
||||
[ -f "$ENV_FILE" ] && source "$ENV_FILE" \
|
||||
|| __FAIL 5 "missing or invalid environment '$ENV_NAME'"
|
||||
[ ! $ENV_NAME ] && ENV_NAME=$(SCWRYPTS__SELECT_ENV)
|
||||
for GROUP in ${SCWRYPTS_GROUPS[@]}
|
||||
do
|
||||
local ENV_FILE=$(SCWRYPTS__GET_ENV_FILE "$ENV_NAME" "$GROUP")
|
||||
source "$ENV_FILE" || FAIL 5 "missing or invalid environment '$GROUP/$ENV_NAME'"
|
||||
done
|
||||
|
||||
export ENV_NAME
|
||||
}
|
||||
|
||||
for f in $(eval 'echo $SCWRYPTS_STATIC_CONFIG__'$SCWRYPT_GROUP)
|
||||
do
|
||||
source "$f" || FAIL 5 "invalid static config '$f'"
|
||||
done
|
||||
|
||||
##########################################
|
||||
|
||||
[ ! $SUBSCWRYPT ] \
|
||||
&& [[ $ENV_NAME =~ prod ]] \
|
||||
&& { __VALIDATE_UPSTREAM_TIMELINE || __ABORT; }
|
||||
&& { __VALIDATE_UPSTREAM_TIMELINE || ABORT; }
|
||||
|
||||
local RUN_STRING=$(__GET_RUN_STRING $SCRIPT $ENV_NAME)
|
||||
##########################################
|
||||
|
||||
local RUN_STRING=$(SCWRYPTS__GET_RUNSTRING $SCWRYPT_NAME $SCWRYPT_TYPE $SCWRYPT_GROUP)
|
||||
[ ! $RUN_STRING ] && exit 3
|
||||
|
||||
##########################################
|
||||
|
||||
local LOGFILE=$(__GET_LOGFILE $SCRIPT)
|
||||
local LOGFILE=$(__GET_LOGFILE)
|
||||
|
||||
local HEADER=$(
|
||||
[ $SUBSCWRYPT ] && return 0
|
||||
echo '====================================================================='
|
||||
echo "script : $SCRIPT"
|
||||
echo "script : $SCWRYPT_GROUP $SCWRYPT_TYPE $SCWRYPT_NAME"
|
||||
echo "run at : $(date)"
|
||||
echo "config : $ENV_NAME"
|
||||
[ ! $LOGFILE ] && echo '\033[1;33m------------------------------------------\033[0m'
|
||||
@ -102,10 +252,10 @@ __RUN() {
|
||||
[ ! $LOGFILE ] && {
|
||||
[ $HEADER ] && echo $HEADER
|
||||
[ $SUBSCWRYPT ] && {
|
||||
eval $RUN_STRING $@
|
||||
eval "$RUN_STRING $(printf "%q " "$@")"
|
||||
exit $?
|
||||
} || {
|
||||
eval $RUN_STRING $@ </dev/tty >/dev/tty 2>&1
|
||||
eval "$RUN_STRING $(printf "%q " "$@")" </dev/tty >/dev/tty 2>&1
|
||||
exit $?
|
||||
}
|
||||
}
|
||||
@ -113,7 +263,7 @@ __RUN() {
|
||||
{
|
||||
[ $HEADER ] && echo $HEADER
|
||||
echo '\033[1;33m--- BEGIN OUTPUT -------------------------\033[0m'
|
||||
eval $RUN_STRING $@
|
||||
eval "$RUN_STRING $(printf "%q " "$@")"
|
||||
EXIT_CODE=$?
|
||||
echo '\033[1;33m--- END OUTPUT ---------------------------\033[0m'
|
||||
|
||||
@ -130,111 +280,17 @@ __RUN() {
|
||||
|
||||
#####################################################################
|
||||
|
||||
__OUTPUT_COMMAND_LIST() {
|
||||
local LAST_TYPE LAST_SUBSET
|
||||
for SCRIPT in $(__GET_AVAILABLE_SCRIPTS)
|
||||
do
|
||||
TYPE=$(echo $SCRIPT | sed 's/\/.*//')
|
||||
SUBSET=$(echo $SCRIPT | sed 's/.*\/\(.*\)\/[^\/]*$/\1/')
|
||||
[[ ! $LAST_TYPE =~ $TYPE ]] && {
|
||||
echo >&2
|
||||
echo "\\033[1;32m$TYPE scwrypts\\033[0m" >&2
|
||||
LAST_SUBSET=''
|
||||
}
|
||||
[ $LAST_SUBSET ] && [[ ! $LAST_SUBSET =~ $SUBSET ]] && {
|
||||
echo >&2
|
||||
}
|
||||
printf ' - ' >&2
|
||||
echo $SCRIPT
|
||||
LAST_TYPE=$TYPE
|
||||
LAST_SUBSET=$SUBSET
|
||||
done
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__SELECT_SCRIPT() {
|
||||
local SCRIPT
|
||||
local SCRIPTS=$(__GET_AVAILABLE_SCRIPTS)
|
||||
local SEARCH=($@)
|
||||
|
||||
[[ ${#SEARCH[@]} -eq 0 ]] && {
|
||||
SCRIPT=$(echo $SCRIPTS | __FZF 'select a script')
|
||||
}
|
||||
|
||||
[[ ${#SEARCH[@]} -eq 1 ]] && [ -f ./$SEARCH ] && {
|
||||
SCRIPT=$SEARCH
|
||||
}
|
||||
|
||||
[ ! $SCRIPT ] && [[ ${#SEARCH[@]} -gt 0 ]] && {
|
||||
SCRIPT=$SCRIPTS
|
||||
for PATTERN in $SEARCH
|
||||
do
|
||||
SCRIPT=$(echo $SCRIPT | grep $PATTERN)
|
||||
done
|
||||
|
||||
[ ! $SCRIPT ] && __FAIL 2 "no script found by name '$@'"
|
||||
|
||||
[[ $(echo $SCRIPT | wc -l) -gt 1 ]] && {
|
||||
__STATUS "more than one script matched '$@'"
|
||||
SCRIPT=$(echo $SCRIPT | __FZF 'select a script')
|
||||
}
|
||||
}
|
||||
|
||||
echo $SCRIPT
|
||||
}
|
||||
|
||||
__GET_RUN_STRING() {
|
||||
local SCRIPT="$1"
|
||||
local ENV_NAME="$2"
|
||||
local TYPE=$(echo $SCRIPT | sed 's/\/.*$//')
|
||||
|
||||
local RUN_STRING
|
||||
|
||||
local _VIRTUALENV="$SCWRYPTS_VIRTUALENV_PATH/$TYPE/bin/activate"
|
||||
[ -f $_VIRTUALENV ] && source $_VIRTUALENV
|
||||
|
||||
case $TYPE in
|
||||
py ) __CHECK_DEPENDENCY python || return 1
|
||||
RUN_STRING="python -m $(echo $SCRIPT | sed 's/\//./g; s/\.py$//; s/\.\.//')"
|
||||
|
||||
CURRENT_PYTHON_VERSION=$(python --version | sed 's/^[^0-9]*\(3\.[^.]*\).*$/\1/')
|
||||
|
||||
echo $__PREFERRED_PYTHON_VERSIONS | grep -q $CURRENT_PYTHON_VERSION || {
|
||||
__WARNING "only tested on the following python versions: $(printf ', %s.x' ${__PREFERRED_PYTHON_VERSIONS[@]} | sed 's/^, //')"
|
||||
__WARNING 'compatibility may vary'
|
||||
}
|
||||
;;
|
||||
|
||||
zsh ) __CHECK_DEPENDENCY zsh || return 1
|
||||
RUN_STRING="noglob ./$SCRIPT"
|
||||
;;
|
||||
|
||||
zx ) __CHECK_DEPENDENCY zx || return 1
|
||||
RUN_STRING="FORCE_COLOR=3 ./$SCRIPT.mjs"
|
||||
;;
|
||||
|
||||
* ) __ERROR "unsupported script type '$SCRIPT_TYPE'"
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
|
||||
RUN_STRING="SCWRYPTS_ENV='$ENV_NAME' $RUN_STRING"
|
||||
[ -f $_VIRTUALENV ] && RUN_STRING="source '$_VIRTUALENV'; $RUN_STRING"
|
||||
|
||||
echo $RUN_STRING
|
||||
}
|
||||
|
||||
__CHECK_ENV_REQUIRED() {
|
||||
[ $CI ] && return 1
|
||||
|
||||
echo $SCRIPT | grep -q 'zsh/scwrypts/logs' && return 1
|
||||
echo $SCWRYPT_NAME | grep -q 'scwrypts/logs/' && return 1
|
||||
echo $SCWRYPT_NAME | grep -q 'scwrypts/environment/' && return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
__VALIDATE_UPSTREAM_TIMELINE() {
|
||||
__STATUS "on '$ENV_NAME'; checking diff against origin/main"
|
||||
STATUS "on '$ENV_NAME'; checking diff against origin/main"
|
||||
|
||||
git fetch --quiet origin main
|
||||
local SYNC_STATUS=$?
|
||||
@ -243,26 +299,24 @@ __VALIDATE_UPSTREAM_TIMELINE() {
|
||||
local DIFF_STATUS=$?
|
||||
|
||||
[[ $SYNC_STATUS -eq 0 ]] && [[ $DIFF_STATUS -eq 0 ]] && {
|
||||
__SUCCESS 'up-to-date with origin/main'
|
||||
SUCCESS 'up-to-date with origin/main'
|
||||
} || {
|
||||
__WARNING
|
||||
[[ $SYNC_STATUS -ne 0 ]] && __WARNING 'unable to synchronize with origin/main'
|
||||
[[ $DIFF_STATUS -ne 0 ]] && __WARNING 'your branch differs from origin/main (diff listed above)'
|
||||
__WARNING
|
||||
WARNING
|
||||
[[ $SYNC_STATUS -ne 0 ]] && WARNING 'unable to synchronize with origin/main'
|
||||
[[ $DIFF_STATUS -ne 0 ]] && WARNING 'your branch differs from origin/main (diff listed above)'
|
||||
WARNING
|
||||
|
||||
__yN 'continue?' || return 1
|
||||
yN 'continue?' || return 1
|
||||
}
|
||||
}
|
||||
|
||||
__GET_LOGFILE() {
|
||||
local SCRIPT="$1"
|
||||
|
||||
[ $SUBSCWRYPT ] \
|
||||
|| [[ $SCRIPT =~ scwrypts/logs ]] \
|
||||
|| [[ $SCRIPT =~ interactive ]] \
|
||||
|| [[ $SCWRYPT_NAME =~ scwrypts/logs ]] \
|
||||
|| [[ $SCWRYPT_NAME =~ interactive ]] \
|
||||
&& return 0
|
||||
|
||||
echo "$SCWRYPTS_LOG_PATH/$(echo $SCRIPT | sed 's/^\.\///; s/\//\%/g').log"
|
||||
echo "$SCWRYPTS_LOG_PATH/$(echo $GROUP/$TYPE/$NAME | sed 's/^\.\///; s/\//\%/g').log"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
@ -1,26 +1,32 @@
|
||||
DONT_EXIT=1 source ${0:a:h}/zsh/common.zsh
|
||||
NO_EXPORT_CONFIG=1 source "${0:a:h}/zsh/lib/import.driver.zsh" || return 42
|
||||
|
||||
#####################################################################
|
||||
__SCWRYPTS() {
|
||||
local SCRIPT=$(__GET_AVAILABLE_SCRIPTS | __FZF 'select a script')
|
||||
SCWRYPTS__ZSH_PLUGIN() {
|
||||
local SCWRYPT_SELECTION=$(SCWRYPTS__GET_AVAILABLE_SCWRYPTS | FZF 'select a script' --header-lines 1)
|
||||
local NAME
|
||||
local TYPE
|
||||
local GROUP
|
||||
zle clear-command-line
|
||||
[ ! $SCRIPT ] && { zle accept-line; return 0; }
|
||||
[ ! $SCWRYPT_SELECTION ] && { zle accept-line; return 0; }
|
||||
|
||||
SCWRYPTS__SEPARATE_SCWRYPT_SELECTION $SCWRYPT_SELECTION
|
||||
|
||||
which scwrypts >/dev/null 2>&1\
|
||||
&& RBUFFER="scwrypts" || RBUFFER="$SCWRYPTS_ROOT/scwrypts"
|
||||
|
||||
RBUFFER+=" $SCRIPT"
|
||||
RBUFFER+=" --name $NAME --group $GROUP --type $TYPE"
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
zle -N scwrypts __SCWRYPTS
|
||||
zle -N scwrypts SCWRYPTS__ZSH_PLUGIN
|
||||
bindkey $SCWRYPTS_SHORTCUT scwrypts
|
||||
|
||||
#####################################################################
|
||||
__SCWRYPTS_ENV() {
|
||||
SCWRYPTS__ZSH_PLUGIN_ENV() {
|
||||
local RESET='reset'
|
||||
local SELECTED=$(\
|
||||
{ [ $SCWRYPTS_ENV ] && echo $RESET; __GET_ENV_NAMES; } \
|
||||
| __FZF 'select an environment' \
|
||||
{ [ $SCWRYPTS_ENV ] && echo $RESET; SCWRYPTS__GET_ENV_NAMES; } \
|
||||
| FZF 'select an environment' \
|
||||
)
|
||||
|
||||
zle clear-command-line
|
||||
@ -32,5 +38,5 @@ __SCWRYPTS_ENV() {
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
zle -N scwrypts-setenv __SCWRYPTS_ENV
|
||||
zle -N scwrypts-setenv SCWRYPTS__ZSH_PLUGIN_ENV
|
||||
bindkey $SCWRYPTS_ENV_SHORTCUT scwrypts-setenv
|
||||
|
@ -1,13 +0,0 @@
|
||||
_DEPENDENCIES+=(
|
||||
aws
|
||||
jq
|
||||
)
|
||||
_REQUIRED_ENV+=(
|
||||
AWS_ACCOUNT
|
||||
AWS_PROFILE
|
||||
AWS_REGION
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
_AWS() { aws --profile $AWS_PROFILE --region $AWS_REGION --output json $@; }
|
@ -1,6 +0,0 @@
|
||||
_DEPENDENCIES+=(
|
||||
docker
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,14 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS "performing AWS ECR docker login"
|
||||
|
||||
_AWS ecr get-login-password | docker login \
|
||||
--username AWS \
|
||||
--password-stdin \
|
||||
"$AWS_ACCOUNT.dkr.ecr.$AWS_REGION.amazonaws.com" \
|
||||
&& __SUCCESS "logged in to 'AWS:$AWS_ACCOUNT:$AWS_REGION'" \
|
||||
|| __FAIL 1 "unable to login to '$AWS_ACCOUNT' in '$AWS_REGION'"
|
@ -1,6 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=(
|
||||
AWS__EFS__LOCAL_MOUNT_POINT
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,34 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_EFS_DISCONNECT() {
|
||||
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
|
||||
__STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
|
||||
[ ! $MOUNTED ] && {
|
||||
__STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
__GETSUDO || exit 1
|
||||
|
||||
|
||||
local SELECTED=$(echo $MOUNTED | __FZF 'select a file system to unmount')
|
||||
[ ! $SELECTED ] && __ABORT
|
||||
|
||||
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
|
||||
__STATUS "unmounting '$SELECTED'"
|
||||
sudo umount $EFS >/dev/null 2>&1
|
||||
sudo rmdir $EFS \
|
||||
&& __SUCCESS "done" \
|
||||
|| __FAIL 2 "failed to unmount '$EFS'"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_EFS_DISCONNECT
|
@ -1,6 +0,0 @@
|
||||
_DEPENDENCIES+=(
|
||||
kubectl
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,19 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS "performing AWS ECR docker login"
|
||||
|
||||
CLUSTER_NAME=$(\
|
||||
_AWS eks list-clusters \
|
||||
| jq -r '.[] | .[]' \
|
||||
| __FZF 'select a cluster'
|
||||
)
|
||||
[ ! $CLUSTER_NAME ] && __ABORT
|
||||
|
||||
__STATUS "updating kubeconfig for '$CLUSTER_NAME'"
|
||||
_AWS eks update-kubeconfig --name $CLUSTER_NAME \
|
||||
&& __SUCCESS "kubeconfig updated with '$CLUSTER_NAME'" \
|
||||
|| __ERROR "failed to update kubeconfig; do you have permissions to access '$CLUSTER_NAME'?"
|
@ -1,6 +0,0 @@
|
||||
_DEPENDENCIES+=(
|
||||
cli53
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,4 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,30 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=(
|
||||
AWS__S3__MEDIA_TARGETS
|
||||
AWS__S3__MEDIA_BUCKET
|
||||
)
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
AWS__S3__MEDIA_TARGETS=($(echo $AWS__S3__MEDIA_TARGETS | sed 's/,/\n/g'))
|
||||
|
||||
__SYNC_MEDIA() {
|
||||
local ACTION="$1"
|
||||
local REMOTE_TARGET="s3://$AWS__S3__MEDIA_BUCKET/$2"
|
||||
local LOCAL_TARGET="$HOME/$2"
|
||||
|
||||
local A B
|
||||
case $ACTION in
|
||||
push ) A="$LOCAL_TARGET"; B="$REMOTE_TARGET" ;;
|
||||
pull ) A="$REMOTE_TARGET"; B="$LOCAL_TARGET" ;;
|
||||
|
||||
* ) __ERROR "unknown action '$1'"; return 1 ;;
|
||||
esac
|
||||
|
||||
local FLAGS=(${@:3})
|
||||
|
||||
__STATUS "${ACTION}ing $2"
|
||||
_AWS s3 sync $REMOTE_TARGET $LOCAL_TARGET $FLAGS \
|
||||
&& __SUCCESS "$2 up-to-date" \
|
||||
|| { __ERROR "unable to sync $2 (see above)"; return 1; }
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__PULL_ALL_MEDIA() {
|
||||
local FLAGS=($@)
|
||||
local FAILED_COUNT=0
|
||||
|
||||
__STATUS 'starting media download from s3'
|
||||
|
||||
local TARGET
|
||||
for TARGET in $AWS__S3__MEDIA_TARGETS
|
||||
do
|
||||
__SYNC_MEDIA pull $TARGET $FLAGS || ((FAILED_COUNT+=1))
|
||||
done
|
||||
|
||||
[[ $FAILED_COUNT -eq 0 ]] \
|
||||
&& __SUCCESS 'local media files now up-to-date' \
|
||||
|| __FAIL $FAILED_COUNT 'unable to download one or more targets' \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__PULL_ALL_MEDIA $@
|
@ -1,27 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__PUSH_ALL_MEDIA() {
|
||||
local FLAGS=($@)
|
||||
local FAILED_COUNT=0
|
||||
|
||||
__STATUS 'starting media upload to s3'
|
||||
|
||||
local TARGET
|
||||
for TARGET in $AWS__S3__MEDIA_TARGETS
|
||||
do
|
||||
__SYNC_MEDIA push $TARGET $FLAGS || ((FAILED_COUNT+=1))
|
||||
done
|
||||
|
||||
[[ $FAILED_COUNT -eq 0 ]] \
|
||||
&& __SUCCESS 's3 media files now up-to-date' \
|
||||
|| __FAIL $FAILED_COUNT 'unable to upload one or more targets' \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
|
||||
__PUSH_ALL_MEDIA $@
|
10
zsh/cloud/aws/ecr/login
Executable file
10
zsh/cloud/aws/ecr/login
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/ecr
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
ECR_LOGIN $@
|
@ -1,37 +1,40 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=(jq)
|
||||
REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
|
||||
|
||||
use cloud/aws/cli
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
_EFS_CONNECT() {
|
||||
__GETSUDO || exit 1
|
||||
EFS_CONNECT() {
|
||||
GETSUDO || exit 1
|
||||
[ ! -d $AWS__EFS__LOCAL_MOUNT_POINT ] && {
|
||||
sudo mkdir $AWS__EFS__LOCAL_MOUNT_POINT \
|
||||
&& __STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'"
|
||||
&& STATUS "created local mount point '$AWS__EFS__LOCAL_MOUNT_POINT'"
|
||||
}
|
||||
|
||||
local FS_ID=$(\
|
||||
_AWS efs describe-file-systems \
|
||||
AWS efs describe-file-systems \
|
||||
| jq -r '.[] | .[] | .FileSystemId' \
|
||||
| __FZF 'select a filesystem to mount' \
|
||||
| FZF 'select a filesystem to mount' \
|
||||
)
|
||||
[ ! $FS_ID ] && __ABORT
|
||||
[ ! $FS_ID ] && ABORT
|
||||
|
||||
local MOUNT_POINT="$AWS__EFS__LOCAL_MOUNT_POINT/$FS_ID"
|
||||
[ -d "$MOUNT_POINT" ] && sudo rmdir "$MOUNT_POINT" >/dev/null 2>&1
|
||||
[ -d "$MOUNT_POINT" ] && {
|
||||
__STATUS "$FS_ID is already mounted"
|
||||
STATUS "$FS_ID is already mounted"
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNT_TARGETS=$(_AWS efs describe-mount-targets --file-system-id $FS_ID)
|
||||
local MOUNT_TARGETS=$(AWS efs describe-mount-targets --file-system-id $FS_ID)
|
||||
local ZONE=$(\
|
||||
echo $MOUNT_TARGETS \
|
||||
| jq -r '.[] | .[] | .AvailabilityZoneName' \
|
||||
| sort -u | __FZF 'select availability zone'\
|
||||
| sort -u | FZF 'select availability zone'\
|
||||
)
|
||||
[ ! $ZONE ] && __ABORT
|
||||
[ ! $ZONE ] && ABORT
|
||||
|
||||
local MOUNT_IP=$(\
|
||||
echo $MOUNT_TARGETS \
|
||||
@ -39,15 +42,15 @@ _EFS_CONNECT() {
|
||||
| head -n1 \
|
||||
)
|
||||
|
||||
__SUCCESS 'ready to mount!'
|
||||
__REMINDER 'your device must be connected to the appropriate VPN'
|
||||
SUCCESS 'ready to mount!'
|
||||
REMINDER 'for private file-systems, you must be connected to the appropriate VPN'
|
||||
|
||||
__STATUS "file system id : $FS_ID"
|
||||
__STATUS "availability zone : $ZONE"
|
||||
__STATUS "file system ip : $MOUNT_IP"
|
||||
__STATUS "local mount point : $MOUNT_POINT"
|
||||
STATUS "file system id : $FS_ID"
|
||||
STATUS "availability zone : $ZONE"
|
||||
STATUS "file system ip : $MOUNT_IP"
|
||||
STATUS "local mount point : $MOUNT_POINT"
|
||||
|
||||
__Yn 'proceed?' || __ABORT
|
||||
Yn 'proceed?' || ABORT
|
||||
|
||||
sudo mkdir $MOUNT_POINT \
|
||||
&& sudo mount \
|
||||
@ -55,12 +58,12 @@ _EFS_CONNECT() {
|
||||
-o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport \
|
||||
$MOUNT_IP:/ \
|
||||
"$MOUNT_POINT" \
|
||||
&& __SUCCESS "mounted at '$MOUNT_POINT'" \
|
||||
&& SUCCESS "mounted at '$MOUNT_POINT'" \
|
||||
|| {
|
||||
sudo rmdir $MOUNT_POINT >/dev/null 2>&1
|
||||
__FAIL 2 "unable to mount '$FS_ID'"
|
||||
FAIL 2 "unable to mount '$FS_ID'"
|
||||
}
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_EFS_CONNECT
|
||||
EFS_CONNECT $@
|
37
zsh/cloud/aws/efs/unmount
Executable file
37
zsh/cloud/aws/efs/unmount
Executable file
@ -0,0 +1,37 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(jq)
|
||||
REQUIRED_ENV+=(AWS__EFS__LOCAL_MOUNT_POINT)
|
||||
|
||||
use cloud/aws/cli
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
EFS_DISCONNECT() {
|
||||
[ ! -d "$AWS__EFS__LOCAL_MOUNT_POINT" ] && {
|
||||
STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
local MOUNTED=$(ls "$AWS__EFS__LOCAL_MOUNT_POINT")
|
||||
[ ! $MOUNTED ] && {
|
||||
STATUS 'no efs currently mounted'
|
||||
exit 0
|
||||
}
|
||||
|
||||
GETSUDO || exit 1
|
||||
|
||||
|
||||
local SELECTED=$(echo $MOUNTED | FZF 'select a file system to unmount')
|
||||
[ ! $SELECTED ] && ABORT
|
||||
|
||||
local EFS="$AWS__EFS__LOCAL_MOUNT_POINT/$SELECTED"
|
||||
STATUS "unmounting '$SELECTED'"
|
||||
sudo umount $EFS >/dev/null 2>&1
|
||||
sudo rmdir $EFS \
|
||||
&& SUCCESS "done" \
|
||||
|| FAIL 2 "failed to unmount '$EFS'"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
EFS_DISCONNECT $@
|
10
zsh/cloud/aws/eks/login
Executable file
10
zsh/cloud/aws/eks/login
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/eks
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
EKS_CLUSTER_LOGIN $@
|
@ -1,14 +1,18 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
CREATE_BACKUP() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
__RUN_SCWRYPT 'zsh/db/postgres/pg_dump' -- \
|
||||
PG_DUMP \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
||||
@ -17,6 +21,5 @@ RDS_INTERACTIVE_LOGIN() {
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
#####################################################################
|
||||
RDS_INTERACTIVE_LOGIN $@
|
||||
CREATE_BACKUP $@
|
@ -1,14 +1,18 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
__RUN_SCWRYPT 'zsh/db/interactive/postgres' -- \
|
||||
POSTGRES__LOGIN_INTERACTIVE \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
@ -1,14 +1,18 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/aws/rds
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
RDS_INTERACTIVE_LOGIN() {
|
||||
LOAD_BACKUP() {
|
||||
local DB_HOST DB_PORT DB_NAME DB_USER DB_PASS
|
||||
GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
RDS__GET_DATABASE_CREDENTIALS $@ || return 1
|
||||
|
||||
__RUN_SCWRYPT 'zsh/db/postgres/pg_restore' -- \
|
||||
PG_RESTORE \
|
||||
--host $DB_HOST \
|
||||
--port $DB_PORT \
|
||||
--name $DB_NAME \
|
||||
@ -19,4 +23,4 @@ RDS_INTERACTIVE_LOGIN() {
|
||||
|
||||
|
||||
#####################################################################
|
||||
RDS_INTERACTIVE_LOGIN $@
|
||||
LOAD_BACKUP $@
|
@ -1,21 +1,22 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=(cli53)
|
||||
REQUIRED_ENV+=(AWS_PROFILE)
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
_ROUTE53_BACKUP() {
|
||||
ROUTE53_BACKUP() {
|
||||
local BACKUP_PATH="$SCWRYPTS_OUTPUT_PATH/$ENV_NAME/aws-dns-backup/$(date '+%Y-%m-%d')"
|
||||
mkdir -p $BACKUP_PATH >/dev/null 2>&1
|
||||
|
||||
local DOMAIN
|
||||
local JOBS=()
|
||||
for DOMAIN in $(_ROUTE53_GET_DOMAINS)
|
||||
for DOMAIN in $(ROUTE53_GET_DOMAINS)
|
||||
do
|
||||
( __STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \
|
||||
( STATUS "creating '$BACKUP_PATH/$DOMAIN.txt'" \
|
||||
&& cli53 export --profile $AWS_PROFILE $DOMAIN > "$BACKUP_PATH/$DOMAIN.txt" \
|
||||
&& __SUCCESS "backed up '$DOMAIN'" \
|
||||
|| __ERROR "failed to back up '$DOMAIN'" \
|
||||
&& SUCCESS "backed up '$DOMAIN'" \
|
||||
|| ERROR "failed to back up '$DOMAIN'" \
|
||||
) &
|
||||
JOBS+=$!
|
||||
done
|
||||
@ -24,7 +25,7 @@ _ROUTE53_BACKUP() {
|
||||
for P in ${JOBS[@]}; do wait $P >/dev/null 2>&1; done
|
||||
}
|
||||
|
||||
_ROUTE53_GET_DOMAINS() {
|
||||
ROUTE53_GET_DOMAINS() {
|
||||
cli53 list --profile $AWS_PROFILE \
|
||||
| awk '{print $2;}' \
|
||||
| sed '1d; s/\.$//'\
|
||||
@ -32,4 +33,4 @@ _ROUTE53_GET_DOMAINS() {
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_ROUTE53_BACKUP
|
||||
ROUTE53_BACKUP
|
10
zsh/cloud/media-sync/pull
Executable file
10
zsh/cloud/media-sync/pull
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/media-sync
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
MEDIA_SYNC__PULL $@
|
10
zsh/cloud/media-sync/push
Executable file
10
zsh/cloud/media-sync/push
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use cloud/media-sync
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
MEDIA_SYNC__PUSH $@
|
@ -1,31 +0,0 @@
|
||||
#####################################################################
|
||||
|
||||
source ${0:a:h}/../global/common.zsh
|
||||
source ${0:a:h}/utils/utils.module.zsh \
|
||||
|| { [ $DONT_EXIT ] && return 1 || exit 1; }
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_ENV_FILES() { find $SCWRYPTS_CONFIG_PATH/env -maxdepth 1 -type f | sort -r }
|
||||
[ ! "$(__GET_ENV_FILES)" ] && {
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/dev"
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/local"
|
||||
cp $__ENV_TEMPLATE "$SCWRYPTS_CONFIG_PATH/env/prod"
|
||||
}
|
||||
|
||||
__GET_ENV_NAMES() { __GET_ENV_FILES | sed 's/.*\///'; }
|
||||
__GET_ENV_FILE() { echo "$SCWRYPTS_CONFIG_PATH/env/$1"; }
|
||||
|
||||
__SELECT_OR_CREATE_ENV() { __GET_ENV_NAMES | __FZF_TAIL 'select/create an environment'; }
|
||||
__SELECT_ENV() { __GET_ENV_NAMES | __FZF 'select an environment'; }
|
||||
|
||||
#####################################################################
|
||||
|
||||
__GET_AVAILABLE_SCRIPTS() {
|
||||
cd $SCWRYPTS_ROOT;
|
||||
find . -mindepth 2 -type f -executable \
|
||||
| grep -v '\.git' \
|
||||
| grep -v 'node_modules' \
|
||||
| sed 's/^\.\///; s/\.[^.]*$//' \
|
||||
;
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
DEFAULT_CONFIG="${0:a:h}/default.conf.zsh"
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
SAFE_SYMLINKS=1
|
||||
|
||||
# in case config.dotfile.zsh is sourced... allow user to provide initial config ;)
|
||||
[ ! $CONFIG__USER_SETTINGS ] \
|
||||
&& CONFIG__USER_SETTINGS="$SCWRYPTS_CONFIG_PATH/config.dotfile.zsh"
|
||||
[ ! -f "$CONFIG__USER_SETTINGS" ] && cp "$DEFAULT_CONFIG" "$CONFIG__USER_SETTINGS"
|
||||
source $CONFIG__USER_SETTINGS
|
@ -1,6 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
__EDIT "$CONFIG__USER_SETTINGS"
|
@ -1,36 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
SETUP_SYMLINKS() {
|
||||
while read SYMLINK
|
||||
do
|
||||
SETUP_SYMLINK $(echo $SYMLINK | awk '{print $1;}') $(echo $SYMLINK | awk '{print $2}')
|
||||
done < <(echo $SYMLINKS | sed -n '/^[^#]/p')
|
||||
}
|
||||
|
||||
SETUP_SYMLINK() {
|
||||
[ ! $2 ] && __FAIL 1 'must provide SOURCE_CONFIG and TARGET_CONFIG'
|
||||
|
||||
local SOURCE_CONFIG="$1"
|
||||
[ ! -f "$SOURCE_CONFIG" ] && __FAIL 2 "no such file '$SOURCE_CONFIG'"
|
||||
|
||||
local TARGET_CONFIG="$HOME/.config/$2"
|
||||
|
||||
[ ! -d $(dirname "$TARGET_CONFIG") ] && mkdir -p $(dirname "$TARGET_CONFIG")
|
||||
|
||||
[ -f "$TARGET_CONFIG" ] && {
|
||||
[[ $SAFE_SYMLINKS -eq 1 ]] && mv "$TARGET_CONFIG" "$TARGET_CONFIG.bak"
|
||||
[[ $SAFE_SYMLINKS -eq 0 ]] && rm "$TARGET_CONFIG"
|
||||
}
|
||||
|
||||
ln -s "$SOURCE_CONFIG" "$TARGET_CONFIG" \
|
||||
&& __SUCCESS "successfully linked '$(basename $(dirname $TARGET_CONFIG))/$(basename $TARGET_CONFIG)'" \
|
||||
|| __FAIL 3 "failed to create link '$TARGET_CONFIG'" \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
SETUP_SYMLINKS $@
|
@ -1,10 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
__STATUS 'updating all config files and links'
|
||||
__RUN_SCWRYPT zsh/config/symlinks || exit 1
|
||||
__RUN_SCWRYPT zsh/config/terminfo || exit 2
|
||||
__SUCCESS 'finished updating config files and links'
|
@ -1,24 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
||||
|
||||
|
||||
GET_POSTGRES_LOGIN_ARGS() {
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
--host | -h ) _HOST="$2"; shift 2 ;;
|
||||
--name | -d ) _NAME="$2"; shift 2 ;;
|
||||
--pass | -w ) _PASS="$2"; shift 2 ;;
|
||||
--port | -p ) _PORT="$2"; shift 2 ;;
|
||||
--user | -U ) _USER="$2"; shift 2 ;;
|
||||
* ) shift 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ ! $_HOST ] && _HOST=127.0.0.1
|
||||
[ ! $_NAME ] && _NAME=postgres
|
||||
[ ! $_PORT ] && _PORT=5432
|
||||
[ ! $_USER ] && _USER=postgres
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,29 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
pgcli
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_LOGIN_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
__STATUS "performing login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "working directory : $DATA_DIR"
|
||||
|
||||
PGPASSWORD="$_PASS" pgcli \
|
||||
--host $_HOST \
|
||||
--port $_PORT \
|
||||
--user $_USER \
|
||||
--dbname $_NAME \
|
||||
;
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
_LOGIN_POSTGRES $@
|
@ -1,4 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
9
zsh/db/postgres/interactive-pgcli
Executable file
9
zsh/db/postgres/interactive-pgcli
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
POSTGRES__LOGIN_INTERACTIVE $@
|
@ -1,44 +1,9 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
pg_dump
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
BACKUP_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_dump"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
local OUTPUT_FILE="$DATA_DIR/$_NAME.dump"
|
||||
[ -f $OUTPUT_FILE ] && {
|
||||
local BACKUP_COUNT=$(ls "$DATA_DIR/$_NAME."*".dump" | wc -l)
|
||||
ls "$DATA_DIR/$_NAME."*".dump"
|
||||
|
||||
__INFO "discovered previous dump for '$_HOST/$_NAME'"
|
||||
__INFO "backing up previous dump to '$_NAME.$BACKUP_COUNT.dump'"
|
||||
|
||||
mv "$OUTPUT_FILE" "$DATA_DIR/$_NAME.$BACKUP_COUNT.dump"
|
||||
}
|
||||
|
||||
__STATUS "making backup of : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "output file : $OUTPUT_FILE"
|
||||
|
||||
PGPASSWORD="$_PASS" pg_dump \
|
||||
--verbose \
|
||||
--format custom \
|
||||
--host "$_HOST" \
|
||||
--port "$_PORT" \
|
||||
--username "$_USER" \
|
||||
--dbname "$_NAME" \
|
||||
--file "$OUTPUT_FILE" \
|
||||
&& { __SUCCESS "finished backup of '$_HOST/$_NAME'"; __SUCCESS "saved to '$OUTPUT_FILE'"; } \
|
||||
|| { __ERROR "error creating backup for '$_HOST/$_NAME' (see above)"; return 1; }
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
BACKUP_POSTGRES $@
|
||||
PG_DUMP $@
|
||||
|
@ -1,55 +1,9 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
pg_dump
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
BACKUP_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER
|
||||
GET_POSTGRES_LOGIN_ARGS $@
|
||||
|
||||
local DATA_DIR="$SCWRYPTS_DATA_PATH/db/$_HOST/$_NAME/pg_restore"
|
||||
[ ! -d $DATA_DIR ] && mkdir -p $DATA_DIR
|
||||
cd $DATA_DIR
|
||||
|
||||
local INPUT_FILE="$DATA_DIR/$_NAME.dump"
|
||||
|
||||
[ ! -f $INPUT_FILE ] && {
|
||||
local DUMP="$(dirname $DATA_DIR)/pg_dump/$_NAME.dump"
|
||||
__STATUS $DUMP
|
||||
ls $DUMP
|
||||
|
||||
[ -f "$DUMP" ] && {
|
||||
__SUCCESS "discovered previous scwrypts dump"
|
||||
__SUCCESS "$DUMP"
|
||||
__Yn 'restore from this backup?' && INPUT_FILE="$DUMP"
|
||||
}
|
||||
|
||||
[ ! -f "$INPUT_FILE" ] && {
|
||||
__STATUS 'place backup in the following location:'
|
||||
__STATUS "$INPUT_FILE"
|
||||
}
|
||||
|
||||
while [ ! -f $INPUT_FILE ]; do sleep 1; done
|
||||
}
|
||||
|
||||
__STATUS "backup file : $DATA_DIR"
|
||||
__STATUS "database : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
|
||||
PGPASSWORD="$_PASS" pg_restore \
|
||||
--verbose \
|
||||
--single-transaction \
|
||||
--format custom \
|
||||
--host "$_HOST" \
|
||||
--port "$_PORT" \
|
||||
--username "$_USER" \
|
||||
--dbname "$_NAME" \
|
||||
"$INPUT_FILE" \
|
||||
&& { __SUCCESS "finished restoring backup for '$_HOST/$_NAME'"; } \
|
||||
|| { __ERROR "error restoring backup for '$_HOST/$_NAME' (see above)"; return 1; }
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
BACKUP_POSTGRES $@
|
||||
PG_RESTORE $@
|
||||
|
51
zsh/db/postgres/run-sql
Executable file
51
zsh/db/postgres/run-sql
Executable file
@ -0,0 +1,51 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=()
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
use db/postgres
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
RUN_SQL_POSTGRES() {
|
||||
local _PASS _ARGS=()
|
||||
POSTGRES__SET_LOGIN_ARGS $@
|
||||
|
||||
local INPUT_FILE="$FILENAME"
|
||||
|
||||
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
|
||||
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
|
||||
|
||||
cd $SQL_DIR
|
||||
|
||||
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
|
||||
ERROR "you haven't made any SQL commands yet"
|
||||
REMINDER "add '.sql' files here: '$SQL_DIR/'"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ ! $INPUT_FILE ] && INPUT_FILE=$(FZF 'select a sql file to run')
|
||||
[ ! $INPUT_FILE ] && ABORT
|
||||
|
||||
[ ! -f "$INPUT_FILE" ] && FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
|
||||
|
||||
STATUS "loading '$INPUT_FILE' preview..."
|
||||
LESS "$INPUT_FILE"
|
||||
|
||||
STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
STATUS "command : '$INPUT_FILE'"
|
||||
|
||||
yN 'run this command?' || ABORT
|
||||
|
||||
STATUS "running '$INPUT_FILE'"
|
||||
|
||||
PSQL < $INPUT_FILE \
|
||||
&& SUCCESS "finished running '$INPUT_FILE'" \
|
||||
|| FAIL 3 "something went wrong running '$INPUT_FILE' (see above)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
WARNING
|
||||
WARNING 'this function is in a beta state'
|
||||
WARNING
|
||||
RUN_SQL_POSTGRES $@
|
@ -1,4 +0,0 @@
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,72 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=(
|
||||
psql
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
|
||||
_RUN_SQL_POSTGRES() {
|
||||
local _HOST _NAME _PASS _PORT _USER INPUT_FILE
|
||||
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case $1 in
|
||||
--host | -h ) _HOST="$2"; shift 2 ;;
|
||||
--name | -d ) _NAME="$2"; shift 2 ;;
|
||||
--pass | -w ) _PASS="$2"; shift 2 ;;
|
||||
--port | -p ) _PORT="$2"; shift 2 ;;
|
||||
--user | -U ) _USER="$2"; shift 2 ;;
|
||||
--file | -i ) INPUT_FILE="$2"; shift 2 ;;
|
||||
* ) shift 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ ! $_HOST ] && _HOST=127.0.0.1
|
||||
[ ! $_NAME ] && _NAME=postgres
|
||||
[ ! $_PORT ] && _PORT=5432
|
||||
[ ! $_USER ] && _USER=postgres
|
||||
|
||||
local SQL_DIR="$SCWRYPTS_DATA_PATH/sql"
|
||||
[ ! -d $SQL_DIR ] && mkdir -p $SQL_DIR
|
||||
cd $SQL_DIR
|
||||
|
||||
[[ $(ls "*.sql" 2>&1 | wc -l) -eq 0 ]] && {
|
||||
__ERROR "you haven't made any SQL commands yet"
|
||||
__REMINDER "add '.sql' files here: '$SQL_DIR/'"
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ ! $INPUT_FILE ] && INPUT_FILE=$(\
|
||||
__FZF 'select a sql file to run'
|
||||
)
|
||||
[ ! $INPUT_FILE ] && __ABORT
|
||||
|
||||
[ ! -f $INPUT_FILE ] && {
|
||||
__FAIL 2 "no such sql file '$SQL_DIR/$INPUT_FILE'"
|
||||
}
|
||||
|
||||
__STATUS "loading $INPUT_FILE preview..."
|
||||
_LESS $INPUT_FILE
|
||||
|
||||
__STATUS "login : $_USER@$_HOST:$_PORT/$_NAME"
|
||||
__STATUS "command : ./$INPUT_FILE"
|
||||
|
||||
__yN 'run this command?' || __ABORT
|
||||
|
||||
__STATUS "running './$INPUT_FILE'"
|
||||
PGPASSWORD="$_PASS" psql \
|
||||
-h $_HOST \
|
||||
-p $_PORT \
|
||||
-U $_USER \
|
||||
-d $_NAME \
|
||||
< $INPUT_FILE \
|
||||
&& __SUCCESS "finished running './$INPUT_FILE'" \
|
||||
|| __FAIL 3 "something went wrong running './$INPUT_FILE' (see above)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
__WARNING
|
||||
__WARNING 'this function is in a beta state'
|
||||
__WARNING
|
||||
_RUN_SQL_POSTGRES $@
|
19
zsh/docker/cleanup
Executable file
19
zsh/docker/cleanup
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/zsh
|
||||
DEPENDENCIES+=(docker)
|
||||
REQUIRED_ENV+=()
|
||||
|
||||
CHECK_ENVIRONMENT
|
||||
#####################################################################
|
||||
|
||||
DOCKER_CLEAN() {
|
||||
WARNING 'this will prune all docker resources from the current machine'
|
||||
WARNING 'pruned resources are PERMANENTLY DELETED'
|
||||
yN 'continue?' || return 1
|
||||
|
||||
SUCCESS "CONTAINER : $(docker container prune -f 2>/dev/null | tail -n 1)"
|
||||
SUCCESS "IMAGE : $(docker image prune -f 2>/dev/null | tail -n 1)"
|
||||
SUCCESS "VOLUME : $(docker volume prune -f 2>/dev/null | tail -n 1)"
|
||||
}
|
||||
|
||||
#####################################################################
|
||||
DOCKER_CLEAN $@
|
@ -1,6 +0,0 @@
|
||||
_DEPENDENCIES+=(
|
||||
git
|
||||
)
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/../common.zsh
|
||||
#####################################################################
|
@ -1,6 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
__RUN_SCWRYPT zsh/git/package/install -- --only-build $@
|
@ -1,6 +0,0 @@
|
||||
#!/bin/zsh
|
||||
_DEPENDENCIES+=()
|
||||
_REQUIRED_ENV+=()
|
||||
source ${0:a:h}/common.zsh
|
||||
#####################################################################
|
||||
__RUN_SCWRYPT zsh/git/package/install -- --only-pull $@
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user