2016-09-06 14:43:49 +03:00
|
|
|
from fabric.api import env, task
|
|
|
|
|
# # from jinja2 import Environment
|
|
|
|
|
import os
|
2018-10-17 23:08:51 +03:00
|
|
|
from .utils import upload_template as utils_upload_template
|
|
|
|
|
from .utils import loggify, print_console, booleanize
|
2016-09-06 18:53:13 +03:00
|
|
|
# from utils import prompt_continue
|
2016-09-06 14:43:49 +03:00
|
|
|
from getpass import getpass
|
2016-09-06 18:53:13 +03:00
|
|
|
import fabric.operations as fabric_ops
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
NOTE = """
|
|
|
|
|
\n\n\n
|
|
|
|
|
NOTE: you MUST deactivate the gunicorn or supervisor service if you want
|
|
|
|
|
to make changes to the database, otherwise, the changes will not run and
|
|
|
|
|
you will NOT BE ABLE TO SYNC. SO TURN THE FUCKING THING OFF\n\n\n"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def generate_sql(script_name):
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
2016-09-06 18:53:13 +03:00
|
|
|
logger = loggify('database', 'generate_sql')
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
build_file = getattr(configuration.templates.database, script_name).dst
|
|
|
|
|
build_path = os.path.join(
|
|
|
|
|
configuration.templates.database.path.remote,
|
|
|
|
|
'build',
|
|
|
|
|
build_file)
|
|
|
|
|
|
|
|
|
|
template_file = getattr(configuration.templates.database, script_name).src
|
|
|
|
|
template_dir = os.path.join(
|
|
|
|
|
configuration.templates.database.path.local,
|
|
|
|
|
'files')
|
|
|
|
|
|
|
|
|
|
context = dict()
|
|
|
|
|
context['db_name'] = configuration.server.database.name
|
|
|
|
|
context['db_user'] = configuration.server.database.user
|
|
|
|
|
context['db_password'] = configuration.server.database.password
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
|
|
|
|
logger.debug("context = %s" % context)
|
|
|
|
|
logger.debug("build_path = %s" % build_path)
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"db_name : %s " % configuration.server.database.name)
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"db_user : %s " % configuration.server.database.user)
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"db_password : %s " % configuration.server.database.password)
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# when we set debug=True, this function returns a string with the
|
|
|
|
|
# command as it would have been executed
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
upload_msg = utils_upload_template(
|
2016-09-06 14:43:49 +03:00
|
|
|
filename=template_file,
|
|
|
|
|
destination=build_path,
|
|
|
|
|
context=context,
|
|
|
|
|
use_jinja=True,
|
|
|
|
|
use_sudo=False,
|
|
|
|
|
backup=False,
|
|
|
|
|
template_dir=template_dir,
|
|
|
|
|
debug=True)
|
|
|
|
|
|
|
|
|
|
logger.debug(upload_msg)
|
|
|
|
|
|
|
|
|
|
else:
|
2016-09-06 18:53:13 +03:00
|
|
|
utils_upload_template(
|
2016-09-06 14:43:49 +03:00
|
|
|
filename=template_file,
|
|
|
|
|
destination=build_path,
|
|
|
|
|
context=context,
|
|
|
|
|
use_jinja=True,
|
|
|
|
|
use_sudo=False,
|
|
|
|
|
backup=False,
|
|
|
|
|
template_dir=template_dir)
|
|
|
|
|
|
|
|
|
|
# with open(build_path, "w") as output:
|
|
|
|
|
# output.write(rendered)
|
2018-10-17 22:48:54 +03:00
|
|
|
print(NOTE)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def execute_sql(script_name, add_dbname=True, is_admin=False):
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
2016-09-06 18:53:13 +03:00
|
|
|
logger = loggify('database', 'execute_sql')
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
build_file = getattr(configuration.templates.database, script_name).dst
|
|
|
|
|
build_path = os.path.join(
|
|
|
|
|
configuration.templates.database.path.remote,
|
|
|
|
|
'build',
|
|
|
|
|
build_file)
|
|
|
|
|
|
|
|
|
|
if add_dbname is True:
|
|
|
|
|
db_name = configuration.server.database.name
|
|
|
|
|
else:
|
|
|
|
|
db_name = "postgres"
|
|
|
|
|
|
|
|
|
|
port = configuration.server.database.port
|
|
|
|
|
host = configuration.server.database.host
|
|
|
|
|
|
|
|
|
|
if is_admin:
|
|
|
|
|
user = configuration.server.database.admin.user
|
|
|
|
|
else:
|
|
|
|
|
user = configuration.server.database.user
|
|
|
|
|
|
|
|
|
|
psql_command = "psql -h {host} -p {port} -U {user} " \
|
|
|
|
|
" -f {sqlfile} {db_name}".format(
|
|
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user,
|
|
|
|
|
sqlfile=build_path,)
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
|
|
|
|
logger.debug("db_name = %s" % db_name)
|
|
|
|
|
logger.debug("run( %s ) " % psql_command)
|
2022-05-08 12:28:19 +03:00
|
|
|
logger.debug("configuration.server.database.host: %s" %
|
|
|
|
|
configuration.server.database.host)
|
2016-09-06 14:43:49 +03:00
|
|
|
else:
|
2016-09-06 18:53:13 +03:00
|
|
|
run_database_command(psql_command, user)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print(NOTE)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def generate():
|
|
|
|
|
"""
|
|
|
|
|
helper function to upload all the scripts
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
generate_sql('init')
|
|
|
|
|
generate_sql('re_init')
|
|
|
|
|
generate_sql('drop_db')
|
|
|
|
|
generate_sql('drop_all')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def clear_scripts():
|
2018-10-17 22:48:54 +03:00
|
|
|
print("this does nothing, the code isn't here")
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def init():
|
|
|
|
|
"""
|
|
|
|
|
runs the database intialization script
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# when initializing the database, you do NOT want
|
|
|
|
|
# to specify which database we are connecting to, because
|
|
|
|
|
# that database is what we are creating (we havent made it yet)
|
|
|
|
|
execute_sql('init', add_dbname=False, is_admin=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def re_init():
|
|
|
|
|
"""
|
|
|
|
|
re-initializes the database
|
|
|
|
|
|
|
|
|
|
drop the database, recreate it, don't touch the original user
|
|
|
|
|
"""
|
|
|
|
|
execute_sql('re_init', add_dbname=False, is_admin=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def drop_all():
|
|
|
|
|
"""
|
|
|
|
|
drop the database and drop the user
|
|
|
|
|
"""
|
|
|
|
|
execute_sql('drop_all', add_dbname=False, is_admin=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def drop_db():
|
|
|
|
|
"""
|
|
|
|
|
drop only the database but ignore the user
|
|
|
|
|
"""
|
2016-09-06 18:53:13 +03:00
|
|
|
execute_sql('drop_db', add_dbname=False, is_admin=True)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def commandline(dbuser='default'):
|
|
|
|
|
"""
|
|
|
|
|
logs into command line of the postgres database of the branch
|
|
|
|
|
|
|
|
|
|
dbuser - set to "default" but can also be set to "admin"
|
|
|
|
|
|
|
|
|
|
the admin user is what it says, I have both of these in case I need to
|
|
|
|
|
switch between the master user for the entire postgres install or the owner
|
|
|
|
|
of the particular database
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
2016-09-06 18:53:13 +03:00
|
|
|
logger = loggify('database', 'commandline')
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
db_name = configuration.server.database.name
|
|
|
|
|
host = configuration.server.database.host
|
|
|
|
|
port = configuration.server.database.port
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("debug -- dbuser: %s" % dbuser)
|
2016-09-06 18:53:13 +03:00
|
|
|
|
2016-09-06 14:43:49 +03:00
|
|
|
if dbuser == 'admin':
|
|
|
|
|
user = configuration.server.database.admin.user
|
|
|
|
|
elif dbuser == 'default':
|
|
|
|
|
user = configuration.server.database.user
|
|
|
|
|
|
|
|
|
|
if env.debug:
|
|
|
|
|
logger.debug("branch: %s" % configuration.project.branch)
|
|
|
|
|
logger.debug("host : %s" % host)
|
|
|
|
|
logger.debug("port : %s" % port)
|
|
|
|
|
logger.debug("user : %s" % user)
|
|
|
|
|
logger.debug("name : %s" % db_name)
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
cmd_commandline = "psql -h {host} -p {port} -U {user} {db_name}".format(
|
2016-09-06 14:43:49 +03:00
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user
|
2016-09-06 18:53:13 +03:00
|
|
|
)
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("debug -- cmd_commandline: %s" % cmd_commandline)
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
run_database_command(cmd_commandline, user)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
2016-09-06 18:53:13 +03:00
|
|
|
def backup(dbuser='default', backup_file=None, branch=None,
|
2016-11-10 22:37:11 +02:00
|
|
|
datadump_only=False, hide_output=False):
|
2016-09-06 14:43:49 +03:00
|
|
|
"""
|
|
|
|
|
creates a dump of the database for backup and storage
|
|
|
|
|
|
|
|
|
|
dbuser - set to "default" but can also be set to "admin"
|
2016-09-06 18:53:13 +03:00
|
|
|
backup_file - specify name of the backup_file otherwise use default value
|
|
|
|
|
branch - what branch to apply this function to (default env.current)
|
2016-11-08 19:33:49 +02:00
|
|
|
datadump_only - only output the results of pg_dump
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
the admin user is what it says, I have both of these in case I need to
|
|
|
|
|
switch between the master user for the entire postgres install or the owner
|
|
|
|
|
of the particular database
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
NOTE:
|
|
|
|
|
this works because the function takes the branch and pulls out the
|
|
|
|
|
configuration file based on the branch, and then uses THAT information
|
|
|
|
|
to pass the host, user and port info into the pg_dump command
|
2016-09-06 14:43:49 +03:00
|
|
|
"""
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
if branch:
|
|
|
|
|
import initialize
|
|
|
|
|
configuration = initialize.environment(branch)
|
|
|
|
|
|
2016-09-06 14:43:49 +03:00
|
|
|
db_name = configuration.server.database.name
|
|
|
|
|
port = configuration.server.database.port
|
|
|
|
|
host = configuration.server.database.host
|
|
|
|
|
|
|
|
|
|
if dbuser == 'admin':
|
|
|
|
|
user = configuration.server.database.admin.user
|
|
|
|
|
elif dbuser == 'default':
|
|
|
|
|
user = configuration.server.database.user
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
import time
|
|
|
|
|
backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d"))
|
|
|
|
|
if not backup_file:
|
|
|
|
|
backup_file = "backup.sql"
|
|
|
|
|
|
|
|
|
|
timefilename = os.path.join(
|
|
|
|
|
configuration.paths.server.backups.database,
|
|
|
|
|
backup_file_time)
|
2018-10-17 22:48:54 +03:00
|
|
|
print("debug timefilename -- %s" % timefilename)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
dumpfilename = os.path.join(
|
|
|
|
|
configuration.paths.server.backups.database,
|
2016-09-06 18:53:13 +03:00
|
|
|
backup_file)
|
|
|
|
|
|
2016-11-08 19:33:49 +02:00
|
|
|
datadump_only = booleanize(datadump_only)
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("dumpfilename: %s" % dumpfilename)
|
2016-11-08 19:33:49 +02:00
|
|
|
|
|
|
|
|
if not datadump_only:
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
# NOTE
|
|
|
|
|
# the "tee" command takes input and pipes it to a file and to standard
|
|
|
|
|
# output
|
|
|
|
|
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \
|
|
|
|
|
" {db_name} | tee {dumpfilename}".format(
|
|
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user,
|
|
|
|
|
dumpfilename=dumpfilename
|
|
|
|
|
)
|
2016-11-08 19:33:49 +02:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
else:
|
|
|
|
|
# this only spits output, it does not save a file
|
|
|
|
|
|
|
|
|
|
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \
|
|
|
|
|
" {db_name}".format(
|
|
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user,
|
|
|
|
|
)
|
2016-09-23 00:18:23 +03:00
|
|
|
|
2016-11-10 22:37:11 +02:00
|
|
|
hide_output = False
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("cmd_pg_dump: %s" % cmd_pg_dump)
|
2016-11-08 19:33:49 +02:00
|
|
|
|
2016-11-10 22:37:11 +02:00
|
|
|
output = run_database_command(cmd_pg_dump, user, hide=hide_output)
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def restore(dbuser='default', backup_file=None, reinitialize=True):
|
|
|
|
|
"""
|
|
|
|
|
creates a dump of the database for backup and storage
|
2016-09-06 14:43:49 +03:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
dbuser - set to "default" but can also be set to "admin"
|
|
|
|
|
backup_file - alternate name for the backup_file to restore from
|
|
|
|
|
reinitialize - if supervisor needs to be restarted, do it
|
|
|
|
|
|
|
|
|
|
the admin user is what it says, I have both of these in case I need to
|
|
|
|
|
switch between the master user for the entire postgres install or the owner
|
|
|
|
|
of the particular database
|
|
|
|
|
"""
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
db_name = configuration.server.database.name
|
|
|
|
|
port = configuration.server.database.port
|
|
|
|
|
host = configuration.server.database.host
|
|
|
|
|
|
|
|
|
|
if dbuser == 'admin':
|
|
|
|
|
user = configuration.server.database.admin.user
|
|
|
|
|
elif dbuser == 'default':
|
|
|
|
|
user = configuration.server.database.user
|
|
|
|
|
|
|
|
|
|
print_console("dbuser = %s" % dbuser)
|
|
|
|
|
|
|
|
|
|
import time
|
|
|
|
|
backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d"))
|
|
|
|
|
|
|
|
|
|
if not backup_file:
|
|
|
|
|
backup_file = "backup.sql"
|
|
|
|
|
|
|
|
|
|
timefilename = os.path.join(
|
|
|
|
|
configuration.paths.server.backups.database,
|
|
|
|
|
backup_file_time)
|
2018-10-17 22:48:54 +03:00
|
|
|
print("debug -- timefilename: %s" % timefilename)
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
dumpfilename = os.path.join(
|
|
|
|
|
configuration.paths.server.backups.database,
|
|
|
|
|
backup_file)
|
|
|
|
|
|
|
|
|
|
cmd_pg_restore = "psql -h {host} -p {port} -U {user} -d {db_name}" \
|
|
|
|
|
"< {dumpfilename}".format(
|
2016-09-06 14:43:49 +03:00
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user,
|
|
|
|
|
dumpfilename=dumpfilename
|
|
|
|
|
)
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
import supervisor
|
|
|
|
|
has_supervisor = supervisor.status()
|
|
|
|
|
if reinitialize:
|
|
|
|
|
if has_supervisor:
|
|
|
|
|
supervisor.stop()
|
|
|
|
|
re_init()
|
|
|
|
|
|
|
|
|
|
run_database_command(cmd_pg_restore, user)
|
|
|
|
|
|
|
|
|
|
if has_supervisor and reinitialize:
|
|
|
|
|
supervisor.start()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
2016-11-08 19:42:47 +02:00
|
|
|
def sync(src):
|
2016-09-06 18:53:13 +03:00
|
|
|
"""
|
2016-11-08 19:42:47 +02:00
|
|
|
sync database from source branch to current branch
|
|
|
|
|
src - the database source branch
|
2016-09-06 18:53:13 +03:00
|
|
|
"""
|
|
|
|
|
configuration = env.config
|
|
|
|
|
import initialize
|
|
|
|
|
|
2016-11-08 19:42:47 +02:00
|
|
|
branch_src = src
|
|
|
|
|
configuration_src = initialize.environment(branch_src)
|
|
|
|
|
configuration_dst = configuration
|
2016-09-06 18:53:13 +03:00
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("branch_src: %s" % configuration_src.project.branch)
|
|
|
|
|
print("branch_dst: %s" % configuration_dst.project.branch)
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
# backup all files with names that wont interfere
|
|
|
|
|
output_name_dst = "output_sync_dst.sql"
|
|
|
|
|
backup_name_dst = "backup_sync_dst.sql"
|
|
|
|
|
backup_name_src = "backup_sync_src.sql"
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("output_src = backup(")
|
|
|
|
|
print("\tbackup_file=%s," % backup_name_src)
|
|
|
|
|
print("\tbranch=%s)" % configuration_src.project.branch)
|
2016-11-08 19:33:49 +02:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
# dump the source database to our {destination}/backups/database
|
2018-10-17 22:48:54 +03:00
|
|
|
print("backup up src: %s" % configuration_src.project.branch)
|
2016-11-08 19:33:49 +02:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
output_src = backup(
|
|
|
|
|
backup_file=backup_name_src,
|
|
|
|
|
branch=configuration_src.project.branch)
|
|
|
|
|
|
2018-10-17 22:48:54 +03:00
|
|
|
print("backing up dst: %s" % configuration_dst.project.branch)
|
2016-09-06 18:53:13 +03:00
|
|
|
backup(
|
|
|
|
|
backup_file=backup_name_dst,
|
|
|
|
|
branch=configuration_dst.project.branch)
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# create a file with the source database on the destination branch
|
|
|
|
|
|
|
|
|
|
output_path_dst = os.path.join(
|
|
|
|
|
configuration_dst.paths.server.backups.database,
|
|
|
|
|
output_name_dst)
|
|
|
|
|
|
|
|
|
|
from StringIO import StringIO
|
|
|
|
|
output_iostring = StringIO(output_src)
|
|
|
|
|
fabric_ops.put(output_iostring, output_path_dst)
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# stop the server, reinit the database and restore with the new information
|
|
|
|
|
|
|
|
|
|
print_console("in order to do this properly you must stop the django.run"
|
|
|
|
|
" or supervisor server first")
|
2016-09-06 14:43:49 +03:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
restore(backup_file=output_name_dst, reinitialize=True)
|
2016-09-06 14:43:49 +03:00
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
|
|
|
|
|
def run_database_command(cmd_string, username, hide=False):
|
|
|
|
|
"""
|
|
|
|
|
runs a bash shell command, but prefaces it by getting the
|
|
|
|
|
database password
|
|
|
|
|
|
|
|
|
|
cmd_string - the command to be executed
|
|
|
|
|
hide - hides the fabric side output default False
|
|
|
|
|
|
|
|
|
|
Note: don't use "hide" with commmandline because it will hide all
|
|
|
|
|
output and make commandline useless
|
|
|
|
|
"""
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
msg_pass = 'Enter your database password for {user}@{branch}:'.format(
|
|
|
|
|
user=username, branch=configuration.project.branch)
|
|
|
|
|
|
|
|
|
|
psqlpass = getpass(msg_pass)
|
2016-09-06 14:43:49 +03:00
|
|
|
cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format(
|
|
|
|
|
psqlpass=psqlpass,
|
|
|
|
|
cmd_string=cmd_string,
|
|
|
|
|
)
|
|
|
|
|
|
2019-06-02 22:52:07 +03:00
|
|
|
if env.debug:
|
|
|
|
|
|
|
|
|
|
# print out the full command and return
|
|
|
|
|
|
|
|
|
|
logger = loggify('database', 'run_database_command')
|
|
|
|
|
logger.debug("run_database_command = %s" % cmd_full)
|
|
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
# NOTE
|
|
|
|
|
# DO NOT USE "with hide"
|
|
|
|
|
# one of the commands is to access to psql commandline
|
|
|
|
|
# with hide('stdout') blocks all messages that get returned
|
|
|
|
|
# from there so leave it alone
|
|
|
|
|
|
|
|
|
|
if hide:
|
|
|
|
|
from fabric.context_managers import hide
|
|
|
|
|
with hide('running', 'stdout', 'stderr'):
|
|
|
|
|
output = fabric_ops.run(cmd_full)
|
|
|
|
|
else:
|
|
|
|
|
output = fabric_ops.run(cmd_full)
|
|
|
|
|
|
|
|
|
|
return output
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_template_path(script_name, script_type):
|
|
|
|
|
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
if script_type == 'build':
|
|
|
|
|
file_build = getattr(configuration.templates.database, script_name).dst
|
|
|
|
|
path = os.path.join(
|
|
|
|
|
configuration.templates.database.path.remote,
|
|
|
|
|
'build',
|
|
|
|
|
file_build)
|
|
|
|
|
|
|
|
|
|
elif script_type == 'template':
|
|
|
|
|
file_template = getattr(
|
|
|
|
|
configuration.templates.database, script_name).src
|
|
|
|
|
|
|
|
|
|
path = os.path.join(
|
|
|
|
|
configuration.templates.database.path.remote,
|
|
|
|
|
'files',
|
|
|
|
|
file_template)
|
|
|
|
|
else:
|
2018-10-17 22:48:54 +03:00
|
|
|
print("Error, you passed the variable %s, must pass"
|
|
|
|
|
"either 'build' or 'template'" % script_type)
|
2016-09-06 14:43:49 +03:00
|
|
|
import sys
|
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
|
|
return path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def edit(param='help'):
|
|
|
|
|
"""
|
|
|
|
|
calls up mvim on the built conf files
|
|
|
|
|
"""
|
|
|
|
|
|
2022-05-08 12:39:08 +03:00
|
|
|
from .maintenance import edit as maintenance_edit
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
locations = {
|
|
|
|
|
'build.init': {
|
|
|
|
|
'path': get_template_path('init', 'build'),
|
|
|
|
|
'desc': 'remote init conf file',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'template.init': {
|
|
|
|
|
'path': get_template_path('init', 'template'),
|
|
|
|
|
'desc': 'remote version of init conf template',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'build.re_init': {
|
|
|
|
|
'path': get_template_path('re_init', 'build'),
|
|
|
|
|
'desc': 'remote re_init conf file',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'template.re_init': {
|
|
|
|
|
'path': get_template_path('re_init', 'template'),
|
|
|
|
|
'desc': 'remote version of re_init conf template',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'build.drop_db': {
|
|
|
|
|
'path': get_template_path('drop_db', 'build'),
|
|
|
|
|
'desc': 'remote drop_db conf file',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'template.drop_db': {
|
|
|
|
|
'path': get_template_path('drop_db', 'template'),
|
|
|
|
|
'desc': 'remote version of drop_db conf template',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'build.drop_all': {
|
|
|
|
|
'path': get_template_path('drop_all', 'build'),
|
|
|
|
|
'desc': 'remote drop_all conf file',
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
'template.drop_all': {
|
|
|
|
|
'path': get_template_path('drop_all', 'template'),
|
|
|
|
|
'desc': 'remote version of drop_all conf template',
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if param in locations.keys():
|
|
|
|
|
remote_path = locations[param]['path']
|
|
|
|
|
maintenance_edit(remote_path=remote_path)
|
|
|
|
|
else:
|
2018-10-17 23:08:51 +03:00
|
|
|
print("""
|
2016-09-06 14:43:49 +03:00
|
|
|
"fab database.edit" automates editing files important to django whether
|
|
|
|
|
locally or remotely
|
|
|
|
|
|
|
|
|
|
to use this you must pass one of the editable locations in as a
|
|
|
|
|
parameter
|
|
|
|
|
|
|
|
|
|
currently editable locations are:
|
2018-10-17 23:08:51 +03:00
|
|
|
""")
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
for k_loc in locations.keys():
|
2018-10-17 22:48:54 +03:00
|
|
|
print("\t{0: <20} - {1}".format(
|
|
|
|
|
k_loc, locations[k_loc]['desc']))
|
2016-09-06 14:43:49 +03:00
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
|
def test():
|
|
|
|
|
configuration = env.config
|
|
|
|
|
|
|
|
|
|
db_name = configuration.server.database.name
|
|
|
|
|
host = configuration.server.database.host
|
|
|
|
|
port = configuration.server.database.port
|
|
|
|
|
user = configuration.server.database.user
|
|
|
|
|
|
|
|
|
|
cmd_string = "psql -h {host} -p {port} -U {user} {db_name}".format(
|
|
|
|
|
db_name=db_name,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
user=user
|
|
|
|
|
)
|
|
|
|
|
|
2016-09-06 18:53:13 +03:00
|
|
|
run_database_command(cmd_string, user)
|