fabric/modules/database.py
2016-11-08 19:33:49 +02:00

587 lines
16 KiB
Python

from fabric.api import env, task
# # from jinja2 import Environment
import os
from utils import upload_template as utils_upload_template
from utils import loggify, print_console, booleanize
# from utils import prompt_continue
from getpass import getpass
import fabric.operations as fabric_ops
NOTE = """
\n\n\n
NOTE: you MUST deactivate the gunicorn or supervisor service if you want
to make changes to the database, otherwise, the changes will not run and
you will NOT BE ABLE TO SYNC. SO TURN THE FUCKING THING OFF\n\n\n"""
def generate_sql(script_name):
configuration = env.config
if env.debug:
logger = loggify('database', 'generate_sql')
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
template_file = getattr(configuration.templates.database, script_name).src
template_dir = os.path.join(
configuration.templates.database.path.local,
'files')
context = dict()
context['db_name'] = configuration.server.database.name
context['db_user'] = configuration.server.database.user
context['db_password'] = configuration.server.database.password
if env.debug:
logger.debug("context = %s" % context)
logger.debug("build_path = %s" % build_path)
logger.debug(
"db_name : %s " % configuration.server.database.name)
logger.debug(
"db_user : %s " % configuration.server.database.user)
logger.debug(
"db_password : %s " % configuration.server.database.password)
#
# when we set debug=True, this function returns a string with the
# command as it would have been executed
upload_msg = utils_upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir,
debug=True)
logger.debug(upload_msg)
else:
utils_upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir)
# with open(build_path, "w") as output:
# output.write(rendered)
print NOTE
def execute_sql(script_name, add_dbname=True, is_admin=False):
if env.debug:
logger = loggify('database', 'execute_sql')
configuration = env.config
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
if add_dbname is True:
db_name = configuration.server.database.name
else:
db_name = "postgres"
port = configuration.server.database.port
host = configuration.server.database.host
if is_admin:
user = configuration.server.database.admin.user
else:
user = configuration.server.database.user
psql_command = "psql -h {host} -p {port} -U {user} " \
" -f {sqlfile} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user,
sqlfile=build_path,)
if env.debug:
logger.debug("db_name = %s" % db_name)
logger.debug("run( %s ) " % psql_command)
else:
run_database_command(psql_command, user)
print NOTE
@task
def generate():
"""
helper function to upload all the scripts
"""
generate_sql('init')
generate_sql('re_init')
generate_sql('drop_db')
generate_sql('drop_all')
@task
def clear_scripts():
print "this does nothing, the code isn't here"
@task
def init():
"""
runs the database intialization script
"""
# when initializing the database, you do NOT want
# to specify which database we are connecting to, because
# that database is what we are creating (we havent made it yet)
execute_sql('init', add_dbname=False, is_admin=True)
@task
def re_init():
"""
re-initializes the database
drop the database, recreate it, don't touch the original user
"""
execute_sql('re_init', add_dbname=False, is_admin=True)
@task
def drop_all():
"""
drop the database and drop the user
"""
execute_sql('drop_all', add_dbname=False, is_admin=True)
@task
def drop_db():
"""
drop only the database but ignore the user
"""
execute_sql('drop_db', add_dbname=False, is_admin=True)
@task
def commandline(dbuser='default'):
"""
logs into command line of the postgres database of the branch
dbuser - set to "default" but can also be set to "admin"
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
if env.debug:
logger = loggify('database', 'commandline')
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
print "debug -- dbuser: %s" % dbuser
if dbuser == 'admin':
user = configuration.server.database.admin.user
db_name = configuration.server.database.admin.db_name
elif dbuser == 'default':
user = configuration.server.database.user
if env.debug:
logger.debug("branch: %s" % configuration.project.branch)
logger.debug("host : %s" % host)
logger.debug("port : %s" % port)
logger.debug("user : %s" % user)
logger.debug("name : %s" % db_name)
cmd_commandline = "psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
)
print "debug -- cmd_commandline: %s" % cmd_commandline
run_database_command(cmd_commandline, user)
@task
def backup(dbuser='default', backup_file=None, branch=None,
datadump_only=False):
"""
creates a dump of the database for backup and storage
dbuser - set to "default" but can also be set to "admin"
backup_file - specify name of the backup_file otherwise use default value
branch - what branch to apply this function to (default env.current)
datadump_only - only output the results of pg_dump
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
NOTE:
this works because the function takes the branch and pulls out the
configuration file based on the branch, and then uses THAT information
to pass the host, user and port info into the pg_dump command
"""
configuration = env.config
if branch:
import initialize
configuration = initialize.environment(branch)
db_name = configuration.server.database.name
port = configuration.server.database.port
host = configuration.server.database.host
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
import time
backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d"))
if not backup_file:
backup_file = "backup.sql"
timefilename = os.path.join(
configuration.paths.server.backups.database,
backup_file_time)
print "debug timefilename -- %s" % timefilename
dumpfilename = os.path.join(
configuration.paths.server.backups.database,
backup_file)
datadump_only = booleanize(datadump_only)
print "dumpfilename: %s" % dumpfilename
if not datadump_only:
# NOTE
# the "tee" command takes input and pipes it to a file and to standard
# output
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \
" {db_name} | tee {dumpfilename}".format(
db_name=db_name,
host=host,
port=port,
user=user,
dumpfilename=dumpfilename
)
else:
# this only spits output, it does not save a file
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \
" {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user,
)
print "cmd_pg_dump: %s" % cmd_pg_dump
output = run_database_command(cmd_pg_dump, user)
return output
@task
def restore(dbuser='default', backup_file=None, reinitialize=True):
"""
creates a dump of the database for backup and storage
dbuser - set to "default" but can also be set to "admin"
backup_file - alternate name for the backup_file to restore from
reinitialize - if supervisor needs to be restarted, do it
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
db_name = configuration.server.database.name
port = configuration.server.database.port
host = configuration.server.database.host
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
print_console("dbuser = %s" % dbuser)
import time
backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d"))
if not backup_file:
backup_file = "backup.sql"
timefilename = os.path.join(
configuration.paths.server.backups.database,
backup_file_time)
print "debug -- timefilename: %s" % timefilename
dumpfilename = os.path.join(
configuration.paths.server.backups.database,
backup_file)
cmd_pg_restore = "psql -h {host} -p {port} -U {user} -d {db_name}" \
"< {dumpfilename}".format(
db_name=db_name,
host=host,
port=port,
user=user,
dumpfilename=dumpfilename
)
import supervisor
has_supervisor = supervisor.status()
if reinitialize:
if has_supervisor:
supervisor.stop()
re_init()
run_database_command(cmd_pg_restore, user)
if has_supervisor and reinitialize:
supervisor.start()
@task
def sync(dst):
"""
sync from current branch database to destination branch
dst - the destination database
"""
configuration = env.config
import initialize
branch_dst = dst
configuration_src = configuration
configuration_dst = initialize.environment(branch_dst)
print "branch_src: %s" % configuration_src.project.branch
print "branch_dst: %s" % configuration_dst.project.branch
# backup all files with names that wont interfere
output_name_dst = "output_sync_dst.sql"
backup_name_dst = "backup_sync_dst.sql"
backup_name_src = "backup_sync_src.sql"
print "output_src = backup("
print "\tbackup_file=%s," % backup_name_src
print "\tbranch=%s)" % configuration_src.project.branch
# dump the source database to our {destination}/backups/database
print "backup up src: %s" % configuration_src.project.branch
output_src = backup(
backup_file=backup_name_src,
branch=configuration_src.project.branch)
print "backing up dst: %s" % configuration_dst.project.branch
backup(
backup_file=backup_name_dst,
branch=configuration_dst.project.branch)
#
# create a file with the source database on the destination branch
output_path_dst = os.path.join(
configuration_dst.paths.server.backups.database,
output_name_dst)
from StringIO import StringIO
output_iostring = StringIO(output_src)
fabric_ops.put(output_iostring, output_path_dst)
#
# stop the server, reinit the database and restore with the new information
print_console("in order to do this properly you must stop the django.run"
" or supervisor server first")
restore(backup_file=output_name_dst, reinitialize=True)
def run_database_command(cmd_string, username, hide=False):
"""
runs a bash shell command, but prefaces it by getting the
database password
cmd_string - the command to be executed
hide - hides the fabric side output default False
Note: don't use "hide" with commmandline because it will hide all
output and make commandline useless
"""
configuration = env.config
msg_pass = 'Enter your database password for {user}@{branch}:'.format(
user=username, branch=configuration.project.branch)
psqlpass = getpass(msg_pass)
cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format(
psqlpass=psqlpass,
cmd_string=cmd_string,
)
# NOTE
# DO NOT USE "with hide"
# one of the commands is to access to psql commandline
# with hide('stdout') blocks all messages that get returned
# from there so leave it alone
if hide:
from fabric.context_managers import hide
with hide('running', 'stdout', 'stderr'):
output = fabric_ops.run(cmd_full)
else:
output = fabric_ops.run(cmd_full)
return output
def get_template_path(script_name, script_type):
configuration = env.config
if script_type == 'build':
file_build = getattr(configuration.templates.database, script_name).dst
path = os.path.join(
configuration.templates.database.path.remote,
'build',
file_build)
elif script_type == 'template':
file_template = getattr(
configuration.templates.database, script_name).src
path = os.path.join(
configuration.templates.database.path.remote,
'files',
file_template)
else:
print "Error, you passed the variable %s, must pass" \
"either 'build' or 'template'" % script_type
import sys
sys.exit()
return path
@task
def edit(param='help'):
"""
calls up mvim on the built conf files
"""
from maintenance import edit as maintenance_edit
locations = {
'build.init': {
'path': get_template_path('init', 'build'),
'desc': 'remote init conf file',
},
'template.init': {
'path': get_template_path('init', 'template'),
'desc': 'remote version of init conf template',
},
'build.re_init': {
'path': get_template_path('re_init', 'build'),
'desc': 'remote re_init conf file',
},
'template.re_init': {
'path': get_template_path('re_init', 'template'),
'desc': 'remote version of re_init conf template',
},
'build.drop_db': {
'path': get_template_path('drop_db', 'build'),
'desc': 'remote drop_db conf file',
},
'template.drop_db': {
'path': get_template_path('drop_db', 'template'),
'desc': 'remote version of drop_db conf template',
},
'build.drop_all': {
'path': get_template_path('drop_all', 'build'),
'desc': 'remote drop_all conf file',
},
'template.drop_all': {
'path': get_template_path('drop_all', 'template'),
'desc': 'remote version of drop_all conf template',
},
}
if param in locations.keys():
remote_path = locations[param]['path']
maintenance_edit(remote_path=remote_path)
else:
print """
"fab database.edit" automates editing files important to django whether
locally or remotely
to use this you must pass one of the editable locations in as a
parameter
currently editable locations are:
"""
for k_loc in locations.keys():
print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc'])
return
@task
def test():
configuration = env.config
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
user = configuration.server.database.user
cmd_string = "psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
)
run_database_command(cmd_string, user)