From cf2cc845bcacd0da841155cd053b491b7bda540d Mon Sep 17 00:00:00 2001 From: Ronny Abraham Date: Tue, 6 Sep 2016 18:53:13 +0300 Subject: [PATCH] changes to fabric modified: modules/database.py modified: modules/django.py modified: modules/initialize.py modified: modules/utils.py --- modules/database.py | 274 ++++++++++++++++++++++++++++++++---------- modules/django.py | 3 + modules/initialize.py | 110 +++++++++++------ modules/utils.py | 8 ++ 4 files changed, 295 insertions(+), 100 deletions(-) diff --git a/modules/database.py b/modules/database.py index 86fdb32..780cc4b 100644 --- a/modules/database.py +++ b/modules/database.py @@ -1,12 +1,11 @@ from fabric.api import env, task # # from jinja2 import Environment import os -# from utils import upload_template as utils_upload_template -# from utils import loggify, print_console -import utils - +from utils import upload_template as utils_upload_template +from utils import loggify, print_console +# from utils import prompt_continue from getpass import getpass -from fabric.operations import run +import fabric.operations as fabric_ops NOTE = """ @@ -17,14 +16,10 @@ NOTE = """ def generate_sql(script_name): - """ - generates the sql files and puts them in - the build directory for this branch - """ configuration = env.config if env.debug: - logger = utils.loggify('database', 'generate_sql') + logger = loggify('database', 'generate_sql') build_file = getattr(configuration.templates.database, script_name).dst build_path = os.path.join( @@ -59,7 +54,7 @@ def generate_sql(script_name): # when we set debug=True, this function returns a string with the # command as it would have been executed - upload_msg = utils.upload_template( + upload_msg = utils_upload_template( filename=template_file, destination=build_path, context=context, @@ -72,7 +67,7 @@ def generate_sql(script_name): logger.debug(upload_msg) else: - utils.upload_template( + utils_upload_template( filename=template_file, destination=build_path, context=context, @@ -89,7 +84,7 @@ def generate_sql(script_name): def execute_sql(script_name, add_dbname=True, is_admin=False): if env.debug: - logger = utils.loggify('database', 'execute_sql') + logger = loggify('database', 'execute_sql') configuration = env.config @@ -124,7 +119,7 @@ def execute_sql(script_name, add_dbname=True, is_admin=False): logger.debug("db_name = %s" % db_name) logger.debug("run( %s ) " % psql_command) else: - run_database_command(psql_command) + run_database_command(psql_command, user) print NOTE @@ -143,38 +138,7 @@ def generate(): @task def clear_scripts(): - """ - clears all the sql scripts from scripts/conf/postgres/build/*.sql - - does this on the remote branch and not local. Because conf files for each - branch are specifically tied to the remote site. - - the "local" directory refers to my computer, technically from wherever - fabric is being run, but that is always development. - """ - - configuration = env.config - - _template = getattr(configuration.templates, 'database') - - if env.debug: - cmd_lsdir = "ls %s" % \ - os.path.join(_template.path.remote, 'build', '*.sql') - - utils.printvar('cmd_lsdir', cmd_lsdir) - output = run(cmd_lsdir) - - outputlist = output.split('\r\n') - - for line in outputlist: - print line - - else: - cmd_rmfiles = "rm %s" % \ - os.path.join(_template.path.remote, 'build', '*.sql') - output = run(cmd_rmfiles) - - print output + print "this does nothing, the code isn't here" @task @@ -212,7 +176,7 @@ def drop_db(): """ drop only the database but ignore the user """ - execute_sql('drop_db') + execute_sql('drop_db', add_dbname=False, is_admin=True) @task @@ -230,14 +194,17 @@ def commandline(dbuser='default'): configuration = env.config if env.debug: - logger = utils.loggify('database', 'commandline') + logger = loggify('database', 'commandline') db_name = configuration.server.database.name host = configuration.server.database.host port = configuration.server.database.port + print "debug -- dbuser: %s" % dbuser + if dbuser == 'admin': user = configuration.server.database.admin.user + db_name = configuration.server.database.admin.db_name elif dbuser == 'default': user = configuration.server.database.user @@ -248,20 +215,104 @@ def commandline(dbuser='default'): logger.debug("user : %s" % user) logger.debug("name : %s" % db_name) - run_database_command("psql -h {host} -p {port} -U {user} {db_name}".format( + cmd_commandline = "psql -h {host} -p {port} -U {user} {db_name}".format( db_name=db_name, host=host, port=port, user=user - )) + ) + + print "debug -- cmd_commandline: %s" % cmd_commandline + + run_database_command(cmd_commandline, user) @task -def datadump(dbuser='default'): +def backup(dbuser='default', backup_file=None, branch=None, + filedump_only=False): """ creates a dump of the database for backup and storage dbuser - set to "default" but can also be set to "admin" + backup_file - specify name of the backup_file otherwise use default value + branch - what branch to apply this function to (default env.current) + filedump_only - only output the results of pg_dump + + the admin user is what it says, I have both of these in case I need to + switch between the master user for the entire postgres install or the owner + of the particular database + + NOTE: + this works because the function takes the branch and pulls out the + configuration file based on the branch, and then uses THAT information + to pass the host, user and port info into the pg_dump command + """ + configuration = env.config + + if branch: + import initialize + configuration = initialize.environment(branch) + + db_name = configuration.server.database.name + port = configuration.server.database.port + host = configuration.server.database.host + + if dbuser == 'admin': + user = configuration.server.database.admin.user + elif dbuser == 'default': + user = configuration.server.database.user + + import time + backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d")) + if not backup_file: + backup_file = "backup.sql" + + timefilename = os.path.join( + configuration.paths.server.backups.database, + backup_file_time) + print "debug timefilename -- %s" % timefilename + + dumpfilename = os.path.join( + configuration.paths.server.backups.database, + backup_file) + + if not filedump_only: + + # NOTE + # the "tee" command takes input and pipes it to a file and to standard + # output + cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \ + " {db_name} | tee {dumpfilename}".format( + db_name=db_name, + host=host, + port=port, + user=user, + dumpfilename=dumpfilename + ) + output = run_database_command(cmd_pg_dump, user, True) + else: + # this only spits output, it does not save a file + + cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user}" \ + " {db_name}".format( + db_name=db_name, + host=host, + port=port, + user=user, + ) + output = run_database_command(cmd_pg_dump, user, True) + + return output + + +@task +def restore(dbuser='default', backup_file=None, reinitialize=True): + """ + creates a dump of the database for backup and storage + + dbuser - set to "default" but can also be set to "admin" + backup_file - alternate name for the backup_file to restore from + reinitialize - if supervisor needs to be restarted, do it the admin user is what it says, I have both of these in case I need to switch between the master user for the entire postgres install or the owner @@ -278,14 +329,25 @@ def datadump(dbuser='default'): elif dbuser == 'default': user = configuration.server.database.user - utils.print_console("dbuser = %s" % dbuser) + print_console("dbuser = %s" % dbuser) + + import time + backup_file_time = "backup_%s.sql" % (time.strftime("%Y_%m_%d")) + + if not backup_file: + backup_file = "backup.sql" + + timefilename = os.path.join( + configuration.paths.server.backups.database, + backup_file_time) + print "debug -- timefilename: %s" % timefilename dumpfilename = os.path.join( configuration.paths.server.backups.database, - "test.sql") + backup_file) - cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user} {db_name} " \ - "-f {dumpfilename}".format( + cmd_pg_restore = "psql -h {host} -p {port} -U {user} -d {db_name}" \ + "< {dumpfilename}".format( db_name=db_name, host=host, port=port, @@ -293,18 +355,108 @@ def datadump(dbuser='default'): dumpfilename=dumpfilename ) - run_database_command(cmd_pg_dump) - # utils.print_console("cmd_pg_dump : %s" % cmd_pg_dump) + import supervisor + has_supervisor = supervisor.status() + if reinitialize: + if has_supervisor: + supervisor.stop() + re_init() + + run_database_command(cmd_pg_restore, user) + + if has_supervisor and reinitialize: + supervisor.start() -def run_database_command(cmd_string): - psqlpass = getpass('Enter your database password:') +@task +def sync(src): + """ + sync from source databases to calling branch + src - the source database + """ + configuration = env.config + + branch_src = src + import initialize + + configuration_src = initialize.environment(branch_src) + configuration_dst = configuration + + print "branch_src: %s" % configuration_src.project.branch + print "branch_dst: %s" % configuration_dst.project.branch + + # backup all files with names that wont interfere + output_name_dst = "output_sync_dst.sql" + backup_name_dst = "backup_sync_dst.sql" + backup_name_src = "backup_sync_src.sql" + + # dump the source database to our {destination}/backups/database + print "backup up src: %s" % configuration_src.project.branch + output_src = backup( + backup_file=backup_name_src, + branch=configuration_src.project.branch) + + print "backing up dst: %s" % configuration_dst.project.branch + backup( + backup_file=backup_name_dst, + branch=configuration_dst.project.branch) + + # + # create a file with the source database on the destination branch + + output_path_dst = os.path.join( + configuration_dst.paths.server.backups.database, + output_name_dst) + + from StringIO import StringIO + output_iostring = StringIO(output_src) + fabric_ops.put(output_iostring, output_path_dst) + + # + # stop the server, reinit the database and restore with the new information + + print_console("in order to do this properly you must stop the django.run" + " or supervisor server first") + + restore(backup_file=output_name_dst, reinitialize=True) + + +def run_database_command(cmd_string, username, hide=False): + """ + runs a bash shell command, but prefaces it by getting the + database password + + cmd_string - the command to be executed + hide - hides the fabric side output default False + + Note: don't use "hide" with commmandline because it will hide all + output and make commandline useless + """ + configuration = env.config + + msg_pass = 'Enter your database password for {user}@{branch}:'.format( + user=username, branch=configuration.project.branch) + + psqlpass = getpass(msg_pass) cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format( psqlpass=psqlpass, cmd_string=cmd_string, ) - run(cmd_full) + # NOTE + # DO NOT USE "with hide" + # one of the commands is to access to psql commandline + # with hide('stdout') blocks all messages that get returned + # from there so leave it alone + + if hide: + from fabric.context_managers import hide + with hide('running', 'stdout', 'stderr'): + output = fabric_ops.run(cmd_full) + else: + output = fabric_ops.run(cmd_full) + + return output def get_template_path(script_name, script_type): @@ -421,4 +573,4 @@ def test(): user=user ) - run_database_command(cmd_string) + run_database_command(cmd_string, user) diff --git a/modules/django.py b/modules/django.py index 5acd364..2c300cc 100644 --- a/modules/django.py +++ b/modules/django.py @@ -44,6 +44,9 @@ def manage(args=None): # changes the working directory to the djangoroot from fabric.context_managers import cd + if not args: + args = "help" + with virtualenv(): with cd(configuration.paths.django.root): output = fabric_ops.run( diff --git a/modules/initialize.py b/modules/initialize.py index d95a03e..de35524 100644 --- a/modules/initialize.py +++ b/modules/initialize.py @@ -3,6 +3,7 @@ import os import fabric.utils import maintenance from fabric.api import env, task +from utils import check_debug # from utils import loggify @@ -36,6 +37,7 @@ def extend_object(envobject, dictname): def environment(branchname): + env.config = get_config(branchname) env.branch = branchname @@ -45,6 +47,7 @@ def environment(branchname): env.host_string = env.hosts[0] # set database host to the docker_ip + host = env.config.server.database.host from docker import docker_ip @@ -122,53 +125,45 @@ def add_template(dataobject, layout, config, section, template_name="conf"): if not hasattr(dataobject.templates, section): dataobject.templates.addbranch(section) + # shortcut to working on the branch we just create above _template = getattr(dataobject.templates, section) + # create a "path" branch to the _templates if not hasattr(_template, "path"): _template.addbranch("path") - # NOTE - # project.root is NOT the same as project.local - # "local" refers to the filesystem from which fabric is being run - # while "root" refers to the file system on which fabric is ACTING - # this can be very confusing, so I have to make these notes now and then + # first get the default template values from layout + var_section_path = os.path.join( + layout['paths']['templates']['conf'], + layout['templates'][section]['path'] + ) - # NOTE - # the reason I am using _template.path.local is because if I am want to - # make corrections to a template conf file, I don't want to have to do - # it on the remote branch, I'd rather have it right there on the - # development branch. Because ultimately, I'm rsyncing everything to the - # remote branch anyway to do the reverse, I'd have to constantly merge - # from the remote to local and from local to remote. Simpler and more - # efficient to assume that everything is located on local. + var_section_source = layout['templates'][section][template_name]['source'] + var_section_output = layout['templates'][section][template_name]['output'] + if 'templates' in config and section in config['templates']: + _config = config['templates'][section] + var_section_path = _config['path'] + var_section_source = _config[template_name]['source'] + var_section_output = _config[template_name]['output'] + + # define the local, and dest paths _template.path.local = os.path.join( - dataobject.paths.conf.local, - layout['templates'][section]['path']) + dataobject.paths.project.local, + var_section_path) _template.path.remote = os.path.join( - dataobject.paths.conf.remote, - layout['templates'][section]['path']) - - # DEBUG REMOVE - if env.debug: - import utils - utils.printvar("branch", dataobject.project.branch) - utils.printvar("section", section) - utils.printvar("_template.path.local", _template.path.local) - utils.printvar("_template.path.remote", _template.path.remote) - # utils.prompt_continue() + dataobject.paths.project.root, + var_section_path) if not hasattr(_template, template_name): _template.addbranch(template_name) conf_template = getattr(_template, template_name) - conf_template.src = \ - layout['templates'][section][template_name]['source'] + conf_template.src = var_section_source - conf_template.dst = \ - layout['templates'][section][template_name]['output'] + conf_template.dst = var_section_output def get_config(branchname): @@ -246,7 +241,7 @@ def get_config(branchname): project_home_dir) # DEBUG REMOVE - if env.debug: + if check_debug(env): import utils utils.printvar('project.branch', dataobject.project.branch) utils.printvar('project.root', dataobject.paths.project.root) @@ -308,6 +303,10 @@ def get_config(branchname): dataobject.paths.project.root, 'templates') + dataobject.paths.django.fixtures = os.path.join( + dataobject.paths.project.root, + layout['paths']['extras']['fixtures']) + dataobject.paths.django.addbranch('settings') dataobject.paths.django.settings.root = os.path.join( @@ -821,24 +820,58 @@ def _init_docker(configuration, layout, config): if 'docker' in config: configuration.addbranch("docker") + # + # compose project name configuration option + configuration.docker.name = "{project_name}_{project_branch}".format( + project_name=configuration.project.name, + project_branch=configuration.project.branch) + + if 'name' in config['docker'] and config['docker']['name']: + configuration.docker.name = config['docker']['name'] + + # + # host information + if 'host' in config['docker']: configuration.docker.host = config['docker']['host'] + # + # configuration info for docker database + if 'database' in config: configuration.docker.addbranch("database") configuration.docker.database.host = \ config['docker']['database']['host'] - configuration.docker.database.container_name = \ - "{project_name}_{project_extension}_db".format( - project_name=configuration.project.name, - project_extension=configuration.project.extension) + container_extension = config['docker']['database'].get( + 'extension', "") + if container_extension: + container_extension = "_" + container_extension - configuration.docker.database.service_name = \ - "{project_name}_{project_branch}_database".format( + # + # set default container name value + # + # if the container name config option is set + # then use the name given in the configuration file + + configuration.docker.database.container_name = \ + "{project_name}_{project_extension}{container_ext}".format( project_name=configuration.project.name, - project_branch=configuration.project.branch) + project_extension=configuration.project.extension, + container_ext=container_extension + ) + + if 'name' in config['docker']['database'] and \ + config['docker']['database']['name']: + configuration.docker.database.container_name = \ + config['docker']['database']['name'] + + # + # not sure what "service name" is for + configuration.docker.database.service_name = \ + "{docker_name}_database".format( + docker_name=configuration.docker.name) configuration.docker.database.port = \ config['docker']['database']['port'] @@ -908,7 +941,6 @@ def _init_overrides(configuration, layout, config): import sys sys.path.append(configuration.paths.overrides.modules) - import override override.initialize(configuration, config, layout, env) diff --git a/modules/utils.py b/modules/utils.py index adeb953..a27eb0c 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -385,3 +385,11 @@ def prompt_continue(message="Do you want to continue? Y/n", default="Y"): else: if not booleanize(prompt_val): sys.exit() + + +def check_debug(env): + + if hasattr(env, 'debug'): + return env.debug + else: + return False