fabric/modules/database.py
Ronny Abraham 2bef12902e new file: api.py
new file:   api.pyc
	new file:   conf/fabric.yml
	new file:   fabfile.py
	new file:   fabfile.pyc
	new file:   modules/__init__.py
	new file:   modules/__init__.pyc
	new file:   modules/conf_setup.py
	new file:   modules/conf_setup.pyc
	new file:   modules/configuration_setup.py
	new file:   modules/database.py
	new file:   modules/database.pyc
	new file:   modules/deploy.py
	new file:   modules/deploy.pyc
	new file:   modules/django.py
	new file:   modules/django.pyc
	new file:   modules/docker.py
	new file:   modules/docker.pyc
	new file:   modules/initialize.py
	new file:   modules/initialize.pyc
	new file:   modules/maintenance.py
	new file:   modules/maintenance.pyc
	new file:   modules/nginx.py
	new file:   modules/nginx.pyc
	new file:   modules/pip.py
	new file:   modules/pip.pyc
	new file:   modules/setup.pyc
	new file:   modules/supervisor.py
	new file:   modules/supervisor.pyc
	new file:   modules/testing/__init__.py
	new file:   modules/testing/__init__.pyc
	new file:   modules/testing/configuration_setup.py
	new file:   modules/testing/maintenance.pyc
	new file:   modules/utils.py
	new file:   modules/utils.pyc
	new file:   templates/conf/database/files/db.drop_all.sql.jinja2
	new file:   templates/conf/database/files/db.drop_db.sql.jinja2
	new file:   templates/conf/database/files/db.init.sql.jinja2
	new file:   templates/conf/database/files/db.re_init.sql.jinja2
	new file:   templates/conf/django/files/gunicorn.jinja2
	new file:   templates/conf/django/files/gunicorn.unixsocket.jinja2
	new file:   templates/conf/django/files/local.jinja2
	new file:   templates/conf/django/files/settings.jinja2
	new file:   templates/conf/django/files/settings18.jinja2
	new file:   templates/conf/django/files/wsgi.jinja2
	new file:   templates/conf/django/files/wsgi.py
	new file:   templates/conf/docker/files/database.jinja2
	new file:   templates/conf/gunicorn/files/gunicorn.jinja2
	new file:   templates/conf/gunicorn/files/gunicorn.unixsocket.jinja2
	new file:   templates/conf/gunicorn/files/local.jinja2
	new file:   templates/conf/gunicorn/files/settings.jinja2
	new file:   templates/conf/gunicorn/files/settings18.jinja2
	new file:   templates/conf/gunicorn/files/wsgi.jinja2
	new file:   templates/conf/gunicorn/files/wsgi.py
	new file:   templates/conf/nginx/files/default.conf.jinja2
	new file:   templates/conf/nginx/files/unixsocket.jinja2
	new file:   templates/conf/supervisor/files/conf_old
	new file:   templates/conf/supervisor/files/supervisor.jinja2
	new file:   templates/meta/development.yml
	new file:   templates/meta/layout.yml
	new file:   templates/meta/staging.yml
	new file:   templates/readmes/aws.md
	new file:   templates/readmes/gandi.md
	new file:   templates/readmes/reset_migrations.md
	new file:   templates/readmes/setup_gandi.md
	new file:   templates/readmes/translations.md
	new file:   templates/readmes/update_images.md
2016-09-06 14:43:49 +03:00

424 lines
11 KiB
Python

from fabric.api import env, task
# # from jinja2 import Environment
import os
# from utils import upload_template as utils_upload_template
# from utils import loggify, print_console
import utils
from getpass import getpass
from fabric.operations import run
NOTE = """
\n\n\n
NOTE: you MUST deactivate the gunicorn or supervisor service if you want
to make changes to the database, otherwise, the changes will not run and
you will NOT BE ABLE TO SYNC. SO TURN THE FUCKING THING OFF\n\n\n"""
def generate_sql(script_name):
"""
generates the sql files and puts them in
the build directory for this branch
"""
configuration = env.config
if env.debug:
logger = utils.loggify('database', 'generate_sql')
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
template_file = getattr(configuration.templates.database, script_name).src
template_dir = os.path.join(
configuration.templates.database.path.local,
'files')
context = dict()
context['db_name'] = configuration.server.database.name
context['db_user'] = configuration.server.database.user
context['db_password'] = configuration.server.database.password
if env.debug:
logger.debug("context = %s" % context)
logger.debug("build_path = %s" % build_path)
logger.debug(
"db_name : %s " % configuration.server.database.name)
logger.debug(
"db_user : %s " % configuration.server.database.user)
logger.debug(
"db_password : %s " % configuration.server.database.password)
#
# when we set debug=True, this function returns a string with the
# command as it would have been executed
upload_msg = utils.upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir,
debug=True)
logger.debug(upload_msg)
else:
utils.upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir)
# with open(build_path, "w") as output:
# output.write(rendered)
print NOTE
def execute_sql(script_name, add_dbname=True, is_admin=False):
if env.debug:
logger = utils.loggify('database', 'execute_sql')
configuration = env.config
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
if add_dbname is True:
db_name = configuration.server.database.name
else:
db_name = "postgres"
port = configuration.server.database.port
host = configuration.server.database.host
if is_admin:
user = configuration.server.database.admin.user
else:
user = configuration.server.database.user
psql_command = "psql -h {host} -p {port} -U {user} " \
" -f {sqlfile} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user,
sqlfile=build_path,)
if env.debug:
logger.debug("db_name = %s" % db_name)
logger.debug("run( %s ) " % psql_command)
else:
run_database_command(psql_command)
print NOTE
@task
def generate():
"""
helper function to upload all the scripts
"""
generate_sql('init')
generate_sql('re_init')
generate_sql('drop_db')
generate_sql('drop_all')
@task
def clear_scripts():
"""
clears all the sql scripts from scripts/conf/postgres/build/*.sql
does this on the remote branch and not local. Because conf files for each
branch are specifically tied to the remote site.
the "local" directory refers to my computer, technically from wherever
fabric is being run, but that is always development.
"""
configuration = env.config
_template = getattr(configuration.templates, 'database')
if env.debug:
cmd_lsdir = "ls %s" % \
os.path.join(_template.path.remote, 'build', '*.sql')
utils.printvar('cmd_lsdir', cmd_lsdir)
output = run(cmd_lsdir)
outputlist = output.split('\r\n')
for line in outputlist:
print line
else:
cmd_rmfiles = "rm %s" % \
os.path.join(_template.path.remote, 'build', '*.sql')
output = run(cmd_rmfiles)
print output
@task
def init():
"""
runs the database intialization script
"""
# when initializing the database, you do NOT want
# to specify which database we are connecting to, because
# that database is what we are creating (we havent made it yet)
execute_sql('init', add_dbname=False, is_admin=True)
@task
def re_init():
"""
re-initializes the database
drop the database, recreate it, don't touch the original user
"""
execute_sql('re_init', add_dbname=False, is_admin=True)
@task
def drop_all():
"""
drop the database and drop the user
"""
execute_sql('drop_all', add_dbname=False, is_admin=True)
@task
def drop_db():
"""
drop only the database but ignore the user
"""
execute_sql('drop_db')
@task
def commandline(dbuser='default'):
"""
logs into command line of the postgres database of the branch
dbuser - set to "default" but can also be set to "admin"
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
if env.debug:
logger = utils.loggify('database', 'commandline')
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
if env.debug:
logger.debug("branch: %s" % configuration.project.branch)
logger.debug("host : %s" % host)
logger.debug("port : %s" % port)
logger.debug("user : %s" % user)
logger.debug("name : %s" % db_name)
run_database_command("psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
))
@task
def datadump(dbuser='default'):
"""
creates a dump of the database for backup and storage
dbuser - set to "default" but can also be set to "admin"
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
db_name = configuration.server.database.name
port = configuration.server.database.port
host = configuration.server.database.host
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
utils.print_console("dbuser = %s" % dbuser)
dumpfilename = os.path.join(
configuration.paths.server.backups.database,
"test.sql")
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user} {db_name} " \
"-f {dumpfilename}".format(
db_name=db_name,
host=host,
port=port,
user=user,
dumpfilename=dumpfilename
)
run_database_command(cmd_pg_dump)
# utils.print_console("cmd_pg_dump : %s" % cmd_pg_dump)
def run_database_command(cmd_string):
psqlpass = getpass('Enter your database password:')
cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format(
psqlpass=psqlpass,
cmd_string=cmd_string,
)
run(cmd_full)
def get_template_path(script_name, script_type):
configuration = env.config
if script_type == 'build':
file_build = getattr(configuration.templates.database, script_name).dst
path = os.path.join(
configuration.templates.database.path.remote,
'build',
file_build)
elif script_type == 'template':
file_template = getattr(
configuration.templates.database, script_name).src
path = os.path.join(
configuration.templates.database.path.remote,
'files',
file_template)
else:
print "Error, you passed the variable %s, must pass" \
"either 'build' or 'template'" % script_type
import sys
sys.exit()
return path
@task
def edit(param='help'):
"""
calls up mvim on the built conf files
"""
from maintenance import edit as maintenance_edit
locations = {
'build.init': {
'path': get_template_path('init', 'build'),
'desc': 'remote init conf file',
},
'template.init': {
'path': get_template_path('init', 'template'),
'desc': 'remote version of init conf template',
},
'build.re_init': {
'path': get_template_path('re_init', 'build'),
'desc': 'remote re_init conf file',
},
'template.re_init': {
'path': get_template_path('re_init', 'template'),
'desc': 'remote version of re_init conf template',
},
'build.drop_db': {
'path': get_template_path('drop_db', 'build'),
'desc': 'remote drop_db conf file',
},
'template.drop_db': {
'path': get_template_path('drop_db', 'template'),
'desc': 'remote version of drop_db conf template',
},
'build.drop_all': {
'path': get_template_path('drop_all', 'build'),
'desc': 'remote drop_all conf file',
},
'template.drop_all': {
'path': get_template_path('drop_all', 'template'),
'desc': 'remote version of drop_all conf template',
},
}
if param in locations.keys():
remote_path = locations[param]['path']
maintenance_edit(remote_path=remote_path)
else:
print """
"fab database.edit" automates editing files important to django whether
locally or remotely
to use this you must pass one of the editable locations in as a
parameter
currently editable locations are:
"""
for k_loc in locations.keys():
print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc'])
return
@task
def test():
configuration = env.config
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
user = configuration.server.database.user
cmd_string = "psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
)
run_database_command(cmd_string)