fabric/modules/deploy.py
Ronny Abraham 152b5658a9 fix bug in deployment where pip gets updated during initial
bootstrap,but then screws up the rest of the deployment including pip
install.  Break up the deployment into two parts.  NOTE: must find a way
to have deployment as one part alone
 Changes to be committed:
	modified:   modules/deploy.py
	modified:   modules/pip.py
2017-07-20 23:43:33 +03:00

740 lines
22 KiB
Python

from fabric.api import env, task
from fabric.operations import run, local
# from fabric.contrib.files import upload_template
# from utils import print_run
from utils import virtualenv_source, booleanize, loggify
from utils import print_console
from pip import setup_virtualenv
from pip import bootstrap_pip
import os
import logging
@task
def setup_rootpath():
configuration = env.config
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: deploy.setup_rootpath %(message)s',
level=logging.DEBUG)
cmd_mkdir_list = list()
cmd_mkdir_list.append(
"mkdir -p {rootpath}/{{logs,private,public,scripts}}".format(
rootpath=configuration.paths.server.root))
for key in configuration.paths.server.backups.keys():
path = configuration.paths.server.backups[key]
cmd_mkdir_list.append(
"mkdir -p {rootpath_backups}".format(rootpath_backups=path))
cmd_mkdir_list.append(
"mkdir -p {rootpath}/logs/{{django,gunicorn,nginx,supervisor}}".format(
rootpath=configuration.paths.server.root))
cmd_mkdir_list.append(
"mkdir -p {rootpath}/public/media/{{dynamic,static}}".format(
rootpath=configuration.paths.server.root))
if env.debug:
for cmd_mkdir in cmd_mkdir_list:
logging.debug("run(\"\n\t%s\n\t\"" % cmd_mkdir)
else:
#
# create the directory structure in the rootpath (usually
# /var/www/projectname.branch)
for cmd_mkdir in cmd_mkdir_list:
run(cmd_mkdir)
@task
def bootstrap():
import database as db
configuration = env.config
if env.debug:
logger = loggify('deploy', 'bootstrap')
#
# not doing a full sync, because we have to set up the rootpath,
# virtualenv, files, dir structure, etc. This means we aren't
# going to upload gunicorn and supervisor until after we've done
# everything else at the end of the bootstrapping process
sync(full=False)
# continue setting up the rootpath and virtualenv
setup_rootpath()
setup_virtualenv()
bootstrap_pip()
# create the django project
from django import create_project
create_project()
#
# link virtualenv to rootpath/private/virtualenv
src_virtual = configuration.virtualenv.paths.root
dst_virtual = configuration.paths.server.virtual
#
# link templates to rootpath/private/templates
src_templates = configuration.paths.django.templates
dst_templates = configuration.paths.server.django.templates
#
# link the django code in the project directory to the appropriate location
# in the rootpath directory
src_code = configuration.paths.django.root
dst_code = configuration.paths.server.django.code
#
# I corrected the linking code so that it deletes already existing
# links before creating them, otherwise you get really weird errors
# where the a link is recreated within the destination link
from utils import link_create
if env.debug:
logger.debug("virtualenv.root : %s"
% configuration.virtualenv.paths.root)
logger.debug("virtualenv.bin : %s\n" %
configuration.virtualenv.paths.bin)
logger.debug("paths.server\n")
logger.debug(" - root\t: %s" % configuration.paths.server.root)
logger.debug(" - media\t: %s" %
configuration.paths.server.media.static)
logger.debug(" - virtual\t: %s" % configuration.paths.server.virtual)
logger.debug(" - django.code\t: %s\n" %
configuration.paths.server.django.code)
logger.debug("django templates : %s" %
configuration.paths.django.templates)
logger.debug("django root : %s" %
configuration.paths.django.root)
logger.debug("django settings : %s" %
configuration.paths.django.settings.root)
logger.debug("django local : %s" %
configuration.paths.django.settings.local)
logger.debug(link_create(src_virtual, dst_virtual, debug=True))
logger.debug(link_create(src_templates, dst_templates, debug=True))
logger.debug(link_create(src_code, dst_code, debug=True))
else:
link_create(src_virtual, dst_virtual)
link_create(src_templates, dst_templates)
link_create(src_code, dst_code)
#
# create and link the scripts that manage the server
# e.g. nginx, supervisor, gunicorn
from nginx import upload as upload_nginx
from supervisor import upload as upload_supervisor
from django import generate as django_generate
print_console("creating gunicorn script")
django_generate('gunicorn', True)
django_generate('local', True)
print_console("creating supervisor script")
upload_supervisor()
print_console("creating nginx script")
upload_nginx()
#
# instantiate docker containers if any
import docker
print_console("check to see if docker containers are used")
if hasattr(configuration, "docker"):
print_console("generating docker configuration file")
docker.generate()
print_console("creating and starting docker container")
docker.create()
else:
print_console("no docker containers are being used. pass")
#
# create and initialize the database
print_console("in db.generate")
db.generate()
print_console("in db.init")
db.init()
@task
def bootstrap_part1():
# import database as db
# configuration = env.config
# if env.debug:
# logger = loggify('deploy', 'bootstrap_part1')
#
# not doing a full sync, because we have to set up the rootpath,
# virtualenv, files, dir structure, etc. This means we aren't
# going to upload gunicorn and supervisor until after we've done
# everything else at the end of the bootstrapping process
sync(full=False)
# continue setting up the rootpath and virtualenv
setup_rootpath()
setup_virtualenv()
@task
def bootstrap_part2():
import database as db
configuration = env.config
if env.debug:
logger = loggify('deploy', 'bootstrap')
# create the django project
from django import create_project
create_project()
#
# link virtualenv to rootpath/private/virtualenv
src_virtual = configuration.virtualenv.paths.root
dst_virtual = configuration.paths.server.virtual
#
# link templates to rootpath/private/templates
src_templates = configuration.paths.django.templates
dst_templates = configuration.paths.server.django.templates
#
# link the django code in the project directory to the appropriate location
# in the rootpath directory
src_code = configuration.paths.django.root
dst_code = configuration.paths.server.django.code
#
# I corrected the linking code so that it deletes already existing
# links before creating them, otherwise you get really weird errors
# where the a link is recreated within the destination link
from utils import link_create
if env.debug:
logger.debug("virtualenv.root : %s"
% configuration.virtualenv.paths.root)
logger.debug("virtualenv.bin : %s\n" %
configuration.virtualenv.paths.bin)
logger.debug("paths.server\n")
logger.debug(" - root\t: %s" % configuration.paths.server.root)
logger.debug(" - media\t: %s" %
configuration.paths.server.media.static)
logger.debug(" - virtual\t: %s" % configuration.paths.server.virtual)
logger.debug(" - django.code\t: %s\n" %
configuration.paths.server.django.code)
logger.debug("django templates : %s" %
configuration.paths.django.templates)
logger.debug("django root : %s" %
configuration.paths.django.root)
logger.debug("django settings : %s" %
configuration.paths.django.settings.root)
logger.debug("django local : %s" %
configuration.paths.django.settings.local)
logger.debug(link_create(src_virtual, dst_virtual, debug=True))
logger.debug(link_create(src_templates, dst_templates, debug=True))
logger.debug(link_create(src_code, dst_code, debug=True))
else:
link_create(src_virtual, dst_virtual)
link_create(src_templates, dst_templates)
link_create(src_code, dst_code)
#
# create and link the scripts that manage the server
# e.g. nginx, supervisor, gunicorn
from nginx import upload as upload_nginx
from supervisor import upload as upload_supervisor
from django import generate as django_generate
print_console("creating gunicorn script")
django_generate('gunicorn', True)
django_generate('local', True)
print_console("creating supervisor script")
upload_supervisor()
print_console("creating nginx script")
upload_nginx()
#
# instantiate docker containers if any
import docker
print_console("check to see if docker containers are used")
if hasattr(configuration, "docker"):
print_console("generating docker configuration file")
docker.generate()
print_console("creating and starting docker container")
docker.create()
else:
print_console("no docker containers are being used. pass")
#
# create and initialize the database
print_console("in db.generate")
db.generate()
print_console("in db.init")
db.init()
@task
def sync(full=True, extras=False):
full = booleanize(full)
extras = booleanize(extras)
# parameter full is for use after bootstrapping the entire directory
# structure, files etc. Otherwise, when bootstrapping you can end up
# trying to link to files in directories which don't exist
#
# so full sync is by default ON, but when using bootstrap, turn it off
# parameter extras determines whether the extras directory is included
from fabric.contrib.project import rsync_project
configuration = env.config
debug_prefix = "DEBUG: deploy.sync"
#
# ensure remote directory exists
remote_dir = "{prj_name}/{prj_branch}".format(
prj_name=configuration.project.name,
prj_branch=configuration.project.branch)
#
# add a slash to the end of the directory so that rsync will upload
# everything inside the directory as opposed to moving the directory over
local_dir = configuration.paths.project.local + "/"
command_mkdir_remote = "mkdir -p {remote}".format(remote=remote_dir)
excludeitems = (".git", "*.swp", "*.swo", ".DS_Store", "*.pyc", "*.bak",
"build/*", "/extras", "/opt")
if env.debug:
print "\n%s debug: %s" % (debug_prefix, env.debug)
print "\n%s project.name %s" \
% (debug_prefix, configuration.project.name)
print "%s project.branch %s" \
% (debug_prefix, configuration.project.branch)
print "%s path.project %s" \
% (debug_prefix, configuration.paths.project.root)
print "\n%s run(%s)" % (debug_prefix, command_mkdir_remote)
print "\n{debug_prefix} rsync_project(\n\tremote_dir={remote_dir}," \
"\n\tlocal_dir={local_dir},\n\texclude={excludeitems})".format(
debug_prefix=debug_prefix,
remote_dir=remote_dir,
local_dir=local_dir,
excludeitems=excludeitems)
# print "\n%s override: %s " \
# % (debug_prefix, configuration.overrides.keys())
# print "%s has overrides? %s" \
# % (debug_prefix, (len(configuration.overrides.keys()) > 0))
else:
# if we are working on the development branch
# either SYNCING TO OR FROM, then just exit at this point
if configuration.project.branch == "development":
print """
------------------------------------
NOTE: this is a hack for the function deploy.sync()
When I originally set up sync and bootstrap, I did not consider
what would happen if I wanted to bootstrap the branch I was running
on.
That is, if I am running commands on the development branch and
then I ask development to bootstrap or sync, I get a probem
whereby I could potentially end up overwriting my own directory
which would be pointless.
So I decided that as a temporary measure if I do any kind of
bootstrapping or anything that calls sync and I am doing it on the
development branch server, then the sync will fail and print out
this message.
Instead of just keeping it in a comment, I thought it better to
print it out so that no matter what I should always be aware of
the issue and maybe later I can fix it.
Right after this message gets printed, the sync function is told to
"return" without any arguments
------------------------------------\n
"""
#
# exit the function without any arguments
return
# print "remote_dir: %s" % remote_dir
# print "local_dir: %s" % local_dir
# import sys
# sys.exit()
run(command_mkdir_remote)
rsync_project(remote_dir=remote_dir, local_dir=local_dir,
exclude=excludeitems)
if full:
from pip import install as pip_install
pip_install('--all')
# overrides()
#
#
# NOTE: if using full synch
if full:
from nginx import upload as upload_nginx
from supervisor import upload as upload_supervisor
from django import generate as django_generate
upload_supervisor()
django_generate('gunicorn', True)
upload_nginx()
from django import collectstatic as django_collectstatic
django_collectstatic()
@task
def media(source_branch):
"""
sync media files from source branch to current branch
source_branch - the branch to which we are copying media files
"""
configuration = env.config
import initialize
print "project.branch: %s" % configuration.project.branch
#
# if we use initialize.environemnt, we will overwrite all project
# information, using get_config instead
configuration_src = initialize.get_config(source_branch)
configuration_dst = configuration
print "branch_src: %s" % configuration_src.project.branch
print "branch_dst: %s" % configuration_dst.project.branch
print "src - server_media_dynamic: %s" % \
configuration_src.paths.server.media.dynamic
print "dst - server_media_dynamic: %s" % \
configuration_dst.paths.server.media.dynamic
# add a trailing slash to the directories
dynamic_src = "%s/" % configuration_src.paths.server.media.dynamic
dynamic_dst = "%s/" % configuration_dst.paths.server.media.dynamic
# rsync can only sync to one remote, so check and see if the dst or
# src project.host is pointing to localhost
if configuration_dst.project.host == 'localhost':
cmd_rsync = "rsync -pthrvz --rsh='ssh -p 22' " \
" {user_src}@{host_src}:{path_src} " \
" {path_dst}".format(
user_src=configuration_src.project.user,
host_src=configuration_src.project.host,
path_src=dynamic_src,
user_dst=configuration_dst.project.user,
host_dst=configuration_dst.project.host,
path_dst=dynamic_dst,
)
else:
cmd_rsync = "rsync -pthrvz --rsh='ssh -p 22' " \
" {path_src} " \
" {user_dst}@{host_dst}:{path_dst}".format(
user_src=configuration_src.project.user,
host_src=configuration_src.project.host,
path_src=dynamic_src,
user_dst=configuration_dst.project.user,
host_dst=configuration_dst.project.host,
path_dst=dynamic_dst,
)
print cmd_rsync
upstream = True
remote_dir = dynamic_dst
local_dir = dynamic_src
if configuration_src.project.host == "localhost":
remote_dir = dynamic_dst
local_dir = dynamic_src
upstream = True
# TODO
# get rid of all this
#
# rsync relies on env.host_string to determine what the username and
# host is
# print "before: %s" % env.host_string
# configuration_dst = initialize.environment(source_branch)
# print "after: %s" % env.host_string
# import sys
# sys.exit()
print "upstream: %s" % upstream
print "remote_dir: %s" % remote_dir
print "local_dir: %s" % local_dir
print "\ncopy from {src} to {dst}\n".format(
src=configuration_src.project.host,
dst=configuration_dst.project.host)
elif configuration_dst.project.host == "localhost":
remote_dir = dynamic_src
local_dir = dynamic_dst
upstream = False
print "upstream: %s" % upstream
print "remote_dir: %s" % remote_dir
print "local_dir: %s" % local_dir
print "\ncopy from {src} to {dst}\n".format(
src=configuration_src.project.host,
dst=configuration_dst.project.host)
else:
print_console("no moving media files from staging to production")
return
cmd_msg = "rsync_project(remote_dir={remote_dir}, " \
"local_dir={local_dir}, upload={upstream})".format(
remote_dir=remote_dir,
local_dir=local_dir,
upstream=upstream)
print cmd_msg
from fabric.contrib.project import rsync_project
rsync_project(remote_dir=remote_dir,
local_dir=local_dir,
upload=upstream)
@task
def test():
configuration = env.config
projectpath = configuration.paths.project.root
local_dir = configuration.paths.project.local + "/"
print hasattr(configuration, 'docker')
print "project path : %s" % projectpath
print "local dir : %s" % local_dir
@task
def remove(full=True):
"""
remove the project, delete the database
"""
full = booleanize(full)
configuration = env.config
if env.debug:
logger = loggify('deploy', 'remove')
import sys
if env.branch == "development" and not env.debug and full:
print """
------------------------------
WARNING:
You were just about to delete the development branch
from your computer!
THIS IS A BAD IDEA BECAUSE YOU MIGHT BE DOING IT BY ACCIDENT
Exiting NOW.
--------------------------------
"""
sys.exit()
#
# prepare to remove remote directory
remote_dir = "{prj_name}/{prj_branch}".format(
prj_name=configuration.project.name,
prj_branch=configuration.project.branch)
remote_dir_parent = os.path.join(
configuration.paths.project.root,
'..')
command_rm_project = "rm -Rf {projectpath}".format(
projectpath=configuration.paths.project.root)
command_rm_project_parent = "rm -Rf {projectparent}".format(
projectparent=remote_dir_parent)
command_rm_rootpath = "rm -Rf {rootpath}".format(
rootpath=configuration.paths.server.root)
command_rm_virtualenv = "rmvirtualenv {virtualenv_name}".format(
virtualenv_name=configuration.virtualenv.name)
if env.debug:
logger.debug("project.name : %s" % configuration.project.name)
logger.debug("project.branch : %s"
% configuration.project.branch)
logger.debug("paths.project : %s"
% configuration.paths.project.root)
logger.debug("remote_dir : %s" % remote_dir)
logger.debug("remote_dir parent : %s" % remote_dir_parent)
logger.debug("rootpath : %s"
% configuration.paths.server.root)
logger.debug("--- removal commands ---")
logger.debug("remove project dir : %s" % command_rm_project)
logger.debug("remove parent dir : %s" % command_rm_project_parent)
logger.debug("rootpath : %s" % command_rm_rootpath)
logger.debug("virtualenv : %s" % command_rm_virtualenv)
value = run("ls %s -l | wc -l" % remote_dir_parent)
logger.debug("value : %s" % value)
logger.debug("len value : %s" % len(value))
logger.debug("type value : %s" % type(value))
logger.debug("value == 2 : %s" % (value == "2"))
else:
#
# NOTE: I have to put the imports for these functions here,
# because otherwise they interfere with this modules version
# of "remove"
from nginx import remove as nginx_remove
from supervisor import remove as supervisor_remove
nginx_remove()
supervisor_remove()
from database import drop_all as db_drop_all
db_drop_all()
#
# check to see if the parent directory contains anything else
# remote_ls_value = run("ls %s -l | wc -l" % remote_dir_parent)
# if remote_ls_value == "2":
# run(command_rm_project_parent)
# else:
# run(command_rm_project)
#
# only remove the project diretory if this is a full
# removal.
if full:
run(command_rm_project)
run(command_rm_rootpath)
with virtualenv_source():
run(command_rm_virtualenv)
@task
def ssh():
configuration = env.config
if configuration.project.ssh:
cmd_ssh = "ssh {user}@{host} -i {sshkey}".format(
user=configuration.project.user,
host=configuration.project.host,
sshkey=configuration.project.ssh)
else:
cmd_ssh = "ssh {user}@{host}".format(
user=configuration.project.user,
host=configuration.project.host)
local(cmd_ssh)