new file: api.py

new file:   api.pyc
	new file:   conf/fabric.yml
	new file:   fabfile.py
	new file:   fabfile.pyc
	new file:   modules/__init__.py
	new file:   modules/__init__.pyc
	new file:   modules/conf_setup.py
	new file:   modules/conf_setup.pyc
	new file:   modules/configuration_setup.py
	new file:   modules/database.py
	new file:   modules/database.pyc
	new file:   modules/deploy.py
	new file:   modules/deploy.pyc
	new file:   modules/django.py
	new file:   modules/django.pyc
	new file:   modules/docker.py
	new file:   modules/docker.pyc
	new file:   modules/initialize.py
	new file:   modules/initialize.pyc
	new file:   modules/maintenance.py
	new file:   modules/maintenance.pyc
	new file:   modules/nginx.py
	new file:   modules/nginx.pyc
	new file:   modules/pip.py
	new file:   modules/pip.pyc
	new file:   modules/setup.pyc
	new file:   modules/supervisor.py
	new file:   modules/supervisor.pyc
	new file:   modules/testing/__init__.py
	new file:   modules/testing/__init__.pyc
	new file:   modules/testing/configuration_setup.py
	new file:   modules/testing/maintenance.pyc
	new file:   modules/utils.py
	new file:   modules/utils.pyc
	new file:   templates/conf/database/files/db.drop_all.sql.jinja2
	new file:   templates/conf/database/files/db.drop_db.sql.jinja2
	new file:   templates/conf/database/files/db.init.sql.jinja2
	new file:   templates/conf/database/files/db.re_init.sql.jinja2
	new file:   templates/conf/django/files/gunicorn.jinja2
	new file:   templates/conf/django/files/gunicorn.unixsocket.jinja2
	new file:   templates/conf/django/files/local.jinja2
	new file:   templates/conf/django/files/settings.jinja2
	new file:   templates/conf/django/files/settings18.jinja2
	new file:   templates/conf/django/files/wsgi.jinja2
	new file:   templates/conf/django/files/wsgi.py
	new file:   templates/conf/docker/files/database.jinja2
	new file:   templates/conf/gunicorn/files/gunicorn.jinja2
	new file:   templates/conf/gunicorn/files/gunicorn.unixsocket.jinja2
	new file:   templates/conf/gunicorn/files/local.jinja2
	new file:   templates/conf/gunicorn/files/settings.jinja2
	new file:   templates/conf/gunicorn/files/settings18.jinja2
	new file:   templates/conf/gunicorn/files/wsgi.jinja2
	new file:   templates/conf/gunicorn/files/wsgi.py
	new file:   templates/conf/nginx/files/default.conf.jinja2
	new file:   templates/conf/nginx/files/unixsocket.jinja2
	new file:   templates/conf/supervisor/files/conf_old
	new file:   templates/conf/supervisor/files/supervisor.jinja2
	new file:   templates/meta/development.yml
	new file:   templates/meta/layout.yml
	new file:   templates/meta/staging.yml
	new file:   templates/readmes/aws.md
	new file:   templates/readmes/gandi.md
	new file:   templates/readmes/reset_migrations.md
	new file:   templates/readmes/setup_gandi.md
	new file:   templates/readmes/translations.md
	new file:   templates/readmes/update_images.md
This commit is contained in:
Ronny Abraham 2016-09-06 14:43:49 +03:00
parent 843e2caef0
commit 2bef12902e
67 changed files with 6772 additions and 0 deletions

11
api.py Normal file
View file

@ -0,0 +1,11 @@
import modules.nginx as nginx
import modules.supervisor as supervisor
import modules.utils as utils
import modules.pip as pip
import modules.django as django
import modules.deploy as deploy
import modules.docker as docker
import modules.database as database
import modules.maintenance as maintenance
import modules.conf_setup as conf_setup

BIN
api.pyc Normal file

Binary file not shown.

11
conf/fabric.yml Normal file
View file

@ -0,0 +1,11 @@
version: 1.0
# location of templates for all files used in project
templates:
root: "templates"
readmes: "readmes"
meta: "meta"
conf: "conf"

78
fabfile.py vendored Normal file
View file

@ -0,0 +1,78 @@
from fabric.api import env, task
import modules.initialize as initialize
from modules.utils import booleanize
from api import *
#
# Note: this is a hack so I can add the various module files
# into fab --list, but without getting the syntastic errors
# because to do it I need to import them whether or not I'm using
# them. And if I am not using them, I'll get a syntastic error
branch = "development"
def setup():
print "setup fabric configuration files"
def all():
#
# set debugging conditiong
# fab {branch} {fab_command} --set debug=True
if 'debug' in env:
env.debug = booleanize(env.debug)
else:
env.debug = False
#
#
# add the config attribute to env
initialize.environment(branch)
@task
def stage():
""" Staging environment """
#
# set the branch configuration we will
# be working on
global branch
branch = "staging"
print "in staging"
all()
@task
def prod():
""" Production environment """
#
# set the branch configuration we will
# be working on
global branch
branch = "production"
print "in production"
all()
@task
def devel():
""" development environment """
global branch
branch = "development"
all()
devel()

BIN
fabfile.pyc Normal file

Binary file not shown.

0
modules/__init__.py Normal file
View file

BIN
modules/__init__.pyc Normal file

Binary file not shown.

237
modules/conf_setup.py Normal file
View file

@ -0,0 +1,237 @@
from fabric.operations import run, put
from fabric.api import env, task
import os
from maintenance import _get_configuration_path, load_configuration
@task
def deploy(param=None):
param_list = ['conf', 'readmes']
if not param:
print "this requires input param must be one of %s" % param_list
import sys
sys.exit()
if param == 'conf':
deploy_conf()
elif param == 'readmes':
deploy_readmes()
@task
def backup(param=None):
param_list = ['conf', 'meta', 'readmes']
if not param:
print "this requires input param must be one of %s" % param_list
import sys
sys.exit()
if param == 'conf':
_backup_conf()
elif param == 'meta':
_backup_meta()
elif param == 'readmes':
_backup_readmes()
def _backup_conf():
configuration = env.config
for key in configuration.templates.keys():
section = getattr(configuration.templates, key)
source_path = os.path.join(section['path']['local'], 'files')
dest_path = os.path.join(
configuration.tools.fabric.templates.conf,
key)
run("mkdir -p %s" % dest_path)
put(source_path, dest_path)
def _backup_meta():
configuration = env.config
from fabric.operations import run, put
source_path_layout = _get_configuration_path(
'layout', 'development')
config_dev = _modify_configuration('development')
config_stg = _modify_configuration('staging')
dest_path = configuration.tools.fabric.templates.meta
run("mkdir -p %s" % dest_path)
#
# copy over the layout.yml file only
put(source_path_layout, dest_path)
#
# the development.yml file needs to be
# modified before it can be copied over
_store_configuration(config_dev)
_store_configuration(config_stg)
def _backup_readmes():
configuration = env.config
dest_readmes = configuration.tools.fabric.templates.readmes
source_readmes = os.path.join(
configuration.paths.project.root, 'scripts', 'readmes')
copy_directories(source_readmes, dest_readmes)
def _modify_configuration(branch):
"""
this method modifies a meta/branch.yml file so that it can be stored as
a template for future branch files of the same time.
Keyword arguments:
branch -- the name of the branch whose configuration files we are
modifying
Notice that some of the values requires me to store the values with
certain names, e.g. if the project/host is NOT 'localhost', then
override the stored IP address with the word "PROJECT_IP"
this method is meant to be used by _store_configuration
"""
#
# get the configuration dict for this branch
config_yaml = load_configuration('config', branch)
#
# basic project configuration
config_yaml['project']['name'] = "PROJECT_NAME"
config_yaml['project']['paths']['home'] = "PROJECT_NAME.prj"
config_yaml['project']['user'] = "BRANCH_USER"
config_yaml['project']['group'] = "BRANCH_GROUP"
if config_yaml['project']['host'] != 'localhost':
config_yaml['project']['host'] = "PROJECT_IP"
#
# database configuration
config_yaml['database']['name'] = "PROJECT_NAME_" + \
config_yaml['project']['extension']
if config_yaml['database']['host'] == "docker":
config_yaml['database']['port'] = "DOCKER_PORT"
else:
config_yaml['database']['host'] = "DATABASE_IP"
#
# database user name values
config_yaml['database']['users']['admin']['name'] = "DATABASE_ADMIN_NAME"
config_yaml['database']['users']['admin']['pass'] = "DATABASE_ADMIN_PASS"
config_yaml['database']['users']['default']['name'] = "DATABASE_USER_NAME"
config_yaml['database']['users']['default']['pass'] = "DATABASE_USER_PASS"
#
# django configuration
config_yaml['django']['port'] = "DJANGO_PORT"
config_yaml['django']['host'] = "DJANGO_IP"
#
# nginx and virtualenv configuration
config_yaml['nginx']['port'] = "NGINX_PORT"
config_yaml['virtualenv']['name'] = "PROJECT_NAME"
return config_yaml
def _store_configuration(config_dict):
"""
takes config dictionary converts it to a yaml file object,
then saves it under the appropriate file name
Keyword arguments:
config_dict -- yaml based configuration dictionary object
"""
configuration = env.config
from StringIO import StringIO
import yaml
from fabric.operations import put
branch_name = config_dict['project']['branch'] + ".yml"
dest_path = os.path.join(
configuration.tools.fabric.templates.meta,
branch_name)
put(StringIO(yaml.dump(config_dict)), dest_path)
def deploy_readmes():
"""
takes the readme files from tools/fabric/templates/readmes
and puts then under the top level of the project/scripts directory
"""
configuration = env.config
source_readmes = configuration.tools.fabric.templates.readmes
dest_readmes = os.path.join(
configuration.paths.project.root,
'scripts', 'readmes')
copy_directories(source_readmes, dest_readmes)
def deploy_conf():
"""
takes the conf templates from tools/fabric/templates/conf
and puts them under scripts/conf.
Note, these "conf" files are NOT the same as the meta/branch.yml conf
files. They are configuration files meant for the various subsystems
of the project. ie, database, docker, supervisor, etc.
"""
configuration = env.config
for key in configuration.templates.keys():
section = getattr(configuration.templates, key)
dest_path = section['path']['local']
source_path = os.path.join(
configuration.tools.fabric.templates.conf,
key)
copy_directories(source_path, dest_path)
def copy_directories(source_path, dest_path):
"""
takes the files from source and copies them to dest using fabric.put
Keyword arguments:
source_path -- the source dir
dest_path -- the destination dir
"""
run("mkdir -p %s" % dest_path)
file_list = run("ls %s" % source_path).split()
for fname in file_list:
fpath = os.path.join(source_path, fname)
put(fpath, dest_path)

BIN
modules/conf_setup.pyc Normal file

Binary file not shown.

View file

@ -0,0 +1,160 @@
from fabric.contrib.files import exists as fab_exists
from fabric.operations import run
from fabric.api import env
import os
import sys
import utils
def check_version(section):
pass
def check_is_conf(section, handle_error=False):
"""confirms that the configuration section name passed is a legit one
Keyword Arguments:
section -- the configuration section we are looking at
handle_error -- if True, print out an error message and exit
"""
configuration = env.config
if section in configuration.templates.keys():
return True
else:
if handle_error:
print """
Error. maintenance.exists_dir_sub takes a 'section' parameter value.
'%s' is not a valid parameter value.
Valid options include:""" % section
for key in configuration.templates.keys():
print " %s" % key
print """
Please run the command again, but this time enter a valid section value.
"""
sys.exit()
return False
def exists_dir_top():
""" Check if the parent directory for all configuration files exists"""
from fabric.contrib.files import exists
configuration = env.config
# NOTE
# all template configuration files are built off the files that are
# contained LOCALLY. I don't bother building them off the remotely
# located files, since those files get rsync'd anyway.
if env.debug:
print "maintenance.exists_dir_top -- checking for " \
"directory:\n\t%s\n" % configuration.paths.conf.local
return exists(configuration.paths.conf.local)
def exists_dir_sub(section):
"""Check if the subdirectory for this configuration type exists in the
configuration directory
Keyword Arguments:
section -- the configuration section we are looking at
"""
configuration = env.config
# NOTE
# all template configuration files are built off the files that are
# contained LOCALLY. I don't bother building them off the remotely
# located files, since those files get rsync'd anyway.
check_is_conf(section)
_template = getattr(configuration.templates, section)
if env.debug:
utils.printvar('template.path.local', _template.path.local)
path_test = os.path.join(_template.path.local, 'blah')
utils.printvar('exists_path_test', fab_exists(path_test))
utils.printvar('exists_local', fab_exists(_template.path.local))
else:
return fab_exists(_template.path.local)
def exists_file(section):
"""Check if the template file for this configuration type exists in the
configuration directory
Keyword Arguments:
section -- the configuration type
"""
configuration = env.config
check_is_conf(section)
exists_dir_sub(section)
utils.print_console("\tNOTE: exists_file ONLY works when run on the"
" local branch!\n\tThis is because it is set up to "
" only check development template config files",
numsep=90)
_template = getattr(configuration.templates, section)
path_src = os.path.join(
_template.path.local,
'files',
_template.conf.src)
if env.debug:
utils.printvar('template.path.local', _template.path.local)
utils.printvar('template.src', _template.conf.src)
utils.printvar('template.dst', _template.conf.dst)
utils.printvar('path_src', path_src)
utils.printvar('path_exists', fab_exists(path_src))
return fab_exists(path_src)
# TODO
# DONE 1. make sure the configuration request is legit for this branch
# DONE 2. check to see if conf directory exists
# DONE 3. check to see if conf template file exists
# 4a. (optional) add switch to check if conf file was built from template
# 4b. (optional) check to see if version is up to date
def create_dir():
# TODO
# 1. make sure the configuration request is legit for this branch
# 2. check to see if conf dir already exists
# 3. if not create it
pass
def create_dir_top():
"""Creates the top level conf directory if it does not exist"""
import utils
configuration = env.config
if not exists_dir_top():
cmd_mkdir = "mkdir %s" % configuration.paths.conf.remote
run(cmd_mkdir)
else:
msg = "configuration directory already exists, aborting create." \
"Continue? Y/n"
utils.prompt_continue(message=msg)
def create_file():
pass

424
modules/database.py Normal file
View file

@ -0,0 +1,424 @@
from fabric.api import env, task
# # from jinja2 import Environment
import os
# from utils import upload_template as utils_upload_template
# from utils import loggify, print_console
import utils
from getpass import getpass
from fabric.operations import run
NOTE = """
\n\n\n
NOTE: you MUST deactivate the gunicorn or supervisor service if you want
to make changes to the database, otherwise, the changes will not run and
you will NOT BE ABLE TO SYNC. SO TURN THE FUCKING THING OFF\n\n\n"""
def generate_sql(script_name):
"""
generates the sql files and puts them in
the build directory for this branch
"""
configuration = env.config
if env.debug:
logger = utils.loggify('database', 'generate_sql')
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
template_file = getattr(configuration.templates.database, script_name).src
template_dir = os.path.join(
configuration.templates.database.path.local,
'files')
context = dict()
context['db_name'] = configuration.server.database.name
context['db_user'] = configuration.server.database.user
context['db_password'] = configuration.server.database.password
if env.debug:
logger.debug("context = %s" % context)
logger.debug("build_path = %s" % build_path)
logger.debug(
"db_name : %s " % configuration.server.database.name)
logger.debug(
"db_user : %s " % configuration.server.database.user)
logger.debug(
"db_password : %s " % configuration.server.database.password)
#
# when we set debug=True, this function returns a string with the
# command as it would have been executed
upload_msg = utils.upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir,
debug=True)
logger.debug(upload_msg)
else:
utils.upload_template(
filename=template_file,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=template_dir)
# with open(build_path, "w") as output:
# output.write(rendered)
print NOTE
def execute_sql(script_name, add_dbname=True, is_admin=False):
if env.debug:
logger = utils.loggify('database', 'execute_sql')
configuration = env.config
build_file = getattr(configuration.templates.database, script_name).dst
build_path = os.path.join(
configuration.templates.database.path.remote,
'build',
build_file)
if add_dbname is True:
db_name = configuration.server.database.name
else:
db_name = "postgres"
port = configuration.server.database.port
host = configuration.server.database.host
if is_admin:
user = configuration.server.database.admin.user
else:
user = configuration.server.database.user
psql_command = "psql -h {host} -p {port} -U {user} " \
" -f {sqlfile} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user,
sqlfile=build_path,)
if env.debug:
logger.debug("db_name = %s" % db_name)
logger.debug("run( %s ) " % psql_command)
else:
run_database_command(psql_command)
print NOTE
@task
def generate():
"""
helper function to upload all the scripts
"""
generate_sql('init')
generate_sql('re_init')
generate_sql('drop_db')
generate_sql('drop_all')
@task
def clear_scripts():
"""
clears all the sql scripts from scripts/conf/postgres/build/*.sql
does this on the remote branch and not local. Because conf files for each
branch are specifically tied to the remote site.
the "local" directory refers to my computer, technically from wherever
fabric is being run, but that is always development.
"""
configuration = env.config
_template = getattr(configuration.templates, 'database')
if env.debug:
cmd_lsdir = "ls %s" % \
os.path.join(_template.path.remote, 'build', '*.sql')
utils.printvar('cmd_lsdir', cmd_lsdir)
output = run(cmd_lsdir)
outputlist = output.split('\r\n')
for line in outputlist:
print line
else:
cmd_rmfiles = "rm %s" % \
os.path.join(_template.path.remote, 'build', '*.sql')
output = run(cmd_rmfiles)
print output
@task
def init():
"""
runs the database intialization script
"""
# when initializing the database, you do NOT want
# to specify which database we are connecting to, because
# that database is what we are creating (we havent made it yet)
execute_sql('init', add_dbname=False, is_admin=True)
@task
def re_init():
"""
re-initializes the database
drop the database, recreate it, don't touch the original user
"""
execute_sql('re_init', add_dbname=False, is_admin=True)
@task
def drop_all():
"""
drop the database and drop the user
"""
execute_sql('drop_all', add_dbname=False, is_admin=True)
@task
def drop_db():
"""
drop only the database but ignore the user
"""
execute_sql('drop_db')
@task
def commandline(dbuser='default'):
"""
logs into command line of the postgres database of the branch
dbuser - set to "default" but can also be set to "admin"
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
if env.debug:
logger = utils.loggify('database', 'commandline')
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
if env.debug:
logger.debug("branch: %s" % configuration.project.branch)
logger.debug("host : %s" % host)
logger.debug("port : %s" % port)
logger.debug("user : %s" % user)
logger.debug("name : %s" % db_name)
run_database_command("psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
))
@task
def datadump(dbuser='default'):
"""
creates a dump of the database for backup and storage
dbuser - set to "default" but can also be set to "admin"
the admin user is what it says, I have both of these in case I need to
switch between the master user for the entire postgres install or the owner
of the particular database
"""
configuration = env.config
db_name = configuration.server.database.name
port = configuration.server.database.port
host = configuration.server.database.host
if dbuser == 'admin':
user = configuration.server.database.admin.user
elif dbuser == 'default':
user = configuration.server.database.user
utils.print_console("dbuser = %s" % dbuser)
dumpfilename = os.path.join(
configuration.paths.server.backups.database,
"test.sql")
cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user} {db_name} " \
"-f {dumpfilename}".format(
db_name=db_name,
host=host,
port=port,
user=user,
dumpfilename=dumpfilename
)
run_database_command(cmd_pg_dump)
# utils.print_console("cmd_pg_dump : %s" % cmd_pg_dump)
def run_database_command(cmd_string):
psqlpass = getpass('Enter your database password:')
cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format(
psqlpass=psqlpass,
cmd_string=cmd_string,
)
run(cmd_full)
def get_template_path(script_name, script_type):
configuration = env.config
if script_type == 'build':
file_build = getattr(configuration.templates.database, script_name).dst
path = os.path.join(
configuration.templates.database.path.remote,
'build',
file_build)
elif script_type == 'template':
file_template = getattr(
configuration.templates.database, script_name).src
path = os.path.join(
configuration.templates.database.path.remote,
'files',
file_template)
else:
print "Error, you passed the variable %s, must pass" \
"either 'build' or 'template'" % script_type
import sys
sys.exit()
return path
@task
def edit(param='help'):
"""
calls up mvim on the built conf files
"""
from maintenance import edit as maintenance_edit
locations = {
'build.init': {
'path': get_template_path('init', 'build'),
'desc': 'remote init conf file',
},
'template.init': {
'path': get_template_path('init', 'template'),
'desc': 'remote version of init conf template',
},
'build.re_init': {
'path': get_template_path('re_init', 'build'),
'desc': 'remote re_init conf file',
},
'template.re_init': {
'path': get_template_path('re_init', 'template'),
'desc': 'remote version of re_init conf template',
},
'build.drop_db': {
'path': get_template_path('drop_db', 'build'),
'desc': 'remote drop_db conf file',
},
'template.drop_db': {
'path': get_template_path('drop_db', 'template'),
'desc': 'remote version of drop_db conf template',
},
'build.drop_all': {
'path': get_template_path('drop_all', 'build'),
'desc': 'remote drop_all conf file',
},
'template.drop_all': {
'path': get_template_path('drop_all', 'template'),
'desc': 'remote version of drop_all conf template',
},
}
if param in locations.keys():
remote_path = locations[param]['path']
maintenance_edit(remote_path=remote_path)
else:
print """
"fab database.edit" automates editing files important to django whether
locally or remotely
to use this you must pass one of the editable locations in as a
parameter
currently editable locations are:
"""
for k_loc in locations.keys():
print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc'])
return
@task
def test():
configuration = env.config
db_name = configuration.server.database.name
host = configuration.server.database.host
port = configuration.server.database.port
user = configuration.server.database.user
cmd_string = "psql -h {host} -p {port} -U {user} {db_name}".format(
db_name=db_name,
host=host,
port=port,
user=user
)
run_database_command(cmd_string)

BIN
modules/database.pyc Normal file

Binary file not shown.

478
modules/deploy.py Normal file
View file

@ -0,0 +1,478 @@
from fabric.api import env, task
from fabric.operations import run
# from fabric.contrib.files import upload_template
# from utils import print_run
from utils import virtualenv_source, booleanize, loggify
from utils import print_console
from database import drop_all as db_drop_all
from pip import setup_virtualenv
from pip import setup as pip_requirements
import os
import logging
@task
def setup_rootpath():
configuration = env.config
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: deploy.setup_rootpath %(message)s',
level=logging.DEBUG)
cmd_mkdir_list = list()
cmd_mkdir_list.append(
"mkdir -p {rootpath}/{{logs,private,public,scripts}}".format(
rootpath=configuration.paths.server.root))
for key in configuration.paths.server.backups.keys():
path = configuration.paths.server.backups[key]
cmd_mkdir_list.append(
"mkdir -p {rootpath_backups}".format(rootpath_backups=path))
cmd_mkdir_list.append(
"mkdir -p {rootpath}/logs/{{django,gunicorn,nginx,supervisor}}".format(
rootpath=configuration.paths.server.root))
cmd_mkdir_list.append(
"mkdir -p {rootpath}/public/media/{{dynamic,static}}".format(
rootpath=configuration.paths.server.root))
if env.debug:
for cmd_mkdir in cmd_mkdir_list:
logging.debug("run(\"\n\t%s\n\t\"" % cmd_mkdir)
else:
#
# create the directory structure in the rootpath (usually
# /var/www/projectname.branch)
for cmd_mkdir in cmd_mkdir_list:
run(cmd_mkdir)
@task
def bootstrap():
import database as db
configuration = env.config
if env.debug:
logger = loggify('deploy', 'bootstrap')
#
# not doing a full sync, because we have to set up the rootpath,
# virtualenv, files, dir structure, etc. This means we aren't
# going to upload gunicorn and supervisor until after we've done
# everything else at the end of the bootstrapping process
sync(full=False)
# continue setting up the rootpath and virtualenv
setup_rootpath()
setup_virtualenv()
pip_requirements()
#
# link virtualenv to rootpath/private/virtualenv
src_virtual = configuration.virtualenv.paths.root
dst_virtual = configuration.paths.server.virtual
#
# link templates to rootpath/private/templates
src_templates = configuration.paths.django.templates
dst_templates = configuration.paths.server.django.templates
#
# link the django code in the project directory to the appropriate location
# in the rootpath directory
src_code = configuration.paths.django.root
dst_code = configuration.paths.server.django.code
#
# I corrected the linking code so that it deletes already existing
# links before creating them, otherwise you get really weird errors
# where the a link is recreated within the destination link
from utils import link_create
if env.debug:
logger.debug("virtualenv.root : %s"
% configuration.virtualenv.paths.root)
logger.debug("virtualenv.bin : %s\n" %
configuration.virtualenv.paths.bin)
logger.debug("paths.server\n")
logger.debug(" - root\t: %s" % configuration.paths.server.root)
logger.debug(" - media\t: %s" %
configuration.paths.server.media.static)
logger.debug(" - virtual\t: %s" % configuration.paths.server.virtual)
logger.debug(" - django.code\t: %s\n" %
configuration.paths.server.django.code)
logger.debug("django templates : %s" %
configuration.paths.django.templates)
logger.debug("django root : %s" %
configuration.paths.django.root)
logger.debug("django settings : %s" %
configuration.paths.django.settings.root)
logger.debug("django local : %s" %
configuration.paths.django.settings.local)
logger.debug(link_create(src_virtual, dst_virtual, debug=True))
logger.debug(link_create(src_templates, dst_templates, debug=True))
logger.debug(link_create(src_code, dst_code, debug=True))
else:
link_create(src_virtual, dst_virtual)
link_create(src_templates, dst_templates)
link_create(src_code, dst_code)
#
# create and link the scripts that manage the server
# e.g. nginx, supervisor, gunicorn
from nginx import upload as upload_nginx
from supervisor import upload as upload_supervisor
from django import generate as django_generate
print_console("creating gunicorn script")
django_generate('gunicorn', True)
django_generate('local', True)
print_console("creating supervisor script")
upload_supervisor()
print_console("creating nginx script")
upload_nginx()
#
# instantiate docker containers if any
import docker
print_console("check to see if docker containers are used")
if hasattr(configuration, "docker"):
print_console("generating docker configuration file")
docker.generate()
print_console("creating and starting docker container")
docker.create()
else:
print_console("no docker containers are being used. pass")
#
# create and initialize the database
print_console("in db.generate")
db.generate()
print_console("in db.init")
db.init()
@task
def sync(full=True, extras=False):
full = booleanize(full)
extras = booleanize(extras)
# parameter full is for use after bootstrapping the entire directory
# structure, files etc. Otherwise, when bootstrapping you can end up
# trying to link to files in directories which don't exist
#
# so full sync is by default ON, but when using bootstrap, turn it off
# parameter extras determines whether the extras directory is included
from fabric.contrib.project import rsync_project
configuration = env.config
debug_prefix = "DEBUG: deploy.sync"
#
# ensure remote directory exists
remote_dir = "{prj_name}/{prj_branch}".format(
prj_name=configuration.project.name,
prj_branch=configuration.project.branch)
#
# add a slash to the end of the directory so that rsync will upload
# everything inside the directory as opposed to moving the directory over
local_dir = configuration.paths.project.local + "/"
command_mkdir_remote = "mkdir -p {remote}".format(remote=remote_dir)
excludeitems = (".git", "*.swp", "*.swo", ".DS_Store", "*.pyc", "*.bak",
"build/*")
if env.debug:
print "\n%s debug: %s" % (debug_prefix, env.debug)
print "\n%s project.name %s" \
% (debug_prefix, configuration.project.name)
print "%s project.branch %s" \
% (debug_prefix, configuration.project.branch)
print "%s path.project %s" \
% (debug_prefix, configuration.paths.project.root)
print "\n%s run(%s)" % (debug_prefix, command_mkdir_remote)
print "\n{debug_prefix} rsync_project(\n\tremote_dir={remote_dir}," \
"\n\tlocal_dir={local_dir},\n\texclude={excludeitems})".format(
debug_prefix=debug_prefix,
remote_dir=remote_dir,
local_dir=local_dir,
excludeitems=excludeitems)
print "\n%s override: %s " \
% (debug_prefix, configuration.overrides.keys())
print "%s has overrides? %s" \
% (debug_prefix, (len(configuration.overrides.keys()) > 0))
else:
# if we are working on the development branch
# either SYNCING TO OR FROM, then just exit at this point
if configuration.project.branch == "development":
print """
------------------------------------
NOTE: this is a hack for the function deploy.sync()
When I originally set up sync and bootstrap, I did not consider
what would happen if I wanted to bootstrap the branch I was running
on.
That is, if I am running commands on the development branch and
then I ask development to bootstrap or sync, I get a probem
whereby I could potentially end up overwriting my own directory
which would be pointless.
So I decided that as a temporary measure if I do any kind of
bootstrapping or anything that calls sync and I am doing it on the
development branch server, then the sync will fail and print out
this message.
Instead of just keeping it in a comment, I thought it better to
print it out so that no matter what I should always be aware of
the issue and maybe later I can fix it.
Right after this message gets printed, the sync function is told to
"return" without any arguments
------------------------------------\n
"""
#
# exit the function without any arguments
return
run(command_mkdir_remote)
rsync_project(remote_dir=remote_dir, local_dir=local_dir,
exclude=excludeitems)
if full:
from pip import install as pip_install
pip_install('--all')
overrides()
#
#
# NOTE: if using full synch
if full:
from nginx import upload as upload_nginx
from supervisor import upload as upload_supervisor
from django import generate as django_generate
upload_supervisor()
django_generate('gunicorn', True)
upload_nginx()
from django import collectstatic as django_collectstatic
django_collectstatic()
@task
def test():
configuration = env.config
projectpath = configuration.paths.project.root
local_dir = configuration.paths.project.local + "/"
print hasattr(configuration, 'docker')
print "project path : %s" % projectpath
print "local dir : %s" % local_dir
@task
def remove(full=True):
full = booleanize(full)
configuration = env.config
if env.debug:
logger = loggify('deploy', 'remove')
import sys
if env.branch == "development" and not env.debug and full:
print """
------------------------------
WARNING:
You were just about to delete the development branch
from your computer!
THIS IS A BAD IDEA BECAUSE YOU MIGHT BE DOING IT BY ACCIDENT
Exiting NOW.
--------------------------------
"""
sys.exit()
#
# prepare to remove remote directory
remote_dir = "{prj_name}/{prj_branch}".format(
prj_name=configuration.project.name,
prj_branch=configuration.project.branch)
remote_dir_parent = os.path.join(
configuration.paths.project.root,
'..')
command_rm_project = "rm -Rf {projectpath}".format(
projectpath=configuration.paths.project.root)
command_rm_project_parent = "rm -Rf {projectparent}".format(
projectparent=remote_dir_parent)
command_rm_rootpath = "rm -Rf {rootpath}".format(
rootpath=configuration.paths.server.root)
command_rm_virtualenv = "rmvirtualenv {virtualenv_name}".format(
virtualenv_name=configuration.virtualenv.name)
if env.debug:
logger.debug("project.name : %s" % configuration.project.name)
logger.debug("project.branch : %s"
% configuration.project.branch)
logger.debug("paths.project : %s"
% configuration.paths.project.root)
logger.debug("remote_dir : %s" % remote_dir)
logger.debug("remote_dir parent : %s" % remote_dir_parent)
logger.debug("rootpath : %s"
% configuration.paths.server.root)
logger.debug("--- removal commands ---")
logger.debug("remove project dir : %s" % command_rm_project)
logger.debug("remove parent dir : %s" % command_rm_project_parent)
logger.debug("rootpath : %s" % command_rm_rootpath)
logger.debug("virtualenv : %s" % command_rm_virtualenv)
value = run("ls %s -l | wc -l" % remote_dir_parent)
logger.debug("value : %s" % value)
logger.debug("len value : %s" % len(value))
logger.debug("type value : %s" % type(value))
logger.debug("value == 2 : %s" % (value == "2"))
else:
#
# NOTE: I have to put the imports for these functions here,
# because otherwise they interfere with this modules version
# of "remove"
from nginx import remove as nginx_remove
from supervisor import remove as supervisor_remove
nginx_remove()
supervisor_remove()
db_drop_all()
#
# check to see if the parent directory contains anything else
# remote_ls_value = run("ls %s -l | wc -l" % remote_dir_parent)
# if remote_ls_value == "2":
# run(command_rm_project_parent)
# else:
# run(command_rm_project)
#
# only remove the project diretory if this is a full
# removal.
if full:
run(command_rm_project)
run(command_rm_rootpath)
with virtualenv_source():
run(command_rm_virtualenv)
def overrides():
"""
this is code for files that have no standard category
for example, I had to upload and override a static js file to modified with
the correct value for the deployment.
I'm keeping this code here so it will be a bit out of the way of the rest.
"""
configuration = env.config
#
# load the module where the custom override code is located
import sys
sys.path.append(configuration.paths.overrides.modules)
import override
#
# call the custom deploy code
override.deploy(env)

BIN
modules/deploy.pyc Normal file

Binary file not shown.

645
modules/django.py Normal file
View file

@ -0,0 +1,645 @@
from fabric.api import env, local, task
from modules.utils import virtualenv
from fabric.context_managers import lcd
import fabric.operations as fabric_ops
from fabric.contrib.files import exists
from utils import loggify, print_run, booleanize
from utils import generate_template_build_path
from utils import generate_template_files_path
import os
SCRIPT_LIST = ['settings', 'local', 'wsgi', 'gunicorn']
def generate_secret_key():
"""
helper to generate django secret key
"""
import random
import string
SECRET_KEY = ''.join([random.SystemRandom().choice(
"{}{}{}".format(
string.ascii_letters,
string.digits,
'!#$%()*+,-./:;<=>?@[\\]_{}~'))
for i in range(50)])
return SECRET_KEY
@task
def test(args=None):
local("ls")
with virtualenv():
local("ls")
@task
def manage(args=None):
configuration = env.config
# changes the working directory to the djangoroot
from fabric.context_managers import cd
with virtualenv():
with cd(configuration.paths.django.root):
output = fabric_ops.run(
"{djangoroot}/manage.py {args} --pythonpath='{djangoroot}' "
"--settings={djangosettings}".format(
djangoroot=configuration.paths.django.root,
args=args,
djangosettings=configuration.imports.settings,
),
# MAKE SURE THIS IS ALWAYS HERE!
shell='/bin/bash'
)
# fabric.run has the ability to give me back the output
return output
# NOTE:
# there was a major problem using fabric commands of "local" or "prefix"
# to work on remote machines, the problem was that for whatever cracked
# up reason, fabric would assume I'm using a /bin/sh shell, and /bin/sh
# CANNOT run all the commends that /bin/bash can. SO YOU MUST SPECIFY
# shell='/bin/bash' in all uses of local!
@task
def admin(args="help"):
configuration = env.config
from fabric.context_managers import cd
with virtualenv():
with cd(configuration.paths.django.root):
fabric_ops.run(
"django-admin {args} --pythonpath='{djangoroot}' "
"--settings={djangosettings}".format(
djangoroot=configuration.paths.django.root,
args=args,
djangosettings=configuration.imports.settings,
),
# MAKE SURE THIS IS ALWAYS HERE!
shell='/bin/bash'
)
@task
def collectstatic():
"""
makes sure the static media directories exist
"""
configuration = env.config
exists(configuration.paths.server.media.static)
exists(configuration.paths.server.media.dynamic)
manage("collectstatic --noinput")
@task
def run(args=None):
configuration = env.config
command = "runserver {host}:{port}".format(
host=configuration.server.django.host,
port=configuration.server.django.port)
output = manage(command)
return output
@task
def startapp(args):
"""
wrapper for the django.startapp
takes name of app and creates in in code/apps
args - name of app
"""
configuration = env.config
destination = os.path.join(configuration.paths.django.apps, args)
cmd_mkdir = "mkdir {destination}".format(
destination=destination)
command = "startapp {appname} {destination}".format(
appname=args,
destination=destination)
fabric_ops.run(cmd_mkdir)
manage(command)
# with lcd(configuration.paths.django.apps):
# manage(command)
@task
def installed_apps():
"""
List the currently installed apps in the settings.py file for this project
"""
configuration = env.config
printecho = "print '\\n'"
printcommand = "print '\\n'.join([ item for item" \
" in {settings}.INSTALLED_APPS])".format(
settings=configuration.imports.settings)
command = "python -c \"import {settings}; {printecho};" \
" {printcommand}; {printecho}\"".format(
settings=configuration.imports.settings,
printecho=printecho, printcommand=printcommand)
with lcd(configuration.paths.django.root):
local(command)
@task
def src():
"""
locate the django source files in the site-packages directory
"""
command = """
python -c "
import sys
sys.path = sys.path[1:]
import django
print(django.__path__)"
"""
command = """
python -c "import sys; sys.path=sys.path[1:];""" \
""" import django; print(django.__path__)[0]"
"""
with virtualenv():
local(command)
@task
def create_project():
configuration = env.config
logger = loggify("django", "create_project")
project_path = configuration.paths.django.root
project_name = configuration.project.name
import os
full_project_path = os.path.join(project_path, project_name)
django_cmd = \
"django-admin startproject {project_name} {project_path}".format(
project_name=configuration.project.name,
project_path=project_path)
manage_path = "%s/manage.py" % project_path
logger.debug("django_root : %s" % configuration.paths.django.root)
logger.debug("project_path : %s" % project_path)
# I accidentally deleted the code directory, this checks to see if the
# project path exists, if not, create it.
if not exists(project_path):
fabric_ops.run("mkdir -p %s" % project_path)
if exists(manage_path):
fabric_ops.run("rm %s" % manage_path)
if exists(full_project_path):
# backup whatever is there
fabric_ops.run("mv {project_path}/{project_name}"
" {project_path}/{project_name}.old".format(
project_name=project_name,
project_path=project_path))
with virtualenv():
fabric_ops.run(django_cmd)
django_path = "{project_path}/{project_name}".format(
project_name=configuration.project.name, project_path=project_path)
fabric_ops.run("mkdir %s/_settings" % django_path)
fabric_ops.run("touch %s/_settings/__init__.py" % django_path)
generate('settings', True)
generate('local', True)
generate('wsgi', True)
def generate_scripts(template_name, make_copy=False):
"""
this is a function meant to generate django settings files
There are a number of different types of django settings files so instead
of generating all of them at the same time (sometimes I want the local,
sometimes I want the main, etc), I decided to create a function that can
look up the type of scripts I want and generate those.
The function is meant to be wrapped up in another funciton that will call
the type of script I want
"""
configuration = env.config
# make sure to booleanize ALL boolean values!
make_copy = booleanize(make_copy)
if env.debug:
logger = loggify("django", "generate_scripts")
project_name = configuration.project.name
project_branch = configuration.project.branch
project_path = configuration.paths.django.root
secret_key = generate_secret_key()
files_name = getattr(configuration.templates.django, template_name).src
build_name = getattr(configuration.templates.django, template_name).dst
build_path = generate_template_build_path('django', template_name)
files_path = generate_template_files_path('django')
context = dict()
context['project_name'] = project_name
context['project_branch'] = project_branch
context['secret_key'] = secret_key
copy_path = "{project_path}/{project_name}".format(
project_path=project_path,
project_name=project_name)
if template_name == 'local':
copy_path = "{project_path}/{project_name}/_settings".format(
project_path=project_path,
project_name=project_name)
build_name = "%s.py" % project_branch
copy_full_path = "{copy_path}/{build_name}".format(
copy_path=copy_path, build_name=build_name)
copy_cmd = "cp {build_path} {copy_full_path}".format(
build_path=build_path, copy_full_path=copy_full_path)
backup_cmd = "cp {copy_full_path} " \
"{copy_full_path}.bak".format(copy_full_path=copy_full_path)
from utils import upload_template as utils_upload_template
if env.debug:
logger.debug("template_name : %s" % template_name)
logger.debug("project_branch : %s" % project_branch)
logger.debug("project_name : %s" % project_name)
logger.debug("build_path : %s" % build_path)
logger.debug("files_path : %s" % files_path)
logger.debug("files_name : %s" % files_name)
logger.debug("copy_path : %s" % copy_path)
logger.debug("copy_full_path : %s" % copy_full_path)
logger.debug("build_name : %s" % build_name)
upload_msg = utils_upload_template(
filename=files_name, destination=build_path, context=context,
use_jinja=True, use_sudo=False, backup=True,
template_dir=files_path, debug=True)
logger.debug("upload_msg : %s" % upload_msg)
logger.debug("make_copy : %s" % make_copy)
logger.debug("copy_cmd : %s" % copy_cmd)
logger.debug("backup_cmd : %s" % backup_cmd)
else:
utils_upload_template(
filename=files_name, destination=build_path, context=context,
use_jinja=True, use_sudo=False, backup=True,
template_dir=files_path, debug=False)
if make_copy:
if exists(copy_full_path):
fabric_ops.run(backup_cmd)
fabric_ops.run(copy_cmd)
print "\n\n------------------------------"
print "project_name : %s" % project_name
print "project_branch : %s" % project_branch
print "project_path : %s" % project_path
print "template_name : %s" % template_name
print "build_path : %s" % build_path
print "files_path : %s" % files_path
print "files_name : %s" % files_name
print "copy_path : %s" % copy_path
print "copy_full_path : %s" % copy_full_path
print "build_name : %s" % build_name
upload_msg = utils_upload_template(
filename=files_name, destination=build_path, context=context,
use_jinja=True, use_sudo=False, backup=True,
template_dir=files_path, debug=True)
print "upload_msg : %s" % upload_msg
print "make_copy : %s" % make_copy
print "copy_cmd : %s" % copy_cmd
print "backup_cmd : %s" % backup_cmd
print "------------------------------\n\n"
@task
def generate(script, make_copy=False):
make_copy = booleanize(make_copy)
if script not in SCRIPT_LIST:
err_msg = "You asked to generate a script that isn't available" \
"possible script values available: %s" % SCRIPT_LIST
import sys
sys.exit(err_msg)
print "django:generate make_copy : %s\n" % make_copy
if env.debug:
print "django:generate script : %s" % script
print "django:generate make_copy : %s\n" % make_copy
else:
pass
# env.debug does not block the rest of the commands because this
# function acts primarily as a wrapper for the following cmomands, in
# those fucntion env.debug will be used to decide if anything should
# happen or not
if script == 'gunicorn':
generate_gunicorn(make_link=make_copy)
else:
generate_scripts(script, make_copy)
def generate_gunicorn(make_link=True):
"""
create the gunicorn configuration script
put it in the build folder and link it to the scripts directory
"""
configuration = env.config
make_link = booleanize(make_link)
if env.debug:
logger = loggify("django", "generate_gunicorn")
files_path = os.path.join(
configuration.templates.gunicorn.path.local,
'files')
build_path = os.path.join(
configuration.templates.gunicorn.path.dest,
'build',
configuration.templates.gunicorn.conf.dst)
link_path = os.path.join(
configuration.paths.server.scripts,
configuration.templates.gunicorn.conf.dst
)
context = dict()
context['host'] = configuration.server.django.host
context['port'] = configuration.server.django.port
context['user'] = configuration.project.user
context['group'] = configuration.project.group
context['settings_module'] = configuration.imports.settings
context['logging_access'] = configuration.logging.gunicorn.access
context['logging_error'] = configuration.logging.gunicorn.error
msg_link_gunicorn = "ln -sf {gunicorn_root} {link_gunicorn}".format(
gunicorn_root=build_path,
link_gunicorn=link_path)
print_run(msg_link_gunicorn)
if env.debug:
logger.debug("\n")
logger.debug("--- in gunicorn ---\n")
for key in context.keys():
logger.debug("%s\t: %s" % (key, context[key]))
logger.debug('build_path\t: %s' % build_path)
logger.debug('files_path\t: %s' % files_path)
logger.debug('\n%s' % print_run(msg_link_gunicorn))
else:
from fabric.contrib.files import upload_template
upload_template(
filename=configuration.templates.gunicorn.conf.src,
destination=build_path,
context=context,
use_jinja=True,
backup=True,
template_dir=files_path)
if make_link:
print "\nlinking the generating gunicorn file in conf to " \
"the server diretory\n"
fabric_ops.run(msg_link_gunicorn)
else:
print "\nNOTE: not linking the generated gunicorn file" \
"to the server directory\n"
@task
def edit(param='help'):
"""
calls up mvim on the gunicorn conf file
"""
from maintenance import edit as maintenance_edit
configuration = env.config
link_path = os.path.join(
configuration.paths.server.scripts,
configuration.templates.gunicorn.conf.dst
)
build_path = os.path.join(
configuration.templates.gunicorn.path.dest,
'build',
configuration.templates.gunicorn.conf.dst)
project_branch = configuration.project.branch
project_path = configuration.paths.django.root
project_settings_dir = configuration.project.django.settings_folder
django_path = "{project_path}/{project_settings_dir}".format(
project_path=project_path,
project_settings_dir=project_settings_dir
)
settings_path = "{django_path}/settings.py".format(
django_path=django_path)
settings_local_path = "{django_path}/_settings/{project_branch}.py".format(
django_path=django_path,
project_branch=project_branch)
# locations = ['gunicorn', 'gunicorn_link', 'gunicorn_build',
# 'settings', 'local']
locations = {
'gunicorn': {
'path': link_path,
'desc': 'gunicorn.conf file',
},
'gunicorn_build': {
'path': build_path,
'desc': "gunicorn.conf file in scripts/conf/gunicorn/build"
},
'settings': {
'path': settings_path,
'desc': 'main settings file for django project',
},
'local': {
'path': settings_local_path,
'desc': 'local settings file for django project',
}
}
if param in locations.keys():
remote_path = locations[param]['path']
maintenance_edit(remote_path=remote_path)
else:
# if param == 'help':
print """
"fab django.edit" automates editing files important to django whether
locally or remotely
to use this you must pass one of the editable locations in as a
parameter
currently editable locations are:
"""
for k_loc in locations.keys():
print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc'])
return
@task
def clearmigrations(appname="help"):
if appname == "help":
print """
"fab django.clearmigration:{appname}" clears out all migrations for the
specified appname
if no appname is given, or if you pass the "help" appnameter in place
of an appname then this help message will appear.
Note: if your appname is actually "help" you might want to go into this
function and change it up a bit!
"""
return
configuration = env.config
import os
app_path = os.path.join(
configuration.paths.django.apps,
appname)
path_migrations = os.path.join(
app_path,
'migrations')
path_migrations_old = os.path.join(
app_path,
'migrations.old')
import fabric
if fabric.contrib.files.exists(path_migrations):
# get rid of any old migration backups
if fabric.contrib.files.exists(path_migrations_old):
cmd_rm_migration_old = "rm -Rf %s" % path_migrations_old
fabric.operations.run(cmd_rm_migration_old)
# move the original migrations folder to migrations.old
cmd_migration = "mv %s %s" % (
path_migrations, path_migrations_old)
fabric.operations.run(cmd_migration)
manage("makemigrations --empty %s" % appname)
manage("makemigrations")
manage("migrate --fake %s 0002" % appname)
@task
def makemigrations_empty(param="help"):
if param == "help":
print "print this help message"
return
manage("makemigrations --empty %s" % param)
@task
def create_fixtures(param=None):
"""
param is the appname for this fixture
"""
configuration = env.config
if param == "help":
print "print this help message"
return
if param is None:
appname = None
else:
appname = param
print "debug - appname: %s" % appname
from fabric.api import *
path_root = configuration.paths.project.root
path_data = os.path.join(path_root, 'extras', 'data', 'fixtures')
path_backups = os.path.join(path_root, 'extras', 'backups', 'fixtures')
if appname is not None:
path_data = os.path.join(path_data, appname)
path_backups = os.path.join(path_backups, appname)
path_fixture = os.path.join(path_backups, "%s.json" % appname)
else:
path_fixture = os.path.join(path_backups, "all.json")
from utils import ensure_dir
ensure_dir(path_data)
ensure_dir(path_backups)
output = manage('dumpdata %s --indent 2' % appname)
f = open(path_fixture, 'w')
f.write(output)

BIN
modules/django.pyc Normal file

Binary file not shown.

198
modules/docker.py Normal file
View file

@ -0,0 +1,198 @@
from fabric.api import env, task
from fabric.operations import run
from fabric.contrib.files import upload_template
from utils import loggify, generate_template_files_path, booleanize
from utils import generate_template_build_path, print_console
@task
def docker_ip():
configuration = env.config
if configuration.docker.database.host == 'local':
docker_cmd = 'docker-machine ip default'
return run(docker_cmd)
else:
return configuration.docker.database.host
def docker_run(cmd):
from fabric.context_managers import prefix
docker_eval = "eval $(docker-machine env default)"
with prefix(docker_eval):
run(cmd)
@task
def generate():
"""
generates and uploads the docker.yml configuration file based on the
settings in the yml file for the current branch.
e.g. if we are using development.yml then it will check for the docker
settings in there to find out what conf values we want to use when creating
whatever docker containers we are usign for this branch
currently, only the development branch is using docker, but I might change
that in the future.
"""
configuration = env.config
if env.debug:
logger = loggify('docker', 'generate')
build_path = generate_template_build_path('docker', 'database')
files_path = generate_template_files_path('docker')
context = dict()
context['docker_service_name'] = \
configuration.docker.database.service_name
context['docker_container_name'] = \
configuration.docker.database.container_name
context['docker_database_env_user'] = \
configuration.docker.database.env.user
context['docker_database_env_pass'] = \
configuration.docker.database.env.password
context['docker_database_env_db'] = \
configuration.docker.database.env.dbname
context['docker_database_image'] = configuration.docker.database.image
context['docker_database_port_external'] = \
configuration.server.database.port
context['docker_database_port_internal'] = \
configuration.docker.database.port
context['database_user'] = configuration.server.database.admin.user
context['database_pass'] = configuration.server.database.admin.password
context['database_name'] = configuration.server.database.name
if env.debug:
for key in context.keys():
logger.debug("context[{key}] : {value}".format(
key=key,
value=context[key]))
upload_msg = "upload_template(" \
"\n\tfilename={filename}," \
"\n\tdestination={destination}," \
"\n\tcontext={context}," \
"\n\tuse_jinja=True," \
"\n\tuse_sudo=False," \
"\n\tbackup=False," \
"\n\ttemplate_dir={template_dir})".format(
filename=configuration.templates.docker.database.src,
destination=build_path,
context=context,
template_dir=files_path)
logger.debug("upload_msg : %s" % upload_msg)
else:
config_src = configuration.templates.docker.database.src
upload_template(
filename=config_src,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=files_path)
@task
def create(container='database'):
"""
helper function to create a docker-based database container
container - specifies the type of container being built
NOTE:
"container" must have a corresponding value in configuration file
"""
# configuration = env.config
if env.debug:
logger = loggify("docker", 'create')
build_path = generate_template_build_path('docker', container)
info_msg = """
Generating container template for {container}, note that
the container paramter of "{container}" must have a
corresponding value in the {branch} configuration file
under "docker"
""".format(container=container, branch="dev")
print_console(info_msg, numsep=60)
dockercompose_cmd = \
"docker-compose -f {build_path} up -d".format(build_path=build_path)
if env.debug:
logger.debug("build_path : %s" % build_path)
logger.debug("dockercompose_cmd : %s" % dockercompose_cmd)
else:
docker_run(dockercompose_cmd)
@task
def status():
docker_run("docker ps -a")
@task
def start(create=False):
"""
this will start the docker container referenced by container_type
NOTE: you should have created it with the docker.create method above
first!
container_type - the type of container to start
create - craete if container has not yet been created
"""
configuration = env.config
create = booleanize(create)
docker_start = 'docker start %s' % \
configuration.docker.database.container_name
docker_run(docker_start)
@task
def stop(remove=False):
"""
this will start the docker container referenced by container_type
NOTE: you should have created it with the docker.create method above
first!
container_type - the type of container to start
create - craete if container has not yet been created
"""
configuration = env.config
remove = booleanize(remove)
docker_stop = 'docker stop %s' % \
configuration.docker.database.container_name
docker_rm = 'docker rm %s' % configuration.docker.database.container_name
docker_run(docker_stop)
if remove:
docker_run(docker_rm)

BIN
modules/docker.pyc Normal file

Binary file not shown.

958
modules/initialize.py Normal file
View file

@ -0,0 +1,958 @@
import os
# import yaml
import fabric.utils
import maintenance
from fabric.api import env, task
# from utils import loggify
# import configuration values from PRJ_ROOT/sites/meta/configuration
class DataObject(object):
def keys(self):
return self.__dict__.keys()
def value(self, key):
return self.__dict__[key]
def __getitem__(self, key):
return self.__dict__[key]
def addbranch(self, name):
setattr(self, name, DataObject(dict()))
def __init__(self, d):
for a, b in d.items():
if isinstance(b, (list, tuple)):
setattr(self, a, [DataObject(x) if isinstance(x, dict)
else x for x in b])
else:
setattr(self, a, DataObject(b) if isinstance(b, dict) else b)
def extend_object(envobject, dictname):
setattr(envobject, dictname, fabric.utils._AttributeDict())
def environment(branchname):
env.config = get_config(branchname)
env.branch = branchname
env.hosts = [env.config.project.host]
env.user = env.config.project.user
env.host_string = env.hosts[0]
# set database host to the docker_ip
host = env.config.server.database.host
from docker import docker_ip
if host == 'docker':
env.config.server.database.host = docker_ip()
return env.config
def add_maintenance(dataobject, layout, config, sectionname):
"""
helper function to add maintenance information to the dataobject
dataobject - will be used as the configuration object in the fabric modules
layout - dictionary containing the layout values for this branch
config - dictionary containing the config values for this branch
sectionname - name of the section of the maintenance information in config
"""
if not hasattr(dataobject.maintenance, sectionname):
dataobject.maintenance.addbranch(sectionname)
_maintenance = getattr(dataobject.maintenance, sectionname)
#
# add the execute attribute
_maintenance.execute = config['maintenance'][sectionname]['execute']
#
# if there is an editor attribute, add it
if 'editor' in config['maintenance'][sectionname]:
_maintenance.editor = config['maintenance'][sectionname]['editor']
#
# add the commands
if not hasattr(_maintenance, "commands"):
_maintenance.addbranch("commands")
keys = config['maintenance'][sectionname]['commands'].keys()
for k in keys:
setattr(_maintenance.commands, k,
config['maintenance'][sectionname]['commands'][k])
def add_template(dataobject, layout, config, section, template_name="conf"):
"""
this is a helper function to allow me to 'more easily' add the info I need
from the config file on templates to the dataobject
dataobject - will be used as the configuration object in the fabric modules
layout - dictionary containing the layout values for this branch
config - dictionary containing the config values for this branch
section - name of the section of the template information in config
template_name - name of which template we are looking for in that
particular section of the templates configuration info, default to "conf"
returns nothing - modifies the passed dataobject.
"""
# dataobject.templates.section
# dataobject.templates.section.path.local
# dataobject.templates.section.path.dest
# dataobject.templates.section.tname.src
# dataobject.templates.section.tname.dst
# if env.debug:
# logger = loggify('intialize', 'add_template')
if not hasattr(dataobject.templates, section):
dataobject.templates.addbranch(section)
_template = getattr(dataobject.templates, section)
if not hasattr(_template, "path"):
_template.addbranch("path")
# NOTE
# project.root is NOT the same as project.local
# "local" refers to the filesystem from which fabric is being run
# while "root" refers to the file system on which fabric is ACTING
# this can be very confusing, so I have to make these notes now and then
# NOTE
# the reason I am using _template.path.local is because if I am want to
# make corrections to a template conf file, I don't want to have to do
# it on the remote branch, I'd rather have it right there on the
# development branch. Because ultimately, I'm rsyncing everything to the
# remote branch anyway to do the reverse, I'd have to constantly merge
# from the remote to local and from local to remote. Simpler and more
# efficient to assume that everything is located on local.
_template.path.local = os.path.join(
dataobject.paths.conf.local,
layout['templates'][section]['path'])
_template.path.remote = os.path.join(
dataobject.paths.conf.remote,
layout['templates'][section]['path'])
# DEBUG REMOVE
if env.debug:
import utils
utils.printvar("branch", dataobject.project.branch)
utils.printvar("section", section)
utils.printvar("_template.path.local", _template.path.local)
utils.printvar("_template.path.remote", _template.path.remote)
# utils.prompt_continue()
if not hasattr(_template, template_name):
_template.addbranch(template_name)
conf_template = getattr(_template, template_name)
conf_template.src = \
layout['templates'][section][template_name]['source']
conf_template.dst = \
layout['templates'][section][template_name]['output']
def get_config(branchname):
# get a logger object
# if env.debug:
# logger = loggify('intialize', 'get_config')
# create two yaml dictionaries based on the branch configuration file
# and the standard file layout file
config = maintenance.load_configuration("config", branchname)
layout = maintenance.load_configuration("layout", branchname)
# fabric_config = maintenance.load_configuration("fabric", branchname)
# maintenance.check_version(branchname)
dataobject = DataObject(dict())
dataobject.addbranch('project')
dataobject.project.name = config['project']['name']
dataobject.project.branch = config['project']['branch']
dataobject.project.extension = config['project']['extension']
dataobject.project.extendedname = "{name}.{ext}".format(
name=dataobject.project.name,
ext=dataobject.project.extension)
dataobject.project.host = config['project']['host']
dataobject.project.user = config['project']['user']
dataobject.project.group = config['project']['group']
dataobject.project.sudo = config['project']['sudo']
#
# django settings directory
# NOTE:
# this is a tricky problem, because sometimes the project.name will NOT
# be what I originally set the settings folder name to. For example, I
# created a project named 'raquelsanchez' but then the production url was
# set to 'raquelsanchezart', well, those two names don't match, and I don't
# feel like going through the complication of changing the wsgi files and
# folder names for the settings directory.
#
# so over here, we check to see if django.settings_folder is the same as
# project.name, if it is, great, if not we set it to whatever is in
# config.django.settings_folder
dataobject.project.addbranch('django')
# this is the default name for the django settings_dir
dataobject.project.django.settings_folder = \
config['project']['name']
if 'settings_folder' in config['django']:
dataobject.project.django.settings_folder = \
config['django']['settings_folder']
dataobject.addbranch('paths')
dataobject.paths.addbranch('project')
dataobject.paths.project.local = maintenance.get_project_root()
if 'home' in config['project']['paths']:
project_home_dir = config['project']['paths']['home']
else:
project_home_dir = os.path.join(
dataobject.project.name,
dataobject.project.branch)
dataobject.paths.project.root = os.path.join(
config['project']['paths']['root'],
project_home_dir)
# DEBUG REMOVE
if env.debug:
import utils
utils.printvar('project.branch', dataobject.project.branch)
utils.printvar('project.root', dataobject.paths.project.root)
utils.prompt_continue()
#
# these are the locations of the scripts/conf file both remote and local
dataobject.paths.addbranch('conf')
dataobject.paths.conf.remote = os.path.join(
dataobject.paths.project.root,
layout['paths']['templates']['conf'])
dataobject.paths.conf.local = os.path.join(
dataobject.paths.project.local,
layout['paths']['templates']['conf'])
#
# tools used in development
dataobject.paths.addbranch('tools')
dataobject.paths.tools.fabric = os.path.join(
dataobject.paths.project.root,
layout['paths']['tools']['fabric']['root'])
dataobject.addbranch('tools')
dataobject.tools.addbranch('fabric')
dataobject.tools.fabric.addbranch('templates')
dataobject.tools.fabric.templates.conf = os.path.join(
dataobject.paths.project.root,
dataobject.paths.tools.fabric,
layout['paths']['tools']['fabric']['templates']['conf'])
dataobject.tools.fabric.templates.meta = os.path.join(
dataobject.paths.project.root,
dataobject.paths.tools.fabric,
layout['paths']['tools']['fabric']['templates']['meta'])
dataobject.tools.fabric.templates.readmes = os.path.join(
dataobject.paths.project.root,
dataobject.paths.tools.fabric,
layout['paths']['tools']['fabric']['templates']['readmes'])
#
# paths for django
dataobject.paths.addbranch('django')
dataobject.paths.django.root = os.path.join(
dataobject.paths.project.root, layout['paths']['django']['root'])
dataobject.paths.django.apps = os.path.join(
dataobject.paths.django.root,
layout['paths']['django']['apps'])
dataobject.paths.django.templates = os.path.join(
dataobject.paths.project.root,
'templates')
dataobject.paths.django.addbranch('settings')
dataobject.paths.django.settings.root = os.path.join(
dataobject.paths.django.root,
dataobject.project.django.settings_folder)
dataobject.paths.django.settings.local = os.path.join(
dataobject.paths.django.settings.root,
layout['paths']['django']['settings.local'])
#
# path to supervisor configuration directory on target machine
dataobject.paths.addbranch("supervisor")
dataobject.paths.supervisor.conf = config['supervisor']['paths']['conf']
#
# django local settings name
dataobject.addbranch('imports')
dataobject.imports.settings = \
"{projectname}.{settings_local}.{projectbranch}".format(
projectname=dataobject.project.django.settings_folder,
settings_local=layout['paths']['django']['settings.local'],
projectbranch=dataobject.project.branch)
#
# server information
dataobject.addbranch('server')
#
# nginx server
dataobject.server.addbranch('nginx')
dataobject.server.nginx.port = config['nginx']['port']
if config['project']['host'] == "localhost":
dataobject.server.nginx.host = "{projectname}.{ext}".format(
ext=dataobject.project.extension,
projectname=dataobject.project.name)
else:
dataobject.server.nginx.host = config['project']['host']
dataobject.server.addbranch('django')
dataobject.server.django.port = config['django']['port']
dataobject.server.django.host = config['django']['host']
#
# initialize the database server information
_init_database(dataobject, config, layout)
#
# initialize the virtualenv information
_init_virtualenv(dataobject, config, layout)
#
# dataobject Templates
dataobject.addbranch('templates')
#
# django template information
dataobject.templates.addbranch('django')
add_template(dataobject, layout, config, "django", "settings")
add_template(dataobject, layout, config, "django", "local")
add_template(dataobject, layout, config, "django", "wsgi")
#
# add templates
# database template information
add_template(dataobject, layout, config, "database", "init")
add_template(dataobject, layout, config, "database", "re_init")
add_template(dataobject, layout, config, "database", "drop_db")
add_template(dataobject, layout, config, "database", "drop_all")
#
# gunicorn template information
add_template(dataobject, layout, config, "gunicorn")
# supervisor template information
add_template(dataobject, layout, config, "supervisor")
# nginx template information
add_template(dataobject, layout, config, "nginx")
# docker template information
# make sure we have docker information available, otherwise spit out
# that we aren't doing it
if 'docker' in config:
if 'database' in config['docker']:
add_template(dataobject, layout, config, "docker", "database")
else:
print "NOTE: docker.database does not exist for this branch"
else:
print "NOTE: docker information does not exist for this branch"
#
# nginx information
_init_nginx(dataobject, config, layout)
#
# initialize the root server directory information
# ie paths, etc.
_init_root_server(dataobject, config, layout)
_init_backups(dataobject, config, layout)
_init_logging(dataobject, layout, config)
_init_docker(dataobject, layout, config)
_init_overrides(dataobject, layout, config)
#
# maintenance commands
dataobject.addbranch("maintenance")
add_maintenance(dataobject, layout, config, 'nginx')
add_maintenance(dataobject, layout, config, 'supervisor')
return dataobject
def _init_database(configuration, config, layout):
"""
initialize the database server information
"""
configuration.server.addbranch('database')
configuration.server.database.name = config['database']['name']
configuration.server.database.port = config['database']['port']
configuration.server.database.host = config['database']['host']
configuration.server.database.backend = config['database']['backend']
configuration.server.database.user = \
config['database']['users']['default']['name']
configuration.server.database.password = \
config['database']['users']['default']['pass']
configuration.server.database.addbranch('admin')
configuration.server.database.admin.user = \
config['database']['users']['admin']['name']
configuration.server.database.admin.password = \
config['database']['users']['admin']['pass']
def _init_virtualenv(configuration, config, layout):
"""
initialize all the virtualenv information
"""
#
# virtualenv
configuration.addbranch('virtualenv')
#
# workon_home variable
configuration.virtualenv.workon = config['virtualenv']['workon']
virtualenv_requirements = "{branch}.txt".format(
branch=configuration.project.branch)
configuration.virtualenv.requirements = os.path.join(
configuration.paths.project.root,
layout['virtualenv']['requirements'],
virtualenv_requirements)
#
# determine the virtualenv name, if it is set as "Null"
# the craete it based on the project name and the extension
# associated with this project
if 'name' in config['virtualenv']:
virtualenv_name = config['virtualenv']['name']
else:
virtualenv_name = configuration.project.extendedname
configuration.virtualenv.name = virtualenv_name
#
# paths used by the virtualenv configuration
configuration.virtualenv.addbranch('paths')
#
# the location of the virtualenv inside of WORKON_HOME
configuration.virtualenv.paths.root = os.path.join(
configuration.virtualenv.workon,
configuration.virtualenv.name)
#
# virtualenv bin directory
configuration.virtualenv.paths.bin = os.path.join(
configuration.virtualenv.paths.root,
'bin')
#
# virtualenv site-packages directory (I hate looking it up)
configuration.virtualenv.paths.sitepackages = os.path.join(
configuration.virtualenv.paths.root,
"lib", "python2.7", "site-packages")
#
# path to the activate file for this virtualenv
configuration.virtualenv.activate = os.path.join(
configuration.virtualenv.paths.bin,
'activate')
def _init_nginx(configuration, config, layout):
"""
all nginx configuration info is done here
"""
configuration.addbranch("nginx")
#
# nginx enabled, and available directory paths
#
# NOTE: on some installations of Nginx, there aren't both a
# sites-available and sites-enabled directories, for example
# on mac brew, there is only "servers". So where this is true,
# I set sites-available to None, and I dump everything in
# whatever I called the sites-enabled directory
configuration.nginx.sites_enabled = os.path.join(
config['nginx']['paths']['root'],
config['nginx']['paths']['enabled'])
if config['nginx']['paths']['available'] is None:
configuration.nginx.sites_available = configuration.nginx.sites_enabled
else:
configuration.nginx.sites_available = os.path.join(
config['nginx']['paths']['root'],
config['nginx']['paths']['available'])
# nginx conf file name
configuration.nginx.addbranch("conf")
configuration.nginx.conf.name = "{name}.conf".format(
name=configuration.project.extendedname)
# nginx path to conf file location
configuration.nginx.conf.destination = os.path.join(
configuration.nginx.sites_available,
configuration.nginx.conf.name)
def _init_root_server(configuration, config, layout):
"""
initialize all the information necessary for the root
server. ie, paths, etc.
"""
#
# the main server directory, which is made public to nginx,
# supervisor, etc.
configuration.paths.addbranch("server")
configuration.paths.server.root = os.path.join(
config['rootpath'],
configuration.project.extendedname)
#
# server virtual environment directory
configuration.paths.server.virtual = os.path.join(
configuration.paths.server.root,
'private', 'virtualenv')
#
# server scripts directory
configuration.paths.server.scripts = os.path.join(
configuration.paths.server.root,
'scripts')
configuration.paths.server.code = os.path.join(
configuration.paths.server.root,
'private',
'code')
configuration.paths.server.logs = os.path.join(
configuration.paths.server.root,
'logs')
configuration.paths.server.addbranch("django")
configuration.paths.server.django.templates = os.path.join(
configuration.paths.server.root,
'private', 'templates')
configuration.paths.server.django.code = os.path.join(
configuration.paths.server.root,
'private', 'code')
configuration.paths.server.addbranch("media")
configuration.paths.server.media.static = os.path.join(
configuration.paths.server.root,
config['media']['paths']['root'],
config['media']['paths']['static'])
configuration.paths.server.media.dynamic = os.path.join(
configuration.paths.server.root,
config['media']['paths']['root'],
config['media']['paths']['dynamic'])
def _init_backups(configuration, config, layout):
"""
initialize paths for the server backup director
"""
#
# paths for the server backup directory
configuration.paths.server.addbranch("backups")
# these are the default backups paths based on the parent
# server directory path
# root backups directory
configuration.paths.server.backups.root = os.path.join(
configuration.paths.server.root,
layout['paths']['backups']['root'])
# database subdirectory of the root backups directory
configuration.paths.server.backups.database = os.path.join(
configuration.paths.server.backups.root,
layout['paths']['backups']['database'])
# check to see if there is an overriden folder name for the backup path
# if there is, then apply THE FULL PATH, don't tack it onto the root server
# path. Here I am assuming that the full corrected path is being given
if 'backups' in config:
# root backups directory
if 'root' in config['backups']['paths']:
configuration.paths.server.backups.root = \
config['backups']['paths']['root']
# database subdirectory of the root backups directory
if 'database' in config['backups']['paths']:
configuration.paths.server.backups.database = \
config['backups']['paths']['root']
def _init_logging(configuration, layout, config):
"""
initialize all logging information
"""
configuration.addbranch("logging")
#
# logging for nginx
configuration.logging.addbranch("nginx")
#
# if 'nginx' log directory paths are defined
# in this configuration file then apply them
if 'nginx' in config['logging']['paths']:
configuration.logging.nginx.access = \
config['logging']['paths']['nginx']['access']
configuration.logging.nginx.error = \
config['logging']['paths']['nginx']['error']
else:
# we don't have anything special defined, use the
# the standard logs directory and give it standard paths
configuration.logging.nginx.access = os.path.join(
configuration.paths.server.logs,
'nginx', 'access.log')
configuration.logging.nginx.error = os.path.join(
configuration.paths.server.logs,
'nginx', 'error.log')
#
# logging for supervisor
configuration.logging.addbranch("supervisor")
#
# if 'nginx' log directory paths are defined
# in this configuration file then apply them
if 'supervisor' in config['logging']['paths']:
configuration.logging.nginx.access = \
config['logging']['paths']['supervisor']['access']
configuration.logging.nginx.error = \
config['logging']['paths']['supervisor']['error']
else:
# we don't have anything special defined, use the
# the standard logs directory and give it standard paths
configuration.logging.supervisor.out = os.path.join(
configuration.paths.server.logs,
'supervisor', 'out.log')
configuration.logging.supervisor.err = os.path.join(
configuration.paths.server.logs,
'supervisor', 'err.log')
#
# django logging
configuration.logging.addbranch('django')
configuration.logging.django.addbranch('handlers')
log_keys = layout['logging']['django']['handlers']
logging_path_project = layout['paths']['logging']['django']['project']
logging_path_server = layout['paths']['logging']['django']['server']
#
# the logs for django are handled differently from other log paths
# for the django log handlers, I want to place them in the a subdirectory
# of the project directory and link that directory to the server log
# directory
# find out and set the log paths to the project directory
for log_handler_key in log_keys:
configuration.logging.django
log_handler_file = \
layout['logging']['django']['handlers'][log_handler_key]['file']
handler_path_project = os.path.join(
configuration.paths.project.root,
logging_path_project, log_handler_file)
handler_path_server = os.path.join(
configuration.paths.server.logs,
logging_path_server, log_handler_file)
configuration.logging.django.handlers.addbranch(log_handler_key)
configuration_handler = getattr(configuration.logging.django.handlers,
log_handler_key)
configuration_handler.addbranch('name')
handler_name = \
layout['logging']['django']['handlers'][log_handler_key]['name']
configuration_handler.name.project = handler_name
configuration_handler.name.server = "server.%s" % handler_name
configuration_handler.addbranch('path')
configuration_handler.path.project = handler_path_project
configuration_handler.path.server = handler_path_server
#
# gunicorn logging
configuration.logging.addbranch('gunicorn')
configuration.logging.gunicorn.access = os.path.join(
configuration.paths.server.logs,
'gunicorn', 'access.log')
configuration.logging.gunicorn.error = os.path.join(
configuration.paths.server.logs,
'gunicorn', 'error.log')
def _init_docker(configuration, layout, config):
"""
docker configuration
"""
if 'docker' in config:
configuration.addbranch("docker")
if 'host' in config['docker']:
configuration.docker.host = config['docker']['host']
if 'database' in config:
configuration.docker.addbranch("database")
configuration.docker.database.host = \
config['docker']['database']['host']
configuration.docker.database.container_name = \
"{project_name}_{project_extension}_db".format(
project_name=configuration.project.name,
project_extension=configuration.project.extension)
configuration.docker.database.service_name = \
"{project_name}_{project_branch}_database".format(
project_name=configuration.project.name,
project_branch=configuration.project.branch)
configuration.docker.database.port = \
config['docker']['database']['port']
configuration.docker.database.image = \
config['docker']['database']['image']
configuration.docker.database.addbranch("env")
configuration.docker.database.env.user = \
config['docker']['database']['env']['user']
configuration.docker.database.env.password = \
config['docker']['database']['env']['pass']
configuration.docker.database.env.dbname = \
config['docker']['database']['env']['name']
def _init_overrides(configuration, layout, config):
#
# overrides - this is where we specify what unusual overrides we need to do
# NOTE:
# the purpose of the override code is to allow us to contain project
# specific information outside of the "scripts" directory. One of the
# biggest challenges with setting up multiple projects using the current
# fabric scripts is making the fabric scripts project-independent
#
# I am hoping that by abstracting the override code from the rest of the
# fabric scripts I can achieve that goal
configuration.addbranch("overrides")
configuration.paths.addbranch("overrides")
configuration.paths.overrides.addbranch("templates")
configuration.paths.overrides.templates.root = os.path.join(
configuration.paths.project.root,
layout["paths"]["extras"]["templates"])
configuration.paths.overrides.modules = os.path.join(
configuration.paths.project.root,
layout["paths"]["extras"]["modules"])
# print configuration.paths.overrides.modules
configuration.overrides.addbranch("modules")
configuration.paths.overrides.templates.files = os.path.join(
configuration.paths.overrides.templates.root,
"files")
configuration.paths.overrides.templates.build = os.path.join(
configuration.paths.overrides.templates.root,
"build")
# NOTE:
# this is the code where we append the override path to the current sys
# path. After we do that, we load up the initialization code that is
# specific to this project (if any).
# check to see if overrides is in the configuration
# if not, just return
import sys
sys.path.append(configuration.paths.overrides.modules)
import override
override.initialize(configuration, config, layout, env)
# import utils
# location = "initialize._init_override"
# utils.print_console("beg debug %s" % location)
# print "paths.project.root : %s" \
# % configuration.paths.project.root
# print "paths.overrides.templates.root : %s" \
# % configuration.paths.overrides.templates.root
# print "paths.overrides.modules : %s" \
# % configuration.paths.overrides.modules
# utils.print_console("end debug %s" % location)
@task
def create_local(branch=None):
if branch is None:
configuration = env.config
branch = configuration.project.branch
else:
configuration = get_config(branch)
template_destination = os.path.join(
configuration.paths.django.settings.local,
'{branchname}.py'.format(branchname=branch))
context = dict()
context['project_name'] = configuration.project.name
context['branch'] = branch
context['server_media_static'] = configuration.paths.server.media.static
context['server_media_dynamic'] = configuration.paths.server.media.dynamic
from fabric.contrib.files import upload_template
upload_template(
filename=configuration.templates.django.settings_local,
destination=template_destination,
context=context,
use_jinja=True,
backup=True,
template_dir=configuration.templates.django.path)

BIN
modules/initialize.pyc Normal file

Binary file not shown.

258
modules/maintenance.py Normal file
View file

@ -0,0 +1,258 @@
from fabric.api import env, task, lcd
from fabric.api import local
import os
import sys
import utils
from utils import executize, virtualenv
def command(program=None, cmd=None, extra_param=None):
"""
takes an argument and executes that command for nginx
program - name of program to be run, eg. 'nginx', 'supervisor'
cmd - can be 'start', 'stop' or 'status'
will then run the appropriate series of commands to get the job done based
on what we have in the configuration file
"""
configuration = env.config
# logger = loggify('maintenance', 'command')
if program is None:
print "Error: You have not given a legitimate program"
print "permissable programs : %s" \
% configuration.maintenance.keys()
sys.exit()
configuration_program = getattr(configuration.maintenance, program)
if cmd is None:
print "Error: You have not given a legitimate command"
print "permissable commands : %s" \
% configuration_program.commands.keys()
sys.exit()
# find out whether we are using sudo, run or local to
# execute the nginx command
_execute = executize(configuration_program.execute)
_command = getattr(configuration_program.commands, cmd)
if extra_param is not None:
_command = "{command} {param}".format(
command=_command, param=extra_param)
if env.debug:
# logger.debug(
# "execute type : %s" % configuration_program.execute)
# logger.debug(
# "%s command : %s" % (program, _command))
# logger.debug("extra_param : %s" % extra_param)
# logger.debug("%s modified command : %s" % (program, _command))
pass
else:
_execute(_command)
def edit(remote_path):
"""
calls up mvim or vim on the file
remote_path - path to file we want to edit
"""
# logger = loggify('maintenance', 'edit')
# configuration = env.config
if env.debug:
# logger.debug("remote_path : %s" % remote_path)
# logger.debug("env.host_string : %s" % env.host_string)
# logger.debug("sys.platform : %s" % sys.platform)
pass
else:
if sys.platform == "darwin":
editor = "mvim"
else:
editor = "vim"
cmd_edit = "{editor} sftp://{user}@{host_string}/{remote_path}".format(
editor=editor,
user=env.user,
host_string=env.host_string,
remote_path=remote_path)
local(cmd_edit)
@task
def pyc_delete():
"""
Deletes *.pyc files from project source dir
"""
configuration = env.config
with lcd(configuration.paths.project.root):
local("find . -name '*.pyc' -delete")
@task
def pyc_compile(force=False):
"""
Compile Python source files in a project source dir
"""
params = ['']
configuration = env.config
if force:
params.append('-f')
with lcd(configuration.paths.project.root):
with virtualenv():
local("python -m compileall {0} .".format(" ".join(params)))
@task
def get_base_dir():
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
return BASE_DIR
@task
def get_project_root():
#
# NOTE: the PROJECT_ROOT value is very important. Make sure that
# if you move around this file, you account for where it is and add
# or take away "os.pardir" from os.path.join
PROJECT_ROOT = os.path.abspath(
os.path.join(get_base_dir(), os.pardir, os.pardir, os.pardir))
return PROJECT_ROOT
def _get_configuration_path(name, branch):
#
# the locations of the files we need relative to the PROJECT_ROOT
param_list = ['config', 'layout']
if name not in param_list:
print "value %s was not legit. _get_configuration_path requires" \
"value such from %s" % (name, param_list)
META_DIR = os.path.join(
get_project_root(), 'scripts', 'meta', 'configuration')
if name == "config":
#
# the configuration we are working with will change
# depending on what branch is being used. This value
# is passed in when the function is called
configname = "{branch}.yml".format(branch=branch)
path_meta = os.path.join(META_DIR, configname)
elif name == "layout":
path_meta = os.path.join(META_DIR, 'layout.yml')
return path_meta
def load_configuration(name, branch):
import yaml
#
# the locations of the files we need relative to the PROJECT_ROOT
if name == "fabric":
file_path = os.path.join(
get_project_root(), 'scripts', 'fabric', 'conf', 'fabric.yml')
elif name == "config":
#
# the configuration we are working with will change
# depending on what branch is being used. This value
# is passed in when the function is called
file_path = _get_configuration_path('config', branch)
elif name == "layout":
file_path = _get_configuration_path('layout', branch)
configuration_file = yaml.load(file(file_path, 'r'))
return configuration_file
@task
def check_version(branchname):
"""
Maintenance function to check the configuration version against
the fabric version currently loaded. Returns boolean
If the version is either non-existant or not the same as the fabric
configuration version, then a message will be sent to the user letting
him know, and giving him the option to stop the program. If he chooses
to continue, the function will return "False" to the calling function
Otherwise, everything will continue as normal and "True" will be returned
Keyword Arguments:
branchname -- the name of the branch whose configuration files we are
checking
"""
config = load_configuration("config", branchname)
fabric_config = load_configuration("fabric", branchname)
if 'version' in config:
config_version = config['version']
else:
config_version = 0
version_correct = config_version >= fabric_config['version']
utils.printvar('version_correct', version_correct)
if not version_correct:
# TODO
# update this message, currently it is false because there is no
# fabric.updateconfs function!
version_false = """
NOTE: the current configuration information related to this project
is not up to date, the fabric tools are at version %s, and you are
at version %s. Run fab fabric.updateconfs to correct.\n"""
utils.print_console(
version_false % (fabric_config['version'], config_version),
numsep=90)
# utils.prompt_continue()
# version was not correct return False to let the app
# know what's going on
return False
# version was correct, so return True to let the app know
# everything is A-OK
return True

BIN
modules/maintenance.pyc Normal file

Binary file not shown.

288
modules/nginx.py Normal file
View file

@ -0,0 +1,288 @@
from fabric.api import env, task
from fabric.contrib.files import upload_template, exists
from fabric.operations import sudo
# from fabric.api import local
# from fabric.operations import run
import os
# import sys
from maintenance import command as maintenance_command
from maintenance import edit as maintenance_edit
from utils import loggify
@task
def command(cmd=None):
"""
wrapper for the maintenance.command function
"""
# configuration = env.config
# logger = loggify('nginx', 'command')
maintenance_command('nginx', cmd)
@task
def start():
"""
wrapper for using above command:cmd=start
"""
command("start")
@task
def stop():
"""
wrapper for using above command:cmd=stop
"""
command("stop")
@task
def status():
"""
wrapper for using above command:cmd=stop
"""
command("status")
@task
def restart():
stop()
start()
@task
def edit(location='conf'):
"""
calls up mvim on the Nginx conf file
"""
configuration = env.config
conf_path = os.path.join(
configuration.nginx.sites_available,
configuration.nginx.conf.name)
if location == 'conf':
remote_path = conf_path
elif location == 'log.error' or location == 'log':
remote_path = configuration.logging.nginx.error
elif location == 'log.access':
remote_path = configuration.logging.nginx.access
maintenance_edit(remote_path=remote_path)
@task
def upload():
"""
put the nginx conf file for this project into nginx sites-available
"""
if env.debug:
logger = loggify('nginx', 'upload')
configuration = env.config
context = dict()
context['server_name'] = configuration.server.nginx.host
context['access_log'] = configuration.logging.nginx.access
context['error_log'] = configuration.logging.nginx.error
context['port'] = configuration.server.nginx.port
context['django_host'] = configuration.server.django.host
context['django_port'] = configuration.server.django.port
context['virtualenv_sitepackages'] = \
configuration.virtualenv.paths.sitepackages
context['server_media_static'] = \
configuration.paths.server.media.static
context['server_media_dynamic'] = \
configuration.paths.server.media.dynamic
destination_available = os.path.join(
configuration.nginx.sites_available,
configuration.nginx.conf.name)
destination_enabled = os.path.join(
configuration.nginx.sites_enabled,
configuration.nginx.conf.name)
build_path = os.path.join(
configuration.templates.nginx.path.dest,
'build',
configuration.templates.nginx.conf.dst)
files_path = os.path.join(
configuration.templates.nginx.path.local,
'files')
#
# this only gets used if sites_availabe is NOT equal to sites_enabled
cmd_link_available_enabled = 'ln -sf {available} {enabled}'.format(
available=destination_available,
enabled=destination_enabled)
if env.debug:
logger.debug("filename : %s" %
configuration.templates.nginx.conf.src)
logger.debug("dest_avail : %s" % destination_available)
logger.debug("dest_enabl : %s" % destination_enabled)
logger.debug("build_path : %s" % build_path)
logger.debug("files_path: : %s" % files_path)
logger.debug("context : %s" % context)
upload_msg1 = "upload_template(" \
"\n\tfilename={filename}," \
"\n\tdestination={destination_available}," \
"\n\tcontext=context," \
"\n\tuse_jinja=True," \
"\n\tuse_sudo=True," \
"\n\tbackup=False," \
"\n\ttemplate_dir={template_dir})".format(
filename=configuration.templates.nginx.conf.src,
destination_available=build_path,
context=context,
template_dir=files_path)
upload_msg2 = "upload_template(" \
"\n\tfilename={filename}," \
"\n\tdestination={destination_available}," \
"\n\tcontext=context," \
"\n\tuse_jinja=True," \
"\n\tuse_sudo=True," \
"\n\tbackup=False," \
"\n\ttemplate_dir={template_dir})".format(
filename=configuration.templates.nginx.conf.src,
destination_available=destination_available,
context=context,
template_dir=files_path)
logger.debug("upload cmd 1: %s" % upload_msg1)
logger.debug("upload cmd 2: %s" % upload_msg2)
copy_msg = "cp -s {build_path} {dest_path}".format(
build_path=build_path,
dest_path=destination_available)
logger.debug("sudo('%s')" % copy_msg)
logger.debug("\nsites_available : %s"
% configuration.nginx.sites_available)
logger.debug("sites_enabled : %s"
% configuration.nginx.sites_enabled)
logger.debug("if sites_enabled != sites_available then ...")
logger.debug("sudo('%s')" % cmd_link_available_enabled)
else:
# put the nginx.conf in the build directory and the
# /etc/nginx/sites-avaialbe location
upload_template(
filename=configuration.templates.nginx.conf.src,
destination=build_path,
context=context,
use_jinja=True,
use_sudo=False,
backup=False,
template_dir=files_path)
upload_template(
filename=configuration.templates.nginx.conf.src,
destination=destination_available,
context=context,
use_jinja=True,
use_sudo=True,
backup=True,
template_dir=files_path)
if configuration.nginx.sites_available \
== configuration.nginx.sites_enabled:
# if the sites_available and sites_enabled directories are the
# same then do nothing
pass
else:
sudo(cmd_link_available_enabled)
# got to this point? then restart the nginx server
restart()
@task
def remove():
"""
remove the nginx conf file from sites-available, and if necessary, from
sites-enabled too.
then restart the server
"""
if env.debug:
logger = loggify('nginx', 'remove')
configuration = env.config
sites_available = os.path.join(
configuration.nginx.sites_available,
configuration.nginx.conf.name)
sites_available_bak = sites_available + ".bak"
sites_enabled = os.path.join(
configuration.nginx.sites_enabled,
configuration.nginx.conf.name)
#
# include the '-f' option so that if nothing is there
# it won't return an error
cmd_remove_enabled = 'rm -f {enabled}'.format(
enabled=sites_enabled)
cmd_remove_available = 'rm -f {available}'.format(
available=sites_available)
cmd_remove_available_bak = 'rm -f {available}'.format(
available=sites_available_bak)
if env.debug:
logger.debug("sites_available : %s" % sites_available)
logger.debug("sites_enabled : %s" % sites_enabled)
logger.debug("sites_available.bak : %s" % sites_available_bak)
logger.debug("remove enabled : %s" % cmd_remove_enabled)
logger.debug("remove available : %s" % cmd_remove_available)
logger.debug("remove available.bak : %s" % cmd_remove_available_bak)
logger.debug("rm_en == rm_avail? %s" % (cmd_remove_enabled !=
cmd_remove_available))
else:
if cmd_remove_enabled != cmd_remove_available:
sudo(cmd_remove_enabled)
sudo(cmd_remove_available)
if exists(sites_available_bak):
sudo(cmd_remove_available_bak)
restart()

BIN
modules/nginx.pyc Normal file

Binary file not shown.

134
modules/pip.py Normal file
View file

@ -0,0 +1,134 @@
from fabric.api import env, task
from fabric.operations import run
import logging
from utils import virtualenv_source, virtualenv
from utils import print_console
@task
def setup_virtualenv():
configuration = env.config
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: deploy.setup_virtualenv %(message)s',
level=logging.DEBUG)
mkvirtualenv_cmd = "mkvirtualenv --no-site-packages " \
"{virtualenv_name}".format(
virtualenv_name=configuration.virtualenv.name)
if env.debug:
logging.debug("virtualenv.workon : %s"
% configuration.virtualenv.workon)
logging.debug("virtualenv.activate : %s"
% configuration.virtualenv.activate)
logging.debug("virtualenv.name : %s"
% configuration.virtualenv.name)
logging.debug("virtualenv.paths.bin : %s"
% configuration.virtualenv.paths.bin)
logging.debug("virtualenv.paths.root : %s"
% configuration.virtualenv.paths.root)
logging.debug("with virtualenv_source(): run(\"\n\t%s\n\t\")".format(
mkvirtualenv_cmd))
else:
# run("source virtualenvwrapper.sh; mkvirtualenv "
# "--no-site-packages {virtualenv_name}".format(
# virtualenv_name=configuration.virtualenv.name))
with virtualenv_source():
run(mkvirtualenv_cmd)
@task
def setup():
"""
install all packages via pip
"""
configuration = env.config
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: deploy.pip %(message)s',
level=logging.DEBUG)
pipinstall_cmd = "pip install -r {requirements}".format(
requirements=configuration.virtualenv.requirements)
if env.debug:
logging.debug("with virtualenv(): run(\"\n\t%s\n\t\")" %
pipinstall_cmd)
else:
with virtualenv():
run(pipinstall_cmd)
@task
def install(package=None):
"""
install a packages via pip
"""
configuration = env.config
import sys
if not package:
print_console("you must specify a package to be installed")
sys.exit()
if package == "--all":
pipinstall_cmd = "pip install -r {requirements_file}".format(
requirements_file=configuration.virtualenv.requirements)
else:
pipinstall_cmd = "pip install {package}".format(
package=package)
if env.debug:
print_console("pipinstall_cmd : %s" % pipinstall_cmd)
else:
with virtualenv():
run(pipinstall_cmd)
@task
def freeze(param=False):
configuration = env.config
msg_help = """
pip.freeze takes one of three values:
\thelp - this help message
\tTrue - update the pip package list the freeze output
\tFalse (default) - print the freeze output to the console
"""
from utils import booleanize, handle_help
import sys
if handle_help(param, msg_help, 'help'):
sys.exit()
else:
try:
param = booleanize(param)
except TypeError:
print "the parameter value you gave, \"%s\" , is not" \
" a valid parameter." % param
print msg_help
sys.exit()
if param:
cmd_pipfreeze = "pip freeze > {requirements}".format(
requirements=configuration.virtualenv.requirements)
else:
cmd_pipfreeze = "pip freeze"
with virtualenv():
run(cmd_pipfreeze)

BIN
modules/pip.pyc Normal file

Binary file not shown.

BIN
modules/setup.pyc Normal file

Binary file not shown.

356
modules/supervisor.py Normal file
View file

@ -0,0 +1,356 @@
from fabric.api import env, task
from fabric.contrib.files import upload_template
from fabric.operations import sudo, run
# from fabric.api import local
import os
from maintenance import command as maintenance_command
from maintenance import edit as maintenance_edit
from utils import loggify, print_run
import logging
def _initialize(configuration):
params = dict()
conf_name = "{project_name}.{project_branch}".format(
project_name=configuration.project.name,
project_branch=configuration.project.branch)
conf_file = "{conf_name}.conf".format(
conf_name=conf_name)
conf_path = os.path.join(
configuration.paths.supervisor.conf,
conf_file)
params['conf_name'] = conf_name
params['conf_file'] = conf_file
params['conf_path'] = conf_path
return params
@task
def command(cmd=None):
"""
wrapper for the maintenance.command function
"""
# NOTE
# if you find yourself getting errors running the supervisor commands, try
# checking to see if supervisord has been started. Sometimes that's all the
# trouble right there
configuration = env.config
param = _initialize(configuration)
if cmd == "update":
print "in update"
# we don't need to specify the supervisor configuration file name, because
# supervisor can figure it out. ie whatever.conf is referred to by
# whatever if you have an error with sueprvisor, go to the log files and
# double check what's going on.
conf_name = param['conf_name']
maintenance_command('supervisor', cmd, conf_name)
@task
def start():
"""
wrapper for using above command:cmd=start
"""
command("start")
@task
def stop():
"""
wrapper for using above command:cmd=stop
"""
command("stop")
@task
def status():
"""
wrapper for using above command:cmd=stop
"""
command("status")
@task
def reload():
"""
reload supervisor
"""
command('reload')
@task
def update():
"""
tell supervisor to update itself with the new configuration scripts
"""
command('update')
@task
def restart():
stop()
start()
@task
def edit():
"""
calls up mvim on the Supervisor conf file
"""
configuration = env.config
param = _initialize(configuration)
if env.debug:
logger = loggify('supervisor', 'edit')
conf_path = param['conf_path']
if env.debug:
logger.debug("conf path : %s" % conf_path)
else:
maintenance_edit(remote_path=conf_path)
@task
def edit_gunicorn():
"""
calls up mvim on the gunicorn conf file
"""
configuration = env.config
if env.debug:
logger = loggify('supervisor', 'edit_gunicorn')
gunicorn_conf = os.path.join(
configuration.templates.gunicorn.path.dest,
'build',
configuration.templates.gunicorn.conf.dst)
if env.debug:
logger.debug("gunicorn_conf : %s" % gunicorn_conf)
else:
maintenance_edit(remote_path=gunicorn_conf)
@task
def upload():
"""
create the supervisor configuration script
put it in the build folder and link it to the scripts directory
"""
configuration = env.config
if env.debug:
logger = loggify('supervisor', 'upload')
context = dict()
context['project_name'] = configuration.project.name
context['project_branch'] = configuration.project.branch
context['project_user'] = configuration.project.user
context['project_group'] = configuration.project.group
#
# sometimes this name is going to be different than the name we use for the
# rest of the project
context['django_settings_folder'] = \
configuration.project.django.settings_folder
context['server_path_virtualenv_bin'] = os.path.join(
configuration.paths.server.virtual, 'bin')
context['server_path_code'] = configuration.paths.server.code
context['server_path_scripts'] = configuration.paths.server.scripts
context['gunicorn_conf_file'] = configuration.templates.gunicorn.conf.dst
context['supervisor_logs_out'] = configuration.logging.supervisor.out
context['supervisor_logs_err'] = configuration.logging.supervisor.err
build_path = os.path.join(
configuration.templates.supervisor.path.dest,
'build',
configuration.templates.supervisor.conf.dst)
files_path = os.path.join(
configuration.templates.supervisor.path.local,
'files')
import utils
utils.printvar("project.branch", configuration.project.branch)
utils.printvar("files_path", files_path)
utils.prompt_continue()
copy_name = "{project_name}.{project_branch}.conf".format(
project_name=configuration.project.name,
project_branch=configuration.project.branch)
copy_path = os.path.join(
configuration.paths.supervisor.conf,
copy_name)
copy_command = "cp {build_path} {copy_path}".format(
build_path=build_path, copy_path=copy_path)
if env.debug:
for key in context.keys():
logger.debug("%s\t\t: %s" % (key, context[key]))
logger.debug('templates.conf.src : %s'
% configuration.templates.supervisor.conf.src)
logger.debug('build_path : %s' % build_path)
logger.debug('files_path : %s' % files_path)
if configuration.project.sudo:
logger.debug("sudo(%s)" % copy_command)
else:
logger.debug("run(%s)" % copy_command)
else:
import utils
utils.printvar("files_path", files_path)
upload_template(
filename=configuration.templates.supervisor.conf.src,
destination=build_path,
context=context,
use_jinja=True,
backup=True,
template_dir=files_path
)
if configuration.project.sudo:
sudo(copy_command)
else:
run(copy_command)
update()
restart()
@task
def remove():
"""
stop the supervisor process for this branch,
then remove the supervisor conf file from supervisor/conf.d
"""
if env.debug:
logger = loggify('supervisor', 'remove')
configuration = env.config
param = _initialize(configuration)
conf_path = param['conf_path']
#
# include the '-f' option so that if nothing is there
# it won't return an error
rm_command = "rm -f {conf_path}".format(conf_path=conf_path)
if env.debug:
logger.debug("conf path : %s" % conf_path)
logger.debug("rm_command : %s" % rm_command)
else:
stop()
sudo(rm_command)
@task
def upload_gunicorn():
"""
create the gunicorn configuration script
put it in the build folder and link it to the scripts directory
"""
configuration = env.config
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: deploy.gunicorn %(message)s',
level=logging.DEBUG)
files_path = os.path.join(
configuration.templates.gunicorn.path.local,
'files')
build_path = os.path.join(
configuration.templates.gunicorn.path.dest,
'build',
configuration.templates.gunicorn.conf.dst)
link_path = os.path.join(
configuration.paths.server.scripts,
configuration.templates.gunicorn.conf.dst
)
context = dict()
context['host'] = configuration.server.django.host
context['port'] = configuration.server.django.port
context['user'] = configuration.project.user
context['group'] = configuration.project.group
context['settings_module'] = configuration.imports.settings
context['logging_access'] = configuration.logging.gunicorn.access
context['logging_error'] = configuration.logging.gunicorn.error
msg_link_gunicorn = "ln -sf {gunicorn_root} {link_gunicorn}".format(
gunicorn_root=build_path,
link_gunicorn=link_path)
if env.debug:
logging.debug("\n")
logging.debug("--- in gunicorn ---\n")
for key in context.keys():
logging.debug("%s\t: %s" % (key, context[key]))
logging.debug('build_path\t: %s' %
build_path)
logging.debug('files_path\t: %s' %
files_path)
logging.debug('\n%s' % print_run(msg_link_gunicorn))
else:
upload_template(
filename=configuration.templates.gunicorn.conf.src,
destination=build_path,
context=context,
use_jinja=True,
backup=True,
template_dir=files_path)
run(msg_link_gunicorn)
@task
def test(param):
status()

BIN
modules/supervisor.pyc Normal file

Binary file not shown.

View file

@ -0,0 +1 @@
import maintenance

Binary file not shown.

View file

@ -0,0 +1,181 @@
from fabric.api import task, env
from fabric.operations import run
import os
import modules.utils as utils
from modules.conf import create_dir_top
from modules.conf import exists_dir_top
from modules.conf import exists_dir_sub
from modules.conf import exists_file
# import modules.conf as conf
@task
def test(*args, **kwargs):
"""
Test functions in conf
Keyword Arguments:
funcname -- name of testing function to run
"""
# configuration = env.config
# dictionary of legitimate functions that can be tested
# when given the param name
test_values = {
'conf_top': test_conf_top,
'conf_sub': test_conf_sub,
'conf_file': test_conf_file,
}
funcname = kwargs.get('funcname')
if not funcname:
if len(args) > 0:
funcname = args[0]
args = args[1:]
if funcname in test_values.keys():
test_values[funcname](*args, **kwargs)
else:
print "\nTest functions in this module, acceptable values include:"
for val in test_values:
print val
def test_conf_file(*args, **kwargs):
SPACING = "\n"
utils.print_console("testing exist_conf_file",
prepend=SPACING, append=SPACING)
confargument = kwargs.get('conf')
if not confargument:
confargument = args[0] if len(args) > 0 else None
exists_file(confargument)
def test_conf_sub(*args, **kwargs):
SPACING = "\n"
utils.print_console("testing exist_conf_sub",
prepend=SPACING, append=SPACING)
confargument = kwargs.get('conf')
if not confargument:
confargument = args[0] if len(args) > 0 else None
exists_dir_sub(confargument)
def test_conf_top(*args, **kwargs):
configuration = env.config
SPACING = "\n"
utils.print_console("testing exists_conf",
prepend=SPACING, append=SPACING)
utils.printvar("exists_dir_top",
exists_dir_top())
utils.print_console("testing create_dir_top",
prepend=SPACING, append=SPACING)
if exists_dir_top():
msg = "conf directory already exists, move conf to a temporary " \
"directory, and test out the create_dir_top function."
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
#
# command to create a temporary directory and echo it's name
# back to stdout, so we can store that name for use
cmd_mktmp = "mytmpdir=`mktemp -d 2>/dev/null ||" \
" mktemp -d -t 'mytmpdir'`"
cmd_mktmp = cmd_mktmp + "; echo $mytmpdir"
#
# create a temporary diretory to store old conf files
tmpdir = run(cmd_mktmp)
#
# make sure we are working with a legit path
# otherwise, just kick out.
with utils.virtualenv():
cmd_py_isdir = "python -c \"import os; "\
"print os.path.isdir('%s')\"" % \
configuration.paths.conf.remote
#
# take the output from this command and booleanize it
output = run(cmd_py_isdir)
is_dir = utils.booleanize(output)
utils.printvar("is_dir", is_dir)
if is_dir:
lastpart = os.path.basename(configuration.paths.conf.remote)
path_conf_tmp = os.path.join(tmpdir, lastpart)
else:
utils.printvar("configuration.paths.conf.remote",
configuration.paths.conf.remote)
msg = "the original configuration path is NOT a path." \
"Continue? y/N"
utils.prompt_continue(message=msg, default="N")
#
# now move the original configuration directory to the temporary
# location, and run test running create_dir_top on an empty
msg = "moving original conf directory."
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
cmd_mvtmp = "mv %s %s" % \
(configuration.paths.conf.remote, path_conf_tmp)
run(cmd_mvtmp)
#
# create the new conf directory
msg = "creating new conf directory."
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
create_dir_top()
#
# testing on empty location completed, remove the current directory
# and move back the original
msg = "removing created directory."
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
cmd_rm_created = "rm -Rf %s" % configuration.paths.conf.remote
run(cmd_rm_created)
#
# returning original directory
msg = "Moving back original directory."
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
cmd_return_orig = "mv %s %s" % \
(path_conf_tmp, configuration.paths.conf.remote)
run(cmd_return_orig)
cmd_rmtmp = "rm -Rf %s" % tmpdir
run(cmd_rmtmp)
else:
msg = "conf directory does not exist, test out create_dir_top"
utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None)
create_dir_top()

Binary file not shown.

387
modules/utils.py Normal file
View file

@ -0,0 +1,387 @@
import os
import errno
import logging
import fabric.contrib.files
from contextlib import contextmanager as _contextmanager
from fabric.api import env, prefix, local
from fabric.operations import run, sudo
def printvar(name, value, exit=False):
print "%s : %s" % (name, value)
if exit:
import sys
sys.exit()
def loggify(module, func, prefix=""):
"""
I'm tired of rewriting this logging code in every single function, so I
decided to just dump it here, and return a logger that can be used and
thrown away when it's done
module - name of the module being used, ie 'nginx', 'deploy', etc
func - the name of the function this logger is going to be used in
prefix - anything you want to add to the front of the logger, ie '\n'
returns a logging object
"""
loggername = '{module}.{func}'.format(
module=module, func=func)
str_logging = '{prefix}%(levelname)s: {loggername} %(message)s'.format(
prefix=prefix,
loggername=loggername)
logging.basicConfig(
format=str_logging,
level=logging.DEBUG)
return logging.getLogger(loggername)
def print_console(string, prepend="\n\n", append="\n\n", sep="-", numsep=44):
"""
helper function to take a string, and format it so it prints to the console
in a way that is pleasing to the eye.
string - the string to be printed
prepend - defaults to two line spaces, can be anything
append - defaults to two lines spaces after the string is printed
sep - the character used to print out one line above the string and
one line after
numsep - number of times the separator is printed out on a line
"""
print prepend
if sep:
print sep * numsep
print string
if sep:
print sep * numsep
print append
def print_debug(debugstr, module, function):
print "%s:%s:%s" \
% (module, function, debugstr)
def executize(config_execute):
"""
A couple of times using fabric I've found that I'll need to switch between
sudo, run or local depending on where I'm executing the function, because
repeating this code in every funcion that needs it can be straingint on the
eyes, I'm putting it here for use.
config_execute - a string that can represent the value of 'sudo',
'run', or 'local'
return the fabric command corresponding to the string value
in config_execute
"""
_execute = local
if config_execute == 'sudo':
_execute = sudo
elif config_execute == 'run':
_execute = run
elif config_execute == 'local':
_execute = local
return _execute
def booleanize(value):
"""
take the argument and return it as either True or False
if the argument is neither, return False by default and warn the user that
there is a problem
"""
true_values = ("y", "yes", "true", "1")
false_values = ("n", "no", "false", "0")
if isinstance(value, bool):
return value
if value.lower() in true_values:
return True
elif value.lower() in false_values:
return False
raise TypeError("Cannot booleanize ambiguous value '%s'" % value)
def ensure_dir(directory):
"""
Create a directory if it's not exists
"""
try:
if not os.path.exists(directory):
print "creating directory: %s" % directory
os.makedirs(directory)
except OSError, e:
if e.errno != errno.EEXIST:
print "Error occurred while creating directory: %s" % directory
raise
def ensure_file(f):
"""
Simulates linux 'touch' command
"""
if not os.path.exists(f):
open(f, 'w').close()
def upload_template(filename, destination, context, use_jinja,
use_sudo, backup, template_dir, debug=False):
if env.debug:
logging.basicConfig(
format='\n%(levelname)s: utils.upload_template %(message)s',
level=logging.DEBUG)
command_msg = "\n\tupload_template(" \
"\n\tfilename={filename}," \
"\n\tdestination={destination_available}," \
"\n\tcontext={context}," \
"\n\tuse_jinja={use_jinja}," \
"\n\tuse_sudo={use_sudo}," \
"\n\tbackup={backup}," \
"\n\ttemplate_dir={template_dir})\n".format(
filename=filename,
destination_available=destination,
context=context,
use_jinja=use_jinja,
use_sudo=use_sudo,
backup=backup,
template_dir=template_dir)
if debug:
return command_msg
else:
fabric.contrib.files.upload_template(
filename=filename,
destination=destination,
context=context,
use_jinja=use_jinja,
use_sudo=use_sudo,
backup=backup,
template_dir=template_dir)
def get_upload_template_msg(filename, destination, context, use_jinja,
use_sudo, backup, template_dir, debug=False):
command_msg = "\n\tupload_template(" \
"\n\tfilename={filename}," \
"\n\tdestination={destination_available}," \
"\n\tcontext={context}," \
"\n\tuse_jinja={use_jinja}," \
"\n\tuse_sudo={use_sudo}," \
"\n\tbackup={backup}," \
"\n\ttemplate_dir={template_dir})\n".format(
filename=filename,
destination_available=destination,
context=context,
use_jinja=use_jinja,
use_sudo=use_sudo,
backup=backup,
template_dir=template_dir)
return command_msg
@_contextmanager
def virtualenv_source():
with prefix("source virtualenvwrapper.sh"):
yield
@_contextmanager
def virtualenv():
configuration = env.config
with virtualenv_source():
with prefix("workon %s" % configuration.virtualenv.name):
yield
# with prefix("/bin/bash -c -l 'source %s'" %
# configuration.virtualenv.activate):
# yield
# with prefix("/bin/bash -c -l 'source /usr/local/bin/' \
# 'virtualenvwrapper.sh ' \
# '&& workon %s'" % configuration.virtualenv.name):
# yield
def generate_template_build_path(section, template_name='conf'):
"""
helper function to automate creation of build path
section - the template section we are building off of
template_name - by default this is "conf", but can be different, for
example, the 'database' section has 3 different template names
returns a path to where the template should be placed
"""
import os
configuration = env.config
conf_section = getattr(configuration.templates, section)
conf_section_templatename = getattr(conf_section, template_name)
build_path = os.path.join(
conf_section.path.dest,
'build',
conf_section_templatename.dst)
return build_path
def generate_template_files_path(section):
"""
helper function to automate creation of build path
section - the template section we are building off of
returns a path to where the template jinja file is located
"""
import os
configuration = env.config
conf_section = getattr(configuration.templates, section)
files_path = os.path.join(
conf_section.path.local,
'files')
return files_path
def print_run(command, prefix="\"\n\t", suffix="\n\t\""):
"""
helper function for when I want a string that has the form
"\n\t run(some_command_string) \t\n"
where "somce_command_string" is a bash script commmand or something like
that
"""
return "run ({prefix}{command}{suffix})".format(
prefix=prefix,
suffix=suffix,
command=command)
def handle_help(param, message, values=None):
values_default = ['-h', '--help']
if values is None:
values = values_default
if isinstance(param, str):
if param.lower() in values:
print message
return True
return False
def is_help(key):
help_keys = ['-h', '--help']
return key in help_keys
def link_create(path_src, path_dst, debug=False):
"""
takes a source and destination path, then links it
if the destination path already exists and is a link,
then delete it. Otherwise sys.exit
path_src - source path
path_dst - destination path
returns: if debug=True then it returns a msg
"""
from fabric.contrib.files import is_link
from fabric.contrib.files import exists
from fabric.operations import run
cmd_rm = "rm {path_dst}".format(
path_dst=path_dst
)
cmd_link = "ln -sf {path_src} {path_dst}".format(
path_src=path_src,
path_dst=path_dst
)
msg_debug = ""
if exists(path_dst):
if is_link(path_dst):
if debug:
msg_debug += "link already exists at dst, removing\n" \
"link_create:cmd_rm : %s" % cmd_rm
else:
run(cmd_rm)
else:
msg_error = "something exists at dst - '%s' " \
"- and it's not a link\n kicking out".format(path_dst)
import sys
sys.exit(msg_error)
if debug:
msg_debug += "link_create:cmd_link : %s" % cmd_link
else:
run(cmd_link)
return msg_debug
def prompt_continue(message="Do you want to continue? Y/n", default="Y"):
""" prompts user if he wants to continue
Keyword Arguments:
message -- ask if user wants to continue
default -- what to do if the user hits enter without giving a value
"""
from fabric.operations import prompt
import sys
prompt_val = prompt(message)
if prompt_val == "":
prompt_val = default
if env.debug:
printvar(
"prompt_val", prompt_val,
not booleanize(prompt_val))
else:
if not booleanize(prompt_val):
sys.exit()

BIN
modules/utils.pyc Normal file

Binary file not shown.

View file

@ -0,0 +1,2 @@
DROP DATABASE {{db_name}};
DROP USER {{db_user}};

View file

@ -0,0 +1 @@
DROP DATABASE {{db_name}};

View file

@ -0,0 +1,5 @@
CREATE USER {{db_user}} WITH PASSWORD '{{db_password}}';
ALTER USER {{db_user}} CREATEDB;
CREATE DATABASE {{db_name}};
ALTER DATABASE {{db_name}} OWNER TO {{db_user}};
GRANT ALL PRIVILEGES ON DATABASE {{db_name}} TO {{db_user}};

View file

@ -0,0 +1,4 @@
DROP DATABASE {{db_name}};
CREATE DATABASE {{db_name}};
ALTER DATABASE {{db_name}} OWNER TO {{db_user}};
GRANT ALL PRIVILEGES ON DATABASE {{db_name}} TO {{db_user}};

View file

@ -0,0 +1,17 @@
# gunicorn script file
bind = '{{host}}:{{port}}'
raw_env = ["DJANGO_SETTINGS_MODULE={{settings_module}}"]
worker_tmp_dir = "/tmp"
user = '{{user}}'
group = '{{group}}'
access_logfile = \
'{{logging_access}}'
error_logfile = \
'{{logging_error}}'

View file

@ -0,0 +1,17 @@
# gunicorn script file
bind = 'unix:/tmp/gunicorn.{{ extended_name }}.sock'
raw_env = ["DJANGO_SETTINGS_MODULE={{settings_module}}"]
worker_tmp_dir = "/tmp"
user = '{{user}}'
group = '{{group}}'
access_logfile = \
'{{logging_access}}'
error_logfile = \
'{{logging_error}}'

View file

@ -0,0 +1,136 @@
LOCAL_SETTINGS = True # avoid recursive imports
BRANCH = '{{ project_branch }}'
from {{ project_name }}.settings import *
import initialize
import logging
LOCAL_LOGGING_PREFIX = "%s %%(message)s" % BRANCH
logging.basicConfig(format=LOCAL_LOGGING_PREFIX, level=logging.DEBUG)
configuration = initialize.get_config(BRANCH)
SITE_ID = 1
TEMPLATES[0]['DIRS'].append(configuration.paths.django.templates)
#
# NOTE: a lot of the code in these local settings files are automated and you
# might be inclined to take them out and move them into the main settings.py
# file. That would be a mistake. These automatic files AT THE VERY LEAST have
# one variable, and that is WHICH BRANCH CONFIGURATION FILE ARE WE LOOKING AT.
# Once I set that file, THEN all the rest of the information can be automated.
# So all these automated info needs to be here.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.{db_backend}'.format(
db_backend=configuration.server.database.backend),
'NAME': '{db_name}'.format(db_name=configuration.server.database.name),
'USER': '{db_user}'.format(db_user=configuration.server.database.user),
'PASSWORD': '{db_pass}'.format(
db_pass=configuration.server.database.password),
'HOST': '{db_host}'.format(db_host=configuration.server.database.host),
'PORT': '{db_port}'.format(db_port=configuration.server.database.port),
}
}
#
# directory from which we serve static files
#
# NOTE: both STATIC and MEDIA roots are getting their values from the
# initialization files that are set up above. Also, MEDIA_ROOT is being set
# to something called "paths.server.media.dynamic" - the names are different,
# but it means the same thing.
#
# MEDIA_ROOT is the dynamic media information that the web server, user or
# admin # will be adding and taking out. It's why I call it "dynamic"
STATIC_ROOT = configuration.paths.server.media.static
MEDIA_ROOT = configuration.paths.server.media.dynamic
# directories from which we search for static files to place in STATIC_ROOT
# these static files are located within the project root as opposed to the
# server root location
STATICFILES_DIRS = (
os.path.join(configuration.paths.project.root, "media"),
)
# debug and debug toolbar settings
DEBUG = True
TEMPLATE_DEBUG = DEBUG
USE_DEBUG_TOOLBAR = DEBUG
# allow template debug outputs on {{ project_branch }} environment
INTERNAL_IPS = ['127.0.0.1', '127.0.0.2', '127.0.0.3', ]
ALLOWED_HOSTS = [configuration.project.extendedname, ]
# -----------------------------------------
# Debug logging to the console
# convenience variable naming, otherwise it's too long to deal with
file_debug_handler = configuration.logging.django.handlers.file_debug
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': "%(levelname)s %(asctime)s %(module)s %(process)d"
" %(thread)d %(message)s"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
'code': {
'format': "%(module)s:%(funcName)s - %(message)s"
},
},
'handlers': {
file_debug_handler.name.project: {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': file_debug_handler.path.project,
'formatter': 'code'
},
file_debug_handler.name.server: {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': file_debug_handler.path.server,
'formatter': 'code'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
}
},
'loggers': {
'django.debug': {
# use the console for logging
'handlers':
[
'console',
file_debug_handler.name.project,
file_debug_handler.name.server
],
'level': 'DEBUG',
'propagate': True,
},
}
}

View file

@ -0,0 +1,138 @@
"""
Django settings for {{ project_name }} project.
Generated by 'django-admin startproject' using Django 1.9.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.abspath(os.path.join(BASE_DIR, os.pardir))
#
# add the directory that has the intialize.py module which contains the
# get_config function definition which will pull out all the configuraiton
# infomration for the settings file
sys.path.insert(0, os.path.join(
PROJECT_ROOT, 'scripts', 'tools', 'fabric', 'modules'))
#
# add an "apps" directory to this project, which is where all the apps
# ought to be in the first place.
sys.path.insert(0, os.path.join(BASE_DIR, 'apps'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '{{ secret_key }}'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '{{ project_name }}.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'

View file

@ -0,0 +1,125 @@
"""
Django settings for {{ project_name }} project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.abspath(os.path.join(BASE_DIR, os.pardir))
#
# add the directory that has the intialize.py module which contains the
# get_config function definition which will pull out all the configuraiton
# infomration for the settings file
sys.path.insert(0, os.path.join(
PROJECT_ROOT, 'scripts', 'tools', 'fabric', 'modules'))
#
# add an "apps" directory to this project, which is where all the apps
# ought to be in the first place.
sys.path.insert(0, os.path.join(BASE_DIR, 'apps'))
#
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
#
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '{{ secret_key }}'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.staticfiles',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '{{ project_name }}.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'

View file

@ -0,0 +1,17 @@
"""
WSGI config for estate project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}._settings")
application = get_wsgi_application()

View file

@ -0,0 +1,16 @@
"""
WSGI config for testit project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testit.settings")
application = get_wsgi_application()

View file

@ -0,0 +1,12 @@
db:
image: {{ docker_database_image }}
ports:
- "{{ docker_database_port_external }}:{{ docker_database_port_internal }}"
environment:
{{ docker_database_env_user }} : "{{ database_user }}"
{{ docker_database_env_pass }} : "{{ database_pass }}"
{{ docker_database_env_db }} : "{{ database_name }}"
container_name: {{ docker_container_name }}

View file

@ -0,0 +1,17 @@
# gunicorn script file
bind = '{{host}}:{{port}}'
raw_env = ["DJANGO_SETTINGS_MODULE={{settings_module}}"]
worker_tmp_dir = "/tmp"
user = '{{user}}'
group = '{{group}}'
access_logfile = \
'{{logging_access}}'
error_logfile = \
'{{logging_error}}'

View file

@ -0,0 +1,17 @@
# gunicorn script file
bind = 'unix:/tmp/gunicorn.{{ extended_name }}.sock'
raw_env = ["DJANGO_SETTINGS_MODULE={{settings_module}}"]
worker_tmp_dir = "/tmp"
user = '{{user}}'
group = '{{group}}'
access_logfile = \
'{{logging_access}}'
error_logfile = \
'{{logging_error}}'

View file

@ -0,0 +1,136 @@
LOCAL_SETTINGS = True # avoid recursive imports
BRANCH = '{{ project_branch }}'
from {{ project_name }}.settings import *
import initialize
import logging
LOCAL_LOGGING_PREFIX = "%s %%(message)s" % BRANCH
logging.basicConfig(format=LOCAL_LOGGING_PREFIX, level=logging.DEBUG)
configuration = initialize.get_config(BRANCH)
SITE_ID = 1
TEMPLATES[0]['DIRS'].append(configuration.paths.django.templates)
#
# NOTE: a lot of the code in these local settings files are automated and you
# might be inclined to take them out and move them into the main settings.py
# file. That would be a mistake. These automatic files AT THE VERY LEAST have
# one variable, and that is WHICH BRANCH CONFIGURATION FILE ARE WE LOOKING AT.
# Once I set that file, THEN all the rest of the information can be automated.
# So all these automated info needs to be here.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.{db_backend}'.format(
db_backend=configuration.server.database.backend),
'NAME': '{db_name}'.format(db_name=configuration.server.database.name),
'USER': '{db_user}'.format(db_user=configuration.server.database.user),
'PASSWORD': '{db_pass}'.format(
db_pass=configuration.server.database.password),
'HOST': '{db_host}'.format(db_host=configuration.server.database.host),
'PORT': '{db_port}'.format(db_port=configuration.server.database.port),
}
}
#
# directory from which we serve static files
#
# NOTE: both STATIC and MEDIA roots are getting their values from the
# initialization files that are set up above. Also, MEDIA_ROOT is being set
# to something called "paths.server.media.dynamic" - the names are different,
# but it means the same thing.
#
# MEDIA_ROOT is the dynamic media information that the web server, user or
# admin # will be adding and taking out. It's why I call it "dynamic"
STATIC_ROOT = configuration.paths.server.media.static
MEDIA_ROOT = configuration.paths.server.media.dynamic
# directories from which we search for static files to place in STATIC_ROOT
# these static files are located within the project root as opposed to the
# server root location
STATICFILES_DIRS = (
os.path.join(configuration.paths.project.root, "media"),
)
# debug and debug toolbar settings
DEBUG = True
TEMPLATE_DEBUG = DEBUG
USE_DEBUG_TOOLBAR = DEBUG
# allow template debug outputs on {{ project_branch }} environment
INTERNAL_IPS = ['127.0.0.1', '127.0.0.2', '127.0.0.3', ]
ALLOWED_HOSTS = [configuration.project.extendedname, ]
# -----------------------------------------
# Debug logging to the console
# convenience variable naming, otherwise it's too long to deal with
file_debug_handler = configuration.logging.django.handlers.file_debug
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': "%(levelname)s %(asctime)s %(module)s %(process)d"
" %(thread)d %(message)s"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
'code': {
'format': "%(module)s:%(funcName)s - %(message)s"
},
},
'handlers': {
file_debug_handler.name.project: {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': file_debug_handler.path.project,
'formatter': 'code'
},
file_debug_handler.name.server: {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': file_debug_handler.path.server,
'formatter': 'code'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
}
},
'loggers': {
'django.debug': {
# use the console for logging
'handlers':
[
'console',
file_debug_handler.name.project,
file_debug_handler.name.server
],
'level': 'DEBUG',
'propagate': True,
},
}
}

View file

@ -0,0 +1,138 @@
"""
Django settings for {{ project_name }} project.
Generated by 'django-admin startproject' using Django 1.9.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.abspath(os.path.join(BASE_DIR, os.pardir))
#
# add the directory that has the intialize.py module which contains the
# get_config function definition which will pull out all the configuraiton
# infomration for the settings file
sys.path.insert(0, os.path.join(
PROJECT_ROOT, 'scripts', 'tools', 'fabric', 'modules'))
#
# add an "apps" directory to this project, which is where all the apps
# ought to be in the first place.
sys.path.insert(0, os.path.join(BASE_DIR, 'apps'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '{{ secret_key }}'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '{{ project_name }}.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'

View file

@ -0,0 +1,125 @@
"""
Django settings for {{ project_name }} project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.abspath(os.path.join(BASE_DIR, os.pardir))
#
# add the directory that has the intialize.py module which contains the
# get_config function definition which will pull out all the configuraiton
# infomration for the settings file
sys.path.insert(0, os.path.join(
PROJECT_ROOT, 'scripts', 'tools', 'fabric', 'modules'))
#
# add an "apps" directory to this project, which is where all the apps
# ought to be in the first place.
sys.path.insert(0, os.path.join(BASE_DIR, 'apps'))
#
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
#
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '{{ secret_key }}'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.staticfiles',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '{{ project_name }}.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'

View file

@ -0,0 +1,17 @@
"""
WSGI config for estate project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}._settings")
application = get_wsgi_application()

View file

@ -0,0 +1,16 @@
"""
WSGI config for testit project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testit.settings")
application = get_wsgi_application()

View file

@ -0,0 +1,39 @@
server {
listen {{port}};
server_name .{{server_name}}
charset utf-8;
client_max_body_size 75M;
access_log {{access_log}};
error_log {{error_log}};
location /media {
alias {{ server_media_dynamic }};
}
location /static/debug_toolbar {
alias {{virtualenv_sitepackages}}/debug_toolbar/static/debug_toolbar;
}
location /static {
alias {{ server_media_static}};
}
location / {
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Scheme $scheme;
proxy_connect_timeout 10;
proxy_read_timeout 10;
proxy_pass http://{{django_host}}:{{django_port}}/;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
}
}

View file

@ -0,0 +1,62 @@
# upstream django_bastardo {
# # server unix:///path/to/your/mysite/mysite.sock; # for a file socket
# #server unix:///tmp/uwsgi_replyall.sock; # for a file socket
# server 127.0.0.1:8002; # for a web port socket (we'll use this first)
# }
upstream app_server_{{ extended_name }} {
# fail_timeout=0 means we always retry an upstream even if it failed
# to return a good HTTP response
# for UNIX domain socket setups
server unix:/tmp/gunicorn.{{ extended_name }}.sock fail_timeout=0;
# for a TCP configuration
# server 192.168.0.7:8000 fail_timeout=0;
# server {{django_host}}:{{django_port}} fail_timeout=0;
}
server {
listen {{port}};
server_name .{{server_name}}
charset utf-8;
client_max_body_size 75M;
access_log {{access_log}};
error_log {{error_log}};
location /media {
alias {{ server_media_dynamic }};
}
location /static/debug_toolbar {
alias {{virtualenv_sitepackages}}/debug_toolbar/static/debug_toolbar;
}
location /static {
alias {{ server_media_static}};
}
location / {
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Scheme $scheme;
proxy_connect_timeout 10;
proxy_read_timeout 10;
proxy_pass http://app_server_{{ extended_name }};
# uwsgi_pass django;
# include /Users/ronny/projects/django/replyall.dev/uwsgi_params;
# #root /usr/local/var/www/test;
# #index index.html index.htm;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
}
}

View file

@ -0,0 +1,8 @@
[program:{{project}}.{{branch}}]
user=root
command=bash {{rootpath}}/{{project}}.{{domain}}/scripts/gunicorn.sh
stdout_logfile={{rootpath}}/{{project}}.{{domain}}/logs/supervisor/out.log
stderr_logfile={{rootpath}}/{{project}}.{{domain}}/logs/supervisor/err.log
stdout_logfile={{projectroot}}/logs/supervisor/out.log
stderr_logfile={{projectroot}}/logs/supervisor/err.log

View file

@ -0,0 +1,10 @@
; this is the supervisor {{project_name}}.{{project_branch}} configuration file
[program:{{project_name}}.{{project_branch}}]
command={{server_path_virtualenv_bin}}/gunicorn {{django_settings_folder}}.wsgi:application -c {{server_path_scripts}}/{{ gunicorn_conf_file }}
directory={{ server_path_code }}
user={{project_user}}
group={{project_group}}
stdout_logfile={{supervisor_logs_out}}
stderr_logfile={{supervisor_logs_err}}

View file

@ -0,0 +1,48 @@
database:
backend: postgresql_psycopg2
host: docker
name: PROJECT_NAME_dev
port: DOCKER_PORT
users:
admin: {name: DATABASE_ADMIN_NAME, pass: DATABASE_ADMIN_PASS}
default: {name: DATABASE_USER_NAME, pass: DATABASE_USER_PASS}
django: {host: DJANGO_IP, port: DJANGO_PORT}
docker:
database:
env: {name: POSTGRES_DB, pass: POSTGRES_PASSWORD, user: POSTGRES_USER}
extension: db
host: local
image: postgres
name: null
port: 5432
name: null
logging:
paths: [null]
maintenance:
nginx:
commands: {start: nginx, status: ps waux | grep nginx, stop: nginx -s stop}
editor: mvim
execute: local
supervisor:
commands: {reload: supervisorctl reload, start: supervisorctl start, status: supervisorctl
status, stop: supervisorctl stop, update: supervisorctl update}
execute: local
media:
paths: {dynamic: dynamic, root: public/media, static: static}
nginx:
paths: {available: null, enabled: servers, root: /usr/local/etc/nginx}
port: NGINX_PORT
overrides: [null]
project:
branch: development
extension: dev
group: BRANCH_GROUP
host: localhost
name: PROJECT_NAME
paths: {home: PROJECT_NAME.prj, root: /Users/ronny/projects/django}
sudo: false
user: BRANCH_USER
rootpath: /usr/local/var/www
supervisor:
paths: {conf: /usr/local/etc/supervisor/conf.d}
virtualenv: {name: PROJECT_NAME, workon: /Users/ronny/.virtualenvs}

125
templates/meta/layout.yml Normal file
View file

@ -0,0 +1,125 @@
imports:
settings.main: "settings"
paths:
# paths and subdirectories relevent to django
django:
root: "code"
apps: "apps"
settings.local: "_settings"
tools:
fabric:
root: "scripts/tools/fabric"
templates:
conf: "templates/conf"
meta: "templates/meta"
readmes: "templates/readmes"
backups:
root: "backups"
database: "database"
media: "media"
fixtures: "django/fixtures"
extras:
code: "extras/scripts/code"
modules: "extras/scripts/code/modules"
templates: "extras/scripts/templates"
fixtures: "extras/data/fixtures"
templates:
conf: "scripts/conf"
logging:
django:
project: "extras/log"
server: "django"
logging:
django:
handlers:
file_debug:
name: 'file.debug'
file: 'django.debug'
modules:
override: "override"
virtualenv:
requirements: "scripts/meta/virtualenv"
# directories containing templates
templates:
nginx:
path: "nginx"
file: "default.conf.jinja2"
conf:
source: "default.conf.jinja2"
output: "nginx.conf"
django:
path: "django"
settings:
source: "settings.jinja2"
output: "settings.py"
local:
source: "local.jinja2"
output: "local.py"
wsgi:
source: "wsgi.jinja2"
output: "wsgi.py"
docker:
path: "docker"
database:
source: "database.jinja2"
output: "database.yml"
gunicorn:
path: "django"
conf:
source: "gunicorn.jinja2"
output: "gunicorn.conf.py"
supervisor:
path: "supervisor"
conf:
source: "supervisor.jinja2"
output: "supervisor.conf"
database:
path: "postgres"
init:
source: "db.init.sql.jinja2"
output: "db.init.sql"
re_init:
source: "db.re_init.sql.jinja2"
output: "db.re_init.sql"
drop_all:
source: "db.drop_all.sql.jinja2"
output: "db.drop_all.sql"
drop_db:
source: "db.drop_db.sql.jinja2"
output: "db.drop_db.sql"
overrides:
path: "overrides"
database:
default_admin: 'postgres'

View file

@ -0,0 +1,48 @@
database:
backend: postgresql_psycopg2
host: DATABASE_IP
name: PROJECT_NAME_stg
port: 5432
users:
admin: {name: DATABASE_ADMIN_NAME, pass: DATABASE_ADMIN_PASS}
default: {name: DATABASE_USER_NAME, pass: DATABASE_USER_PASS}
django: {host: DJANGO_IP, port: DJANGO_PORT}
logging:
paths: [null]
maintenance:
nginx:
commands: {start: service nginx start, status: service nginx status, stop: service
nginx stop}
editor: mvim
execute: sudo
supervisor:
commands: {reload: supervisorctl reload, start: supervisorctl start, status: supervisorctl
status, stop: supervisorctl stop, update: supervisorctl update}
execute: run
media:
paths: {dynamic: dynamic, root: public/media, static: static}
nginx:
paths: {available: sites-available, enabled: sites-enabled, root: /etc/nginx}
port: NGINX_PORT
overrides: [null]
project:
branch: staging
extension: stg
group: BRANCH_GROUP
host: PROJECT_IP
name: PROJECT_NAME
paths: {home: PROJECT_NAME.prj, root: /home/website}
sudo: true
user: BRANCH_USER
rootpath: /var/www
supervisor:
paths: {conf: /etc/supervisor/conf.d}
templates:
gunicorn:
conf: {output: gunicorn.conf.py, source: gunicorn.unixsocket.jinja2}
path: scripts/conf/django
nginx:
conf: {output: nginx.conf, source: unixsocket.jinja2}
file: unixsocket.jinja2
path: scripts/conf/nginx
virtualenv: {name: PROJECT_NAME, workon: /home/website/.virtualenvs}

283
templates/readmes/aws.md Normal file
View file

@ -0,0 +1,283 @@
## links
#### visudo
[configuring visudo](http://askubuntu.com/questions/539243/how-to-change-visudo-editor-from-nano-to-vim)
#### users
[tecmint.com complete guide to users](http://www.tecmint.com/add-users-in-linux/)
[How To Configure SSH Key-Based Authentication on a Linux Server](https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server)
#### postgres
[ubuntu community postgres docs](https://help.ubuntu.com/community/PostgreSQL)
## adding/deleting users
#### adding a user:
*("www-data" is the group name for website stuff on gandi)*
> *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username***
***-G group*** adds the groups in a comma separated
***-d /home/username*** specifies the home directory to be created (necessary on ubuntu)
***-m*** flag to create scripts (necessary)
***-s /bin/bash*** what shell is to be used (default is none)
#### deleting a user
userdel -r {username}
## IMPORTANT
### set users primary group
**this is critical**
sudo usermod <username> -g www-data
### setting up ssh authentication
cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys"
## apt-get commands
**to see the package version:**
dpkg -s postgresql | grep Version
# setting up aws server
## creating the server instance
[aws instance](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EC2_GetStarted.html)
1. Open the [amazon EC2 console](https://console.aws.amazon.com/ec2/)
2. Choose **Launch Instance**
3. The *Choose an Amazon Machine Image (AMI)* page has basic configuration, so pick the first *ubuntu* configuration
4. This will take you to the *Choose an Instance Type* page, this chooses the hardware configuration, you want to pick **t2.micro**
5. Hit **Review and Launch**
6. This will take you to *Review Instance Launch* page, and that has an option for **Security Groups**, hit **Edit security groups**, on the page that pops up, pick the options you want to allow for your instance
7. When finished, hit "done" or whatever and you'll be taken back to the *Review Instance Launch* page, from here hit the **Launch** key
8. this will prompt you for a key pair. There are a few options. Create a new Pair and choose an existing key pair
####key pair info:
[aws info on key pairs](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)
[importing id_rsa](http://www.admin-magazine.com/CloudAge/Blogs/Dan-Frost-s-Blog/Importing-SSH-keys-on-AWS)
[add_ssh](http://stackoverflow.com/questions/8193768/trying-to-ssh-into-an-amazon-ec2-instance-permission-error)
1. mv /path/to/myname.pem ~/.ssh
2. ssh-add ~/.ssh/myname.pem
3. ssh ubuntu@INSTANCE_IP_ADDRESS
Remember that the IP ADDRESS changes whenever you restart the instance
on your computer
1. vim /etc/hosts
2. add a line with the server name and IP ADDRESS for that insntace
3. ssh ubuntu@SERVERNAME
## updating the hostname
[aws ubuntu hostname](https://aws.amazon.com/premiumsupport/knowledge-center/linux-static-hostname/)
[scroll down to find the "echo" comment](https://forums.aws.amazon.com/message.jspa?messageID=495274)
there is an issue with ubuntu instances on AWS, the name given in /etc/hostname doesn't match what exist in /etc/hosts. So if you try using sudo you'll get an error.
To fix this, you need to change those files, to get into sudo type in:
sudo su -
echo "127.0.0.1 $(hostname)" >> /etc/hosts
This will update /etc/hosts with the default hostname generated by amazon. Alternatively you can do what it says in the first link [aws ubuntu hostname](https://aws.amazon.com/premiumsupport/knowledge-center/linux-static-hostname/)
### setup visudo
sudo update-alternatives --config editor
su -c 'visudo'
find this line:
USERNAME ALL=(ALL) NOPASSWD: ALL
replace it with:
admin ALL=(ALL) ALL
## add new user
[adding a user on linux AWS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/managing-users.html)
[how to get an add the public key to the new user](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html#retrieving-the-public-key)
1. *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username***
2. *mkdir projectdir*
### set users primary group
**this is critical**
- The primary group is the group applied to you when you log in using the usual methods (TTYs, GUI, SSH, etc.).
sudo usermod <username> -g www-data
### set password
sudo passwd <username>
### setting up ssh authentication
1. get the public key:
ssh-keygen -y -f /path/to/myinstance.pem
2. copy the above results
3. log in to the ubuntu instance using default ubuntu user
4. change users to the custom username
sudo su - username
5. create the ssh directory and auth file
cd /home/username
mkdir .ssh
touch .ssh/authorized_keys
vim ~/.ssh/authorized_keys
6. now paste in the public_key you got in step 1
7. log out and test with the username
ssh username@instance.domain
### add username to the sudo group
AWS has a sudo group that allows a user sudo priveleges
usermod -a -G sudo username
### apt-get setup
sudo apt-get update
sudo apt-get install aptitude
## postgres
#### links fix locale error with postgres
[could not connect to server solution](http://askubuntu.com/questions/50621/cannot-connect-to-postgresql-on-port-5432)
[locale solution](http://ubuntuforums.org/showthread.php?t=1346581)
[remote connecting](http://www.railszilla.com/postgresql-tcpip-connections-port-5432/coffee-break)
sudo apt-get install postgresql
sudo apt-get install postgresql-contrib
sudo locale-gen en_US en_US.UTF-8 hu_HU hu_HU.UTF-8
sudo dpkg-reconfigure locales
sudo service postgresql restart
sudo -u postgres psql postgres
sudo -u postgres createdb **website_dbname**
sudo apt-get install postgresql-server-dev-X.Y
sudo apt-get install postgresql-server-dev-9.3
#### change password for db user "postgres"
sudo -u postgres psql postgres
ALTER USER Postgres WITH PASSWORD '<newpassword>';
#### set up postgresql to remote access
######(see "remote connecting" link above)
#####Note: be careful with this, because **anyone** will be able to mess around with it
1. sudo vim /etc/postgresql/9.3/main/postgresql.conf
2. find **listen\_addresses** and change it to **listen\_addresses = '\*'**
3. sudo vim /etc/postgresql/9.3/main/pg_hba.conf
4. find **host all all 127.0.0.1/32 trust** and change **127.0.0.1/32** to **0.0.0.0/0**
5. sudo service postgresql restart
6. test it by running: *psql -h* ***ip\_address*** *-U* ***username*** *-d* ***database***
7. e.g. psql -h 173.246.107.96 -U postgres postgres
### setup the /var/www directory
cd /var
sudo mkdir www
sudo chgrp www-data www -R
sudo chmod g+w www -R
### install python packages
sudo apt-get install python-dev
sudo apt-get install libjpeg-dev
## install and set up supervisor
sudo apt-get install supervisor
make sure www-data is a group for the main user
vim /etc/supervisor/supervisord.conf
add the following:
[unix_http_server]
file=/var/run/supervisor.sock
chmod=0770
chown=nobody:www-data
[supervisorctl]
serverurl=unix:///var/run//supervisor.sock
chmod=0770
chown=nobody:www-data
#### run the following commands:
sudo service supervisor stop
sudo service supervisor start
### install pip and virtualenv
[virtualenv install](http://roundhere.net/journal/virtualenv-ubuntu-12-10/)
sudo apt-get install python-pip
sudo pip install virtualenv
sudo pip install virtualenvwrapper
cat "WORKON_HOME=~/.virtualenvs" >> .bashrc
cat ". /usr/local/bin/virtualenvwrapper.sh" >> .bashrc
### install nginx
sudo apt-get install nginx
# bootstrap server
fab (prod|rel) deploy.bootstrap

233
templates/readmes/gandi.md Normal file
View file

@ -0,0 +1,233 @@
## links
#### gandi
[connecting to gandi server](https://wiki.gandi.net/en/hosting/using-linux/how_to_connect_ai)
[accessing the server via ssh](https://wiki.gandi.net/en/hosting/using-linux/server-access)
[first steps with gandi server](https://wiki.gandi.net/en/hosting/gandi-expert/setup)
#### visudo
[configuring visudo](http://askubuntu.com/questions/539243/how-to-change-visudo-editor-from-nano-to-vim)
#### users
[tecmint.com complete guide to users](http://www.tecmint.com/add-users-in-linux/)
[How To Configure SSH Key-Based Authentication on a Linux Server](https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server)
#### postgres
[ubuntu community postgres docs](https://help.ubuntu.com/community/PostgreSQL)
## adding/deleting users
#### adding a user:
*("www-data" is the group name for website stuff on gandi)*
> *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username***
***-G group*** adds the groups in a comma separated
***-d /home/username*** specifies the home directory to be created (necessary on ubuntu)
***-m*** flag to create scripts (necessary)
***-s /bin/bash*** what shell is to be used (default is none)
#### deleting a user
userdel -r {username}
## IMPORTANT
### set users primary group
**this is critical**
sudo usermod <username> -g www-data
### setting up ssh authentication
cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys"
## apt-get commands
**to see the package version:**
dpkg -s postgresql | grep Version
# setting up gandi server
## creating the server instance
after logging in, go to the virtual machine setup tab under *services* -> *servers*
[virtual machine](https://www.gandi.net/admin/iaas/vm)
under "servers" click "create a server". At the setup page you will have the option of either using an public ssh key, a password, or both. Pick both.
1. generate a password
2. go to ~/.ssh and look for something like *id_rsa.pub*
3. to put in on the clipboard, on OSX type *cat ~/.ssh/id_rsa.pub | pbcopy*
4. paste what's in the clipboard into the ssh input section
5. copy all this information down and start it up
## first steps with server
next follow the links under "gandi"
while you are doing the commands link *"first steps with gandi server"* you may want to do this command:
su -c 'apt-get install sudo'
su -c 'apt-get install vim'
after setting up "first steps with gandi server" above, you will want to configure visudo to use vim
### setup visudo
su -c 'update-alternatives --config editor'
su -c 'visudo'
find this line:
USERNAME ALL=(ALL) NOPASSWD: ALL
replace it with:
admin ALL=(ALL) ALL
(or whatever user name you are mainly using)
## add new user
1. *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username***
2. *mkdir projectdir*
### set users primary group
**this is critical**
sudo usermod <username> -g www-data
### set password
sudo passwd <username>
### setting up ssh authentication
cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys"
### apt-get setup
sudo apt-get update
sudo apt-get install aptitude
## postgres
#### links fix locale error with postgres
[could not connect to server solution](http://askubuntu.com/questions/50621/cannot-connect-to-postgresql-on-port-5432)
[locale solution](http://ubuntuforums.org/showthread.php?t=1346581)
[remote connecting](http://www.railszilla.com/postgresql-tcpip-connections-port-5432/coffee-break)
sudo apt-get install postgresql
sudo apt-get install postgresql-contrib
sudo locale-gen en_US en_US.UTF-8 hu_HU hu_HU.UTF-8
sudo dpkg-reconfigure locales
sudo service postgresql restart
sudo -u postgres psql postgres
sudo -u postgres createdb **website_dbname**
sudo apt-get install postgresql-server-dev-X.Y
sudo apt-get install postgresql-server-dev-9.3
#### change password for db user "postgres"
sudo -u postgres psql postgres
ALTER USER Postgres WITH PASSWORD '<newpassword>';
#### set up postgresql to remote access
######(see "remote connecting" link above)
#####Note: be careful with this, because **anyone** will be able to mess around with it
1. sudo vim /etc/postgresql/9.3/main/postgresql.conf
2. find **listen\_addresses** and change it to **listen\_addresses = '\*'**
3. sudo vim /etc/postgresql/9.3/main/pg_hba.conf
4. find **host all all 127.0.0.1/32 trust** and change **127.0.0.1/32** to **0.0.0.0/0**
5. sudo service postgresql restart
6. test it by running: *psql -h* ***ip\_address*** *-U* ***username*** *-d* ***database***
7. e.g. psql -h 173.246.107.96 -U postgres postgres
### setup the /var/www directory
cd /var
sudo mkdir www
sudo chgrp www-data www -R
sudo chmod g+w www -R
### install python packages
sudo apt-get install python-dev
sudo apt-get install libjpeg-dev
## install and set up supervisor
sudo apt-get install supervisor
make sure www-data is a group for the main user
vim /etc/supervisor/supervisord.conf
add the following:
[unix_http_server]
file=/var/run//supervisor.sock
chmod=0770
chown=nobody:www-data
[supervisorctl]
serverurl=unix:///var/run//supervisor.sock
chmod=0770
chown=nobody:www-data
#### run the following commands:
sudo service supervisor stop
sudo service supervisor start
### install pip and virtualenv
[virtualenv install](http://roundhere.net/journal/virtualenv-ubuntu-12-10/)
sudo apt-get install python-pip
sudo pip install virtualenv
sudo pip install virtualenvwrapper
### install nginx
sudo apt-get install nginx
# bootstrap server
fab (prod|rel) deploy.bootstrap

View file

@ -0,0 +1,49 @@
I try different commands and some of the answers help me. Only this sequence in my case fixed both broken dependencies in migrations in MYAPP and clean all past migrations starting from scratch.
Before doing this ensure that database is already synced (e.g. do not add a new Model field here or change Meta options).
rm -Rf MYAPP/migrations/*
python manage.py makemigrations --empty MYAPP
python manage.py makemigrations
python manage.py migrate --fake MYAPP 0002
Where 0002 is the migration number returned by the last makemigrations command.
Now you can run makemigrations / migrate again normally because migration 0002 is stored but not reflected in the already-synced database.
if you really want to go crazy delete all the tables in teh database related to the app and then
DELETE FROM django_migrations WHERE app='registry'
-------
this is what I do
first I clear out everything:
rm -Rf appname/migrations
fab database.drop_all
fab database.init
fab django.manage:migrate
fab django.manage:syncdb
then I re-add the app
fab django.manage:"makemigrations appname"
fab django.manage:migrate
at that point I have working table
so again
rm -Rf {ON HOST}/appname/migrations
fab ONHOST database.drop_all
fab ONHOST database.init
fab ONHOST deploy.sync <--- CRITICAL STEP DO NOT FORGET
fab ONHOST django.manage:migrate
fab ONHOST django.manage:syncdb

View file

@ -0,0 +1,54 @@
* set up using docker
## setup the /var/www directory
cd /var
sudo mkdir www
sudo chgrp www-data www -R
sudo chmod g+w www -R
## install python packages
sudo apt-get install python-dev
sudo apt-get install libjpeg-dev
## install supervisor
sudo apt-get install supervisor
make sure www-data is a group for *user*
modify /etc/supervisor/supervisord.conf
add the following:
[unix_http_server]
file=/var/run//supervisor.sock ; (the path to the socket file)
chmod=0770 ; sockef file mode (default 0700)
chown=nobody:www-data
[supervisorctl]
serverurl=unix:///var/run//supervisor.sock ; use a unix:// URL for a unix socket
chmod=0770
chown=nobody:www-data
#### run the following commands:
sudo service supervisor stop
sudo service supervisor start
## install postgres
sudo apt-get install postgresql postgresql-contrib
sudo apt-get install postgresql-server-dev-X.Y
sudo apt-get install postgresql-server-dev-9.1
## install pip and virtualenv
http://roundhere.net/journal/virtualenv-ubuntu-12-10/
sudo apt-get install python-pip
sudo pip install virtualenv
sudo pip install virtualenvwrapper
1. setup scripts/meta/configuration/branch.yml
1. setup scripts/meta/virtualenv/branch.txt

View file

@ -0,0 +1,32 @@
# how to do django translation
[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/]()
#### translation for templates
[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#translator-comments-in-templates]()
#### language switching
[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#switching-language-in-templates]()
#### how to create language files
[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#localization-how-to-create-language-files]()
you have to set the -e extension to specify files you want to use. e.g.
`django-admin makemessages -e django`
`fab django.admin:"makemessages -l en -e django"`
after creating the .po files you have to compile them for use
`django-admin compilemessages`
`fab django.admin:"compilemessages"`
### set langauge redirect
[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#the-set-language-redirect-view]()

View file

@ -0,0 +1,20 @@
in order to update the images
1. make fixtures - fab django.create_fixtures:appname
2. copy fixtures - cp extras/backups/fixtures/appname/appname.json
code/app/appname/fixtures/appname.json
3. deploy - fab <branch> deploy.sync
4. copy images - cp extras/path/to/images/* /var/www/project.branch/public/media/dynamic
5. check pip - make sure virtualenv txt is updated to development
6. syncdb - fab <branch> django.manage:syncdb
5. ?flush - fab <branch> django.manage:flush
6. ?superuser - fab <branch> django.manage:createsuperuser
7. load fixture - fab <branch> django.manage:"loaddata fixture appname"
if first time:
-? fab <branch> django.manage:"makemigrations thumbnail"
-? fab <branch> django.manage:migrate
8. ?clear thumbs - fab <branch> django.manage:"thumbnail clear"