From 2bef12902ee89605d1e361f2a241eee95b2ea2fc Mon Sep 17 00:00:00 2001 From: Ronny Abraham Date: Tue, 6 Sep 2016 14:43:49 +0300 Subject: [PATCH] new file: api.py new file: api.pyc new file: conf/fabric.yml new file: fabfile.py new file: fabfile.pyc new file: modules/__init__.py new file: modules/__init__.pyc new file: modules/conf_setup.py new file: modules/conf_setup.pyc new file: modules/configuration_setup.py new file: modules/database.py new file: modules/database.pyc new file: modules/deploy.py new file: modules/deploy.pyc new file: modules/django.py new file: modules/django.pyc new file: modules/docker.py new file: modules/docker.pyc new file: modules/initialize.py new file: modules/initialize.pyc new file: modules/maintenance.py new file: modules/maintenance.pyc new file: modules/nginx.py new file: modules/nginx.pyc new file: modules/pip.py new file: modules/pip.pyc new file: modules/setup.pyc new file: modules/supervisor.py new file: modules/supervisor.pyc new file: modules/testing/__init__.py new file: modules/testing/__init__.pyc new file: modules/testing/configuration_setup.py new file: modules/testing/maintenance.pyc new file: modules/utils.py new file: modules/utils.pyc new file: templates/conf/database/files/db.drop_all.sql.jinja2 new file: templates/conf/database/files/db.drop_db.sql.jinja2 new file: templates/conf/database/files/db.init.sql.jinja2 new file: templates/conf/database/files/db.re_init.sql.jinja2 new file: templates/conf/django/files/gunicorn.jinja2 new file: templates/conf/django/files/gunicorn.unixsocket.jinja2 new file: templates/conf/django/files/local.jinja2 new file: templates/conf/django/files/settings.jinja2 new file: templates/conf/django/files/settings18.jinja2 new file: templates/conf/django/files/wsgi.jinja2 new file: templates/conf/django/files/wsgi.py new file: templates/conf/docker/files/database.jinja2 new file: templates/conf/gunicorn/files/gunicorn.jinja2 new file: templates/conf/gunicorn/files/gunicorn.unixsocket.jinja2 new file: templates/conf/gunicorn/files/local.jinja2 new file: templates/conf/gunicorn/files/settings.jinja2 new file: templates/conf/gunicorn/files/settings18.jinja2 new file: templates/conf/gunicorn/files/wsgi.jinja2 new file: templates/conf/gunicorn/files/wsgi.py new file: templates/conf/nginx/files/default.conf.jinja2 new file: templates/conf/nginx/files/unixsocket.jinja2 new file: templates/conf/supervisor/files/conf_old new file: templates/conf/supervisor/files/supervisor.jinja2 new file: templates/meta/development.yml new file: templates/meta/layout.yml new file: templates/meta/staging.yml new file: templates/readmes/aws.md new file: templates/readmes/gandi.md new file: templates/readmes/reset_migrations.md new file: templates/readmes/setup_gandi.md new file: templates/readmes/translations.md new file: templates/readmes/update_images.md --- api.py | 11 + api.pyc | Bin 0 -> 653 bytes conf/fabric.yml | 11 + fabfile.py | 78 ++ fabfile.pyc | Bin 0 -> 1702 bytes modules/__init__.py | 0 modules/__init__.pyc | Bin 0 -> 167 bytes modules/conf_setup.py | 237 +++++ modules/conf_setup.pyc | Bin 0 -> 7051 bytes modules/configuration_setup.py | 160 +++ modules/database.py | 424 ++++++++ modules/database.pyc | Bin 0 -> 10785 bytes modules/deploy.py | 478 +++++++++ modules/deploy.pyc | Bin 0 -> 10477 bytes modules/django.py | 645 ++++++++++++ modules/django.pyc | Bin 0 -> 16311 bytes modules/docker.py | 198 ++++ modules/docker.pyc | Bin 0 -> 5998 bytes modules/initialize.py | 958 ++++++++++++++++++ modules/initialize.pyc | Bin 0 -> 17247 bytes modules/maintenance.py | 258 +++++ modules/maintenance.pyc | Bin 0 -> 6380 bytes modules/nginx.py | 288 ++++++ modules/nginx.pyc | Bin 0 -> 6427 bytes modules/pip.py | 134 +++ modules/pip.pyc | Bin 0 -> 3669 bytes modules/setup.pyc | Bin 0 -> 3572 bytes modules/supervisor.py | 356 +++++++ modules/supervisor.pyc | Bin 0 -> 8810 bytes modules/testing/__init__.py | 1 + modules/testing/__init__.pyc | Bin 0 -> 206 bytes modules/testing/configuration_setup.py | 181 ++++ modules/testing/maintenance.pyc | Bin 0 -> 4220 bytes modules/utils.py | 387 +++++++ modules/utils.pyc | Bin 0 -> 11490 bytes .../database/files/db.drop_all.sql.jinja2 | 2 + .../conf/database/files/db.drop_db.sql.jinja2 | 1 + .../conf/database/files/db.init.sql.jinja2 | 5 + .../conf/database/files/db.re_init.sql.jinja2 | 4 + templates/conf/django/files/gunicorn.jinja2 | 17 + .../django/files/gunicorn.unixsocket.jinja2 | 17 + templates/conf/django/files/local.jinja2 | 136 +++ templates/conf/django/files/settings.jinja2 | 138 +++ templates/conf/django/files/settings18.jinja2 | 125 +++ templates/conf/django/files/wsgi.jinja2 | 17 + templates/conf/django/files/wsgi.py | 16 + templates/conf/docker/files/database.jinja2 | 12 + templates/conf/gunicorn/files/gunicorn.jinja2 | 17 + .../gunicorn/files/gunicorn.unixsocket.jinja2 | 17 + templates/conf/gunicorn/files/local.jinja2 | 136 +++ templates/conf/gunicorn/files/settings.jinja2 | 138 +++ .../conf/gunicorn/files/settings18.jinja2 | 125 +++ templates/conf/gunicorn/files/wsgi.jinja2 | 17 + templates/conf/gunicorn/files/wsgi.py | 16 + .../conf/nginx/files/default.conf.jinja2 | 39 + templates/conf/nginx/files/unixsocket.jinja2 | 62 ++ templates/conf/supervisor/files/conf_old | 8 + .../conf/supervisor/files/supervisor.jinja2 | 10 + templates/meta/development.yml | 48 + templates/meta/layout.yml | 125 +++ templates/meta/staging.yml | 48 + templates/readmes/aws.md | 283 ++++++ templates/readmes/gandi.md | 233 +++++ templates/readmes/reset_migrations.md | 49 + templates/readmes/setup_gandi.md | 54 + templates/readmes/translations.md | 32 + templates/readmes/update_images.md | 20 + 67 files changed, 6772 insertions(+) create mode 100644 api.py create mode 100644 api.pyc create mode 100644 conf/fabric.yml create mode 100644 fabfile.py create mode 100644 fabfile.pyc create mode 100644 modules/__init__.py create mode 100644 modules/__init__.pyc create mode 100644 modules/conf_setup.py create mode 100644 modules/conf_setup.pyc create mode 100644 modules/configuration_setup.py create mode 100644 modules/database.py create mode 100644 modules/database.pyc create mode 100644 modules/deploy.py create mode 100644 modules/deploy.pyc create mode 100644 modules/django.py create mode 100644 modules/django.pyc create mode 100644 modules/docker.py create mode 100644 modules/docker.pyc create mode 100644 modules/initialize.py create mode 100644 modules/initialize.pyc create mode 100644 modules/maintenance.py create mode 100644 modules/maintenance.pyc create mode 100644 modules/nginx.py create mode 100644 modules/nginx.pyc create mode 100644 modules/pip.py create mode 100644 modules/pip.pyc create mode 100644 modules/setup.pyc create mode 100644 modules/supervisor.py create mode 100644 modules/supervisor.pyc create mode 100644 modules/testing/__init__.py create mode 100644 modules/testing/__init__.pyc create mode 100644 modules/testing/configuration_setup.py create mode 100644 modules/testing/maintenance.pyc create mode 100644 modules/utils.py create mode 100644 modules/utils.pyc create mode 100644 templates/conf/database/files/db.drop_all.sql.jinja2 create mode 100644 templates/conf/database/files/db.drop_db.sql.jinja2 create mode 100644 templates/conf/database/files/db.init.sql.jinja2 create mode 100644 templates/conf/database/files/db.re_init.sql.jinja2 create mode 100644 templates/conf/django/files/gunicorn.jinja2 create mode 100644 templates/conf/django/files/gunicorn.unixsocket.jinja2 create mode 100644 templates/conf/django/files/local.jinja2 create mode 100644 templates/conf/django/files/settings.jinja2 create mode 100644 templates/conf/django/files/settings18.jinja2 create mode 100644 templates/conf/django/files/wsgi.jinja2 create mode 100644 templates/conf/django/files/wsgi.py create mode 100644 templates/conf/docker/files/database.jinja2 create mode 100644 templates/conf/gunicorn/files/gunicorn.jinja2 create mode 100644 templates/conf/gunicorn/files/gunicorn.unixsocket.jinja2 create mode 100644 templates/conf/gunicorn/files/local.jinja2 create mode 100644 templates/conf/gunicorn/files/settings.jinja2 create mode 100644 templates/conf/gunicorn/files/settings18.jinja2 create mode 100644 templates/conf/gunicorn/files/wsgi.jinja2 create mode 100644 templates/conf/gunicorn/files/wsgi.py create mode 100644 templates/conf/nginx/files/default.conf.jinja2 create mode 100644 templates/conf/nginx/files/unixsocket.jinja2 create mode 100644 templates/conf/supervisor/files/conf_old create mode 100644 templates/conf/supervisor/files/supervisor.jinja2 create mode 100644 templates/meta/development.yml create mode 100644 templates/meta/layout.yml create mode 100644 templates/meta/staging.yml create mode 100644 templates/readmes/aws.md create mode 100644 templates/readmes/gandi.md create mode 100644 templates/readmes/reset_migrations.md create mode 100644 templates/readmes/setup_gandi.md create mode 100644 templates/readmes/translations.md create mode 100644 templates/readmes/update_images.md diff --git a/api.py b/api.py new file mode 100644 index 0000000..6f32b9f --- /dev/null +++ b/api.py @@ -0,0 +1,11 @@ + +import modules.nginx as nginx +import modules.supervisor as supervisor +import modules.utils as utils +import modules.pip as pip +import modules.django as django +import modules.deploy as deploy +import modules.docker as docker +import modules.database as database +import modules.maintenance as maintenance +import modules.conf_setup as conf_setup diff --git a/api.pyc b/api.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f2d2c7f0c42dce81f896e126328ad72a3ce9805 GIT binary patch literal 653 zcmZ9JOHRWu5QZlWeUqjSZosCi`T_{CWy1y`ByW)Gx|QA3wrmH{t8gH8oQ4B{6W6MX z6OaFICi*oK=Rd{h_UrqV!fAE<`QW$x^kL)Q3@QMT0nQ?JK;(e)h*u!GfV&ZUAo_s& z5eFcKfQJ!BAjW{l5wAfMfQyI|5GCL;;uORT@GRmS!~*c5Y3X^{^vQqydRpFi=?WhX zYExItuH=r(57+gXpyo-<)|#1jZk2H(pH47!s2#6t>k!m9anM}5+;ekIlKe&xxl^v^ zQzv;*Wx7(wPz~~;s_34XPz`dLIul3M#4vsAxF1;C?gMIXfUMw>f871WrX`=+A&Z00IvizVcra9f$dZmp(mKy&uUDyQx&J<<4Jm z<7@Z;c(YE@mID%p)`@3lcGf%d-pp?JYd9GF`1vWP)gv&z!fU@n6!-^J5-kFn2DA_~ z6%_ESpfaR+ho&9BD5SDWi-@LC>+Vq5qYp)wWS@$Nz7b7hDtcZSkP=k%S--@#Ti%S< z=C(HnYja3yNSB0N1s#vKkdyw6XUu835JH)A*I`52v!ijq+5qCDs!An|{;JwJb{shV zzEJ0?tm=g_ZUdUVukqTL7Xa=o)^9>}}eH zI>Ihr?Sfmb3nlc1e+ejb0`4eh|VHvo|5zWTrl03{4B`COkS&ly?K{h#9l7# z3bv0`sFUSv+{1nuUdlK`7D0ILp)A1*v#pJGT9#njQ;eEuAZCiOFN)DflYhB=dP2mK zW%&q$?L$y+JJ=I(w4rEI=Y2Rqk%SbeHW<3(9t(k;3SU8u&u+k z(36#UJb`UYf1gw|d8>=33l z$<@r=1LqoX{AU>diTgelv~^yWn6DA;6Z-@rxpo=cD09XU?j9Ex)TeaF8FCKPiNyFG zMt{!Q8(|`A-JVD8XZHYNf$HEInp_>kj1Pe0TCIIcr%UUPu)jEY6W^;G)HiI#tFsY{ j9f;fSUrzBwo8C)?Vjn{6i;>t7gE$J~PP{w1hhP5>B?o}* literal 0 HcmV?d00001 diff --git a/modules/__init__.py b/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/__init__.pyc b/modules/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b41f4fae44a2c409de846065938cef34a8f71dd GIT binary patch literal 167 zcmZSn%*!QKe>Oas0SXv_v;z`FVMj z`UOS#S*gh-#ri2(iFxVy`o)>K1v#k&naSCu1$qTVS^CAvMVSRa`Lx8OqReFd-29Z% hoYZ3d`1s7c%#!$cy@JXT4xphn5D7bw)x|)}003-vDQ*A& literal 0 HcmV?d00001 diff --git a/modules/conf_setup.py b/modules/conf_setup.py new file mode 100644 index 0000000..a5b4d3c --- /dev/null +++ b/modules/conf_setup.py @@ -0,0 +1,237 @@ + +from fabric.operations import run, put +from fabric.api import env, task +import os + +from maintenance import _get_configuration_path, load_configuration + + +@task +def deploy(param=None): + param_list = ['conf', 'readmes'] + + if not param: + print "this requires input param must be one of %s" % param_list + import sys + sys.exit() + + if param == 'conf': + deploy_conf() + elif param == 'readmes': + deploy_readmes() + + +@task +def backup(param=None): + param_list = ['conf', 'meta', 'readmes'] + + if not param: + print "this requires input param must be one of %s" % param_list + import sys + sys.exit() + + if param == 'conf': + _backup_conf() + elif param == 'meta': + _backup_meta() + elif param == 'readmes': + _backup_readmes() + + +def _backup_conf(): + configuration = env.config + + for key in configuration.templates.keys(): + section = getattr(configuration.templates, key) + source_path = os.path.join(section['path']['local'], 'files') + + dest_path = os.path.join( + configuration.tools.fabric.templates.conf, + key) + + run("mkdir -p %s" % dest_path) + put(source_path, dest_path) + + +def _backup_meta(): + configuration = env.config + + from fabric.operations import run, put + + source_path_layout = _get_configuration_path( + 'layout', 'development') + + config_dev = _modify_configuration('development') + config_stg = _modify_configuration('staging') + + dest_path = configuration.tools.fabric.templates.meta + + run("mkdir -p %s" % dest_path) + + # + # copy over the layout.yml file only + + put(source_path_layout, dest_path) + + # + # the development.yml file needs to be + # modified before it can be copied over + + _store_configuration(config_dev) + _store_configuration(config_stg) + + +def _backup_readmes(): + configuration = env.config + + dest_readmes = configuration.tools.fabric.templates.readmes + + source_readmes = os.path.join( + configuration.paths.project.root, 'scripts', 'readmes') + + copy_directories(source_readmes, dest_readmes) + + +def _modify_configuration(branch): + """ + this method modifies a meta/branch.yml file so that it can be stored as + a template for future branch files of the same time. + + Keyword arguments: + branch -- the name of the branch whose configuration files we are + modifying + + Notice that some of the values requires me to store the values with + certain names, e.g. if the project/host is NOT 'localhost', then + override the stored IP address with the word "PROJECT_IP" + + this method is meant to be used by _store_configuration + """ + + # + # get the configuration dict for this branch + + config_yaml = load_configuration('config', branch) + + # + # basic project configuration + + config_yaml['project']['name'] = "PROJECT_NAME" + config_yaml['project']['paths']['home'] = "PROJECT_NAME.prj" + config_yaml['project']['user'] = "BRANCH_USER" + config_yaml['project']['group'] = "BRANCH_GROUP" + + if config_yaml['project']['host'] != 'localhost': + config_yaml['project']['host'] = "PROJECT_IP" + + # + # database configuration + + config_yaml['database']['name'] = "PROJECT_NAME_" + \ + config_yaml['project']['extension'] + + if config_yaml['database']['host'] == "docker": + config_yaml['database']['port'] = "DOCKER_PORT" + else: + config_yaml['database']['host'] = "DATABASE_IP" + + # + # database user name values + + config_yaml['database']['users']['admin']['name'] = "DATABASE_ADMIN_NAME" + config_yaml['database']['users']['admin']['pass'] = "DATABASE_ADMIN_PASS" + + config_yaml['database']['users']['default']['name'] = "DATABASE_USER_NAME" + config_yaml['database']['users']['default']['pass'] = "DATABASE_USER_PASS" + + # + # django configuration + + config_yaml['django']['port'] = "DJANGO_PORT" + config_yaml['django']['host'] = "DJANGO_IP" + + # + # nginx and virtualenv configuration + + config_yaml['nginx']['port'] = "NGINX_PORT" + config_yaml['virtualenv']['name'] = "PROJECT_NAME" + + return config_yaml + + +def _store_configuration(config_dict): + """ + takes config dictionary converts it to a yaml file object, + then saves it under the appropriate file name + + Keyword arguments: + config_dict -- yaml based configuration dictionary object + """ + configuration = env.config + + from StringIO import StringIO + import yaml + from fabric.operations import put + + branch_name = config_dict['project']['branch'] + ".yml" + + dest_path = os.path.join( + configuration.tools.fabric.templates.meta, + branch_name) + + put(StringIO(yaml.dump(config_dict)), dest_path) + + +def deploy_readmes(): + """ + takes the readme files from tools/fabric/templates/readmes + and puts then under the top level of the project/scripts directory + """ + configuration = env.config + source_readmes = configuration.tools.fabric.templates.readmes + + dest_readmes = os.path.join( + configuration.paths.project.root, + 'scripts', 'readmes') + + copy_directories(source_readmes, dest_readmes) + + +def deploy_conf(): + """ + takes the conf templates from tools/fabric/templates/conf + and puts them under scripts/conf. + + Note, these "conf" files are NOT the same as the meta/branch.yml conf + files. They are configuration files meant for the various subsystems + of the project. ie, database, docker, supervisor, etc. + """ + configuration = env.config + + for key in configuration.templates.keys(): + section = getattr(configuration.templates, key) + dest_path = section['path']['local'] + + source_path = os.path.join( + configuration.tools.fabric.templates.conf, + key) + + copy_directories(source_path, dest_path) + + +def copy_directories(source_path, dest_path): + """ + takes the files from source and copies them to dest using fabric.put + + Keyword arguments: + source_path -- the source dir + dest_path -- the destination dir + """ + + run("mkdir -p %s" % dest_path) + + file_list = run("ls %s" % source_path).split() + for fname in file_list: + fpath = os.path.join(source_path, fname) + + put(fpath, dest_path) diff --git a/modules/conf_setup.pyc b/modules/conf_setup.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d97c264e8cffe18c8b3032572593026cb18b4741 GIT binary patch literal 7051 zcmcgx+io0L6+Kn$w%v|zaS}V8NisDKC=HX?!$ks(B810@9A}iaDLc%_XlTk^WxLv4 zRh>GgowSq)iOd5MFNjaz2YBLPUU=pS@fG|49{_9ZQ(fJ50s(|)Q)xN77wz{fQQOTIvA5%$H?N@DU zMRms2{MN!8XH+;T zg;^EWq%fz#DJjgW@RAfRt8iKh3o4wE!W9+HO5rLO+O5yw2=PDgS9eh|JX+pM<+s~2 z!^S8*HDeqR=#z$RUP85b6q#l#OAq6tULKe@OPk%m9GeC7b+RBl+t+f?%q6(i1|AJL zQGAfqk*i)g>LuE~&z-$=)Z*r&aEx+-a9_FVrK{Qyic(IhI1ZnkxGI0FjB8sLrPY+KW@1-RlNdAbYIJ~(TmuaOK1qAZYS%@!OiTjUaVHCM!j)a=CC_>-vC z@z|IC0;RR5I?DB0o~3Djt(#}sQa+c)Ew+-|mplBUX(JME4;HD}tHai^Ur{FW-?zBf4T z6<$_lP}5OmptT<;D67yKK>QA^Jt?(zcSLKgQfpS1713p1LBOxA-bI((c*EygB%eek z@Tt)MO_S7x$la$H%rsujgMb3HIvj1_Qidio7%{36MhoPy4Jc`LHMk=qKf zA*o%EvHSB_^jmnWnL9Oi(OGbAx;1AG)T?o#A;f#!f&e%o2*w2jkQU)UY@j4lFqrPv zxw*zLQVmrhO6mmttT*t#r%@;CCQ)jv8cc8$r$=u(pW0@YV z;?ro%HN2YertzV5@MYBVZ=%P)#TTv6r;jj1Ra2WRkIsfcIg9@R$KkPnl88Bil4fxt z#s>?rW*D7HxG1XH9Qn7=dr=6YfPRR_dKeJVQZKxC1KXd@1&GYIY zc>MSIqE7r9c!`zMzgUM*1XghZd6pSNgK1^meiKdxhMeVbq$PNZ;v`In5f>3aLVcmX zK~TU97ZAt&1uV0NM-wXwidleJUw0*P6W^5B1|IzrcrGXd@`bo*vm$63V+AgJOx=gc z4r=3QnLrCvQ}owR6GtwiFRA;3j%f^?Q709ZcS$aX&G$jn!nv|xUM(-N^RiuRP~_(e z66pV+!YeYGBd)6ZDVXiGvSAwQgBBDMEMdrXJ5jj&M(cYR`~I2M8^ze0Z?tmkEouG0 zE*pAfjca}=wUwezz!=`Pvtcv4^>0wL6A<*2hd&{bpb5uW=!v5MY`nlDaacRZgS2(L z+D|$jqnM{N3=fPK8?P0lB$?tYLNCxVSKyf!zO=^AzjscEmVz31H#YQUB|cR61wlVqn+p2s1#Q%ZGf#|y#`{9b!pOu9r$D?9%7 zc{-F@iGw^%$E*Hd(C`rA);#FHK6S+6T&C<>uIrC=m# z3|8CP5!y1Sa&Ju;9=$M8s!3SLUc%I>%sQMXAk}6&bGfNt97&7 zhTRA2d+VRB?>>YO%@qzD)Yl(8*=jVoj%<`i&{D$f3uNmpV|UhfckSeX))E-Y$tVds z7hQT79R|IQdCL%)LiYQ3y|KA%_bW${dy^daf$hS6={OskTa7R5I1tudDd+i07^I4-2(G+pHA0(ZNipgB3iAR4MFLh&&`Woy*mVv&3J- z=0#@_d~XrGvwUo5pa9D_{cBW;I1LAje@I?{?Fr#`SV!6d0U2x$Wuz@6r2(GjAd(P5Qy6`65 zJjUI?enh)`r}r#$%x!pZp|T{+Bs}LZNA_tqBYUy*l_(M$+ci17=hk+^0!z?EH{tJ6 z0Mrd*MV(0mza*;uQ&hz|xVV3Zo$}>bdUKuu2_fZ_VK3UK zu?(*Wi-&oZAUKTVTLxhpSsR;UlFx%Q^pIE55~W6vXtJ)?iEwEd1mZ!MD{?siy=9<% zna00|{Rru8{P(O^VvNKh|6@+`9lVCD*8d*czRNZ|7r|ZWqQAzY8820}=8(JcN+H^% zt1Y$rOH>LMg=9r0SB7D-;^%KqRzR8XSdydo53>{WPLj=V@U!^{)y+qenJDq_3C!&r z9_SWIvZlR%hX@XR0}e?6he7s^g9<7KS``Vg0V!Ky`$_>jP%$AuCI1TqS#MCyazV+Q zta{$wanvV~JLj^byP!Ixo1FJF$m6W1J>5HCo-;};F)avVfm2KwV~YdEfRfOKyZ8zh z>C;$eIcS_|ty=hAg&uugq#O0KS8#iCUS{`ueEmy@#<**dOMqCv<+1c3U$^;M!Rw+h zO!n|+Jens{HJe(QEmA8pZq;egd7r`Z&(HuY1DXhuHWP*#f})a|&2!`W`{HA|qX%Es zCATJM$K4sDEJT@T!Icdy_JaKEBV^D@ocJYH)- za}k=nA|V2=_0GsRlDlAJ6jVymi|$Ld!P`ff_Zp00L!8S09Fzmkr)o51*U`xHHR_^! zRPFs;cRR5;4-`g2$B!nLYzeh4){qRGhh4JoB}=+Kt+!+D2^& HrCa|5>DF-G literal 0 HcmV?d00001 diff --git a/modules/configuration_setup.py b/modules/configuration_setup.py new file mode 100644 index 0000000..22dd99a --- /dev/null +++ b/modules/configuration_setup.py @@ -0,0 +1,160 @@ +from fabric.contrib.files import exists as fab_exists +from fabric.operations import run +from fabric.api import env + +import os +import sys +import utils + + +def check_version(section): + pass + + +def check_is_conf(section, handle_error=False): + """confirms that the configuration section name passed is a legit one + + Keyword Arguments: + + section -- the configuration section we are looking at + handle_error -- if True, print out an error message and exit + """ + configuration = env.config + + if section in configuration.templates.keys(): + return True + else: + if handle_error: + print """ +Error. maintenance.exists_dir_sub takes a 'section' parameter value. +'%s' is not a valid parameter value. +Valid options include:""" % section + + for key in configuration.templates.keys(): + print " %s" % key + + print """ +Please run the command again, but this time enter a valid section value. + """ + + sys.exit() + + return False + + +def exists_dir_top(): + """ Check if the parent directory for all configuration files exists""" + + from fabric.contrib.files import exists + configuration = env.config + + # NOTE + # all template configuration files are built off the files that are + # contained LOCALLY. I don't bother building them off the remotely + # located files, since those files get rsync'd anyway. + + if env.debug: + print "maintenance.exists_dir_top -- checking for " \ + "directory:\n\t%s\n" % configuration.paths.conf.local + + return exists(configuration.paths.conf.local) + + +def exists_dir_sub(section): + """Check if the subdirectory for this configuration type exists in the + configuration directory + + Keyword Arguments: + + section -- the configuration section we are looking at + """ + configuration = env.config + + # NOTE + # all template configuration files are built off the files that are + # contained LOCALLY. I don't bother building them off the remotely + # located files, since those files get rsync'd anyway. + + check_is_conf(section) + + _template = getattr(configuration.templates, section) + + if env.debug: + utils.printvar('template.path.local', _template.path.local) + + path_test = os.path.join(_template.path.local, 'blah') + + utils.printvar('exists_path_test', fab_exists(path_test)) + utils.printvar('exists_local', fab_exists(_template.path.local)) + else: + return fab_exists(_template.path.local) + + +def exists_file(section): + """Check if the template file for this configuration type exists in the + configuration directory + + Keyword Arguments: + + section -- the configuration type + """ + configuration = env.config + + check_is_conf(section) + exists_dir_sub(section) + + utils.print_console("\tNOTE: exists_file ONLY works when run on the" + " local branch!\n\tThis is because it is set up to " + " only check development template config files", + numsep=90) + + _template = getattr(configuration.templates, section) + + path_src = os.path.join( + _template.path.local, + 'files', + _template.conf.src) + + if env.debug: + utils.printvar('template.path.local', _template.path.local) + utils.printvar('template.src', _template.conf.src) + utils.printvar('template.dst', _template.conf.dst) + utils.printvar('path_src', path_src) + utils.printvar('path_exists', fab_exists(path_src)) + + return fab_exists(path_src) + + # TODO + # DONE 1. make sure the configuration request is legit for this branch + # DONE 2. check to see if conf directory exists + # DONE 3. check to see if conf template file exists + # 4a. (optional) add switch to check if conf file was built from template + # 4b. (optional) check to see if version is up to date + + +def create_dir(): + + # TODO + # 1. make sure the configuration request is legit for this branch + # 2. check to see if conf dir already exists + # 3. if not create it + pass + + +def create_dir_top(): + """Creates the top level conf directory if it does not exist""" + import utils + configuration = env.config + + if not exists_dir_top(): + cmd_mkdir = "mkdir %s" % configuration.paths.conf.remote + run(cmd_mkdir) + + else: + msg = "configuration directory already exists, aborting create." \ + "Continue? Y/n" + utils.prompt_continue(message=msg) + + +def create_file(): + pass diff --git a/modules/database.py b/modules/database.py new file mode 100644 index 0000000..86fdb32 --- /dev/null +++ b/modules/database.py @@ -0,0 +1,424 @@ +from fabric.api import env, task +# # from jinja2 import Environment +import os +# from utils import upload_template as utils_upload_template +# from utils import loggify, print_console +import utils + +from getpass import getpass +from fabric.operations import run + + +NOTE = """ + \n\n\n + NOTE: you MUST deactivate the gunicorn or supervisor service if you want + to make changes to the database, otherwise, the changes will not run and + you will NOT BE ABLE TO SYNC. SO TURN THE FUCKING THING OFF\n\n\n""" + + +def generate_sql(script_name): + """ + generates the sql files and puts them in + the build directory for this branch + """ + configuration = env.config + + if env.debug: + logger = utils.loggify('database', 'generate_sql') + + build_file = getattr(configuration.templates.database, script_name).dst + build_path = os.path.join( + configuration.templates.database.path.remote, + 'build', + build_file) + + template_file = getattr(configuration.templates.database, script_name).src + template_dir = os.path.join( + configuration.templates.database.path.local, + 'files') + + context = dict() + context['db_name'] = configuration.server.database.name + context['db_user'] = configuration.server.database.user + context['db_password'] = configuration.server.database.password + + if env.debug: + logger.debug("context = %s" % context) + logger.debug("build_path = %s" % build_path) + + logger.debug( + "db_name : %s " % configuration.server.database.name) + + logger.debug( + "db_user : %s " % configuration.server.database.user) + + logger.debug( + "db_password : %s " % configuration.server.database.password) + + # + # when we set debug=True, this function returns a string with the + # command as it would have been executed + + upload_msg = utils.upload_template( + filename=template_file, + destination=build_path, + context=context, + use_jinja=True, + use_sudo=False, + backup=False, + template_dir=template_dir, + debug=True) + + logger.debug(upload_msg) + + else: + utils.upload_template( + filename=template_file, + destination=build_path, + context=context, + use_jinja=True, + use_sudo=False, + backup=False, + template_dir=template_dir) + + # with open(build_path, "w") as output: + # output.write(rendered) + print NOTE + + +def execute_sql(script_name, add_dbname=True, is_admin=False): + + if env.debug: + logger = utils.loggify('database', 'execute_sql') + + configuration = env.config + + build_file = getattr(configuration.templates.database, script_name).dst + build_path = os.path.join( + configuration.templates.database.path.remote, + 'build', + build_file) + + if add_dbname is True: + db_name = configuration.server.database.name + else: + db_name = "postgres" + + port = configuration.server.database.port + host = configuration.server.database.host + + if is_admin: + user = configuration.server.database.admin.user + else: + user = configuration.server.database.user + + psql_command = "psql -h {host} -p {port} -U {user} " \ + " -f {sqlfile} {db_name}".format( + db_name=db_name, + host=host, + port=port, + user=user, + sqlfile=build_path,) + + if env.debug: + logger.debug("db_name = %s" % db_name) + logger.debug("run( %s ) " % psql_command) + else: + run_database_command(psql_command) + + print NOTE + + +@task +def generate(): + """ + helper function to upload all the scripts + """ + + generate_sql('init') + generate_sql('re_init') + generate_sql('drop_db') + generate_sql('drop_all') + + +@task +def clear_scripts(): + """ + clears all the sql scripts from scripts/conf/postgres/build/*.sql + + does this on the remote branch and not local. Because conf files for each + branch are specifically tied to the remote site. + + the "local" directory refers to my computer, technically from wherever + fabric is being run, but that is always development. + """ + + configuration = env.config + + _template = getattr(configuration.templates, 'database') + + if env.debug: + cmd_lsdir = "ls %s" % \ + os.path.join(_template.path.remote, 'build', '*.sql') + + utils.printvar('cmd_lsdir', cmd_lsdir) + output = run(cmd_lsdir) + + outputlist = output.split('\r\n') + + for line in outputlist: + print line + + else: + cmd_rmfiles = "rm %s" % \ + os.path.join(_template.path.remote, 'build', '*.sql') + output = run(cmd_rmfiles) + + print output + + +@task +def init(): + """ + runs the database intialization script + """ + + # when initializing the database, you do NOT want + # to specify which database we are connecting to, because + # that database is what we are creating (we havent made it yet) + execute_sql('init', add_dbname=False, is_admin=True) + + +@task +def re_init(): + """ + re-initializes the database + + drop the database, recreate it, don't touch the original user + """ + execute_sql('re_init', add_dbname=False, is_admin=True) + + +@task +def drop_all(): + """ + drop the database and drop the user + """ + execute_sql('drop_all', add_dbname=False, is_admin=True) + + +@task +def drop_db(): + """ + drop only the database but ignore the user + """ + execute_sql('drop_db') + + +@task +def commandline(dbuser='default'): + """ + logs into command line of the postgres database of the branch + + dbuser - set to "default" but can also be set to "admin" + + the admin user is what it says, I have both of these in case I need to + switch between the master user for the entire postgres install or the owner + of the particular database + """ + + configuration = env.config + + if env.debug: + logger = utils.loggify('database', 'commandline') + + db_name = configuration.server.database.name + host = configuration.server.database.host + port = configuration.server.database.port + + if dbuser == 'admin': + user = configuration.server.database.admin.user + elif dbuser == 'default': + user = configuration.server.database.user + + if env.debug: + logger.debug("branch: %s" % configuration.project.branch) + logger.debug("host : %s" % host) + logger.debug("port : %s" % port) + logger.debug("user : %s" % user) + logger.debug("name : %s" % db_name) + + run_database_command("psql -h {host} -p {port} -U {user} {db_name}".format( + db_name=db_name, + host=host, + port=port, + user=user + )) + + +@task +def datadump(dbuser='default'): + """ + creates a dump of the database for backup and storage + + dbuser - set to "default" but can also be set to "admin" + + the admin user is what it says, I have both of these in case I need to + switch between the master user for the entire postgres install or the owner + of the particular database + """ + configuration = env.config + + db_name = configuration.server.database.name + port = configuration.server.database.port + host = configuration.server.database.host + + if dbuser == 'admin': + user = configuration.server.database.admin.user + elif dbuser == 'default': + user = configuration.server.database.user + + utils.print_console("dbuser = %s" % dbuser) + + dumpfilename = os.path.join( + configuration.paths.server.backups.database, + "test.sql") + + cmd_pg_dump = "pg_dump -h {host} -p {port} -U {user} {db_name} " \ + "-f {dumpfilename}".format( + db_name=db_name, + host=host, + port=port, + user=user, + dumpfilename=dumpfilename + ) + + run_database_command(cmd_pg_dump) + # utils.print_console("cmd_pg_dump : %s" % cmd_pg_dump) + + +def run_database_command(cmd_string): + psqlpass = getpass('Enter your database password:') + cmd_full = "PGPASSWORD={psqlpass} {cmd_string}".format( + psqlpass=psqlpass, + cmd_string=cmd_string, + ) + + run(cmd_full) + + +def get_template_path(script_name, script_type): + + configuration = env.config + + if script_type == 'build': + file_build = getattr(configuration.templates.database, script_name).dst + path = os.path.join( + configuration.templates.database.path.remote, + 'build', + file_build) + + elif script_type == 'template': + file_template = getattr( + configuration.templates.database, script_name).src + + path = os.path.join( + configuration.templates.database.path.remote, + 'files', + file_template) + else: + print "Error, you passed the variable %s, must pass" \ + "either 'build' or 'template'" % script_type + import sys + sys.exit() + + return path + + +@task +def edit(param='help'): + """ + calls up mvim on the built conf files + """ + + from maintenance import edit as maintenance_edit + + locations = { + 'build.init': { + 'path': get_template_path('init', 'build'), + 'desc': 'remote init conf file', + }, + + 'template.init': { + 'path': get_template_path('init', 'template'), + 'desc': 'remote version of init conf template', + }, + + 'build.re_init': { + 'path': get_template_path('re_init', 'build'), + 'desc': 'remote re_init conf file', + }, + + 'template.re_init': { + 'path': get_template_path('re_init', 'template'), + 'desc': 'remote version of re_init conf template', + }, + + 'build.drop_db': { + 'path': get_template_path('drop_db', 'build'), + 'desc': 'remote drop_db conf file', + }, + + 'template.drop_db': { + 'path': get_template_path('drop_db', 'template'), + 'desc': 'remote version of drop_db conf template', + }, + + 'build.drop_all': { + 'path': get_template_path('drop_all', 'build'), + 'desc': 'remote drop_all conf file', + }, + + 'template.drop_all': { + 'path': get_template_path('drop_all', 'template'), + 'desc': 'remote version of drop_all conf template', + }, + } + + if param in locations.keys(): + remote_path = locations[param]['path'] + maintenance_edit(remote_path=remote_path) + else: + print """ + "fab database.edit" automates editing files important to django whether + locally or remotely + + to use this you must pass one of the editable locations in as a + parameter + + currently editable locations are: + """ + + for k_loc in locations.keys(): + print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc']) + + return + + +@task +def test(): + configuration = env.config + + db_name = configuration.server.database.name + host = configuration.server.database.host + port = configuration.server.database.port + user = configuration.server.database.user + + cmd_string = "psql -h {host} -p {port} -U {user} {db_name}".format( + db_name=db_name, + host=host, + port=port, + user=user + ) + + run_database_command(cmd_string) diff --git a/modules/database.pyc b/modules/database.pyc new file mode 100644 index 0000000000000000000000000000000000000000..01b3bb112397ec2a7d333bec24c367d6d35a684d GIT binary patch literal 10785 zcmd^FOLN@Vb-w6sHrdUlnh!lj_B`&eJZ9UL$nn@w97W@J9(qj0rlvqCidk_wAOMnR zp@B9oS`;I7rBa$zR<6n>yDah-@&g<%U6oCi-Z-oLf$Wk+@_pyx(Udi=vCB-kDxv`1 z_v7H4?|$c8Hve;_d--qv^^vcNe@*=U9v<_yqf~(Zj*68UJ8Iui zQ(7S1Qu{4QH&wi##tUkH!M1CuxUI&EYJU-V6fdcutv*v~zoWvg3YS%|s6NB6Jry=o zu*9ktm}^C{oicktvfVPN7_M-zZB~%aV&KSY!MB zODb5G#ARu-p@NHLn=4XssT}q-%3yDB8KR2*2mb~Qq;g6xOrGQ|W<2wFcK|7>d=ams zFrRqF*aEi8riu9v{O)S}>*61O@7{lK`=&lkr}_sE_a5jV^!z+};^m>vk3)SlO(H+d z5}jt+OebOXBr;4g;fFdpl%6MElFMLus>j~rQ2WPTaugbkJVyz<+&l10cul9sW+xGo ztSH-^L~*Q>G}l<7_L4vb%bOVl6U)*pQM{oDG%eZBX?-H*1l-n*|KJal*UgL}92 zorfR&=_k8)k>>mUojcf^PeF9>tl{|xkNJ0$UmSWXsTK3Ree{~Y~geILQh&U zZ(HSk^+Kr`X2jI1oO-mTo-Zn>@vXXOU5eWCMH^DIsBUq0*wjBc&#_=pZ*;#5{VoeS z>Urx-LHEpL^G)p@?K0NS^y$s@*|0SfB6NC1Jzr441x^C(=h9bI4i#Th`DH3P-&ky$ zT8{D;YH7Bl>{_2M(Nvt|+wwtvld)c?MJK06&pv+K3sGIq5 ztPi6YIp~<4Omit7>nO3>izWxtC=PTGWuc#^*{ME+w&%x@(Fd8A_{X;05{55zI`1J< zPBX*|P#vXY7)i-|VvoVWF!9D=nV!Ok@@0;6Fr=NHq*-7%n4czj_%zq=>vxRlmxG~A zer(IuZ6^*!3)cE3T53}ibKF-Hms1q2bMAp_lO@SALX$^{mq(bM@KG!S8m|<_9FC%7 zzHmVLh|M7GpQpm&cB!;ku5TEQl2oI)5yMx!z%*CUm1I9g!j$E!*8cp*k zHZo(J9vwx8r;-MEd3m16?B%$|6_g{C1t!mlf2k4VqiiHGN~1(($-;4(hXRi#^JStq z^}SeH1(BaiE8-Zw0*4hjFd^TC(kUedvqb46PQ74QE=9UL$flu;e8-DTC`Clm0VO71 z*7#X8$?aEPL80CH6beZpQ+nJo7SKgfRA$IeGMAN&v1>W8G;j%Nt`xIpv9_@}8X%|S zWj;k}=OH#|cCs`{PIo3*I)YN0onQoTPIpX%uN6)re!lHVHrlbPH9LpiK^FNt<20DU zRCY>5-=3VhRP7Bs=Fjm`3!O&Cxzu>g={S9->2!(|OE>s~-)qiVqwQQQpLWM-pZV!D zR-0$b7F#|)Fb#oPDZqWR0`BZhh3`HB4XE(CDR>RwYn9;J`W4`w2j8-`2JQs`?lw^^ zUr~>jRQ9(-u+as9;64#K0c2r(Kyy{Ts)$`xc1_Iyt^nVk&K0h!(T19JRRS=qT1p8p zoq&l+0g@QDLoNag3@}Jn1%Tx?;BjHLT%1_hmV*NZM^^w`E-~E}0FIX}3MXBnDsn_p zU@ag~0TQVwyViCn((q~MPhq9NDwjGaHa&qQ9c7_0H&HSnP+mXQ&yG<#)7K~Z*(A-F zdZ?e#K4w~9Kh)3AjQTp$&kAgtxilQH1a_4Kd%Bw}CaM%&1n7!R7SyQ7IQ<8NvajjE z6=Vf71Ss5#%wFP)>gSRKxzdCd>k7bJW$H_OeVMP<`Fe*h&DYy}y~o!#@e-@`f^n4C zl~D{p`Y|AfGhrRWQtgL+Iv(S=aJNt~pjx^#FtJfD2!_FdAP4synV}u?>umKkzP`cN zHNM!OUVxpJqfyr7931#O&36aG{}7M4!HL9pOQWUv_MG)byU}<0FxN|sexrlm_J=0D z1~cw9eD>r&qOTYycL&}9gA}bp3OtJXhZHDLkpjm-3R=H6ps>xi@RGyyIE+Cn_2D!D z?@SX?MvG_K11FSITTs;4qnF=1N}^npDGP^HI>^!qK87f>q|x1yVL`20w30d(c$B$I zD)1S1$&aquU57s+Ihp`LR3!H=NI**DqewJJG%d1`hFXT&QrX{8pitO?j9OL-pa2F! zs7-YljMdUoNEZNv&JIo=rZBMZQW8|a39>Eq^buIR%o9haFM9)&w4=bCQco$Uyyv&~G1q&1UMt4Duboc@vhQ0?}(j2v5cqDG{Wh`T}x1=H)E*;uG)G!2Nm>#_41nCi%8>6xcRYnb-gt z1+XP6!V-GqkAq=sz_|=5*owpjRP7i>$;`ExxAuYzrHr$XHD^C^+b2LYs!*B*^VhsmoxHk~_ zyo)yS)$n<($5Eala53`8i=)35>Q`8S{lGkM-9O~(J9zC5C~lWX?S7T7ckrrZbbjFF zavlGSN3=+Feme;qV*o3G@H(Mc_AI7cM*#{82VRS1mm<0b8~3x2k%uV1rh_!uf_bD< z|5%!(S#$*VB-Z2~R&sxg$$lRs$94P^kCoiwZzs9y7(lkg!|RM-1nsKQdi(FA$M3`T zxsIP#+h3@*UonIWsI;ad9Dq?QD!KVKUiFsKgn$glEfqrWF*-_8pf=0vo!>&o;%uj( z{GXTy=lnN3_9HI{{|LfEZyM)54nIX$B!~ToZN7>U?D&gBni$wA4|kr{RTlB4)?BtD z=e2OE$M7JX>c5JR3;6=3>#5PonfMNzlCwb~ z_$LczTEJClpDBRX0w%)G%V`kP!kYTxtAM7!K|wsIiVe}RX^AHEp$V&$*Y`^L0DssJ zD^s4Vq}`flhE5Mf14_P7t4L8%1ZS)=9Z*s9bp$DiA9}O+N}HljzJ~x~Y*L)VRRbaI zoAsG5c~Li>%qMa}=i15~MWu=Nu}uw=Ya2ws*){xtToN(@q6J9vfY zC^?rl=H8=>shn0$3>Qh3KAMdONtT}4Tvo&=u_dXs;U?)z%Krs?#Is(ZI9}ERqBpoJ zUw)382qU?O8tdn2ro_8@n<;v2wzKSrBZvtWQ4Jv;c6TnF8}|?J5~?CKf#`&F$;3HZ zVw-wQ4p*bdxeyWe6U^}oJcbJ(_h9S<&KBIQo^#2e^VM;#IEa|w=OW6i>m}gA1!1a(wx1ho@P=TJY z`A&%nmKJ~{>tH&blxR?$6SO27hLoeyAaLs)g$2#{|B42+;Zt#}0oX_jl?f5?oZgoC z0GY|rP~`gSBR-WiOX!Ck%J8v``n+UL5)qznwL1_C$9zQ)M>ADh_o*ov;sDl_)~{(`9Q$RsdRz8@WqWu@clJ1(#_>5zUo_(yrOtCv z0e%^V?lgU_kX(+6DU$se5|AvpoZxGLpbrXBB4PvRrFS@=ZY7g+>& zo%)V284-Z-kx144@l`{0<;w8$hQfEXQR|jU-Xi70XqOR?0jMHUAbbxD4dDC&djtEj zQMcVX-do-$xZJC`hu7^aOA*K5tt-DQc??K50sxVB5Qm@)*YtP_z95wm>N340ahokt zm928XEqgglqjmB8h5=h@qP?^^wKqHA(=y~hg%A@hZdLNrNoeB`{DQ=yp)5yYzycal zR<6r^gjVOQR5--s+Pw;j$fezl%C=#{J*T`b=^@<&1+bSVy+#xI*K@iJ8$Cx)NYEmvO)P&Q0Xui1B^8NT!`7h(&_XD|4TSq}-rm4zd!w%HRJFKaVC`~MivVHW zcg5CTn6It-g4VO|YE`>7-*?^CZp_!#eR1Q#zL%<6+y~e5Ua_^W&DYj_uiDx#sT;II zRah7F`*B-kH+aE~!mHT)RqI9tUlwyyj+6Y63SLK`^Xc`v)5kx0i;O6EL#!9}gYGSK zUsP8iLIg$cIgf%j)gGV0Tlffuo~96O#`p|^)O!3R8kg}&s$M=9f0UNi6!6jPl}H#w zxrAB73`I{3?=*_YBjY9Y2%-i-XnX-E(d=!uaQ}>p`AXdlAx6W%IoyMKgi`Ab?Xb+) z{)|=_-k22A7D4oSX6a)mEb7cw-F$AkGd;^m3>~^Mvt2ZCZ@n~W5v_maq($B5OnRng zInUWiizY4u6NWL)Vy9A0*QyQ^Gc;khT2>ljMA2bxuVuCETg^*@FEiU^8$l^MgaCl~ zZ?qDH;Jy4NV!@T?woRSb)ZR2taruNm8S@MfOKeo*iWWW@{xQ^cp3k60n=7R z{xr+ryJF^7^hc!qX4O*uh@tcB-JAN>d+*{BQ9t|UY*(meyVjAy**b=xPV 0)) + + else: + + # if we are working on the development branch + # either SYNCING TO OR FROM, then just exit at this point + + if configuration.project.branch == "development": + print """ + ------------------------------------ + NOTE: this is a hack for the function deploy.sync() + + When I originally set up sync and bootstrap, I did not consider + what would happen if I wanted to bootstrap the branch I was running + on. + + That is, if I am running commands on the development branch and + then I ask development to bootstrap or sync, I get a probem + whereby I could potentially end up overwriting my own directory + which would be pointless. + + So I decided that as a temporary measure if I do any kind of + bootstrapping or anything that calls sync and I am doing it on the + development branch server, then the sync will fail and print out + this message. + + Instead of just keeping it in a comment, I thought it better to + print it out so that no matter what I should always be aware of + the issue and maybe later I can fix it. + + Right after this message gets printed, the sync function is told to + "return" without any arguments + ------------------------------------\n + """ + + # + # exit the function without any arguments + + return + + run(command_mkdir_remote) + + rsync_project(remote_dir=remote_dir, local_dir=local_dir, + exclude=excludeitems) + + if full: + from pip import install as pip_install + pip_install('--all') + + overrides() + + # + # + # NOTE: if using full synch + + if full: + from nginx import upload as upload_nginx + from supervisor import upload as upload_supervisor + from django import generate as django_generate + + upload_supervisor() + django_generate('gunicorn', True) + upload_nginx() + + from django import collectstatic as django_collectstatic + django_collectstatic() + + +@task +def test(): + configuration = env.config + + projectpath = configuration.paths.project.root + local_dir = configuration.paths.project.local + "/" + + print hasattr(configuration, 'docker') + print "project path : %s" % projectpath + print "local dir : %s" % local_dir + + +@task +def remove(full=True): + full = booleanize(full) + + configuration = env.config + + if env.debug: + logger = loggify('deploy', 'remove') + + import sys + if env.branch == "development" and not env.debug and full: + print """ + ------------------------------ + WARNING: + + You were just about to delete the development branch + from your computer! + + THIS IS A BAD IDEA BECAUSE YOU MIGHT BE DOING IT BY ACCIDENT + + Exiting NOW. + -------------------------------- + """ + sys.exit() + + # + # prepare to remove remote directory + + remote_dir = "{prj_name}/{prj_branch}".format( + prj_name=configuration.project.name, + prj_branch=configuration.project.branch) + + remote_dir_parent = os.path.join( + configuration.paths.project.root, + '..') + + command_rm_project = "rm -Rf {projectpath}".format( + projectpath=configuration.paths.project.root) + + command_rm_project_parent = "rm -Rf {projectparent}".format( + projectparent=remote_dir_parent) + + command_rm_rootpath = "rm -Rf {rootpath}".format( + rootpath=configuration.paths.server.root) + + command_rm_virtualenv = "rmvirtualenv {virtualenv_name}".format( + virtualenv_name=configuration.virtualenv.name) + + if env.debug: + + logger.debug("project.name : %s" % configuration.project.name) + + logger.debug("project.branch : %s" + % configuration.project.branch) + + logger.debug("paths.project : %s" + % configuration.paths.project.root) + + logger.debug("remote_dir : %s" % remote_dir) + logger.debug("remote_dir parent : %s" % remote_dir_parent) + + logger.debug("rootpath : %s" + % configuration.paths.server.root) + + logger.debug("--- removal commands ---") + logger.debug("remove project dir : %s" % command_rm_project) + + logger.debug("remove parent dir : %s" % command_rm_project_parent) + + logger.debug("rootpath : %s" % command_rm_rootpath) + + logger.debug("virtualenv : %s" % command_rm_virtualenv) + + value = run("ls %s -l | wc -l" % remote_dir_parent) + logger.debug("value : %s" % value) + logger.debug("len value : %s" % len(value)) + logger.debug("type value : %s" % type(value)) + + logger.debug("value == 2 : %s" % (value == "2")) + + else: + + # + # NOTE: I have to put the imports for these functions here, + # because otherwise they interfere with this modules version + # of "remove" + + from nginx import remove as nginx_remove + from supervisor import remove as supervisor_remove + + nginx_remove() + supervisor_remove() + + db_drop_all() + + # + # check to see if the parent directory contains anything else + + # remote_ls_value = run("ls %s -l | wc -l" % remote_dir_parent) + + # if remote_ls_value == "2": + # run(command_rm_project_parent) + # else: + # run(command_rm_project) + + # + # only remove the project diretory if this is a full + # removal. + + if full: + run(command_rm_project) + + run(command_rm_rootpath) + + with virtualenv_source(): + run(command_rm_virtualenv) + + +def overrides(): + """ + this is code for files that have no standard category + + for example, I had to upload and override a static js file to modified with + the correct value for the deployment. + + I'm keeping this code here so it will be a bit out of the way of the rest. + """ + + configuration = env.config + + # + # load the module where the custom override code is located + + import sys + sys.path.append(configuration.paths.overrides.modules) + import override + + # + # call the custom deploy code + + override.deploy(env) diff --git a/modules/deploy.pyc b/modules/deploy.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4004a346a945936e51dbc289a37994dd526c15eb GIT binary patch literal 10477 zcmcIq+j1PqajovbZH9v@Nq~12sig?6aFN#$zA#M56v-uFhd`^r6}50E;GXFQ&_GZ3 zY*!Bfm>4_4TtE8{{0DyY7dXP<7k!PM9DcCR$?D4txVxbT6Tmd8GOMz(vMzZtdm6u= zZ7u)f@7{J*@iT${e~FLzA3Os5IVx7F=crvr^<1^P@NL zDb<@+yVI&SqjqO3Z$fxYwcE7yNfozL?~>YW+xnD>XH{=b?atZywAAO-?t-n)sCZHB zE{ZlNFR9%n6}l>Ds&AG00L?B-wN+NHuo_-f;j#)Ysc&6rvm$NUWt(e7n_yOb>qvv^ zDwrd8_l62))XwTW<{$kpepX$S@&+DZ@-ClX#W%;RPD#xM$tt<|A|CIeEFbtWTJD*2 zkafen1!^ZvS#sPT0 z5jd^R8>O6YQuVeNZRYz&D_TH@@6(49#r#&B(X|~k)gj%NB-~6|F zo7_~V*KDH&X*5T+aPK&8vBh^*FTsA>u!d6mY1Z>|^G7JQ9<0XUyD(1tUiip7)aLk?7WH#89Om2$WovWuJB93Z{O<9fZ-zOyN!hrhnkV1cX}HO8F??|3CkN@; z!61pcX_l-d2T^jmW(NH*dl#8B8xFr$cdP7XFAO4|lFnclDxdp#)J4BG)xin5-)cTE zt>%3>S00GmXG&qu??(q>qaf@I4(juf9OPVpj&Gvwv%1yu7r*%GrJTYzGP%g``~5Hp zJRUR=#1$~YG0;EEq%}<m~E6czk&*8GfR`?Y_p%GM`1TNo7UW$ChGO$upe>ZHu~AoX0hU% z`+g^jx|_W;7{no(+bz~V6LrRS+7%39e#$}JrrUIy&XhBY(iBRU-CNFO)K`j{r8S*d z=i1Dy(`6q{kE`$uAJf6icq3r9;`qZHIQ}m00Tvo6`>Dd5?S1h9bXW1NnB@HcTjN4- zVBQZ(IoPi9uPOg{XywW{YOCT3xWtR%XpX0jTvX@P5e^})7+VF`Ql?4@s_2d@W3<7d z8sZ{=#&9T0s&`op;b017f9~SWf!2W)QEDK^zu;JDmUG2k3B0)vY)Qp8RPUx5wyOFq zsb8}7%PO8%z1wOCkdWU|!4 zZCikaTv7IL%OwJokefX7`*OGJ4@$?+YcP(IWBw+~5TXm&MDmB{#HQ*A&wf32Xv_TswS0~h}YNvuGkN1{B#f|AujUJ zTmw^#2eG>Ja@t{#`wU18D7TpblD{<7qJPR?Q2J=S@uO&IEEUk7RT zILtJhOYTQd%4k0e^}vLI-2}ompP^@=GZn2n!9^AxN$*E+2|pek+)n}xcbm~>BYld2 z9-(QHeoPkyIwAW|jSb!RjWKix*f2q7qp}^{swG89l&}5>RWVuM=RTZsD8AfdM_CO| zJSkDKIZL`?&o5!k(7VePL^6U<`cZ!`3%?meS=b8`YCT*$9CE{8U)jh+<`~(!l6^MRt)!P^f4ufU5 z4g4VzmdF@@$gyD+U{25=x?$)9fCM*+a0^inf+@feTP8?y33*UOL<4S11=DI}2#<>x z2;Ia116Kj|1P=*yDE*Sk-Z*f(aN4%I@2cgOui#EWsfHNxv%e z>9V>kI;@ZwAWnil*98sSpx^(XCNSpBQj(y#4+xX2F1JX>R-!s33-$_btK*5HP2!iz z0z}+V$MC(W6Wmph3Ll9Z;v87gVz9B%;jB4&!@+W7|1Ln{9{hc906P{Iwn{& ziAJU6LzgS{BNbm(y(<;uK>e!J3&`=AikDRnAtNBixdjn~7!Onq96>-+de6gM~pank@Kyue3Tc1mE5uz*Jp z07-OI_TrsLiHsc|m`5gJ{k@C`Lx%g&=}?cEb+y&BGu=ag{p5XJ9|%Q})(pY9Z~5OZ+#iSj%D^`&|D6e#Sa%zkc)LvCa=8qw(isV1N?jGC&IT{s8V7 zF>aN?z#d$E)EX1;_ApHJmQJ$>2>~Rc&VXlfJ?LxhXHDQyVb(RY3!*?L0Hc682D8q_ zWSt!Pxjsn;aR5CSu@e_<=@UQ6!+=n!5+&*t+k_2d6C^+?WAf5uV_exc93V1lR<7UU z)?qKG)9XQ37(EfuL2YDR3f$7xU62)Z4+{%+LfuaRX@CXI z1vBiV7%d39SOA!nHh}?sf#PZAXJ^3mz8L_0iur>S%Xp@bBPf&ZkBwKYN1sN-Y|#*V z31yKAj2vT6mm2}b>H%ev9p#0k$Ha}=v9P+0K-Rb@c4b7`NfgI=-;ZM1V3Hcu=^!5y zCVRR_v0a$|R$}td=WO)Rz~uTkM2f{~1sLngZonFF+4945aBzrClj{y*4gmB#9UI0D z&iM>-ZBjdNplKun*;=;rmNpQBUG2vw{+WR!?Vk|cUoa=nTx84uY}mTzpP?aRql}xw zPXO#sp~!{FcoFsU_eD3J<6_R-alCD@1;RiKXjh^-)M%QgQ0l^j?q^6KXUTni5^?en zjm7t~gMr*QV{`ulE}Ql711K1{xCgE8qtl469^5K)JL~Hx2}>ZgEeVijL{t0AZW_nT zlt~hCl~Vp6-PbDNw?}uF(>2*-8v@uQ&!SH0hP+p7gpabJjU*STNa3ag*wf32i?#dN z1}zWShd5fo7x5IO8!#S2ci=WAgcC_TN(I4?x5FNP#>Z!T{5c<+e297el%+R#tP-1g z_wkT{!&Br7j8#^q^-Hq1`FPDX^f<-!SL86Q6Z8ItT>4lBV#N}9FHrZs|6u` z;|l`%rwaxcKK}T}hdm#pU$@|`oF zU&ZIM>7^T!lM{gXOMv`S2mxl@=_&qaFfff^VA@@n_&;RD=P5h=8y1mV<~KaNNRSf$T1k%krE;Qni3^UmTmVKRe2X>Lrhs;3#&MYY>7`A$;qhz*37>ID3!z# z)6(-uMb9B(>k_FF4l`;NQ~&41*a{iWk5kSPWWl;N_K#uvwcO&QBILYwFz|z14p@t2&XR!ni(S? zP;6R4zN7+#hjw73ev2IYZH3rsROTI(V}`JaGe*A4OX*`WqCy1E_qYp0ND?yuMR;y38Z>MNM44}~6Myv^ zJa!~6w(V)`>?-Jb7fPDf`xS^rWAj)3^@n|H)$Hw4Z+mO|<>T7Tew_~V2_U6~HhzaL z9MCxk<1i12qsHK>bxW1F{VeV2GrW~12I>!hDel#h-@Mw|(fE6+fARFW-g^E5g%{7B zezo&LfBpI^{mZSFuik*7pTCAidJE;R_0wn1&~W=rE#<|j1+v?(-)>an{_t$q*QXJ0OK$PrpF-9E>%WW>VJbiV^F2L}}Rd0JslD=qEIs)q> z*t-r!D;^4!@DX+&U^KCB2x@jQ$EgeP2sHqkFaA^_fu;1vO)7SN)(=1CS0mb< zPoC&6tN|pS@`Z(_gdjCs9C5ya@95M8zZtwm{tYV=c!W3t@-EsF=lqs*?%33Lx63w1 zh?6d)xV?2S1rmC@Wix7SF(DNZOlY$NoxSp_AzlNVRXSAxh2r-;=2E?VK8OS*)m!g} zJ-afv#A;}m^~&TFZn^Iwmz^$;bIY@m%i9neX752*y~vVu7ZXdCL~_qa{Bw(lUUlxd zpng((;)xxJO%5@UD%0$?oxANxXVyYYK+Cqnm+4FRl>Z4YS5TkBXVG0Oa?P_W%kP4- z=&ayX`;Ddrm&jW$@Krps%UzDeb>cR7hL0f-%u9&L)FhI~f_vzJfmeVX!JQaSYO$+mOdX-}^5%RiEfETUGwAKBTVtv%=kBgI#D!zvoEh6 zk-^z}$9()PA2WP3`B>wFXT{^?Y~SJCV@U$ipRmMu&GY$?!$r+&+eqnCd=rK${)Jtz lpK?@[\\]_{}~')) + for i in range(50)]) + + return SECRET_KEY + + +@task +def test(args=None): + local("ls") + + with virtualenv(): + local("ls") + + +@task +def manage(args=None): + configuration = env.config + + # changes the working directory to the djangoroot + from fabric.context_managers import cd + + with virtualenv(): + with cd(configuration.paths.django.root): + output = fabric_ops.run( + "{djangoroot}/manage.py {args} --pythonpath='{djangoroot}' " + "--settings={djangosettings}".format( + djangoroot=configuration.paths.django.root, + args=args, + djangosettings=configuration.imports.settings, + ), + # MAKE SURE THIS IS ALWAYS HERE! + shell='/bin/bash' + ) + + # fabric.run has the ability to give me back the output + return output + + # NOTE: + # there was a major problem using fabric commands of "local" or "prefix" + # to work on remote machines, the problem was that for whatever cracked + # up reason, fabric would assume I'm using a /bin/sh shell, and /bin/sh + # CANNOT run all the commends that /bin/bash can. SO YOU MUST SPECIFY + # shell='/bin/bash' in all uses of local! + + +@task +def admin(args="help"): + configuration = env.config + + from fabric.context_managers import cd + + with virtualenv(): + with cd(configuration.paths.django.root): + fabric_ops.run( + "django-admin {args} --pythonpath='{djangoroot}' " + "--settings={djangosettings}".format( + djangoroot=configuration.paths.django.root, + args=args, + djangosettings=configuration.imports.settings, + ), + # MAKE SURE THIS IS ALWAYS HERE! + shell='/bin/bash' + ) + + +@task +def collectstatic(): + """ + makes sure the static media directories exist + """ + + configuration = env.config + + exists(configuration.paths.server.media.static) + exists(configuration.paths.server.media.dynamic) + + manage("collectstatic --noinput") + + +@task +def run(args=None): + configuration = env.config + + command = "runserver {host}:{port}".format( + host=configuration.server.django.host, + port=configuration.server.django.port) + + output = manage(command) + return output + + +@task +def startapp(args): + """ + wrapper for the django.startapp + + takes name of app and creates in in code/apps + + args - name of app + """ + configuration = env.config + + destination = os.path.join(configuration.paths.django.apps, args) + + cmd_mkdir = "mkdir {destination}".format( + destination=destination) + + command = "startapp {appname} {destination}".format( + appname=args, + destination=destination) + + fabric_ops.run(cmd_mkdir) + + manage(command) + # with lcd(configuration.paths.django.apps): + # manage(command) + + +@task +def installed_apps(): + """ + List the currently installed apps in the settings.py file for this project + """ + configuration = env.config + + printecho = "print '\\n'" + printcommand = "print '\\n'.join([ item for item" \ + " in {settings}.INSTALLED_APPS])".format( + settings=configuration.imports.settings) + + command = "python -c \"import {settings}; {printecho};" \ + " {printcommand}; {printecho}\"".format( + settings=configuration.imports.settings, + printecho=printecho, printcommand=printcommand) + + with lcd(configuration.paths.django.root): + local(command) + + +@task +def src(): + """ + locate the django source files in the site-packages directory + """ + + command = """ + python -c " + import sys + sys.path = sys.path[1:] + import django + print(django.__path__)" + """ + + command = """ + python -c "import sys; sys.path=sys.path[1:];""" \ + """ import django; print(django.__path__)[0]" + """ + with virtualenv(): + local(command) + + +@task +def create_project(): + configuration = env.config + + logger = loggify("django", "create_project") + + project_path = configuration.paths.django.root + project_name = configuration.project.name + + import os + full_project_path = os.path.join(project_path, project_name) + + django_cmd = \ + "django-admin startproject {project_name} {project_path}".format( + project_name=configuration.project.name, + project_path=project_path) + + manage_path = "%s/manage.py" % project_path + + logger.debug("django_root : %s" % configuration.paths.django.root) + logger.debug("project_path : %s" % project_path) + + # I accidentally deleted the code directory, this checks to see if the + # project path exists, if not, create it. + if not exists(project_path): + fabric_ops.run("mkdir -p %s" % project_path) + + if exists(manage_path): + fabric_ops.run("rm %s" % manage_path) + + if exists(full_project_path): + # backup whatever is there + fabric_ops.run("mv {project_path}/{project_name}" + " {project_path}/{project_name}.old".format( + project_name=project_name, + project_path=project_path)) + + with virtualenv(): + fabric_ops.run(django_cmd) + + django_path = "{project_path}/{project_name}".format( + project_name=configuration.project.name, project_path=project_path) + + fabric_ops.run("mkdir %s/_settings" % django_path) + fabric_ops.run("touch %s/_settings/__init__.py" % django_path) + + generate('settings', True) + generate('local', True) + generate('wsgi', True) + + +def generate_scripts(template_name, make_copy=False): + """ + this is a function meant to generate django settings files + + There are a number of different types of django settings files so instead + of generating all of them at the same time (sometimes I want the local, + sometimes I want the main, etc), I decided to create a function that can + look up the type of scripts I want and generate those. + + The function is meant to be wrapped up in another funciton that will call + the type of script I want + """ + configuration = env.config + + # make sure to booleanize ALL boolean values! + make_copy = booleanize(make_copy) + + if env.debug: + logger = loggify("django", "generate_scripts") + + project_name = configuration.project.name + project_branch = configuration.project.branch + project_path = configuration.paths.django.root + + secret_key = generate_secret_key() + + files_name = getattr(configuration.templates.django, template_name).src + build_name = getattr(configuration.templates.django, template_name).dst + + build_path = generate_template_build_path('django', template_name) + files_path = generate_template_files_path('django') + + context = dict() + context['project_name'] = project_name + context['project_branch'] = project_branch + context['secret_key'] = secret_key + + copy_path = "{project_path}/{project_name}".format( + project_path=project_path, + project_name=project_name) + + if template_name == 'local': + copy_path = "{project_path}/{project_name}/_settings".format( + project_path=project_path, + project_name=project_name) + build_name = "%s.py" % project_branch + + copy_full_path = "{copy_path}/{build_name}".format( + copy_path=copy_path, build_name=build_name) + + copy_cmd = "cp {build_path} {copy_full_path}".format( + build_path=build_path, copy_full_path=copy_full_path) + + backup_cmd = "cp {copy_full_path} " \ + "{copy_full_path}.bak".format(copy_full_path=copy_full_path) + + from utils import upload_template as utils_upload_template + + if env.debug: + logger.debug("template_name : %s" % template_name) + logger.debug("project_branch : %s" % project_branch) + logger.debug("project_name : %s" % project_name) + logger.debug("build_path : %s" % build_path) + logger.debug("files_path : %s" % files_path) + logger.debug("files_name : %s" % files_name) + + logger.debug("copy_path : %s" % copy_path) + logger.debug("copy_full_path : %s" % copy_full_path) + logger.debug("build_name : %s" % build_name) + + upload_msg = utils_upload_template( + filename=files_name, destination=build_path, context=context, + use_jinja=True, use_sudo=False, backup=True, + template_dir=files_path, debug=True) + + logger.debug("upload_msg : %s" % upload_msg) + logger.debug("make_copy : %s" % make_copy) + logger.debug("copy_cmd : %s" % copy_cmd) + logger.debug("backup_cmd : %s" % backup_cmd) + + else: + + utils_upload_template( + filename=files_name, destination=build_path, context=context, + use_jinja=True, use_sudo=False, backup=True, + template_dir=files_path, debug=False) + + if make_copy: + if exists(copy_full_path): + fabric_ops.run(backup_cmd) + + fabric_ops.run(copy_cmd) + + print "\n\n------------------------------" + print "project_name : %s" % project_name + print "project_branch : %s" % project_branch + print "project_path : %s" % project_path + print "template_name : %s" % template_name + print "build_path : %s" % build_path + print "files_path : %s" % files_path + print "files_name : %s" % files_name + + print "copy_path : %s" % copy_path + print "copy_full_path : %s" % copy_full_path + print "build_name : %s" % build_name + + upload_msg = utils_upload_template( + filename=files_name, destination=build_path, context=context, + use_jinja=True, use_sudo=False, backup=True, + template_dir=files_path, debug=True) + + print "upload_msg : %s" % upload_msg + print "make_copy : %s" % make_copy + print "copy_cmd : %s" % copy_cmd + print "backup_cmd : %s" % backup_cmd + print "------------------------------\n\n" + + +@task +def generate(script, make_copy=False): + + make_copy = booleanize(make_copy) + + if script not in SCRIPT_LIST: + err_msg = "You asked to generate a script that isn't available" \ + "possible script values available: %s" % SCRIPT_LIST + import sys + sys.exit(err_msg) + + print "django:generate make_copy : %s\n" % make_copy + if env.debug: + print "django:generate script : %s" % script + print "django:generate make_copy : %s\n" % make_copy + else: + pass + # env.debug does not block the rest of the commands because this + # function acts primarily as a wrapper for the following cmomands, in + # those fucntion env.debug will be used to decide if anything should + # happen or not + + if script == 'gunicorn': + generate_gunicorn(make_link=make_copy) + else: + generate_scripts(script, make_copy) + + +def generate_gunicorn(make_link=True): + """ + create the gunicorn configuration script + put it in the build folder and link it to the scripts directory + """ + configuration = env.config + + make_link = booleanize(make_link) + + if env.debug: + logger = loggify("django", "generate_gunicorn") + + files_path = os.path.join( + configuration.templates.gunicorn.path.local, + 'files') + + build_path = os.path.join( + configuration.templates.gunicorn.path.dest, + 'build', + configuration.templates.gunicorn.conf.dst) + + link_path = os.path.join( + configuration.paths.server.scripts, + configuration.templates.gunicorn.conf.dst + ) + + context = dict() + + context['host'] = configuration.server.django.host + context['port'] = configuration.server.django.port + + context['user'] = configuration.project.user + context['group'] = configuration.project.group + + context['settings_module'] = configuration.imports.settings + + context['logging_access'] = configuration.logging.gunicorn.access + context['logging_error'] = configuration.logging.gunicorn.error + + msg_link_gunicorn = "ln -sf {gunicorn_root} {link_gunicorn}".format( + gunicorn_root=build_path, + link_gunicorn=link_path) + print_run(msg_link_gunicorn) + + if env.debug: + logger.debug("\n") + logger.debug("--- in gunicorn ---\n") + for key in context.keys(): + logger.debug("%s\t: %s" % (key, context[key])) + + logger.debug('build_path\t: %s' % build_path) + + logger.debug('files_path\t: %s' % files_path) + + logger.debug('\n%s' % print_run(msg_link_gunicorn)) + + else: + from fabric.contrib.files import upload_template + + upload_template( + filename=configuration.templates.gunicorn.conf.src, + destination=build_path, + context=context, + use_jinja=True, + backup=True, + template_dir=files_path) + + if make_link: + print "\nlinking the generating gunicorn file in conf to " \ + "the server diretory\n" + fabric_ops.run(msg_link_gunicorn) + else: + print "\nNOTE: not linking the generated gunicorn file" \ + "to the server directory\n" + + +@task +def edit(param='help'): + """ + calls up mvim on the gunicorn conf file + """ + + from maintenance import edit as maintenance_edit + + configuration = env.config + + link_path = os.path.join( + configuration.paths.server.scripts, + configuration.templates.gunicorn.conf.dst + ) + + build_path = os.path.join( + configuration.templates.gunicorn.path.dest, + 'build', + configuration.templates.gunicorn.conf.dst) + + project_branch = configuration.project.branch + project_path = configuration.paths.django.root + + project_settings_dir = configuration.project.django.settings_folder + + django_path = "{project_path}/{project_settings_dir}".format( + project_path=project_path, + project_settings_dir=project_settings_dir + ) + + settings_path = "{django_path}/settings.py".format( + django_path=django_path) + + settings_local_path = "{django_path}/_settings/{project_branch}.py".format( + django_path=django_path, + project_branch=project_branch) + + # locations = ['gunicorn', 'gunicorn_link', 'gunicorn_build', + # 'settings', 'local'] + locations = { + 'gunicorn': { + 'path': link_path, + 'desc': 'gunicorn.conf file', + }, + + 'gunicorn_build': { + 'path': build_path, + 'desc': "gunicorn.conf file in scripts/conf/gunicorn/build" + }, + + 'settings': { + 'path': settings_path, + 'desc': 'main settings file for django project', + }, + + 'local': { + 'path': settings_local_path, + 'desc': 'local settings file for django project', + } + } + + if param in locations.keys(): + remote_path = locations[param]['path'] + maintenance_edit(remote_path=remote_path) + else: + # if param == 'help': + + print """ + "fab django.edit" automates editing files important to django whether + locally or remotely + + to use this you must pass one of the editable locations in as a + parameter + + currently editable locations are: + """ + + for k_loc in locations.keys(): + print "\t{0: <20} - {1}".format(k_loc, locations[k_loc]['desc']) + + return + + +@task +def clearmigrations(appname="help"): + if appname == "help": + print """ + "fab django.clearmigration:{appname}" clears out all migrations for the + specified appname + + if no appname is given, or if you pass the "help" appnameter in place + of an appname then this help message will appear. + + Note: if your appname is actually "help" you might want to go into this + function and change it up a bit! + """ + return + + configuration = env.config + + import os + + app_path = os.path.join( + configuration.paths.django.apps, + appname) + + path_migrations = os.path.join( + app_path, + 'migrations') + + path_migrations_old = os.path.join( + app_path, + 'migrations.old') + + import fabric + + if fabric.contrib.files.exists(path_migrations): + + # get rid of any old migration backups + if fabric.contrib.files.exists(path_migrations_old): + cmd_rm_migration_old = "rm -Rf %s" % path_migrations_old + fabric.operations.run(cmd_rm_migration_old) + + # move the original migrations folder to migrations.old + cmd_migration = "mv %s %s" % ( + path_migrations, path_migrations_old) + + fabric.operations.run(cmd_migration) + + manage("makemigrations --empty %s" % appname) + manage("makemigrations") + manage("migrate --fake %s 0002" % appname) + + +@task +def makemigrations_empty(param="help"): + if param == "help": + print "print this help message" + return + + manage("makemigrations --empty %s" % param) + + +@task +def create_fixtures(param=None): + """ + param is the appname for this fixture + """ + configuration = env.config + + if param == "help": + print "print this help message" + return + + if param is None: + appname = None + else: + appname = param + + print "debug - appname: %s" % appname + + from fabric.api import * + + path_root = configuration.paths.project.root + path_data = os.path.join(path_root, 'extras', 'data', 'fixtures') + path_backups = os.path.join(path_root, 'extras', 'backups', 'fixtures') + + if appname is not None: + path_data = os.path.join(path_data, appname) + path_backups = os.path.join(path_backups, appname) + path_fixture = os.path.join(path_backups, "%s.json" % appname) + else: + path_fixture = os.path.join(path_backups, "all.json") + + from utils import ensure_dir + + ensure_dir(path_data) + ensure_dir(path_backups) + + output = manage('dumpdata %s --indent 2' % appname) + + f = open(path_fixture, 'w') + f.write(output) diff --git a/modules/django.pyc b/modules/django.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79b56e847956ba04b7f730f4a8bd57e4aaf037a6 GIT binary patch literal 16311 zcmcIrS#KOycD~ikty|4iBCW-i%Cuy)Y>|}hiIb6c%Nolw2JI=z(U`WTQ|u~|)$DER zR!e5co**LwlORA0BtQ@ZNb;0q9`gs1w;)eJUh)s-A@dsKC3#A|@0?niv@ByVAj!VG z`*QBN-&yYM@_$cPm;T`2?ln#NGlu_f<75BTF($yDW7@`a9JA?|PRVSROs8x%%f?|@ z$+X8zXWVR#Yr1UO6Q(n1HYYVbX4+GxRWX}Y>1y1xPnk~5Y}T}R!nCJNXU1&Kn9i)( zoHd;}vpJ__lTtQsHs>`xCFupTxsZ2#+H9USLB#~<{ger6W@pT7o-x6+`O=upMH7}y zFlK@o^QB`RVF6ENIkQ>LS;?6*!F-l;PI4C51Dl-Bnw-umT#%eIS%s%1xtJxNk>paA zyeP@1vg9Qbx+XZwt~Q@#5tHY#o|jGdoC(fL>1An%iCxG_uVgt-XF1PjInQJ{S0x8a zzL@3INr8>}C2VN)fB31F5la|^-3Q4y{_mc|Z{W$6-jd;+NzP5V3cIbDb-w(C*)2Oi%cEi|D z!bTEy`fWzG22nd`^!;S_Nwv14c4(zmGKFqzm?Tkm$9ghxF5{uyiKJ*}(2bhCxGSv9 z64t-PHoFMG*JNK~hI!mELu8gst7LYBN}wH>;Jy1d%&=q*jcFl`aNGprrZr&>9n+eW zaLNP|=24loe_S#@d6AJ}*&LQZdy>YkBB;9f*$vzMFm{ukn-RbbT7Gw@=i0Cthl#rv z?n_14uJr`%O!P9lAjuDg{I%y1efGJ_SL)YZc=7tm>e|iMUVr1wx8DBj^ZSj1;ZN(_ zfP~d!zZ>*A6153-WJmV7)x+L!>ruQDCN;!1_AR!-E2=kldr>o#mfO9!<0r{9s`<7V zMU6H{9>!J*g9ya7byC-xWiW>z;a)_t^BcEsdAIL2esKHKI`WJJb{(;`Pb~Uai+kPf z{#rlowZdj%*OU}%HUdk8{iwM&=&$zU)|zd`Q6J^o{#G0{*E+pm04_+YulDzo)95lI zXhSL403{cYZOP4Myyn!L%J{8Plau3z1QKrGq(Emd1a{5A3kb23L)(eFgaz76+h@i-QSS? zJjz{RI?rY7Sp?t2Vcs%pJjWnsBJVPjR~S6c;3@-du`r5vjWOX6Qdq}%fiZS)vYbHi zAr$&PBK8adFhc1#BQ%-MgU)k)3m?lMg(E~%r~+QbQs@x?K9(zJL?}uD8d7^hln(pb z=uBi~$KMOBYX@=YCcB|)6P%5v+X;incY`Q~Xz9ffYKX=nEi4alv)67@>}d6sl`aG$ z&d|CDP&xLxDO{{ls@F>2K!yV6E(f-XeA=SQ25#c=B5CDIXgnna65E@t`gatM#({I@6y3i8{ z2B8k(xCgsEn+$Ir5X6QG2FQ>Iv+A7L2!RQE-Wmhp_3tqDTMUGT9H3xsp>ea<0S*No zVdz9DE2j-gqOnwy$m11f)>$akRMe7~IYC)17q#GRrQ@hrg~WVRqAp2tgP}ogkkU|G zg4WD|kOcyz`K95Qfi?}{3q?|n#wl|sDyakUyHO_`Ta2iO;9>0dsair`lT)BLwhHc! z6O>ei?-MyGV9?O*ZM(>JK}HvfClqk&0`TzH>;>T(imW!F0Crd0kv4jQsB(4oaJ<}u z0GdeD6@fHN2uLFt)_t=lC+Q$IzS``ORx;B6R;K72L}^6ot#s$4|0H@NAszmp*|EevRD}`2FPNo2^uZn zH87NDxs{B&?m>NnmOBZ~k}2QC@Uqn(!p;+xY7XK!>?ZAfunATKya~ud!Z-rFsWC>y zjdohfi;;Cx4PQB@ijKuVbgzEiy=vb)l3b;EQvb}2V30_AiBJ@0+S+jSgY}KO?|k^- z?e`k*eEjjo{c9ow?fdLUv_yBM=`O1ZHPY-g_drGsVcZO}M2aMNS_+p780m-$!LEHpu z(2ghE2;*RrSNeW)4_d#?MYbBk_NVAj`Wo3>NzE`{s zJZ+-W$#Z3Y>nUh9#KF*LTw9iM>px}+#mKMaUe31hZa2Z?yK%z(!M#vQh z08)-LD%%3s%TosO4Dt*9K#DZffmB0VxKoe`2i^!iWw=!uP8c8(DlZMy)l#F0nWG;p z)+O>Er7TK-WvD5la$11Tfgh6=Uu%|L!mO~1WuRO%%lJRDatPN+ZJd^mleObTCv9KL zCJAYs!fJ^_vG$xC3AiS;?jf`#IThkJn9dqz=$khmPGy77n8T@b<~rNiB4geJb0%3J zv77{n`O(BemH>%2>J-ZBDi?X`Rj@Zw^cBNfv}WbV)W#9nlvW``iZ7;-O6e@g12s&6 zu&8h(L#$oO?kl!1;4P)K4%eXN>$*4HD^{>)wCgmRx`XNzD}7d@_>Mb_y~~mZ#q?x8 zYfqf*vC`FEJFw57o4nEE`jDCF6tE1qlGMFuFQQ@68#H$@nIe0w(TKWH(r7^9i4an6 z5Q!jLF(M*(hzhMmWP+%&DHTMGACns74I2TMQIBXT#WgkNw_az+u2laAgeI?TkazuS(&TZW;6>QRvj_*J4rd#GfJQ zI%=Po^5k*f{}~@kictTag_EHMS+!WM(4z;p*Li1ttmd3S9zH0aaHipqI1R;Wu{7_@ zm!_R*r%4Nxhl4BT`t3h~bSaj>r~w#>`wu|I%(97ZP_rxmu5573fgkE3Bamr)lK92A zxXOTQ04z}K5&X)N2FHU%IR5}9(fwO9hG+ik`LkwNA&g)|tvRf6dyoW>3{S}+N1tej zp@DQ|^ZFqMMr;f*KqF$~=5-)T3wI(&12>^1(!p060_(7&Q)aJZ;tLK?^09RYkbqOn z153q)3M*l}X*jwh)bN5 zu3wb^`yg;|hLck8){I3Np17ITSmOLgYdpnKM>Ecv!&65x&gH0?_HmvI7;SQaa~{oj znv2s6(EAy7fcq0J?~+i}HOZ0*o;C3M1Z&$M6=@H28Qc$mF!_}Nb1#v;@oW5J3tKNrzT|=k$ zyFDxxz6fkZlwd1l4i6*H0k@6qbi+V^BQr=RAf>9q&}*DcrIAik#FsenGX-J`7g^0+ z@u?SAKi@9uRT$FLu@tCJR0|4`FFMH;SQz{EEL$~u{e6*x!?gpw!4%C&y$o5I%&jQ3 zE?PGGZrVWf5LbIrX)MYX#c**aDl3-3@+xx2#BPpe!?$uQV|B~l8@YAiMcts^?)gD} z`6|Br{3ezwpmC+T8r}RyN&VQOyiw7rn7#Bi+F@~x`wc}!cO%otiJdhn#yYxWmW!gv z$rpsrnz2aKCQgGRsb`3um-I?U_HSUrMl0&Je5uQn9Rxkmr?z0M4*H`1=468}%%c-R zbjXyz9lMjyg?=BpsvDfEOeNQaW%f|Ebb@)dj?zdAB-=$N)#}O-vrv}ii5h#G$?qXhQpoW;rrUbFqZS1hSIoFRj)kBZsjT-7i^~WC zn|L%bJ+ixKuR+vI^d^kT?jVWU*85%7_<%K-_dUOD!(*zj=-YBjoed%;j0~F-U0m_r z_u1Jk1|KsJo%n}{>8+f+FA$#CVCmhX1*E3;BNmW6-X|<2b+QfAlP=7`ZA+-W%Mu#L zPMlOMnB-)&=#Maq|H8-q6#`SMlxxtA>BXkcr&2oWoE5FQ;#B#Yx+I}p(=`cg8b0sz z!#kde1FbsyrG^b1d%{`3@2sYrGh>>bMt;qiLz{8*GxCc*==#iBWhJ7ek70&n3~rG9 zGXzi^bLAW=BQEIoCQbY%bz{Vr=>5bW91QR!K;;!(3~G0(`vk~Bt@rPMx1hYoVd`Y3 znv)e#wFf7Ti-P=zzdW$3Xg~W8L1vTuO_blz>nJg7Tz`Vtc~zLtMkBb4iYKTAo9b9iahhQp9>F3MMumH;wLk-mW*^KzHqb0+^1{uMwP;#U z`jMX0Do8`YM|w)rXH5(0J<=6TpBG;p^m}AgN3)>mBkL6GO-%h6F%adU9qdQio3MOg zL8TYBYEJ)>PfTbesS%PU)I#}Ujoq~_aXs(df0h<2MFgpgi)h?|WDassVQ?MN4xm+2LFegZIW&Ium#OgCZMVA4MTz$M7+=Ju=v)$$ z?!51yK<-)PkVC!ETkV~=2hD~`V&?s7=z}X!LFJWSccFo4!Va%2tg?q{WHi7i)bMN}hBxllQa zyh7L1yfW%lQ;bz7yQmk0bI}7;zJJQ(XAC|^V5##|Ie!{-vW4n#V=|u-N8%j0tNGUG zl{j|>&k**jmWoq#{iD0LZ@Ms(WG9dH32Abq%WRX2p7^3+A9@;z{mpvXF;`-Pl?ivn zV@Y1r%IzVHK4!`NK}BIktNa*a*o z7GzN42p5W=!wh7clhwj_5TTVAONKJIc~dQnFADTsK^Y!PdNtwOES-i2b53+8##9rd zW=3!3Y3EXD9>3@D^olyvMEX04)Ht*)k!>>^10RuXe~)3IFF{cP!SI+HeohA>4uA~x z37ep9O~R_&4bxwVcr|_6N9iQW)sdQ73V8*uAT+6C^+lR76LM%SbCRNS_z4$8YH5+U z4Iwai1%#*DaA^W?0r&t@2okvB&;W-NBRHE_xDe)5a#&i2k?u{Jd z0Z>D+h2$t^*5hcg<gD<6k zzAX@;#+UNnULPk-wdpwQ^zf{m_+$SUnX*Sz$d`e0sT*;Xo3!lugQSN?*02JYrZf=j zOD~X9FJ?Nghr1ztYk6mIx`T(2)>bmBH7@WOPPq4#rSj>xlP0xTCz9^V9bg*=0jZd7{VSQ-^f_ZcJS>s{Ls zeMS{26+=Hml{&#vG$UCk?qTSu$*0xe3RTK{$qp%CMtlc<#3Z?2)R&B*WaH&wdY&U& zrTKJu$lC~#+xrtXCRpz^ka@CFh$Q}ZN+BLSGXZ-(lR1z>ltAVg3t&V8nbKG3unN?A z2U0^Sz<^idC^v=V>WsF1UWz8T+Ja2di#Tn9OxPb{90l4Zo*Y1yz_ySl2Q9#^JfDky zbu?2i`u{})4{yNa`V8+0Aont@9xFgDMjJOYL};R*Tdg8*spJ^|Bq0jYYc0SIWEI^I zjNx)E&FA9lyhL61RuI|;piq8RMzNuZFU`cAXh)xV-^}06ExS^JGYlJncMEwPo4&XT z^ZslfR|DITKGdRDt2n^XHuO)v-x60V0AeS45aLn+K!PHog#ZN+VwrEb0aIERFhIn` zw|1JvK)6Tn;rLH`LnAu<(J5QvrodV}?Q!J>0Cljg^iF{wwGNoPnT`qTrkIf5-Z1EBl@NZvN?@>?!0OeF|W-`$FmXLI6+z!XPZK|xC> zD3bDOfDDai2rJ$;Tr=`U22T;LSSBbE>5?g0z%Icznv#8#ir_g~Dy~_r9HQnnnzE-G zH*UOKCq#V<_z9l9hI*n`tG+DiHpCvj;j`sg_CX|(W%ckNRGwv8dC02oX}KEjcNmD? zOpFy5CJ3+C#CcF^HHt+EjvdQuz#8@bg00UWfU~m^cZwz~6+C1?-qDFdo7k*J4~<=h zQ^&rH`I%C!I_`||otxktajh)q#;{geEHA?Mc^ddOT{>NwL0q(GO7k4ct|IYGSO&y7 za=^z>H$C8dYc6lUzJ4N}%s*<*~y~}+Yn~{gk+rJ&=3zKqY#Jg zGxFNI_}C9YsxZU!f=U-t#K+S>W|qy~l!;$q1eQk^=nNkg+_!?`^vS|~g0F>^&0TtA zGQ$MeALfP$pdI!COap-H7$v~*BIxj11Zx6aW5|k$e>qZ322<%#w3W&#Jw`L3+4IFK4DX(T4%#R*}O;FXtpu^>M}2p8Bo@Q0xb z*jo@$@(7Fq19RS}Qwgza2n4T0G2sO_@i@ased~?P?dXdHUw@FADpj7qbnBKcVdOm# zl(Dq0-0Li?2FJQFF9^us`%SU0e&E2zRSvlz9nK2$=p~g+T`{XI3yp@AfZUSM4#_KF zm*3H7z|s&H3__v|mr&<~G+p_l>d&{+l6Qa^#;kJMXn8-435+hYpj z77PB40S}RM0!Y+vKR%4{=v18uJXZ>aghg>32-eA0wFsAzlGWF;RJv+?0Y|V*X`;i? z43dRxpX_-hRHu*pvOU2UQ1$z!;$e`(%6C!|K)xz&c`i5?aD*4}`*dkhzOhq9IP27} zp)2nf4DeAGk%z8SsZGCexvIBu1VsI+5>Zb4D(RMt2%NuUB_gKxhyiiJll_=rY?1-F zq`3T(Cuv@XdAvOI{(%7>?8)YNtBk$EfF$+aVDL7BcNo0OpvmAP26q^Iz@X3I=M4Ua z!QV3YM+EF-Bsi&u|F0|M-sE`_&)?GyygJad=gJZ*)yfOy%6!?Goy6xNKBs4A5T2U7 aFuORrH2X^J0a6!gA0qVdc^RLnvh)9RilLDJ literal 0 HcmV?d00001 diff --git a/modules/docker.py b/modules/docker.py new file mode 100644 index 0000000..d7a41b9 --- /dev/null +++ b/modules/docker.py @@ -0,0 +1,198 @@ +from fabric.api import env, task + +from fabric.operations import run + +from fabric.contrib.files import upload_template + +from utils import loggify, generate_template_files_path, booleanize +from utils import generate_template_build_path, print_console + + +@task +def docker_ip(): + configuration = env.config + + if configuration.docker.database.host == 'local': + docker_cmd = 'docker-machine ip default' + return run(docker_cmd) + else: + return configuration.docker.database.host + + +def docker_run(cmd): + from fabric.context_managers import prefix + + docker_eval = "eval $(docker-machine env default)" + with prefix(docker_eval): + run(cmd) + + +@task +def generate(): + """ + generates and uploads the docker.yml configuration file based on the + settings in the yml file for the current branch. + + e.g. if we are using development.yml then it will check for the docker + settings in there to find out what conf values we want to use when creating + whatever docker containers we are usign for this branch + + currently, only the development branch is using docker, but I might change + that in the future. + """ + + configuration = env.config + + if env.debug: + logger = loggify('docker', 'generate') + + build_path = generate_template_build_path('docker', 'database') + files_path = generate_template_files_path('docker') + + context = dict() + + context['docker_service_name'] = \ + configuration.docker.database.service_name + + context['docker_container_name'] = \ + configuration.docker.database.container_name + + context['docker_database_env_user'] = \ + configuration.docker.database.env.user + + context['docker_database_env_pass'] = \ + configuration.docker.database.env.password + + context['docker_database_env_db'] = \ + configuration.docker.database.env.dbname + + context['docker_database_image'] = configuration.docker.database.image + + context['docker_database_port_external'] = \ + configuration.server.database.port + + context['docker_database_port_internal'] = \ + configuration.docker.database.port + + context['database_user'] = configuration.server.database.admin.user + context['database_pass'] = configuration.server.database.admin.password + context['database_name'] = configuration.server.database.name + + if env.debug: + for key in context.keys(): + logger.debug("context[{key}] : {value}".format( + key=key, + value=context[key])) + + upload_msg = "upload_template(" \ + "\n\tfilename={filename}," \ + "\n\tdestination={destination}," \ + "\n\tcontext={context}," \ + "\n\tuse_jinja=True," \ + "\n\tuse_sudo=False," \ + "\n\tbackup=False," \ + "\n\ttemplate_dir={template_dir})".format( + filename=configuration.templates.docker.database.src, + destination=build_path, + context=context, + template_dir=files_path) + + logger.debug("upload_msg : %s" % upload_msg) + + else: + + config_src = configuration.templates.docker.database.src + upload_template( + filename=config_src, + destination=build_path, + context=context, + use_jinja=True, + use_sudo=False, + backup=False, + template_dir=files_path) + + +@task +def create(container='database'): + """ + helper function to create a docker-based database container + + container - specifies the type of container being built + + NOTE: + "container" must have a corresponding value in configuration file + """ + # configuration = env.config + + if env.debug: + logger = loggify("docker", 'create') + + build_path = generate_template_build_path('docker', container) + + info_msg = """ +Generating container template for {container}, note that +the container paramter of "{container}" must have a +corresponding value in the {branch} configuration file +under "docker" + """.format(container=container, branch="dev") + + print_console(info_msg, numsep=60) + + dockercompose_cmd = \ + "docker-compose -f {build_path} up -d".format(build_path=build_path) + + if env.debug: + logger.debug("build_path : %s" % build_path) + logger.debug("dockercompose_cmd : %s" % dockercompose_cmd) + else: + docker_run(dockercompose_cmd) + + +@task +def status(): + docker_run("docker ps -a") + + +@task +def start(create=False): + """ + this will start the docker container referenced by container_type + + NOTE: you should have created it with the docker.create method above + first! + + container_type - the type of container to start + create - craete if container has not yet been created + """ + configuration = env.config + create = booleanize(create) + + docker_start = 'docker start %s' % \ + configuration.docker.database.container_name + + docker_run(docker_start) + + +@task +def stop(remove=False): + """ + this will start the docker container referenced by container_type + + NOTE: you should have created it with the docker.create method above + first! + + container_type - the type of container to start + create - craete if container has not yet been created + """ + configuration = env.config + remove = booleanize(remove) + + docker_stop = 'docker stop %s' % \ + configuration.docker.database.container_name + + docker_rm = 'docker rm %s' % configuration.docker.database.container_name + + docker_run(docker_stop) + + if remove: + docker_run(docker_rm) diff --git a/modules/docker.pyc b/modules/docker.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9f6b8e388c52c68904ed9f60339c26271a8da2f GIT binary patch literal 5998 zcmeHL&2J+~6|ZjR%g%>0$&6=acR)oLkVj0M8;eGh0kM2&LCR`7yV_M?WwpD@c02t= zS9NAgVsV(gBXQxvfeU9ia_7t${tqOs@O!ViZ6{PoR>awg?KeccDv~vEwr2Y) zB3>8ChB(_0NmHCPMbZ*yEjzZ#W1He^QzTpBY)d%G72%rrLWsB6XIq5p)IQr0MBk|C zZSFQj*b-kjJad;RoArcy)Kz;T+^Q$+*Ocv=a$kfy;AGkcQd*@KW`#Pw zKJPj;wJ1~Dp%rBwXMWf>D#>Hts4gut>v+W3XcP^n<_X#(m8t@x^J)EI6f51&eKR&K zOc-QYto$_krLvpfzGzTJacCE9V^Lm2sp$tFUQCX3P((S#hyI|5 zf}tT`<=AOO(ul5%X}_t1bb3Ha;|Fs2hW z#LOjwUb%$FU$=#0&}SGspTejgea4n0Se;n7F|defUZ`R8MjxQ5F8o+N?OJ)}wgw(+ zV?VPR*2aO zBY#~?HpHYUt}uT^yd<1kQj1Be%Os3#GIi|+b&IL%H>lf8-B_mXFsW%tZQgp9Nv-8s zmbAG%>t4O})(xuNdV86=#~gRW%gQ`|bl$v2q{%+%{e#b+i5dGfOcz21#5imS``n{R zXgq!^Tv*Ea#Veq4k04@+?p*VcNTXn=0>?~GhYlLoM`%ql1i4HMI-@mzZ zwl2&=5rXCVbvBQLc_hNe!aNq?JN&E|uRGzph&WGx0sR-8e@o(12Tm>hG?X^hYH7ww zT0HfpNi4603`vnyQiKU*meN#agn(nBbfjg(G)e1t<}fRmUr-i>N{t*8ej1E>EncO1 zqn?b0^0ktFp=7DS0uFeg;w(=vo43ansf>(#9mSAktb%h{GqpnACIGmZ3=%-Ati;-} zZ&(irXDpSb9bfwp2a`&THti7<3Q;i~F+W-XL5j){wAlEF#|5vgq#dP|D3PudvwBs^ z#?$ApQ9QKIBk9FzmxN1@JUycWp5DwKf!Os=cla zhJ)qp=0j0}*lE6U%c49hOds)86{#P8`3$6FJA;zhoGr_Fjl#lRqmT(S=~4yqpIx4- z>Fjg)LS8Z`W*osm<9uv!@&`=6S%I{g1Z?7Vd|9_<&s)tha}LhsLTck1wkR z(I9hw5~UOW_~W8f6+xF_cKjn$0z?@2lwIfDx(4cpQE_~^+?zdP6x7n#?6VMu>!oUE z!w9LWDdME}7CI(+L{Z^|{D$UqJr|MLQ;+21lEcb&(tQW3xhC-_3$f`!H7G|OC18&V zHIK?0WD*=#6d1;=g(*pL4WBz8keKXe7MQe*50~;6o$2eW2yM+XU`!Kp3o6R+#14q1 zRfH!N#}5;LhDf}C8aQOlOb|#C->|N=JvGBX7lGHnOkPLl!>ssVx+IE6vN8**L$M`S znTz_7x^x`22KBy4OQ`PPc@?5yZSF#46morR*~N@IE5JN*)vw~Ge?r^rG~K4N=eC_K zx7paow?ki-TKjIt={Wls-*?)m&Gy{p#vcB!IZgNeifvKPe)%v1DzfM-=*Kn2@6oU| zM)9sdiFUKJ;9`U7HZCntV^HPHbj)*@j(}1dDN)DV8 zke#YDaA*@>9d{O4p^E!8c;%tgxeB6Tgj_@E)J$_Fv*BWa94Jb5bmcI1@yUlD|L_IN zK|XjecZ36(l-kI#e?f8t8S<#kvoxgT9Hl5VSPWHVJ!jsf3_+e@714htU4Lw^OT=Kl zi>&d_8I9uq`VCR+qLInB69QnTFte zU{lKhE5mL&-=3Y%GGLEI*}$ZE#FX?2T;$b-)23eA4$n=LpY&-YU8EPJR7<#X0 zHifdq5FJAOp@7K4mRZG%u1q|GhEKX2>%c;I-=YV_85{q;LoE&(lz_YBAs)GicT5l3 zn!{_94zoVR1iDxJ$3}M>8orM$HT~}6A+|U$tUq#h8*qK+o^#&~h)y56O_K1M10V$D z3jw{KIOR*`iKeh_#h1C3hd$rsJysnfHoUPmcX7$&Bw&0~>fe%(B&}H7Ea$?18)O?e z=z?C~A90U|IutfI8ckM3hi*Lx8GP4ta1pgvcHOQEXfs9;$Carzet~*&UOF#13a*_B z#czO;c`#kj`xF$Gd|ytpQtEM5#-a6g>$9OPe$9A^d9R8~DC)*pDE&cpVN1m!61w@? zjcCLhAtc=%h!EvicJ{G~?;!%8ui*A5&gUKnp)>^pIaR3RRb4TwkXLHDT~_CAZE@Wx zC?k4wJc|mz>rt1H(V!Ng&?eRehWgCwPSMY)owxZN1h8)#=#~fjHGnYM?f(e71z{9&<_ODj3d#$h$RfEBhW|U_ z$jY2B3YEZnEbf+G+h~*J%kQj;i-_j;Q--4Y(p#(|8uYk0wIwbkq|!vOwmFOQ6FVwrv<-}UBo8|( z7m?OcKOh>VS?{Ox_z;hUg}8MW0iMKpXnO3?G!04@$4;E6u}_zK4#}mK zyVRbuy2N5oz(I->O_L%`^Uyw}1^Q4lE%IO#=tF__x&0IJngV?+(3hZaf4|>1=dvqG zDN>jLN^*yD`DW&uneRTcwf|h4`;UM7*S9;)|J3mRIzGuykVN>axSn%2@_fbJsAlyw zccZ3!)%B*_e%;-uyZvc*W7<`snu}-LcEv?g?iviRb*4HJ?`I5r&wqwH`6&}UT>%EH@5cTPMYsg z1=XL$C;2#%7z@HOou*a|MX?zaNF6TG=zN6!w&$v&@fyhSAUo!i47 z5OqM2%^tg1WWzA(cG55uiZ|nXi2zhe;@Fyu&;)8DI=ICJKV7PZJ=?uFEXio=&UUF@_y*2E1uI&$^Q7=xeb^G14 z+wOJ07H=Hf3%EhVHQ~0oN{9clj}|hI3R}qiid)N;!NmkMB--wPd!RkXLXHXZlyly0 z_eOCOng2Wb!8BKc&oS(J%1sT!ojC2L@qQSdMsaclT%Hc#ab!}R0S+L=UzPL(+JC1yH$7VRqfub#bN+Iy!H8>`qZ*M$Y8Gl{dQ@;xz!>>^JANoZYjNcT<0^3iNla$Aq>}$Pl%(d ztW}mP3zd&o*R@asBH2J>5g-grpoF1hNI2A}8EJ|$3;XFxqh`Ok zKqJ+Q$R_z|uj=-2z#zDBeqslm8OX;u6QVlJQQGY#=?PSZp8=nBw?=9Fdd6ZYPZ^qL zAU%_EL&$!|$!`?r@!d4;N1C3T;|*y?M94KyI#p!=ov86D<6FGA}mY zkE%Xl-Sx8?)32LdU}*~8)FgK~8gy>P!w{N|2V|vnE?)Er;Tm=Z{q62ffGYAc)U?XV znlzHU^y*YByF2hK%S@uG5j3v^Hmkw}2j1V&5U9V6CLJ*>i>-DN2Ny6)a30AzH(7|- zOeBEd$-EVrhJIu>ol(m1GG1Nni54(smj3+{d$YTy}n2uht%zgc%53ZR4#I-q7v zGW`R{^g66FE;<1by15p9cH`bbJZx=``kk~p=(o~A3mUAo-|qIoR}}EAZhw0)+;3;i z>2BPLKn2Eea~h>6x30GCbbGzlR@?$PMXh$yVh5wg(a=V*t{7NtCU9EbYSVbV_PxO< z#aNUmK&w5x$01T+1)5YZ4e zzNO`h58E*;(q<7(Vy_aY*fPEwcOY5CYH`#}2gYih!Tx@`A0?aXlmUX=Z6`8#uAoBN zN3{?Ds=dnE4JNrn53ZqTdX1{AI44bJdl+0qix4XCMOPFd*l8w7rXuFS8(WOh6 z48&N*;#W|WtRiu9rz)qbr|Qe~GnLi)Ql(j4tuBJQCCIqp9ok!zSlkOS`8R<4FT~_^ zb%QSiOB6i%B{Cj)RDPMHg7gMhld@sP?ajKq25mbD1PTTS2=rvPx(gdL#gG|;pL;m` z91mx2!3|sVP$2|*i{yDQ_8y$fx+Yz6drcx&@wI4{6wAtx51Jf!m7pLmtaeVdly4A5 z7}12$rz>m*-nlTM_{*9PF=nwuz^uU+Ub{cz!jlg)%vl5c-6=>Q`UDBYlu015EjYJ* zq%7+726tNEJ;6AFWee5Dn|+wD;=0Yvf8^-Bq z*iTyh0Z0kcU8O4NZg+XqAdrK0LPb;@_l7Nna^awz?#hnMC$`W^Ktocp)gvfdBFvyR z*l(jZ=(KwY&HsspVZ1-^;yf9QhVYT7bD_`CpJE~x;sxa3?g1tVDB&c; z_6AVF6k?n#2|mes7B#I#w0X?vV@niojdlV$HZs)?V85le+r!`q)Yyt@$R)#$0vJ(} zu2Vh+FEV+F2^B%`t4LBAm*vCOF6HaANeIMq$g7V_F>uqU7_#&DJ(LsbME97qLKXVp zWCPmZ9BeWZb>TCGD}#)F|HC)&;;CdNLn}!5Y4pMC=x00X?dX@;vS;d*VXen6P<*Eb>`s^ON8n`m=QLmdeYN? zgtq)F%)Y9JG+n{o6u36ad_D_ht<^2$b@O@MVqUk9*PX~F!73JyUd7&$3l+cQS{B_J zP1?_Cu_z#{>8CU>=n-Oo(Uxp96i$}*h&y@;>k$+hut^AjHgn9DZ2TYQn~4fEuff{x zUv%jix3}!}Rx+fe>Znd2C~U2PX_SZPAko+)tNJ8P;G;2+~+XbAYhvGQfDyg((J6}@Pn6Qvb=md!A**2i6V zzJz_=1*kltiy>0a3s{#g5NF0*{>KJ08_L(DVpM9|QITu|yvi9ncwHrs)UVE_idH4R* z1X>`N$ts*5sv^WQDvo;PgP90>gN40ddf-PhV+(u1p_$9Q^^Y^ri|%v3*@tKL68@(f z<2#sMnF(wB?I$W|~`W$Q$jH`jULwrRGNtgI(c z4(J1!O{Op^gO~~83<*t}_h?-oZoG%k=AleAWGtfg6k25j?hFT`1DT1*C>qEbv@0?> z&A1myVF@SU_MjKV!{9o*(cax1>|4NoI2fcdb(0gQ_Hn@t3!VsvtcrlLJ!P(HZr!y7 zbiy?zMr_Me9>(oxKekX}``{pv5t^Z*d9s3;&Waf(BZQxBcN4@sXoul`54j}SK>j_C zgWrv;b0`BC+vA%J=|crDN<9>|9`7TocC1%Gi&cy2?{xcjwH_iMYgh9qI!c>8o33;= z!DrZ;`+6tY=?2eaC0;Q29nocj^3iZ`5Jp=TL}k9+>#49a>UTSXVP6r|5~+C=IT;6EK%MB&f`y77S!_OzxMu|c2fxkkn@rwd@;gkfBT1V`7|r%)6pHw` z$-%2k=o$r|Wn-NK$mB~* z-e&S;CSPGf(g@yR@-7m)1<}<}b|OO8Zj%sqPDkw?0?@&45t!RzEk;)-^D>td`gl71 zCxhB*^F})nfm%&(umg(Ru`rxXt4jc_&_D)=$q&0xoCG|3#dU*ckt>NBx^_Y{+ffPt zT*nW!Wyj>M1ubClBYcwoLc+k^srnLvb!YTT*KE0}eD%!9X8mlnS$(X!h)_Lim*K!Q z6vAJsobqnmT4k}`lw)_^(s~mx3zbV4vx%S}^9z+{CjYC?v!#Bgss_tZmMRy~ey+ZV zIWSXYxz0Jx0t1eA4!LIKY~}1!bE>J96}XZsGfFEf_0v=Moms%_Yt`9mz22N|Vio$W zFIGT? zfAQ7yHuGw8LOC`ZPWYJ|^qy|kgVq%)azEPY{3>?67b%B&HLInuSB*~q@FbeLXVAo(-NTj=yNzIGw zLcM=n+;$Ul{WFNmuD?jdq}=M++J)MM>gn30nrM;nB7Q`RZ|AglG$G=y6AmbBP=%j) z$_ik=?<#z)fH<^b-mJ&1CYlhSP$N-^$4qbV>gnF$)_W z*QX$H{8>lDEtCPjfPCffl;^c;I)gw612&!IL1hS^PeAJG71THecH$vma1`+3Yo0c0 zG3L@yzg6ADq55P9<9awQB z@HXzgZX;&CxlRRUvC8+D!%nzhhshI6IQOKAzRo)7=|kprnaEoBI&xBLoG+U$_Nrdc^zU8pY9 zn|121Riq2hdQ2^!>jPSk;=$@95Bhfkh4_bn1rM%e@|{J9eot_^O)fWq^B|~i&br~g z2v;0S3I^ni>%Ww}LwMx)7^D~<0$tt7f^d_e0vND_5I6N8u7XJLK=UqKD9vF#^C5Jh zl@GlKU_6{`fL$nr3w9xo$bvfJKA?p=qiE2Nacwk=O&7>oyN(e=coO)y!`|w}kqr9w z?RK}vqD;bs)aYF;MzKYmo8nia^a-1jgUdi8-6ml+Fb{AOXqe-rredCgMMVFfBx;;i z1|lps7^u1tm+}b4DgU@Ac^~*(gjtfDN5<6`=V<|*tFFrSLT-xI)#?=KIH%fNbNqWw zwS^Uf@Hi9&=#1p(tpdbyKr$wlK?=Ae#N(M94mGMIF-gUEg<+7mFFg`c57UHwn~b}P7(Z0f8eSMn9Arg=nZ}Snps9H@5|0=J!TiZV^kDhTb-HKCSCKqSm6czL z^y5yPBz(AGZ!_W+T_5!{Q*NWx>c&@aJ%`(^E3G)~*u9*&AqM|ML(u4wUeDHD+7m#KU-hxwYuN?4vXJq@+U}snS9>I{GTVEe-;_npa(VM zwb8QaCuy%5o=QnudGBadrBz>opQQ3qAq#R1_92G#L@aa6+mf(2%v>zmU;@HEqynP@ z0i9Mz2K0n*z>B5_dqLTZcPz9YSjRd-fFPo3am1fdn2^vp2_~bx`7h-PX;b66Y+7s* z@88Bd{j6N^K&M%E>t}q$S3Is_oMMtiMCLgwcxT4kemrs-;+4^0s6L`f9}g(VR&I_x z1hPtz;Zc+-lk0oHKwW;B1UCQ@-z32?;^Jmc&ycJxLXm)orWar^UBstDdz9pcpYz1R)D%ddz@`n zjttCxbjw9`Hr7KSp*~ClWWyGSo9>{YR3JjcpR1HA&}{d{w%!L2XY>CSaqvN|kbsUE zV@EC#kWe-`;-ZlUXy>##jaJlyB@HvRy4+J*R*51u{EyA@x?gf;{&x;X@n z9_48Q-nTa78}8OmD{mLOecEt3YALvn9wWswdbRv8$TBGca*7w%Z?5yHik?YK3MZP` zKH&JE4@&Lp8!a4q9Tjp#+a0QS>^xp!4~K)H9;90(BK$jYG1b75BC81p!$TM~GmmC9 zQen#F+{KH>9-=koSW?PK!}ArFEN2bs4T#J3S|*4AwW-ya;r^#68>-)D?$4M^==1BW zlMI!up=;AKi&ieEDI2kgR^I>JMfu1f&RY^R4WuT`tSdP9EG2{*v`=7m%N zS!g>wJ=tJuk{7|=ejhlB0=VTmZiYtAfF2lmtig}W)3-NqJ~WlhA!j`6ahyK%0WkV8 z6~rzw@H`MVJyX3Pd}@OzRoRH=4y|rcEP7Bc>L3-E}|Ck zSUm}CD)O84mcpUZ$ra8Y!iS;NP}S|@Hzn;Ep&^Wb6JM4QwCFyu50pb22}ywMTDz6@Lh;3$!K z^PG1i81(Is60l~-TEe< z&p_CFFN40Qq?Fx)jl2J9urDAy2{;)#c&=aE(dl{OVVGUng}R60_39yS;K;PueF(`t zWpFF2$UzfIhkOLWl?L>Iw=lm7I#C+J8tvAIMAqF$K#d~ALdU@!<`@Miht>3TihMPt z*qUNyUjVdjpuoU<1dlI+5ThdKM;`BXVyk_Z6Z|z3eEg*Mkdx4kw#4G(01G(el8XyI zhOcS_w=m7~m_K14-PIQ=yzDtkcCFb`3x2th z5#db*%8Hvs`LNf~AldiK)u~=_<5w@rLyT@**4?5M+TkR?WRa z>QET&6;6d=ntCz`QyH~&w3lo38VrQ|3e*z_t#`)lFO$$Ju+E`*2)m-M$Kf5DsU>J2 zj0411c|*b!4^VS3s=aCZw(k;lU^8L*p2<{6VSbnUS=0r8#pDta8B=EEgbMfIyK4&Y z(E1vyk1X0_6v`c!y@l_r^0h+ymjf7 zYz*G9uj^UDAE2wr;fu^Y!GuT$EhZBjah-Lvh3zrIXPDbzGRH)!fJ7C1hsk%DFsdY5 z(7r{rK`ik_%?x<4jvMv zu|4zo%bbsD3kX0j;Zh~zon~c@m+;`OX>ilD|6OOWLEEaa{I2MWtoHKb3Kz`n^iA!9 zt{soafx^bVoo3Hn9_MJ!G2t5vJ?^lByUAjY3FGGhj29OSn9!(|iVG+Q0v-Q9Ay@36 zav$wCr)DwXD<+~}<+>z_a32L58m|^s8dHtaji(nj8y~vm#>X13HP#!y*0|F6dgE&2 IH)@st0Y<0;*Z=?k literal 0 HcmV?d00001 diff --git a/modules/maintenance.py b/modules/maintenance.py new file mode 100644 index 0000000..6d0538f --- /dev/null +++ b/modules/maintenance.py @@ -0,0 +1,258 @@ +from fabric.api import env, task, lcd +from fabric.api import local +import os +import sys + +import utils +from utils import executize, virtualenv + + +def command(program=None, cmd=None, extra_param=None): + """ + takes an argument and executes that command for nginx + + program - name of program to be run, eg. 'nginx', 'supervisor' + cmd - can be 'start', 'stop' or 'status' + + will then run the appropriate series of commands to get the job done based + on what we have in the configuration file + """ + + configuration = env.config + # logger = loggify('maintenance', 'command') + + if program is None: + print "Error: You have not given a legitimate program" + + print "permissable programs : %s" \ + % configuration.maintenance.keys() + + sys.exit() + + configuration_program = getattr(configuration.maintenance, program) + + if cmd is None: + print "Error: You have not given a legitimate command" + print "permissable commands : %s" \ + % configuration_program.commands.keys() + + sys.exit() + + # find out whether we are using sudo, run or local to + # execute the nginx command + + _execute = executize(configuration_program.execute) + + _command = getattr(configuration_program.commands, cmd) + + if extra_param is not None: + _command = "{command} {param}".format( + command=_command, param=extra_param) + + if env.debug: + # logger.debug( + # "execute type : %s" % configuration_program.execute) + + # logger.debug( + # "%s command : %s" % (program, _command)) + + # logger.debug("extra_param : %s" % extra_param) + + # logger.debug("%s modified command : %s" % (program, _command)) + pass + + else: + _execute(_command) + + +def edit(remote_path): + """ + calls up mvim or vim on the file + + remote_path - path to file we want to edit + """ + + # logger = loggify('maintenance', 'edit') + + # configuration = env.config + + if env.debug: + # logger.debug("remote_path : %s" % remote_path) + # logger.debug("env.host_string : %s" % env.host_string) + # logger.debug("sys.platform : %s" % sys.platform) + pass + else: + + if sys.platform == "darwin": + editor = "mvim" + else: + editor = "vim" + + cmd_edit = "{editor} sftp://{user}@{host_string}/{remote_path}".format( + editor=editor, + user=env.user, + host_string=env.host_string, + remote_path=remote_path) + + local(cmd_edit) + + +@task +def pyc_delete(): + """ + Deletes *.pyc files from project source dir + """ + configuration = env.config + + with lcd(configuration.paths.project.root): + local("find . -name '*.pyc' -delete") + + +@task +def pyc_compile(force=False): + """ + Compile Python source files in a project source dir + """ + params = [''] + + configuration = env.config + + if force: + params.append('-f') + + with lcd(configuration.paths.project.root): + with virtualenv(): + local("python -m compileall {0} .".format(" ".join(params))) + + +@task +def get_base_dir(): + import os + BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + return BASE_DIR + + +@task +def get_project_root(): + # + # NOTE: the PROJECT_ROOT value is very important. Make sure that + # if you move around this file, you account for where it is and add + # or take away "os.pardir" from os.path.join + + PROJECT_ROOT = os.path.abspath( + os.path.join(get_base_dir(), os.pardir, os.pardir, os.pardir)) + + return PROJECT_ROOT + + +def _get_configuration_path(name, branch): + # + # the locations of the files we need relative to the PROJECT_ROOT + + param_list = ['config', 'layout'] + + if name not in param_list: + print "value %s was not legit. _get_configuration_path requires" \ + "value such from %s" % (name, param_list) + + META_DIR = os.path.join( + get_project_root(), 'scripts', 'meta', 'configuration') + + if name == "config": + + # + # the configuration we are working with will change + # depending on what branch is being used. This value + # is passed in when the function is called + + configname = "{branch}.yml".format(branch=branch) + + path_meta = os.path.join(META_DIR, configname) + + elif name == "layout": + path_meta = os.path.join(META_DIR, 'layout.yml') + + return path_meta + + +def load_configuration(name, branch): + import yaml + + # + # the locations of the files we need relative to the PROJECT_ROOT + + if name == "fabric": + file_path = os.path.join( + get_project_root(), 'scripts', 'fabric', 'conf', 'fabric.yml') + + elif name == "config": + + # + # the configuration we are working with will change + # depending on what branch is being used. This value + # is passed in when the function is called + + file_path = _get_configuration_path('config', branch) + + elif name == "layout": + file_path = _get_configuration_path('layout', branch) + + configuration_file = yaml.load(file(file_path, 'r')) + return configuration_file + + +@task +def check_version(branchname): + """ + Maintenance function to check the configuration version against + the fabric version currently loaded. Returns boolean + + If the version is either non-existant or not the same as the fabric + configuration version, then a message will be sent to the user letting + him know, and giving him the option to stop the program. If he chooses + to continue, the function will return "False" to the calling function + + Otherwise, everything will continue as normal and "True" will be returned + + Keyword Arguments: + + branchname -- the name of the branch whose configuration files we are + checking + """ + + config = load_configuration("config", branchname) + fabric_config = load_configuration("fabric", branchname) + + if 'version' in config: + config_version = config['version'] + else: + config_version = 0 + + version_correct = config_version >= fabric_config['version'] + + utils.printvar('version_correct', version_correct) + + if not version_correct: + + # TODO + # update this message, currently it is false because there is no + # fabric.updateconfs function! + + version_false = """ + NOTE: the current configuration information related to this project + is not up to date, the fabric tools are at version %s, and you are + at version %s. Run fab fabric.updateconfs to correct.\n""" + + utils.print_console( + version_false % (fabric_config['version'], config_version), + numsep=90) + + # utils.prompt_continue() + + # version was not correct return False to let the app + # know what's going on + return False + + # version was correct, so return True to let the app know + # everything is A-OK + return True diff --git a/modules/maintenance.pyc b/modules/maintenance.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1706e311303d4afb2acd13aa5cc251699ccb4404 GIT binary patch literal 6380 zcmcgwTW=&s6|SDE$2*QUn`GHs2t~-^(Xt+gAQWOPD9L7XA&NJ&7bWq_sNG#NZo4l- zSB<^KI|#{ui={EA9C>IIw@`XZ2-RoT#6Y`7}J<;C!6m0S@x*HqG%&NH%MWwGH|-n%_`3NKCngU_Ieo?~N* zL)YRUvIjDe#mOMxL>o<3#!TTVVqL1~|DrAU*65q*~Vaxah!G9ThsNCbQfW4*f)XdSSusMNY?M zo^y6wR=Sv^#lvMeI<3k{73F$e7g26>Io69ASL$7(t66bdo5@hG$)UB|dd<$JraDY* zS*;0(I8Sgq#`W00W?fV{+2+dWn#OIMax=T;FFZ=K3>cY$pxM#U6jx5GG;&5;Q>DNd zXw>)u`%+Jglb!qJu1?Cr=-tSgL@qB2eZ=P-8NC-B8l8HWaaoMh$*hW8iXG!LGqT$b zLAQHVRaTq&^K#~o6s6OX^bi0n>P5j;3gt&MiG^LfyHQ{S}z3&_Qr zo&>p=Ux0iXSDw_9bA2+6z@a(hQmJ}U^h2h%1}m5j@l~!P8~AY1sx45;UBO%)rG+y^ zRK!LW4$QF?b+h=T{+WlV6YwB(zFv<*0P#wJ9g8}jg6BTcr$gdH57E^|Q~=Y|EqV4{=x+_^!!}gN?lf@s5ShR*$PPr@^D%Xj{&z*ytp! zWWRk0C&wvV$55|}lUS3ZYkEBqP+Ow=o)y9r@z`|66ehbc3WP0hugcPSZt!&>DR61D zr-I+7}ABNsxQBd-V(KYzbD!ahc0Xar#!RTVAiM5c(AMumJuoYV>H5oyJbG5K7MlS z_TbKH22n*V9bWWxtRwo$k{#<~`vQKZ9>97|_ezkk5c=e`xgI+5LT?S|FV0yhnhF1G ziX)xy{Fr+x25h4DC~B$ZjZG za1~5n4r0#mbu?$f_Dyuac8CXp^0^X46K+%E9`7_2-t2jZ7BT{^ZcD_vMBSayfUDPG%N4r+$sXU_***Y2tkZ^;?IsMMt+xJHAei&YwqEH184U8CRjezX0 zG4`qguXoGS?1FK60~@??f>zWWPaZ#yI2goRw5LKgPugt>`L{&o<;CG^fEB*Z=6h^t zx)w47RD6a&1V0Jy|McG72cz)*{RfvunX9;%l&Q_-i014!nDf->_JUiDm>j3jll>x5 ze?*58)ld_-dNCm|76)Bb-J(^1TwyqFFd3->e-Sg-lv5m(SO9?ls8%O5K5+OIg91hh zhzMXvRDfjVj0ebZkfbo-XshB?UV+;gZv5fG9>fjx9^gRJLle0bQs{){S#(^^oc##n z!zh~>guDm}tY*|K(ehC15$SOurt{%MWqvVBD`WjZJB#ZRYLI20$f%`iEO25^+H3}Dk2fS$6aK$*j=&1j3+NjvmC_iWuzRA z*AwN78ucg4+GmMF&~NtYF5BiV@eOne|2{{=SaLnL|3=c5S@+BzRW1;LBS z*>XIqDilGoV@)!e1QseZZdMgm?;_!2C>%_XNwM&O3W;TX29G2VW;(E<&LJrhP79l%6K6jgDvr1Sz2QS771Av06jB;%sGh#l}bpW zUwJReta)XD5lU_u?_99MQ|dmhN5*FX$bejkBDqJvEUqA33Kj)2LH(5n)eKMias*$XQO{ZkI>?eGeWj*Dk e;A+qh7=&DJUfb;H)hh@_uC8`hS9&k^UidHEGAzpg literal 0 HcmV?d00001 diff --git a/modules/nginx.py b/modules/nginx.py new file mode 100644 index 0000000..dda4e38 --- /dev/null +++ b/modules/nginx.py @@ -0,0 +1,288 @@ +from fabric.api import env, task +from fabric.contrib.files import upload_template, exists +from fabric.operations import sudo +# from fabric.api import local +# from fabric.operations import run +import os +# import sys + +from maintenance import command as maintenance_command +from maintenance import edit as maintenance_edit + +from utils import loggify + + +@task +def command(cmd=None): + """ + wrapper for the maintenance.command function + """ + + # configuration = env.config + # logger = loggify('nginx', 'command') + + maintenance_command('nginx', cmd) + + +@task +def start(): + """ + wrapper for using above command:cmd=start + """ + + command("start") + + +@task +def stop(): + """ + wrapper for using above command:cmd=stop + """ + + command("stop") + + +@task +def status(): + """ + wrapper for using above command:cmd=stop + """ + + command("status") + + +@task +def restart(): + stop() + start() + + +@task +def edit(location='conf'): + """ + calls up mvim on the Nginx conf file + """ + + configuration = env.config + + conf_path = os.path.join( + configuration.nginx.sites_available, + configuration.nginx.conf.name) + + if location == 'conf': + remote_path = conf_path + elif location == 'log.error' or location == 'log': + remote_path = configuration.logging.nginx.error + elif location == 'log.access': + remote_path = configuration.logging.nginx.access + + maintenance_edit(remote_path=remote_path) + + +@task +def upload(): + """ + put the nginx conf file for this project into nginx sites-available + """ + + if env.debug: + logger = loggify('nginx', 'upload') + + configuration = env.config + + context = dict() + + context['server_name'] = configuration.server.nginx.host + + context['access_log'] = configuration.logging.nginx.access + context['error_log'] = configuration.logging.nginx.error + + context['port'] = configuration.server.nginx.port + + context['django_host'] = configuration.server.django.host + context['django_port'] = configuration.server.django.port + + context['virtualenv_sitepackages'] = \ + configuration.virtualenv.paths.sitepackages + + context['server_media_static'] = \ + configuration.paths.server.media.static + + context['server_media_dynamic'] = \ + configuration.paths.server.media.dynamic + + destination_available = os.path.join( + configuration.nginx.sites_available, + configuration.nginx.conf.name) + + destination_enabled = os.path.join( + configuration.nginx.sites_enabled, + configuration.nginx.conf.name) + + build_path = os.path.join( + configuration.templates.nginx.path.dest, + 'build', + configuration.templates.nginx.conf.dst) + + files_path = os.path.join( + configuration.templates.nginx.path.local, + 'files') + + # + # this only gets used if sites_availabe is NOT equal to sites_enabled + + cmd_link_available_enabled = 'ln -sf {available} {enabled}'.format( + available=destination_available, + enabled=destination_enabled) + + if env.debug: + logger.debug("filename : %s" % + configuration.templates.nginx.conf.src) + + logger.debug("dest_avail : %s" % destination_available) + logger.debug("dest_enabl : %s" % destination_enabled) + logger.debug("build_path : %s" % build_path) + logger.debug("files_path: : %s" % files_path) + logger.debug("context : %s" % context) + + upload_msg1 = "upload_template(" \ + "\n\tfilename={filename}," \ + "\n\tdestination={destination_available}," \ + "\n\tcontext=context," \ + "\n\tuse_jinja=True," \ + "\n\tuse_sudo=True," \ + "\n\tbackup=False," \ + "\n\ttemplate_dir={template_dir})".format( + filename=configuration.templates.nginx.conf.src, + destination_available=build_path, + context=context, + template_dir=files_path) + + upload_msg2 = "upload_template(" \ + "\n\tfilename={filename}," \ + "\n\tdestination={destination_available}," \ + "\n\tcontext=context," \ + "\n\tuse_jinja=True," \ + "\n\tuse_sudo=True," \ + "\n\tbackup=False," \ + "\n\ttemplate_dir={template_dir})".format( + filename=configuration.templates.nginx.conf.src, + destination_available=destination_available, + context=context, + template_dir=files_path) + + logger.debug("upload cmd 1: %s" % upload_msg1) + logger.debug("upload cmd 2: %s" % upload_msg2) + + copy_msg = "cp -s {build_path} {dest_path}".format( + build_path=build_path, + dest_path=destination_available) + + logger.debug("sudo('%s')" % copy_msg) + + logger.debug("\nsites_available : %s" + % configuration.nginx.sites_available) + + logger.debug("sites_enabled : %s" + % configuration.nginx.sites_enabled) + + logger.debug("if sites_enabled != sites_available then ...") + logger.debug("sudo('%s')" % cmd_link_available_enabled) + + else: + # put the nginx.conf in the build directory and the + # /etc/nginx/sites-avaialbe location + + upload_template( + filename=configuration.templates.nginx.conf.src, + destination=build_path, + context=context, + use_jinja=True, + use_sudo=False, + backup=False, + template_dir=files_path) + + upload_template( + filename=configuration.templates.nginx.conf.src, + destination=destination_available, + context=context, + use_jinja=True, + use_sudo=True, + backup=True, + template_dir=files_path) + + if configuration.nginx.sites_available \ + == configuration.nginx.sites_enabled: + + # if the sites_available and sites_enabled directories are the + # same then do nothing + pass + + else: + sudo(cmd_link_available_enabled) + + # got to this point? then restart the nginx server + + restart() + + +@task +def remove(): + """ + remove the nginx conf file from sites-available, and if necessary, from + sites-enabled too. + + then restart the server + """ + + if env.debug: + logger = loggify('nginx', 'remove') + + configuration = env.config + + sites_available = os.path.join( + configuration.nginx.sites_available, + configuration.nginx.conf.name) + + sites_available_bak = sites_available + ".bak" + + sites_enabled = os.path.join( + configuration.nginx.sites_enabled, + configuration.nginx.conf.name) + + # + # include the '-f' option so that if nothing is there + # it won't return an error + + cmd_remove_enabled = 'rm -f {enabled}'.format( + enabled=sites_enabled) + + cmd_remove_available = 'rm -f {available}'.format( + available=sites_available) + + cmd_remove_available_bak = 'rm -f {available}'.format( + available=sites_available_bak) + + if env.debug: + logger.debug("sites_available : %s" % sites_available) + logger.debug("sites_enabled : %s" % sites_enabled) + + logger.debug("sites_available.bak : %s" % sites_available_bak) + + logger.debug("remove enabled : %s" % cmd_remove_enabled) + logger.debug("remove available : %s" % cmd_remove_available) + logger.debug("remove available.bak : %s" % cmd_remove_available_bak) + + logger.debug("rm_en == rm_avail? %s" % (cmd_remove_enabled != + cmd_remove_available)) + + else: + if cmd_remove_enabled != cmd_remove_available: + sudo(cmd_remove_enabled) + + sudo(cmd_remove_available) + + if exists(sites_available_bak): + sudo(cmd_remove_available_bak) + + restart() diff --git a/modules/nginx.pyc b/modules/nginx.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4658b9e4f4cab7d9201ffb5f2158ec0843c14229 GIT binary patch literal 6427 zcmcgw&2Jn_5wDrq5AS;Ib!^A^PA2cY5C@XA5(G$5{6H@f4+-JJ07CL*HJ%wev!0J- zy4S?AR~+)VaOH%!aN>mcPxv$V3%GHCUscb{ek6hSB$7DO+g(-NU0q#W_3L*2xz^qJ z?caYDQ2Ecn?~m}Qf1wER-=c&lwdl~I)TTq5ET-C&IFvT%ut8~)4x5JRP|~8bO^0pM zZcws9hbuBpladalT{>Jfyq2_kbhu{PZA#WD?bBhOtjMOYLE#F0Wzic<-6f`U=qr-u z6=`;>=B6}PtLBz8dsTB=nrjrUSEKcW(ys)rN^_%X?(oz<-@St6$N$E^T^lvcW{I*h z-Qb3=P7Sq>)?}RIemK-oI!=5Y=@v`Aj+Ius77KF^H3{;fx=Jv7WJ$ z{NyA)J`cD%e?0d~e2R6_vL%-=1n-0mO_{B}kD}}1-){Nv!M@Udp=TD5rm=u1 zFk9h=TIMRWy@k&#v-S0t`Rto5Gas8(iX-ZL{81O#gRY-_#6|My!&d5v`l!oX!?aJ{ zu}nU`;=V3+sY!$%VGdEEE}mLbZO7?P!=GRou3TVX+&E!L45`e%Ne_(w@38i-itr;%)p z#Q*F>0<%$xy}DFm1J(KHEUAOHO7jd!9{3#DH7jRSniPh@i)4yu?{O-bIobyZ;;ixu zd};>;wK{gk>R6k059M`h)9RrfFo(^`W)Uy@8~n|9S#gWK3?76O#}0)QWmt|lV1thk zds*dxqc(e4=+Fx$t*}pyIznp9le$7`8j^VKXjMq9n$)9mtoA&~NUa#s8jaRz)R&GO zLqZ$FZP4h7P`gXiO`)#R3%y07Z5mw_&7tE?Rk=o^>#|aZmSt|dCDT}xxhc!Vu(v4e ziLjwy%-i$^7Hhso{tks}GHHw9(daIPSd>HI23@j+?P z2D6pvFjm`ev%ipb-?i)qbIbk~>H8EuB>f2ijB!~S0#W>ZoogG^oiPMDqpUmg=FYq2 z4T-^jbh0jKETb(dYiQ0H>=6R*w{dnQd`LVUPqf5Z!E&=$TcXiexg|Js0ZMb%NGT48 zPwIeZ;i4qqWE&$oqt z4T5KJp(lOC#(r?>pCCCh3@&F(5oG-#!-_c2+spLu92$Yau=QvXC!y3h#VBoSSh8iLi=|r zB6cn{0F*#tDKBq4rs5217VYAm;64?*FA<*!@h&8TF?QFznAsV-D;vb=f2!F}XFS>N znkxu{SGxP~fqM8z-GppcPQ%P*bDP0z!!2FJ@T$Ff40e3%E=t_rFA24e@$t;u!C+tx z!)((;w|Xdrz6kUNk$inym-d942vgI&+m_*+n+CJhiDhdAiho zCR-cidz>E0fG5|1MMw)%fiQ{v*qkKJ0&(W2(e@JEyubFTHoq4 zJ66jw*m2vwVwHb(XJr#~;nX$o(1&UKyDs0FwcTm~ZL+NSYun3h%+qk3ZX;kFIqmS4 zy~L-G3oH2ZztHIH79>|7&Z4N`O4F^bLjq2(;Ti<2zaR}{xg?F3FUXiH4#O}D_zjmF zrU=Sq3IJP8k!#Qls(r756e~57D)9;+&mtNFl3B!SP-YRH0ZuN$v$Yzhl!LBw7YLn} zBV{jVgyO)$IRl@84g+{JfY#8^=mxzlDt;{@23W92-(ZVAumuNQ^*a=T3Hb7bHynP{ zdJA3@d1?aP3`ab6@xXHx=)bg%OR&8hhxdsC8G|&Q|~*>nnm-PQ^s>iEi-ndWEJg~*mZB# zq{;={yENBQ_qunI4hwQ@8tb^osbG#=FI{s= zdKZI7{Gb7nP4y3a>T?v-DFdWjl)DIEXd`G!_~RBn4s!UD-=4MZ^sF_zi%a#Iy(>vg z5`{dg4*YR!c7orS1hty?R=fgdx`>Ykl29aZd2DOVPXeV#ju&J>a?0!lXHrQcb1x6_ zO6oE2_86yo{FEt)Z=&Nw$=1uW#zCG%@~&lGxa8G@x7CyEz$(s6EVcjtV}xY97v`P$ ib2f)^QS4Yd)`rt_HmnVM!*Q(sYQH0YE!3~}|L`C0C7`_k literal 0 HcmV?d00001 diff --git a/modules/pip.py b/modules/pip.py new file mode 100644 index 0000000..0aaa1b7 --- /dev/null +++ b/modules/pip.py @@ -0,0 +1,134 @@ +from fabric.api import env, task +from fabric.operations import run + +import logging + +from utils import virtualenv_source, virtualenv +from utils import print_console + + +@task +def setup_virtualenv(): + configuration = env.config + + if env.debug: + logging.basicConfig( + format='\n%(levelname)s: deploy.setup_virtualenv %(message)s', + level=logging.DEBUG) + + mkvirtualenv_cmd = "mkvirtualenv --no-site-packages " \ + "{virtualenv_name}".format( + virtualenv_name=configuration.virtualenv.name) + + if env.debug: + logging.debug("virtualenv.workon : %s" + % configuration.virtualenv.workon) + + logging.debug("virtualenv.activate : %s" + % configuration.virtualenv.activate) + + logging.debug("virtualenv.name : %s" + % configuration.virtualenv.name) + + logging.debug("virtualenv.paths.bin : %s" + % configuration.virtualenv.paths.bin) + + logging.debug("virtualenv.paths.root : %s" + % configuration.virtualenv.paths.root) + + logging.debug("with virtualenv_source(): run(\"\n\t%s\n\t\")".format( + mkvirtualenv_cmd)) + else: + + # run("source virtualenvwrapper.sh; mkvirtualenv " + # "--no-site-packages {virtualenv_name}".format( + # virtualenv_name=configuration.virtualenv.name)) + + with virtualenv_source(): + run(mkvirtualenv_cmd) + + +@task +def setup(): + """ + install all packages via pip + """ + configuration = env.config + + if env.debug: + logging.basicConfig( + format='\n%(levelname)s: deploy.pip %(message)s', + level=logging.DEBUG) + + pipinstall_cmd = "pip install -r {requirements}".format( + requirements=configuration.virtualenv.requirements) + + if env.debug: + logging.debug("with virtualenv(): run(\"\n\t%s\n\t\")" % + pipinstall_cmd) + else: + with virtualenv(): + run(pipinstall_cmd) + + +@task +def install(package=None): + """ + install a packages via pip + """ + configuration = env.config + + import sys + + if not package: + print_console("you must specify a package to be installed") + sys.exit() + + if package == "--all": + pipinstall_cmd = "pip install -r {requirements_file}".format( + requirements_file=configuration.virtualenv.requirements) + else: + pipinstall_cmd = "pip install {package}".format( + package=package) + + if env.debug: + print_console("pipinstall_cmd : %s" % pipinstall_cmd) + + else: + with virtualenv(): + run(pipinstall_cmd) + + +@task +def freeze(param=False): + configuration = env.config + + msg_help = """ + pip.freeze takes one of three values: + \thelp - this help message + \tTrue - update the pip package list the freeze output + \tFalse (default) - print the freeze output to the console + """ + + from utils import booleanize, handle_help + import sys + + if handle_help(param, msg_help, 'help'): + sys.exit() + else: + try: + param = booleanize(param) + except TypeError: + print "the parameter value you gave, \"%s\" , is not" \ + " a valid parameter." % param + print msg_help + sys.exit() + + if param: + cmd_pipfreeze = "pip freeze > {requirements}".format( + requirements=configuration.virtualenv.requirements) + else: + cmd_pipfreeze = "pip freeze" + + with virtualenv(): + run(cmd_pipfreeze) diff --git a/modules/pip.pyc b/modules/pip.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61f011af1cc27874a7248e1311494354f2e99025 GIT binary patch literal 3669 zcmcIn&2Ah;5U!r_?t0gDoJ8ON;b&xFvPQ8j;1)#@5JC`07EBU3_+pKBde^g_Kc;&e zoOp#$!ifWKz=bo9!I1-xz%%dw;j5Zi?|KtBC96!QySlo&y88R7%I-f)os}Q{-04yM zbMgPj_{^_pLi|${6D5jv6(tVsI;2?YQ0!7NN4s;?*rm8ZyA4?}M{$#q7VWkunWx?P zN~0mtHtn`aX@|llJt2CGeLKX%)_AxeLzlvNdZI*Oi6#0Rh3&CqnL?NDbUQFHdWv7y zLC>=3Iz6&;JP6ETSB<4bnQm7)7a%%{3R?y-RQF6)7CmiwqlvUyw**;UM5*2DWvR(x zEiU4Nn)q(wGf&Z2qM@Qu)p4lUWIKj)>^C?GX7aE>kI7Lf{5D6!CWQ?;!Lk+&=P{v1 zN_N5GzeQV|Q!BJ-1wK-|i8!eicg~3CC)-VHx5wfI8ZMq$(3zT9Ix_>+CY^}i`P}pk zvgcqU{jv-rmg^a{0|+1!t+gZ^8i$TG`2 ziN+OJ`XI6g-qgJ+aCX?w=DMpm^|qGLpu8h2GXZqYJZDWNCW5l^5Ai_pI3phe0~XwoH`d zvCgC3VVQ5{#c<2?iYUi?KiDs#-d2)@WvtCs9%1@ej+;dpliOV7Go7~6R&CW(OXw|2 z{~X%!cLDti&Z1gsb=0$D{}CI!P_p-?$gV09GTmCIbOV2dh@e}G>DB_>_+iX@XgUm? zC^a^SV~@X6Y8*v@2X}~qJaeW0d`d!bnuR`tM@YdF$jor^UL@QQJ?qo89ftR`j>ckiY@;Y3=xYEl%u>h>?MPVBa(oL zLz;OF4K=0^q$&h0I6w<$8s^byYWNL<8ZAf!k_%#oMh*vF^C7={=yGyXCk|%|L|9G_ zm_&v+L?-x_lLYbx(j-8Dn2c<}slui7A}33plCWP1`KWm+ev*xW8+U@I<}{wFGk1`g zfG4L;q2^r}dlpa6N0a2*ZSy|VkF(NCN@G2f>t579_JFx6HhVVn_VpM$IyCIYjSWE5 ztfKdSxZ3MSF(NtV=xK0?*t|Nk_)&e{NCMs%{k7He-5b&#N5H5ldjVoUf{4p#m}Uas%o2++h1S@G?S%@?qgN)J3|^+1%?^eZ5nn2*iA}N7|qkD zNw$3o*#$bm$)L$p#lfLt+x&r6FbhC!_6w~aYR?9TC=Ieyds*MJ2M~HkL0oEcO*GpF zI?lZf%tXeMeqDW3vv-S9W46pgE&%L-<|ulK>No<}Sy-Qyl{PPJwes^IHrnfkx*wFW zUB@O;I6kY++~7Uxic(fj6|fAq{S2jMkVX%+B=3VD4P(8>)(T3wV?N<`W&47H^R_Ok zYkEw|L2#t6dfw`qSq1X1dbnzeIwN3}D4Zm4xVSS++GcjJ`uVIrWN>c4j0=QHL zrHx|azXLtrsv*U6|>Y5Rxw)Z@&Y%hLOf@<#0=!hGuv9JFOqN% z7FAZiMEWs{szdtdNe)E;(P}y?YSFm}{<-SBT2d~`i3UoDrm(SzWFYmK{7F&OD4RhZ zRX&ryM>x9G6IrgSGRa7ut5P8c$*QiBn8WeOU*YC;G=hO`Al!eOM?5Lu^RJ*cafb9> icyYxqbG<6KZtztMhr$(g9uWzxyS%=9$yLjjUG*<3;YXnW literal 0 HcmV?d00001 diff --git a/modules/setup.pyc b/modules/setup.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1abe753c9180b18be0e0f652d653782eeefdb4ff GIT binary patch literal 3572 zcmcIm-A)@<5T0Ed6BF|jl0rzE)JYT7YSs9%QiZA#K>2G7S|>_HRauL7F>CByXU}mI zB)DmF(W^d3Z`&8>6ZC2N0PXk9Sqw>#dIfL}b7tq9nKSd9nJN7`Y5E zrNkwPDoRu(npR?lTiT6FP?r3QpN0?0(ug>HZOdGQ=AhN}K#|L_j$wM3V4G9eE6K4Z zM__+VokyN5J$wY6uoT80`*P$l zOgBsSlo-Y~wA0|{aTpC_W4;7!_Y+gk<5%M(k4-&EM`K$bg?Tuv568yVcjJ1NV(iuL z8FyK|AFpH%jUF4nNvypL#%|a>7>_!t@64KN8{A;0FzMNL=7j1D5@Q2?XkfU>JOkBy zVdC60^DIpdo1;AI$6afhQ9n$3S<@uL(I6fr)OUZB_nW4hCnJ2{3wQIR+Z<-mco3VW zi7f>VExqB)W7NzdCh_O}idXe&-mG8qX7ImDF8#wRu49}>%Nxv%n896EwZYv1+3Zl)AQ)fQnsLRr? z>4CFS?5+?UK;cnEmH<)@$T*tf3cI~mO?jyir{%5p0GW?d1ty}Zu^NSTU&AoSy5T^n zy#$^zjMwlWO7i+wBZRs!hjoA<>NDen-I}hK_MsXh6BuhU3TtNxEE7CE* z2uxBicJziFy)e<%iC~g0FA``04PM4D*Dwj-wd5~(i{4GY>dkvqf9@1yv|K^<8;Jh_ z89W4d0X9Gl5u*Sj4PXLVnyW|y6adf((txDn^Pp2YfsU;ajYJeFfDzPrP&(spKBPf- zm;q0OOccM42ia&Cr&hykY}iZE-Uo1E2o+8NF%oInlkAl(AVJzbmV-3T{yB6uh+Q3}`yVEr}fmJ|O6| zCaz*>Y%jQluNR6Z;`(z8vxLdvsp8d;uxkDy&@}7McwN>RzLp81o`d5Mg_;3G5HebJ zm_}UgSw-$SSyhmUre!s{G~raSLZ&LDFU#r)S%c`T9F!zC63tB{YO;zN70pi=3$n7z zJy$f*>2{LbzNq!;Pl>LoG}+g%eNLi}CJdYxp}G|ZEJNfvMj70d54bnL{rDa3Cxz_I z54e=QrQA=Q+NeHRQRi)4-6_5y%ff=s+-V@&jl1x2OCgL0BeETAJ=t7;-f68p+f?`$ zbxJc^8luE5yeGwCrRk1&$)(8s3*ijb zjYlf8pBbxZSF@e22-p+D9&c;%JXzAtj*7=`@YW1-B>@(NHrx$SEcv>lta|{(v}j{% z{pn`V+1?7CtF}>=yJx?#_I&NZT6?nt1$<+Z_1ea>$1Ud%)^uLMsL5zJ@V?KH-Ck?A z-Ql4zmXC097(H?7+bG@(#{+wgh#sN#K3Qu$+;aBnRkT~s3vakju$TU6J$&5y!O79g z6glgSet4bab{r1S$7xd++`(7XILN~2y(GdX=NgL~S!-WCR-}f*a5%Wov_K$!gJHhF zBpy@2qBrNw`PaNFe#u+b|8pokEI&*5s=vOb3YMrch8CY+K@`OYSjQh4V{|x37;Kuc zM!HA4gUSQC#HM*T#hT+GtrRWH3ObWzHZc|HPqX^qskm0hwK6O$I1iNwT`t~KN&4t) zkd?IKF1i@#X=_?8IvD0O#b(>)C@7D&8suo35%A8zQ%r`}xPJ&=Pudk%U)Vq@s2B)W zZL|V0iE8f=u*95OGI(S(9*$g%_?}`~D0pPh;>43Q*t;6{4j;Om2lKSWP^4Ta0PaEG zj~K3%MQ5>q26h28!SxvpJ_ki~(XWN0#MNJQqxwOy&Fi+%;RoEH-AllX5ZvWNC8xJg g>H>Om>RWXk`?q{knlzVKMZsC=j7NH+8EbB`DrWKAOB^ zZkI}&NC;57&wVcXH}$!HNZ;Fj-|QZbQiZ}TAW`D-W_E9HcV>3xo4LwA78>i{|Ls{% zxlaYZAL6UO^puM5@2P=OLr?8_YUryyUwP#EYEV(b8MQZK>53Z6s$o^_Rn>4#?af*4 zj2hI`UQOniRfD=3Hq_p{?W+piRC^1So>PM>YS>bHEj3(JdyA79mek&oifZb$QhUoP z_Ej{aqPlwRsaIH4LpTlgl3W#Xz983xY!>9YkP8KQRmdv^c}>VxL2d}SSddL2mkRQ_ zkjn*mL&%ka+!S)PAa4q}R*<*2g{SRxh&lNeK5ZXYhLVrdBQryyt6#LC0A*Te#-l;j zjXEYCjs{&5+uPeMj-La&mko#AG_nkii=)KYyVyO*4iA%qm!^*EQJ$ox zgWG$2+N}FN#;^V!je%5^I`L=*eYCzhR;ur-enmx<;(7*S{1ed&$IPl@9~|$bqRg!7 zS5<#bM%GlnuKEpiTv4xlbuzjUWybZyplRd260J^y5@O%9=D5j zqNHa`71TkN54*-tsP5U)&m~H~eXgN;Gc8@5NG|#r1W$RBqfG->dOV8rqeN%9jNqc$ z9AOsFkV+26dDkRaDwB`8d3UHot}5j2Lk?@>Ya1QnYUc|b=Xxj4()8tyTcX~H`rY&} z+tCT^DIO)g7vs_PDDUs+UY?AM-Z|**=Sgp8m__43tar+FZI50WKChFciAlPHn-4Uc1Kr?}P9tXoN#2lxvk$Br3 zi{Jp(PASR_BFgb7!nx8s5J?(X$_n->Si5a5oZ?h6+NvVFL3#zv)H6E8(c>L*GJC@) z+#q3f4VB5n7p|v}&%2MWrfsNssK#%4wYi4ZBh3%ZQq;kt0(C(0(1HiKKW$gQ(mz4- zA5tCbBs~ne``J+(xZS>oO}?*9H#b(Ke6*0(C&^OE4a?O9S!Z|$kN$v<=jK%P&G-6a z4F4bYnvLvMsb_7n(dF;-E}qPDbXhxd6G?3JQz%EDKMGyT|r`WJU9rxPRw(0gK7LV4(n6|37oGN zF@&3#@$v=0r*_W~0FlUJ@|V$A1mYR^CWM$s!8fsgSKv&@Lm+4Va@>nV`aBljmT-kU z{>q=TK9xiTUs?NeT(}U23yEwDaR_&adli-ch)@C^7v7d=$XASXAYG)hgPt+?5o;mp zRn$7YbGpD#LXd?Rs5}p#>1mq)r1aLk?qHx{3Bm9v83tJ@7VvZeF2oQ5;v1`chPaRi zK>&m%Dy69e1SSlr@_8@V(h~afc$k^E!_Hlay%HD0o4j8lBAtH}?~e~duF4Q|O$SJU zqLh?G#W3I)KAfO5&MybU$Zj({R1l%6GB(z|Wxs)TwbDa}=#i@wUH%alr7i^k=l2DT zO!Vhbf6!=O5HBDKKyM$27i67EEjbOd{}XttfoBP7X=)CfJeT@ zS92>c2{DMe*YYafT4@%9>0F$c#T{(n4b9>ek|^;3&OQXvKB7^;K#(4Z@(a2q^ej>! z+g}%&$&sbSf_MwEB;-B)1=YVI2G&yjMb(#N89s$0>h>nvu`H~{G;2jz^Om)$`fIAc zuKHJn+qB$ks=uN7*M+q(&AK71D`(b#NiZBs+Eh_XR%JOdEzKS5{ev?+rXG!xb8=>bPb*zq(m*yjv4q`dXbM z!-=%amWpnlT}u?mhVO`0?<(__itb5!UxcixUb&3fiUGGK=|b92=Iw$oZBc@GV0E#K z*tCV?qQ607y;dG0(~^i(VzG0fi3}f&4X{jrq>Ra&xGJP}g9sTO3LL>8NnfxZz=!<~ z(*#kHqf(INFJ+F90nVj)1c4#bypSjPRMB9JOj#UHQAIe+v+>9<9NK)jqhn)``{<5c zfq1Zeq?gQoar04}OIYk2CAk@Q2dL$A*v$wy-QCNgSn|ecrdzrpT}I#P(BlP%@d zL9o=FY;5c_lgMuQah``i#CY-Uyjx3=X;(ANbDP_Cl?h5ghRDkGeUOvN%K3#BWYOt? zW~A?pg6~8la_>oi%F5`ay_RxD+2`sgu2(ZCZ0UNP(M3yc>E5wBKc|M9ij$aUB?2o5ADU;7E~&G=Ol44Yey_P>hX8$r>2}kTo)?_nk4cGa)vl zNy>7p0SPE|HYbydfpH_wsKR$}9X@13iG@388)cN5vMJgHKl0A;@OIKn}EY#!e0}=tCgm==vTdEuj>Bh zz*zuVb$JJsMCm@pSN|D}1ts|n<{f2?AoqIh_S_h@EbFkY+8cAvx>AhYyL9i#Y?(4Z%HtS^|hDml8{ryb?iK z9(_~;dH;v_4L@Qdk@zvMK4J4In%y?B;#thI3!YSo_PaXiJ%(FN4n?$|eERVhPtM|) z5|;dma}jWq07Xzv_zUtrXLE(kXKen!ro%>nRjCuYDZJ8xqAW-$V3c4brx3*|MZ=rHNljO1*&W_PBNbaI^cC$2=I<|!g6k2$n%_bY7<4|&pcX`FI9S+%~XiAaF)dkhZBx`zP9o+{M qo95{ygk7&RecI5v*R0gnDqd@&RhPe7tJbQv)>>Cvz1GdvlYav$uZ~mz literal 0 HcmV?d00001 diff --git a/modules/testing/__init__.py b/modules/testing/__init__.py new file mode 100644 index 0000000..f38bba0 --- /dev/null +++ b/modules/testing/__init__.py @@ -0,0 +1 @@ +import maintenance diff --git a/modules/testing/__init__.pyc b/modules/testing/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f6c072876c3f3369670a0ed15b077ca1cc67e04 GIT binary patch literal 206 zcmYLDK?=e^4BYBP6v1co)Oe@UuQZQxzPN$%Gj~rqAX4 z5$WX34-=hRZCNP=t-@T5Ie)~y99?=Tc4eV5FO+VMI3qjuSn6v2Bu8E0AeaPFWFDYP zu|=N+LhHDR5U_R`#bRFtyo2~eow^rSC-QKMp}(SlCNgmY))G^;w(5pF)6?s3U^BiV G+U5;$8#b8$ literal 0 HcmV?d00001 diff --git a/modules/testing/configuration_setup.py b/modules/testing/configuration_setup.py new file mode 100644 index 0000000..10c58d9 --- /dev/null +++ b/modules/testing/configuration_setup.py @@ -0,0 +1,181 @@ +from fabric.api import task, env +from fabric.operations import run +import os +import modules.utils as utils + +from modules.conf import create_dir_top +from modules.conf import exists_dir_top +from modules.conf import exists_dir_sub +from modules.conf import exists_file + +# import modules.conf as conf + + +@task +def test(*args, **kwargs): + """ + Test functions in conf + + Keyword Arguments: + + funcname -- name of testing function to run + """ + # configuration = env.config + + # dictionary of legitimate functions that can be tested + # when given the param name + test_values = { + 'conf_top': test_conf_top, + 'conf_sub': test_conf_sub, + 'conf_file': test_conf_file, + } + + funcname = kwargs.get('funcname') + + if not funcname: + if len(args) > 0: + funcname = args[0] + args = args[1:] + + if funcname in test_values.keys(): + test_values[funcname](*args, **kwargs) + + else: + print "\nTest functions in this module, acceptable values include:" + for val in test_values: + print val + + +def test_conf_file(*args, **kwargs): + SPACING = "\n" + utils.print_console("testing exist_conf_file", + prepend=SPACING, append=SPACING) + + confargument = kwargs.get('conf') + if not confargument: + confargument = args[0] if len(args) > 0 else None + + exists_file(confargument) + + +def test_conf_sub(*args, **kwargs): + SPACING = "\n" + utils.print_console("testing exist_conf_sub", + prepend=SPACING, append=SPACING) + + confargument = kwargs.get('conf') + if not confargument: + confargument = args[0] if len(args) > 0 else None + + exists_dir_sub(confargument) + + +def test_conf_top(*args, **kwargs): + configuration = env.config + SPACING = "\n" + + utils.print_console("testing exists_conf", + prepend=SPACING, append=SPACING) + + utils.printvar("exists_dir_top", + exists_dir_top()) + + utils.print_console("testing create_dir_top", + prepend=SPACING, append=SPACING) + + if exists_dir_top(): + + msg = "conf directory already exists, move conf to a temporary " \ + "directory, and test out the create_dir_top function." + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + + # + # command to create a temporary directory and echo it's name + # back to stdout, so we can store that name for use + + cmd_mktmp = "mytmpdir=`mktemp -d 2>/dev/null ||" \ + " mktemp -d -t 'mytmpdir'`" + cmd_mktmp = cmd_mktmp + "; echo $mytmpdir" + + # + # create a temporary diretory to store old conf files + + tmpdir = run(cmd_mktmp) + + # + # make sure we are working with a legit path + # otherwise, just kick out. + + with utils.virtualenv(): + cmd_py_isdir = "python -c \"import os; "\ + "print os.path.isdir('%s')\"" % \ + configuration.paths.conf.remote + + # + # take the output from this command and booleanize it + + output = run(cmd_py_isdir) + is_dir = utils.booleanize(output) + utils.printvar("is_dir", is_dir) + + if is_dir: + lastpart = os.path.basename(configuration.paths.conf.remote) + + path_conf_tmp = os.path.join(tmpdir, lastpart) + else: + + utils.printvar("configuration.paths.conf.remote", + configuration.paths.conf.remote) + + msg = "the original configuration path is NOT a path." \ + "Continue? y/N" + + utils.prompt_continue(message=msg, default="N") + + # + # now move the original configuration directory to the temporary + # location, and run test running create_dir_top on an empty + + msg = "moving original conf directory." + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + + cmd_mvtmp = "mv %s %s" % \ + (configuration.paths.conf.remote, path_conf_tmp) + + run(cmd_mvtmp) + + # + # create the new conf directory + + msg = "creating new conf directory." + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + create_dir_top() + + # + # testing on empty location completed, remove the current directory + # and move back the original + + msg = "removing created directory." + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + + cmd_rm_created = "rm -Rf %s" % configuration.paths.conf.remote + run(cmd_rm_created) + + # + # returning original directory + + msg = "Moving back original directory." + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + + cmd_return_orig = "mv %s %s" % \ + (path_conf_tmp, configuration.paths.conf.remote) + + run(cmd_return_orig) + + cmd_rmtmp = "rm -Rf %s" % tmpdir + run(cmd_rmtmp) + else: + msg = "conf directory does not exist, test out create_dir_top" + utils.print_console(msg, prepend=SPACING, append=SPACING, sep=None) + + create_dir_top() diff --git a/modules/testing/maintenance.pyc b/modules/testing/maintenance.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e4c7bf7f5fa483c7afb35a2095b571816b7f16b3 GIT binary patch literal 4220 zcmd5ei?O0xU z-?B4OPL%uw^OfT_z#rhp@dM!LUcK0Nxhj7lg>8+dXL@@2<>~2m?cW{mZwG&RF{0&< zgYTc?WB!F9!k?mqC{;96lsYtYNb#yeNsZDi8g5l|jgmSI>v95fmnIDwHiX=wWSi0^ z4Vx8fU1*PnUPZe?w`ka^=!Vd38n#JkMbS3>mFN-*bSP@_e7HlACzCEkEt%|6RHNs; zHgt~vgP$IoVYhe)&AF{%qO(~~@d6X&G&`!O+n`3J4y`^O<=N?R6qm;~FE*R{%h*_R z!%Z_isZQ?ljaN>d#)%dm*RjDGe2S0x2Zp#tR^0s9!dQnMTZfH#eBsbS(WFMv7F&N& zqeYF(OSWnu{)p;yzD4CrCNaOq9GJOua6$CyC1u1X77k77bV+pXQu!Aqv3L)R5dIaB z%I;ItppSp~k#UERAfeW^<^6D#k0p~yOHCa{YNS=?M&$Y4s=`wKI~W?`!R!$V)b^HbkKWSouHQhl5Ih>*y# z+qhUFgQ#o>INGvavct}mt(EW?I}~%Y%@YjXH+8mWvGLP9nkM>@AC5-4u;EFf{aKhy zHCr`GrjdSf)N^s(c6fl26$)69>np6y&NUuUiQ_!hRuGrK4~)liJvTiDfly&NHkPkA ze=XA%Z}3Hx9)ZU+iviuk7s+7$fZ5=;Mwez#=2Z+qgyw9ew9RlkyCU`f1V4!9qs0N||FF+aNrE(NQ8#Iu)hW78-QUDVx05L=Rvn8PK zWyk{J6ac1eoe>Nseutj-8o2AoaHu_V2jkKrUy`qim&Ahkic%Lki^Sn!Ayd|yF(ino zg3ZC1+Bh*1J7y;uq_#SWI^L7#_;Glni&$oG=%L6}Ok}m&5v8oaU8N+#lihx%en3qZb zv~%7t<{iZwTbwsYgG=NJ%JGud<*IR=S6Ir@!2^#+z2d{uew7aoJksb=)f8&$1yTzo z+@m{~0mL9UJzY7v&G-HN8PcxBNjz!N#G^@z_AeU3+YsJu;ce3b>^RyK5?5lSDcZik zb-(!?h_a`UV3Qr1bg#+dV;(=bytPXUmlhtMyl7GOla;v6^&{Mr?5u3OMG++8PBaC# zptPf+z11E(^sV+w0bHB4Xwj(y+A6lt#?GuP9+DIxUi%vYh78(9#M4n2I5!7@Jq`{w|Z&0Yr2-SRfBXTWBEli zoRcfIH#Ax1754dU^wD*peC6fb4-+(F(Y$)U%_G$4nf8STO+n~eofdf+f^mHW^*f8O zfwm-{THl_5|CL!O|u1cv^`@A+ZoA${8Pa#*(@&YG(<}` z!!wL{!{J71l!&XMWSQ|KN7oQ$@t-wbU!vSd-@yEG$4O|=H>X-wCpkI|?k&*FrG@ky z60@L&wH|}*N{7Lvy;6G#6De~7?bM8`);NuhfnA7_9#&vpbtBj)=Es6S*+beCQyZ}3 zMXe+>wg^jGH9$~y+4n*ehib%vXDpbjFH>cDybP-tAWPa#%j}pDv_4d(IP?~)Fn80R z;A2iOkZLN|;SQ*&yz1L=odef(yn5U1sG74|Z>z4_M}vFcxm#|8CG1s~-4Ba6;F~L?@$PaXFLagMMlcn;&j&fA%f7rX{iguZ zGE`T~SK4$zMTJd OzVTb{e!tmnw*L#@Va~?@ literal 0 HcmV?d00001 diff --git a/modules/utils.py b/modules/utils.py new file mode 100644 index 0000000..a8a3f37 --- /dev/null +++ b/modules/utils.py @@ -0,0 +1,387 @@ +import os +import errno +import logging + +import fabric.contrib.files + +from contextlib import contextmanager as _contextmanager +from fabric.api import env, prefix, local +from fabric.operations import run, sudo + + +def printvar(name, value, exit=False): + print "%s : %s" % (name, value) + if exit: + import sys + sys.exit() + + +def loggify(module, func, prefix=""): + """ + I'm tired of rewriting this logging code in every single function, so I + decided to just dump it here, and return a logger that can be used and + thrown away when it's done + + module - name of the module being used, ie 'nginx', 'deploy', etc + func - the name of the function this logger is going to be used in + prefix - anything you want to add to the front of the logger, ie '\n' + + returns a logging object + """ + + loggername = '{module}.{func}'.format( + module=module, func=func) + + str_logging = '{prefix}%(levelname)s: {loggername} %(message)s'.format( + prefix=prefix, + loggername=loggername) + + logging.basicConfig( + format=str_logging, + level=logging.DEBUG) + + return logging.getLogger(loggername) + + +def print_console(string, prepend="\n\n", append="\n\n", sep="-", numsep=44): + """ + helper function to take a string, and format it so it prints to the console + in a way that is pleasing to the eye. + + string - the string to be printed + prepend - defaults to two line spaces, can be anything + append - defaults to two lines spaces after the string is printed + sep - the character used to print out one line above the string and + one line after + numsep - number of times the separator is printed out on a line + """ + + print prepend + + if sep: + print sep * numsep + + print string + + if sep: + print sep * numsep + + print append + + +def print_debug(debugstr, module, function): + print "%s:%s:%s" \ + % (module, function, debugstr) + + +def executize(config_execute): + """ + A couple of times using fabric I've found that I'll need to switch between + sudo, run or local depending on where I'm executing the function, because + repeating this code in every funcion that needs it can be straingint on the + eyes, I'm putting it here for use. + + config_execute - a string that can represent the value of 'sudo', + 'run', or 'local' + + return the fabric command corresponding to the string value + in config_execute + """ + + _execute = local + if config_execute == 'sudo': + _execute = sudo + elif config_execute == 'run': + _execute = run + elif config_execute == 'local': + _execute = local + + return _execute + + +def booleanize(value): + """ + take the argument and return it as either True or False + + if the argument is neither, return False by default and warn the user that + there is a problem + """ + + true_values = ("y", "yes", "true", "1") + false_values = ("n", "no", "false", "0") + + if isinstance(value, bool): + return value + + if value.lower() in true_values: + return True + + elif value.lower() in false_values: + return False + + raise TypeError("Cannot booleanize ambiguous value '%s'" % value) + + +def ensure_dir(directory): + """ + Create a directory if it's not exists + """ + try: + if not os.path.exists(directory): + print "creating directory: %s" % directory + os.makedirs(directory) + + except OSError, e: + if e.errno != errno.EEXIST: + print "Error occurred while creating directory: %s" % directory + raise + + +def ensure_file(f): + """ + Simulates linux 'touch' command + """ + if not os.path.exists(f): + open(f, 'w').close() + + +def upload_template(filename, destination, context, use_jinja, + use_sudo, backup, template_dir, debug=False): + + if env.debug: + logging.basicConfig( + format='\n%(levelname)s: utils.upload_template %(message)s', + level=logging.DEBUG) + + command_msg = "\n\tupload_template(" \ + "\n\tfilename={filename}," \ + "\n\tdestination={destination_available}," \ + "\n\tcontext={context}," \ + "\n\tuse_jinja={use_jinja}," \ + "\n\tuse_sudo={use_sudo}," \ + "\n\tbackup={backup}," \ + "\n\ttemplate_dir={template_dir})\n".format( + filename=filename, + destination_available=destination, + context=context, + use_jinja=use_jinja, + use_sudo=use_sudo, + backup=backup, + template_dir=template_dir) + + if debug: + return command_msg + + else: + + fabric.contrib.files.upload_template( + filename=filename, + destination=destination, + context=context, + use_jinja=use_jinja, + use_sudo=use_sudo, + backup=backup, + template_dir=template_dir) + + +def get_upload_template_msg(filename, destination, context, use_jinja, + use_sudo, backup, template_dir, debug=False): + + command_msg = "\n\tupload_template(" \ + "\n\tfilename={filename}," \ + "\n\tdestination={destination_available}," \ + "\n\tcontext={context}," \ + "\n\tuse_jinja={use_jinja}," \ + "\n\tuse_sudo={use_sudo}," \ + "\n\tbackup={backup}," \ + "\n\ttemplate_dir={template_dir})\n".format( + filename=filename, + destination_available=destination, + context=context, + use_jinja=use_jinja, + use_sudo=use_sudo, + backup=backup, + template_dir=template_dir) + + return command_msg + + +@_contextmanager +def virtualenv_source(): + with prefix("source virtualenvwrapper.sh"): + yield + + +@_contextmanager +def virtualenv(): + configuration = env.config + with virtualenv_source(): + with prefix("workon %s" % configuration.virtualenv.name): + yield + + # with prefix("/bin/bash -c -l 'source %s'" % + # configuration.virtualenv.activate): + # yield + + # with prefix("/bin/bash -c -l 'source /usr/local/bin/' \ + # 'virtualenvwrapper.sh ' \ + # '&& workon %s'" % configuration.virtualenv.name): + # yield + + +def generate_template_build_path(section, template_name='conf'): + """ + helper function to automate creation of build path + + section - the template section we are building off of + template_name - by default this is "conf", but can be different, for + example, the 'database' section has 3 different template names + + returns a path to where the template should be placed + """ + import os + + configuration = env.config + + conf_section = getattr(configuration.templates, section) + + conf_section_templatename = getattr(conf_section, template_name) + + build_path = os.path.join( + conf_section.path.dest, + 'build', + conf_section_templatename.dst) + + return build_path + + +def generate_template_files_path(section): + """ + helper function to automate creation of build path + + section - the template section we are building off of + + returns a path to where the template jinja file is located + """ + import os + + configuration = env.config + + conf_section = getattr(configuration.templates, section) + + files_path = os.path.join( + conf_section.path.local, + 'files') + + return files_path + + +def print_run(command, prefix="\"\n\t", suffix="\n\t\""): + """ + helper function for when I want a string that has the form + + "\n\t run(some_command_string) \t\n" + + where "somce_command_string" is a bash script commmand or something like + that + """ + + return "run ({prefix}{command}{suffix})".format( + prefix=prefix, + suffix=suffix, + command=command) + + +def handle_help(param, message, values=None): + + values_default = ['-h', '--help'] + + if values is None: + values = values_default + + if isinstance(param, str): + if param.lower() in values: + print message + return True + + return False + + +def is_help(key): + help_keys = ['-h', '--help'] + + return key in help_keys + + +def link_create(path_src, path_dst, debug=False): + """ + takes a source and destination path, then links it + if the destination path already exists and is a link, + then delete it. Otherwise sys.exit + + path_src - source path + path_dst - destination path + + returns: if debug=True then it returns a msg + """ + + from fabric.contrib.files import is_link + from fabric.contrib.files import exists + from fabric.operations import run + + cmd_rm = "rm {path_dst}".format( + path_dst=path_dst + ) + + cmd_link = "ln -sf {path_src} {path_dst}".format( + path_src=path_src, + path_dst=path_dst + ) + + msg_debug = "" + + if exists(path_dst): + if is_link(path_dst): + if debug: + msg_debug += "link already exists at dst, removing\n" \ + "link_create:cmd_rm : %s" % cmd_rm + else: + run(cmd_rm) + else: + msg_error = "something exists at dst - '%s' " \ + "- and it's not a link\n kicking out".format(path_dst) + import sys + sys.exit(msg_error) + + if debug: + msg_debug += "link_create:cmd_link : %s" % cmd_link + else: + run(cmd_link) + + return msg_debug + + +def prompt_continue(message="Do you want to continue? Y/n", default="Y"): + """ prompts user if he wants to continue + + Keyword Arguments: + + message -- ask if user wants to continue + default -- what to do if the user hits enter without giving a value + """ + + from fabric.operations import prompt + import sys + + prompt_val = prompt(message) + + if prompt_val == "": + prompt_val = default + + if env.debug: + printvar( + "prompt_val", prompt_val, + not booleanize(prompt_val)) + else: + if not booleanize(prompt_val): + sys.exit() diff --git a/modules/utils.pyc b/modules/utils.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed1b9c3cb64e426e90795d0d178ea1a6218410ca GIT binary patch literal 11490 zcmds7O>-Q_dG6f>0qk-?kQ6^mQHn;inO!CzASeE)VwrXnik2AJP&|+<38~6@u+s|+ zusgGwo*}pf5myWyQ>9Cex#g0hZ$9LbRHe$N9CAqImU9liuJ3*T?z(hU@3a81DzsPM-8PMp;}At=lMzWa!q%lVTY&j`JifojpafO`YuekDKRKS&ely&k3k_z^mm^zpdFR1;30iPg-;0@asOo9Gr zSu)Qd?8|!8s~TS{s2>lviK*Q$aIUQYKTkTNph$8X2iYLV?O~o2NxB;pdx;B%+3qeM z`dMs)Bn|9Sn@<9VrlAc6W!f*2EL{s+7Cg~hvF#@@W-78^zjQ?qm!olz6v3X&?OG6} zF%~GwJPjgEZF4LW6+u5rgB=@`4nxsNQx$u8c8HFL(IhzBvni(OxFF6_+tPc_U=EHwJWa4{3egrk4WXQRL?g_z}e_F*v}Jne78Hqu?@Msd+tGSyY-`~1mOw`t{T$1XaZN{y=ua9Rmd6PgUL%++g(LrR+_cddJ{U?RN1OK64z=$ zd5pQAtfA~rRg`a}rr$;ddI^)&F!@cY7mkf8+_#he-G7XqeTNm}TpA{v}jzxv1L>u3W@~F>IG9n;elm}S}Z3AJV zF51bSTJfoxc4}*8lU0@Z2ofUd({eOh6*i|F?O-ihodg=>=d@$cP-HR`72m4$sm)-I zCff9ZO+k`+c~YcG6@%66xh4OKb=BlaI9I)(N{aMeB}wZPQL3j|xlo6Z-I#0r9iv9!-aMi~ORIt+ngebS^V$qWRfjo3wjc=HuuEy(?+h)(l z)mWMZx?ry3{(Y+YZ$fcPsOMauN-3(3C*YwWwu7vM?n#%QbcVwqwcgLV!=&i%L05}I zYrQvOn6(xl=n5c=3dNwUR6eny!2$X7&17Iw&h&YD)EMt8sp_CzADpMU#J% ziNdK?C<F1DhWmXqKZ;Vk33f6#?ZT_x6?yRNe-vk`s`100V8E6HGSrLcnr|DIB#&?U_r1yPHIf5`XDbPC3J z3G9n2qO@;CD;M?=UUq16HIbES5_c5Dy+0Y-$9bOl+%Kd6DQe^TJ?VujXu`E%)o`vN z%$>}6Ho%jQJ2U6xhol&Jm6>c%c#&2@SRsIrKZm4K1|GhPuMo|t zok{11={L**SaZ-axivM&e+{O5|AGOR_7}|o;=e(at6}udjrn9?(wg}QiCbmw8I?;~ z9_6s8jO=3MU44WMlK^6>1czdPjMWuRJ>^&X>`o&+>&|r7@kDhTLDuh=uvjrHdjc== z#r-$CjCv)7nU~OURO~6T@C(ZHj1Y2S8K;2cy)DfzF|c_K+9*JeAAj^@>wRy)C<8O9 zB+&NUPu!-i%xw?L9%FTHwH6w+tFT(vaO(W4^QKBtO?2Lj-p?L(mu3kEj3bBbLV|o! z!3%Yq7)QE$D0B{6$*3IS%#ac%Y56SZ6j|Bd>r6ey+y)$O`V8lr7#V(x1v3O`R0bC; zR`!RPvmRF=je~RL7^9quV*?1s-{8?lISW^7{bfAy2${`^O@EJyXH$NcvH}eNcL>BZ z^hh4n1;GGm8T(U{cWHk^im=35QFM5PZOo@bX4W1jpw;lP?2L*$+0h^}*cFt& zv&TTW4Buk$E{pH8c$>vLES|9VGZsHUARJO!e-i3I>~_0C8z1@Ry8%Eu&)v+~i$EB!Dg z;e&ex1&lAu%4p ziCyQLR_z>sZ7TbmhMuSD-$>sA3R7RHX_qt+RA=q-d#HF91NRLVb7IVA9aDoV#>U}I z;D_N^m<^m^!RHvkDG#&!0LkTDr`XmTBbpnZRfSuq&XLVIso^qSco)oE!}l%$-@v%f z+4kG#Soi@}`w#s3D*ynEt=i3+!UNLK=dm$NBl{k)+h3sY9(V}w0oj(KRonqEbithn zOa&rT01iO^g+0O(^AW}a-dm9=><25jNJ0lxmd#1S9K)_GP6N~L;(!#wd;ta-l|=^p zcN$fK*T}GU%48S^j2|li5m78*0FR_keMaD_=0n7~KpZqm7{p+JS$xn`Ip?0>_xq=_ zFx@~P)4D^>+*!l$8L|*3g8?v%v{)l(po#3Wh_^Cp%Go+`R760dZD&Tq9hH;H z>A5`cbV{c{Q9`hf3kFr_W^tf6d{_9uonlcjw4lsRM~iczmg1d}yf!)3n|8j1*3%8M zfQUO+HYk#N-1B+LenI=TYf}p~voj)k^Tq!Q@4c~Xwd>9L)jHw$CeXOHhaqSv$r33B z!qfud|n&5Q?x=|%(zr=jM10B$k^Gl!qL+%tn28@>o zhZD$}!r@-{!uPO)F728ZhMC-jAF~a2fZ55pnJUN1@w{DgjRvqG2V877DBFFmyImYa{-Axj~U(znyov&m$xT((5Zj6 z*Bu^f6u@fj1^z-=*`)wU(878B)#5NY@JK3gP|1&b4SOZt+nrweA^Q*o$49Ol;8y(JcNXF!Z%O|dXxR1@ZZ>D3n$G z?EpZtn&31;>~j)9GUw6+;8nqv%UF^YTMR98qY zr~6M$w?bq^)1|^(@Wkl|v~($Qt_`NhR1iz2nW+*80_mqRdWCcfp;7`}2fWSEFu;>E zcW1i>(Gb{VJgFc{0q3$}9PBS(Ff7Hmp+yLp6zf6o9$NCl#Nl#x;@0_fjyH{TeLa`+ zbBu~OwrF*|LLh`!XDN6gaC<-)RWQUu!q+63-;Go_KevsP%hAAcLpl!3i_ZG9N@Dh#o%SJ z=@=TGT||@-U{)+qTtgR+-K-;s#C816x}0<9@WO+e%mS+}z&L#pkg@c86`xrpMY1J0J(x{g7SEo5N@uwiI9KyRx=-=xJ2Wx0|E7Jp zhJOrGfdk>ok;WqIYFobRVtoY|5O2~hcf3w?X775M}$l;Ny){0Q$#$ z8e!V2nav{3{esP^&urGz2{NdGF@xa&T7*jqn1+Y6B4~l7fYU;znE!;%j<^z%Md3;q z(nc~^OK;5^+(dCQEJ%fh^Zr6{!#{zH0JmNveCy_~#h`Am3XG5R9msSuUMb(T6J+Oc z@Md*u=N|ZWMq8C@4DLfp-2v4W03-sg&&|Zn{HeSp2ORQy7UX6zK7jFNNn`gC5P_7C z)8ICcUw-T+R3oVU{L0Uinb^5Iw`cVnG%NUl8IL3j6#9 z+h{;M=noI#v%HGhQv-j-PWbi3Zy#6?gjx|lJ%%C0l#bjqFT)plm(P5P;@GZrzlJwH z@+S`Ik=v)N{Sbw?!I=PN50f3nT}F%W(~wi&f%;rQ_vh=;IPn}(?AYK`;-5@`k`0DR zo^_-ve3eDO;;SsgPFlzC5eovXAs6;A&IhdZSy&c(ED{zOi<||tz=VH|V$N#S&Vk>{ zSmitZyF0=s{xeqUJ{f(v_DbzWqrKF=ef{O;h32j1&E{&e+3vKjuP&}$X}{dOzIwU6 J+FWeE@xK *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username*** + +***-G group*** adds the groups in a comma separated + +***-d /home/username*** specifies the home directory to be created (necessary on ubuntu) + +***-m*** flag to create scripts (necessary) + +***-s /bin/bash*** what shell is to be used (default is none) + +#### deleting a user + + userdel -r {username} + +## IMPORTANT +### set users primary group + +**this is critical** + + sudo usermod -g www-data + + +### setting up ssh authentication + +cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys" + +## apt-get commands + +**to see the package version:** + + dpkg -s postgresql | grep Version + +# setting up aws server + +## creating the server instance + +[aws instance](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EC2_GetStarted.html) + +1. Open the [amazon EC2 console](https://console.aws.amazon.com/ec2/) +2. Choose **Launch Instance** +3. The *Choose an Amazon Machine Image (AMI)* page has basic configuration, so pick the first *ubuntu* configuration +4. This will take you to the *Choose an Instance Type* page, this chooses the hardware configuration, you want to pick **t2.micro** +5. Hit **Review and Launch** +6. This will take you to *Review Instance Launch* page, and that has an option for **Security Groups**, hit **Edit security groups**, on the page that pops up, pick the options you want to allow for your instance +7. When finished, hit "done" or whatever and you'll be taken back to the *Review Instance Launch* page, from here hit the **Launch** key +8. this will prompt you for a key pair. There are a few options. Create a new Pair and choose an existing key pair + +####key pair info: +[aws info on key pairs](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html) + +[importing id_rsa](http://www.admin-magazine.com/CloudAge/Blogs/Dan-Frost-s-Blog/Importing-SSH-keys-on-AWS) + +[add_ssh](http://stackoverflow.com/questions/8193768/trying-to-ssh-into-an-amazon-ec2-instance-permission-error) + +1. mv /path/to/myname.pem ~/.ssh +2. ssh-add ~/.ssh/myname.pem +3. ssh ubuntu@INSTANCE_IP_ADDRESS + +Remember that the IP ADDRESS changes whenever you restart the instance + +on your computer + +1. vim /etc/hosts +2. add a line with the server name and IP ADDRESS for that insntace +3. ssh ubuntu@SERVERNAME + +## updating the hostname + +[aws ubuntu hostname](https://aws.amazon.com/premiumsupport/knowledge-center/linux-static-hostname/) + +[scroll down to find the "echo" comment](https://forums.aws.amazon.com/message.jspa?messageID=495274) + +there is an issue with ubuntu instances on AWS, the name given in /etc/hostname doesn't match what exist in /etc/hosts. So if you try using sudo you'll get an error. + +To fix this, you need to change those files, to get into sudo type in: + + sudo su - + echo "127.0.0.1 $(hostname)" >> /etc/hosts + +This will update /etc/hosts with the default hostname generated by amazon. Alternatively you can do what it says in the first link [aws ubuntu hostname](https://aws.amazon.com/premiumsupport/knowledge-center/linux-static-hostname/) + + +### setup visudo + + sudo update-alternatives --config editor + + su -c 'visudo' + +find this line: + + USERNAME ALL=(ALL) NOPASSWD: ALL + +replace it with: + + admin ALL=(ALL) ALL + + +## add new user + +[adding a user on linux AWS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/managing-users.html) + +[how to get an add the public key to the new user](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html#retrieving-the-public-key) + +1. *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username*** +2. *mkdir projectdir* + +### set users primary group + +**this is critical** +- The primary group is the group applied to you when you log in using the usual methods (TTYs, GUI, SSH, etc.). + + sudo usermod -g www-data + +### set password + + sudo passwd + +### setting up ssh authentication + +1. get the public key: + + ssh-keygen -y -f /path/to/myinstance.pem + +2. copy the above results + +3. log in to the ubuntu instance using default ubuntu user + +4. change users to the custom username + + sudo su - username + +5. create the ssh directory and auth file + + cd /home/username + mkdir .ssh + touch .ssh/authorized_keys + vim ~/.ssh/authorized_keys + +6. now paste in the public_key you got in step 1 +7. log out and test with the username + + ssh username@instance.domain + +### add username to the sudo group +AWS has a sudo group that allows a user sudo priveleges + + usermod -a -G sudo username + +### apt-get setup + + sudo apt-get update + sudo apt-get install aptitude + +## postgres + + +#### links fix locale error with postgres + +[could not connect to server solution](http://askubuntu.com/questions/50621/cannot-connect-to-postgresql-on-port-5432) + +[locale solution](http://ubuntuforums.org/showthread.php?t=1346581) + +[remote connecting](http://www.railszilla.com/postgresql-tcpip-connections-port-5432/coffee-break) + + sudo apt-get install postgresql + sudo apt-get install postgresql-contrib + sudo locale-gen en_US en_US.UTF-8 hu_HU hu_HU.UTF-8 + sudo dpkg-reconfigure locales + sudo service postgresql restart + sudo -u postgres psql postgres + sudo -u postgres createdb **website_dbname** + + sudo apt-get install postgresql-server-dev-X.Y + sudo apt-get install postgresql-server-dev-9.3 + +#### change password for db user "postgres" + sudo -u postgres psql postgres + + ALTER USER Postgres WITH PASSWORD ''; + +#### set up postgresql to remote access + +######(see "remote connecting" link above) + +#####Note: be careful with this, because **anyone** will be able to mess around with it + +1. sudo vim /etc/postgresql/9.3/main/postgresql.conf +2. find **listen\_addresses** and change it to **listen\_addresses = '\*'** +3. sudo vim /etc/postgresql/9.3/main/pg_hba.conf +4. find **host all all 127.0.0.1/32 trust** and change **127.0.0.1/32** to **0.0.0.0/0** +5. sudo service postgresql restart +6. test it by running: *psql -h* ***ip\_address*** *-U* ***username*** *-d* ***database*** +7. e.g. psql -h 173.246.107.96 -U postgres postgres + +### setup the /var/www directory + + cd /var + sudo mkdir www + sudo chgrp www-data www -R + sudo chmod g+w www -R + +### install python packages + + sudo apt-get install python-dev + sudo apt-get install libjpeg-dev + + +## install and set up supervisor + + sudo apt-get install supervisor + +make sure www-data is a group for the main user + + vim /etc/supervisor/supervisord.conf + +add the following: + + [unix_http_server] + file=/var/run/supervisor.sock + chmod=0770 + chown=nobody:www-data + + [supervisorctl] + serverurl=unix:///var/run//supervisor.sock + chmod=0770 + chown=nobody:www-data + +#### run the following commands: + + sudo service supervisor stop + sudo service supervisor start + +### install pip and virtualenv + +[virtualenv install](http://roundhere.net/journal/virtualenv-ubuntu-12-10/) + + sudo apt-get install python-pip + sudo pip install virtualenv + sudo pip install virtualenvwrapper + + cat "WORKON_HOME=~/.virtualenvs" >> .bashrc + cat ". /usr/local/bin/virtualenvwrapper.sh" >> .bashrc + + +### install nginx + + sudo apt-get install nginx + + +# bootstrap server + + fab (prod|rel) deploy.bootstrap + + + + + diff --git a/templates/readmes/gandi.md b/templates/readmes/gandi.md new file mode 100644 index 0000000..e6a598e --- /dev/null +++ b/templates/readmes/gandi.md @@ -0,0 +1,233 @@ + + +## links + +#### gandi + +[connecting to gandi server](https://wiki.gandi.net/en/hosting/using-linux/how_to_connect_ai) + +[accessing the server via ssh](https://wiki.gandi.net/en/hosting/using-linux/server-access) + +[first steps with gandi server](https://wiki.gandi.net/en/hosting/gandi-expert/setup) + +#### visudo + +[configuring visudo](http://askubuntu.com/questions/539243/how-to-change-visudo-editor-from-nano-to-vim) + +#### users + +[tecmint.com complete guide to users](http://www.tecmint.com/add-users-in-linux/) + +[How To Configure SSH Key-Based Authentication on a Linux Server](https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server) + +#### postgres + +[ubuntu community postgres docs](https://help.ubuntu.com/community/PostgreSQL) + + +## adding/deleting users + +#### adding a user: + +*("www-data" is the group name for website stuff on gandi)* + +> *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username*** + +***-G group*** adds the groups in a comma separated + +***-d /home/username*** specifies the home directory to be created (necessary on ubuntu) + +***-m*** flag to create scripts (necessary) + +***-s /bin/bash*** what shell is to be used (default is none) + +#### deleting a user + + userdel -r {username} + +## IMPORTANT +### set users primary group + +**this is critical** + + sudo usermod -g www-data + + +### setting up ssh authentication + +cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys" + +## apt-get commands + +**to see the package version:** + + dpkg -s postgresql | grep Version + +# setting up gandi server + +## creating the server instance + +after logging in, go to the virtual machine setup tab under *services* -> *servers* + +[virtual machine](https://www.gandi.net/admin/iaas/vm) + +under "servers" click "create a server". At the setup page you will have the option of either using an public ssh key, a password, or both. Pick both. + +1. generate a password +2. go to ~/.ssh and look for something like *id_rsa.pub* +3. to put in on the clipboard, on OSX type *cat ~/.ssh/id_rsa.pub | pbcopy* +4. paste what's in the clipboard into the ssh input section +5. copy all this information down and start it up + + +## first steps with server + +next follow the links under "gandi" + +while you are doing the commands link *"first steps with gandi server"* you may want to do this command: + + su -c 'apt-get install sudo' + su -c 'apt-get install vim' + +after setting up "first steps with gandi server" above, you will want to configure visudo to use vim + +### setup visudo + + su -c 'update-alternatives --config editor' + + su -c 'visudo' + +find this line: + + USERNAME ALL=(ALL) NOPASSWD: ALL + +replace it with: + + admin ALL=(ALL) ALL + +(or whatever user name you are mainly using) + +## add new user + +1. *sudo useradd -G* ***www-data*** *-d /home/****username*** *-m -s /bin/bash* ***username*** +2. *mkdir projectdir* + +### set users primary group + +**this is critical** + + sudo usermod -g www-data + +### set password + + sudo passwd + +### setting up ssh authentication + +cat ~/.ssh/id\_rsa.pub | ssh **username@remote\_host** "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized\_keys" + +### apt-get setup + + sudo apt-get update + sudo apt-get install aptitude + +## postgres + + +#### links fix locale error with postgres + +[could not connect to server solution](http://askubuntu.com/questions/50621/cannot-connect-to-postgresql-on-port-5432) + +[locale solution](http://ubuntuforums.org/showthread.php?t=1346581) + +[remote connecting](http://www.railszilla.com/postgresql-tcpip-connections-port-5432/coffee-break) + + sudo apt-get install postgresql + sudo apt-get install postgresql-contrib + sudo locale-gen en_US en_US.UTF-8 hu_HU hu_HU.UTF-8 + sudo dpkg-reconfigure locales + sudo service postgresql restart + sudo -u postgres psql postgres + sudo -u postgres createdb **website_dbname** + + sudo apt-get install postgresql-server-dev-X.Y + sudo apt-get install postgresql-server-dev-9.3 + +#### change password for db user "postgres" + sudo -u postgres psql postgres + + ALTER USER Postgres WITH PASSWORD ''; + +#### set up postgresql to remote access + +######(see "remote connecting" link above) + +#####Note: be careful with this, because **anyone** will be able to mess around with it + +1. sudo vim /etc/postgresql/9.3/main/postgresql.conf +2. find **listen\_addresses** and change it to **listen\_addresses = '\*'** +3. sudo vim /etc/postgresql/9.3/main/pg_hba.conf +4. find **host all all 127.0.0.1/32 trust** and change **127.0.0.1/32** to **0.0.0.0/0** +5. sudo service postgresql restart +6. test it by running: *psql -h* ***ip\_address*** *-U* ***username*** *-d* ***database*** +7. e.g. psql -h 173.246.107.96 -U postgres postgres + +### setup the /var/www directory + + cd /var + sudo mkdir www + sudo chgrp www-data www -R + sudo chmod g+w www -R + +### install python packages + + sudo apt-get install python-dev + sudo apt-get install libjpeg-dev + + +## install and set up supervisor + + sudo apt-get install supervisor + +make sure www-data is a group for the main user + + vim /etc/supervisor/supervisord.conf + +add the following: + + [unix_http_server] + file=/var/run//supervisor.sock + chmod=0770 + chown=nobody:www-data + + [supervisorctl] + serverurl=unix:///var/run//supervisor.sock + chmod=0770 + chown=nobody:www-data + +#### run the following commands: + + sudo service supervisor stop + sudo service supervisor start + +### install pip and virtualenv + +[virtualenv install](http://roundhere.net/journal/virtualenv-ubuntu-12-10/) + + sudo apt-get install python-pip + sudo pip install virtualenv + sudo pip install virtualenvwrapper + +### install nginx + + sudo apt-get install nginx + + +# bootstrap server + + fab (prod|rel) deploy.bootstrap + + + + + diff --git a/templates/readmes/reset_migrations.md b/templates/readmes/reset_migrations.md new file mode 100644 index 0000000..6598671 --- /dev/null +++ b/templates/readmes/reset_migrations.md @@ -0,0 +1,49 @@ +I try different commands and some of the answers help me. Only this sequence in my case fixed both broken dependencies in migrations in MYAPP and clean all past migrations starting from scratch. + +Before doing this ensure that database is already synced (e.g. do not add a new Model field here or change Meta options). + +rm -Rf MYAPP/migrations/* +python manage.py makemigrations --empty MYAPP +python manage.py makemigrations +python manage.py migrate --fake MYAPP 0002 +Where 0002 is the migration number returned by the last makemigrations command. + +Now you can run makemigrations / migrate again normally because migration 0002 is stored but not reflected in the already-synced database. + +if you really want to go crazy delete all the tables in teh database related to the app and then + +DELETE FROM django_migrations WHERE app='registry' + + + +------- + + +this is what I do + +first I clear out everything: + +rm -Rf appname/migrations + +fab database.drop_all +fab database.init +fab django.manage:migrate +fab django.manage:syncdb + + +then I re-add the app + +fab django.manage:"makemigrations appname" +fab django.manage:migrate + + +at that point I have working table +so again + +rm -Rf {ON HOST}/appname/migrations +fab ONHOST database.drop_all +fab ONHOST database.init +fab ONHOST deploy.sync <--- CRITICAL STEP DO NOT FORGET +fab ONHOST django.manage:migrate +fab ONHOST django.manage:syncdb + diff --git a/templates/readmes/setup_gandi.md b/templates/readmes/setup_gandi.md new file mode 100644 index 0000000..cab0ded --- /dev/null +++ b/templates/readmes/setup_gandi.md @@ -0,0 +1,54 @@ +* set up using docker + +## setup the /var/www directory + + cd /var + sudo mkdir www + sudo chgrp www-data www -R + sudo chmod g+w www -R + +## install python packages + + sudo apt-get install python-dev + sudo apt-get install libjpeg-dev + +## install supervisor +sudo apt-get install supervisor + +make sure www-data is a group for *user* + +modify /etc/supervisor/supervisord.conf + +add the following: + +[unix_http_server] +file=/var/run//supervisor.sock ; (the path to the socket file) +chmod=0770 ; sockef file mode (default 0700) +chown=nobody:www-data + +[supervisorctl] +serverurl=unix:///var/run//supervisor.sock ; use a unix:// URL for a unix socket +chmod=0770 +chown=nobody:www-data + +#### run the following commands: + + sudo service supervisor stop + sudo service supervisor start + +## install postgres + + sudo apt-get install postgresql postgresql-contrib + sudo apt-get install postgresql-server-dev-X.Y + sudo apt-get install postgresql-server-dev-9.1 + +## install pip and virtualenv + +http://roundhere.net/journal/virtualenv-ubuntu-12-10/ + + sudo apt-get install python-pip + sudo pip install virtualenv + sudo pip install virtualenvwrapper + +1. setup scripts/meta/configuration/branch.yml +1. setup scripts/meta/virtualenv/branch.txt diff --git a/templates/readmes/translations.md b/templates/readmes/translations.md new file mode 100644 index 0000000..c0894d8 --- /dev/null +++ b/templates/readmes/translations.md @@ -0,0 +1,32 @@ +# how to do django translation + +[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/]() + + +#### translation for templates + +[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#translator-comments-in-templates]() + +#### language switching + +[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#switching-language-in-templates]() + +#### how to create language files + +[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#localization-how-to-create-language-files]() + + +you have to set the -e extension to specify files you want to use. e.g. + +`django-admin makemessages -e django` + +`fab django.admin:"makemessages -l en -e django"` + +after creating the .po files you have to compile them for use + +`django-admin compilemessages` + +`fab django.admin:"compilemessages"` + +### set langauge redirect +[https://docs.djangoproject.com/es/1.9/topics/i18n/translation/#the-set-language-redirect-view]() \ No newline at end of file diff --git a/templates/readmes/update_images.md b/templates/readmes/update_images.md new file mode 100644 index 0000000..ddd5db9 --- /dev/null +++ b/templates/readmes/update_images.md @@ -0,0 +1,20 @@ +in order to update the images + +1. make fixtures - fab django.create_fixtures:appname +2. copy fixtures - cp extras/backups/fixtures/appname/appname.json + code/app/appname/fixtures/appname.json +3. deploy - fab deploy.sync +4. copy images - cp extras/path/to/images/* /var/www/project.branch/public/media/dynamic + +5. check pip - make sure virtualenv txt is updated to development +6. syncdb - fab django.manage:syncdb + +5. ?flush - fab django.manage:flush +6. ?superuser - fab django.manage:createsuperuser +7. load fixture - fab django.manage:"loaddata fixture appname" + +if first time: +-? fab django.manage:"makemigrations thumbnail" +-? fab django.manage:migrate + +8. ?clear thumbs - fab django.manage:"thumbnail clear"