Skip to content

Instantly share code, notes, and snippets.

@dimkoug
Last active January 7, 2020 14:30
Show Gist options
  • Save dimkoug/b7a92225b36b2ab720d77a40584e983a to your computer and use it in GitHub Desktop.
Save dimkoug/b7a92225b36b2ab720d77a40584e983a to your computer and use it in GitHub Desktop.
django shell script to create project
#!/bin/sh
cd $HOME;
PROJECT_NAME=$1;
PROJECT_DIR=$HOME/$PROJECT_NAME
VENV='env'
USER=`whoami`
echo $PROJECT_NAME;
mkdir $PROJECT_NAME;
cd $PROJECT_NAME;
python3 -m venv env;
. env/bin/activate
pip install django celery channels django-redis-cache psycopg2-binary gunicorn django-debug-toolbar django-extensions httpie httpie-http2
pip freeze > requirements.txt
django-admin startproject $PROJECT_NAME
mkdir deploy public;
cd deploy
mkdir conf logs
cd logs
mkdir celery_worker celery_beat gunicorn nginx daphne
cd ..
cd conf
touch celery_beat.conf celery_worker.conf daphne.conf gunicorn.conf nginx.conf
echo """
; ================================
; celery beat supervisor example
; ================================
; the name of your supervisord program
[program:${PROJECT_NAME}_beat]
; Set full path to celery program if using virtualenv
command=${PROJECT_DIR}/${VENV}/bin/celery beat -A ${PROJECT_NAME} --loglevel=INFO
; The directory to your Django project
directory=${PROJECT_DIR}/${PROJECT_NAME}
; If supervisord is run as the root user, switch users to this UNIX user account
; before doing any processing.
user=${USER}
; Supervisor will start as many instances of this program as named by numprocs
numprocs=1
; Put process stdout output in this file
stdout_logfile=${PROJECT_DIR}/deploy/logs/celery_beat/beat_stdout.log
; Put process stderr output in this file
stderr_logfile=${PROJECT_DIR}/deploy/logs/celery_beat/beat_stderr.log
; If true, this program will start automatically when supervisord is started
autostart=true
; May be one of false, unexpected, or true. If false, the process will never
; be autorestarted. If unexpected, the process will be restart when the program
; exits with an exit code that is not one of the exit codes associated with this
; process’ configuration (see exitcodes). If true, the process will be
; unconditionally restarted when it exits, without regard to its exit code.
autorestart=true
; The total number of seconds which the program needs to stay running after
; a startup to consider the start successful.
startsecs=10
; if your broker is supervised, set its priority higher
; so it starts first
priority=999
""" >> celery_beat.conf
echo """
; ==================================
; celery worker supervisor example
; ==================================
; the name of your supervisord program
[program:${PROJECT_NAME}_celery]
; Set full path to celery program if using virtualenv
command=${PROJECT_DIR}/${VENV}/bin/celery worker -A ${PROJECT_NAME} --loglevel=INFO
; The directory to your Django project
directory=${PROJECT_DIR}/${PROJECT_NAME}
; If supervisord is run as the root user, switch users to this UNIX user account
; before doing any processing.
user=${USER}
; Supervisor will start as many instances of this program as named by numprocs
numprocs=1
; Put process stdout output in this file
stdout_logfile=${PROJECT_DIR}/deploy/logs/celery_worker/worker_stdout.log
; Put process stderr output in this file
stderr_logfile=${PROJECT_DIR}/deploy/logs/celery_worker/worker_stderr.log
; If true, this program will start automatically when supervisord is started
autostart=true
; May be one of false, unexpected, or true. If false, the process will never
; be autorestarted. If unexpected, the process will be restart when the program
; exits with an exit code that is not one of the exit codes associated with this
; process’ configuration (see exitcodes). If true, the process will be
; unconditionally restarted when it exits, without regard to its exit code.
autorestart=true
; The total number of seconds which the program needs to stay running after
; a startup to consider the start successful.
startsecs=10
; Need to wait for currently executing tasks to finish at shutdown.
; Increase this if you have very long running tasks.
stopwaitsecs = 600
; When resorting to send SIGKILL to the program to terminate it
; send SIGKILL to its whole process group instead,
; taking care of its children as well.
killasgroup=true
; if your broker is supervised, set its priority higher
; so it starts first
priority=998
""" >> celery_worker.conf
echo """
[program:${PROJECT_NAME}_asgi_daphne]
directory=${PROJECT_DIR}/${PROJECT_NAME}
command=${PROJECT_DIR}/${VENV}/bin/daphne -u ${PROJECT_DIR}/deploy/asgi.sock ${PROJECT_NAME}.asgi:application
stdout_logfile=${PROJECT_DIR}/deploy/logs/daphne/daphne.log
stderr_logfile=${PROJECT_DIR}/deploy/logs/daphne/daphne_stderr.log
autostart=true
autorestart=true
redirect_stderr=true
""" >> daphne.conf
echo """
[program:${PROJECT_NAME}_gunicorn]
command=${PROJECT_DIR}/${VENV}/bin/gunicorn --workers 3 --bind unix:/${PROJECT_DIR}/deploy/wsgi.sock ${PROJECT_NAME}.wsgi
directory=${PROJECT_DIR}/${PROJECT_NAME}/${PROJECT_NAME}
autostart=true
autorestart=true
stderr_logfile=${PROJECT_DIR}/deploy/logs/gunicorn/gunicorn.err.log
stdout_logfile=${PROJECT_DIR}/deploy/logs/gunicorn/gunicorn.out.log
""" >> gunicorn.conf
echo """
server {
listen 80;
server_name ${PROJECT_NAME};
access_log ${PROJECT_DIR}/deploy/logs/nginx/nginx-access.log;
error_log ${PROJECT_DIR}/deploy/logs/nginx/nginx-error.log debug;
location = /favicon.ico { access_log off; log_not_found off; }
location /static {
alias ${PROJECT_DIR}/public/static;
}
location /media {
alias ${PROJECT_DIR}/public/media;
}
location / {
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header Host \$http_host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_pass http://unix:${PROJECT_DIR}/deploy/wsgi.sock;
}
location /ws/ {
proxy_http_version 1.1;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection "\"upgrade"\";
proxy_pass http://unix:${PROJECT_DIR}/deploy/asgi.sock;
}
}
""" >> nginx.conf
sudo ln -sf "${PROJECT_DIR}/deploy/conf/nginx.conf" "/etc/nginx/sites-enabled/${PROJECT_NAME}.conf"
sudo ln -sf "${PROJECT_DIR}/deploy/conf/gunicorn.conf" "/etc/supervisor/conf.d/${PROJECT_NAME}_gunicorn.conf"
sudo ln -sf "${PROJECT_DIR}/deploy/conf/celery_beat.conf" "/etc/supervisor/conf.d/${PROJECT_NAME}_celery_beat.conf"
sudo ln -sf "${PROJECT_DIR}/deploy/conf/celery_worker.conf" "/etc/supervisor/conf.d/${PROJECT_NAME}_celery_worker.conf"
sudo nginx -t
sudo supervisorctl reread
sudo supervisorctl update
sudo supervisorctl start "${PROJECT_NAME}_gunicorn"
cd ../..
cd $PROJECT_NAME
mkdir templates static
cd $PROJECT_NAME
mv settings.py settings_base.py
touch settings.py settings_local_sample.py settings_local.py
echo """
try:
from .settings_local import *
except ImportError:
from .settings_base import *
""" >> settings.py
echo """
import os
from .settings_base import *
'''
This file is used to create a local settings_local.py file for development
'''
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', '"${PROJECT_NAME}"']
INTERNAL_IPS = ('127.0.0.1', '0.0.0.0', '"${PROJECT_NAME}"')
INSTALLED_APPS += (
'debug_toolbar',
'django_extensions',
)
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel'
]
#TEMPLATES[0]['OPTIONS']['loaders'] = (
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader',
#)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# For postgres with postgis
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'NAME': '',
# 'USER': 'postgres',
# 'PASSWORD': '',
# 'HOST': 'localhost',
# 'PORT': '5432',
# 'CONN_MAX_AGE': 60 * 10
# }
# }
# For postgres with postgres
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'NAME': '',
# 'USER': 'postgres',
# 'PASSWORD': '',
# 'HOST': 'localhost',
# 'PORT': '5432',
# 'CONN_MAX_AGE': 60 * 10
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = []
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
SESSION_CACHE_ALIAS = 'default'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
""" >> settings_local.py
cp settings_local.py settings_local_sample.py
touch celery.py
echo """
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '"${PROJECT_NAME}.settings"')
app = Celery('"${PROJECT_NAME}"')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.conf.broker_url = 'redis://localhost:6379'
app.conf.result_backend = 'redis://localhost:6379'
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
from celery.decorators import task
from celery.task.schedules import crontab
from celery.decorators import periodic_task
@periodic_task(run_every=(crontab(minute='*/15')), name='some_task', ignore_result=True)
def some_task():
print('hi')
# do something
""" >> celery.py
echo """
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('"${PROJECT_NAME}"',)
""" >> __init__.py
echo """
PUBLIC_DIR = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', 'public'))
STATIC_ROOT = os.path.join(PUBLIC_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(PUBLIC_DIR, 'media')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = 'default'
BROKER_URL = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'127.0.0.1:6379',
],
'OPTIONS': {
#'DB': 1,
#'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
#'CONNECTION_POOL_CLASS_KWARGS': {
# 'max_connections': 50,
# 'timeout': 20,
#},
#'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
'hosts': [('localhost', 6379)],
},
},
}
""" >> settings_base.py
cd ../..
echo `pwd`;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment