Skip to content

Instantly share code, notes, and snippets.

@absent1706
Last active March 2, 2017 11:25
Show Gist options
  • Save absent1706/4dc134177ecf0faaac8ad77427f9b093 to your computer and use it in GitHub Desktop.
Save absent1706/4dc134177ecf0faaac8ad77427f9b093 to your computer and use it in GitHub Desktop.
knowstory-env
@echo off
set "ROOT_DIR=E:\code\knowstory"
set "VIRTUAL_ENV=%ROOT_DIR%\env"
if defined _OLD_VIRTUAL_PROMPT (
set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
) else (#!E:\Python\python.exe
import re
import sys
from optparse import OptionParser, IndentedHelpFormatter
import memcache
import memcacheinspector
def do_list(servers, max_value_size=0):
mci = memcacheinspector.MemcacheInspector(servers)
for server, items in mci.get_items(max_value_size=max_value_size).iteritems():
for item in items:
print '%s|%s|%s|%s' % (server, item.expiration.isoformat(), item.size, item.key)
def do_dump(servers, max_value_size=0):
mci = memcacheinspector.MemcacheInspector(servers)
for server, items in mci.get_items(include_values=True, max_value_size=max_value_size).iteritems():
for item in items:
print '%s|%s|%s|%s' % (server, item.expiration.isoformat(), item.size, item.key)
print item.value
def do_grep(servers, pattern, ignore_case=False, invert_match=False):
if ignore_case:
regex = re.compile(pattern, flags=re.IGNORECASE)
else:
regex = re.compile(pattern)
mci = memcacheinspector.MemcacheInspector(servers)
for server, items in mci.get_items(include_values=True).iteritems():
for item in items:
match = regex.search(item.key) or (isinstance(item.value, basestring) and regex.search(item.value))
if bool(match) != invert_match:
print '%s|%s|%s|%s' % (server, item.expiration.isoformat(), item.size, item.key)
print item.value
def do_get(servers, keys):
for server in servers:
mc = memcache.Client([server])
for key, val in mc.get_multi(keys).iteritems():
print '%s|%s' % (server, key)
print val
def do_set(servers, key, value):
for server in servers:
mc = memcache.Client([server])
if mc.set(key, value):
print '%s|%s' % (server, key)
print value
def do_incr(servers, keys, delta):
for server in servers:
mc = memcache.Client([server])
for key in keys:
try:
val = mc.incr(key, delta)
except:
pass
else:
print '%s|%s' % (server, key)
print val
def do_decr(servers, keys, delta):
for server in servers:
mc = memcache.Client([server])
for key in keys:
try:
val = mc.decr(key, delta)
except:
pass
else:
print '%s|%s' % (server, key)
print val
def do_delete(servers, keys):
for server in servers:
mc = memcache.Client([server])
if mc.delete_multi(keys):
for key in keys:
print '%s|%s' % (server, key)
def do_flush(servers):
for server in servers:
mc = memcache.Client([server])
mc.flush_all()
print server
def do_stats(servers):
for server in servers:
mc = memcache.Client([server])
for host, stats in mc.get_stats():
for key, val in stats.iteritems():
print '%s|%s|%s' % (host.split()[0], key, val)
class _CustomHelpFormatter(IndentedHelpFormatter):
def format_description(self, description):
return description if description else ''
def format_epilog(self, epilog):
return epilog if epilog else ''
if __name__ == '__main__':
opt = OptionParser(
usage='Usage: %prog [options] <action> [<arguments>]',
version='%%prog %s' % (memcacheinspector.__version__,),
formatter=_CustomHelpFormatter(),
description='''Actions:
list Lists all items stored in the server(s).
dump Dumps all items (including values) stored in the
server(s).
grep <pattern> Dumps all items (including values) whose key or value
matches the specified search pattern.
get <key> [<key> ...] Retrieves the items with the specified key(s).
set <key> <value> Sets the item with the specified key and value.
incr key Increments the value of the items with the specified
key(s).
decr key Decrements the value of the items with the specified
key(s).
delete <key> [<key> ...] Deletes the items with the specified key(s).
flush Expires all items in the server(s).
stats Retrieves statistics from the server(s).
''',
epilog='''
Output Format:
list:
<server connection string>|<expiration date>|<size in bytes>|<key>
dump, grep:
<server connection string>|<expiration date>|<size in bytes>|<key>
<value>
get, set, incr, decr:
<server connection string>|<key>
<value>
delete:
<server connection string>|<key>
flush:
<server connection string>
stats:
<server connection string>|<statistic key>|<value>
'''
)
opt.add_option(
'-s', '--server',
dest='server', action='append', type='string',
help='Specifies a server to connect to. Can be used multiple times. Defaults to \'127.0.0.1:11211\'.'
)
opt.add_option(
'-z', '--max-value-size',
dest='max_value_size', action='store', type='int', default=0,
help='The maximum size (in bytes) of a value can be when performing a list or dump action. Zero or lower is interpreted as no limit. Defaults to 0.'
)
opt.add_option(
'-d', '--delta',
dest='delta', action='store', type='int', default=1,
help='The amount to change the value when using the incr or decr actions. Defaults to 1.'
)
opt.add_option(
'-i', '--ignore-case',
dest='ignore_case', action='store_true', default=False,
help='Ignore case distinctions in both the pattern and the items during the grep action.'
)
opt.add_option(
'-v', '--invert-match',
dest='invert_match', action='store_true', default=False,
help='Inverts the sense of matching, to select non-matching items during the grep action.'
)
options, args = opt.parse_args(sys.argv[1:])
def usage():
opt.print_help()
sys.exit(1)
def ensure_args(args, min_num):
if len(args) < min_num:
usage()
if not args:
usage()
else:
action = args[0]
del args[0]
if options.server:
servers = options.server
else:
servers = ['127.0.0.1:11211']
if action == 'list':
do_list(servers, max_value_size=options.max_value_size)
elif action == 'dump':
do_dump(servers, max_value_size=options.max_value_size)
elif action == 'grep':
ensure_args(args, 1)
do_grep(servers, args[0], ignore_case=options.ignore_case, invert_match=options.invert_match)
elif action == 'get':
ensure_args(args, 1)
do_get(servers, args)
elif action == 'set':
ensure_args(args, 2)
do_set(servers, args[0], args[1])
elif action == 'incr':
ensure_args(args, 1)
do_incr(servers, args, options.delta)
elif action == 'decr':
ensure_args(args, 1)
do_decr(servers, args, options.delta)
elif action == 'stats':
do_stats(servers)
elif action == 'delete':
ensure_args(args, 1)
do_delete(servers, args)
elif action == 'flush':
do_flush(servers)
else:
usage()
if not defined PROMPT (
set "PROMPT=$P$G"
)
set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
)
set "PROMPT=(env) %PROMPT%"
if not defined _OLD_VIRTUAL_PYTHONHOME (
set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
)
set PYTHONHOME=
if defined _OLD_VIRTUAL_PATH (
set "PATH=%_OLD_VIRTUAL_PATH%"
) else (
set "_OLD_VIRTUAL_PATH=%PATH%"
)
set "PATH=%VIRTUAL_ENV%\Scripts;%PATH%"
rem CUSTOM
set "PYTHONPATH=%ROOT_DIR%;%VIRTUAL_ENV%\lib\site-packages;%ROOT_DIR%\knowstory;E:\\Python\\Lib\\site-packages;%PYTHONPATH%"
set "GAEPATH=C:\Program Files (x86)\Google\google_appengine"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\cherrypy"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\cherrypy"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\fancy_urllib"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\yaml-3.10"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\antlr3"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\concurrent"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\ipaddr"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\jinja2-2.6"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\webob-1.2.3"
rem set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\webapp2-2.5.1"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\mox"
set "PYTHONPATH=%PYTHONPATH%;%GAEPATH%\lib\protorpc-1.0"
rem set "PYTHONSTARTUP=%VIRTUAL_ENV%\shell.py"
rem set "SQL_ALCHEMY_ECHO=True"
rem Start Memcache service
rem start/B %VIRTUAL_ENV%\memcached-amd64\memcached.exe
:END
DOSKEY gae=python "C:\Program Files (x86)\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\dev_appserver.py" $* && ^
DOSKEY gaeks=python "C:\Program Files (x86)\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\dev_appserver.py" knowstory\app-dev.yaml --automatic_restart=1 $* && ^
DOSKEY gaeksn=python "C:\Program Files (x86)\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\dev_appserver.py" knowstory\app-dev.yaml --automatic_restart=0 $* && ^
DOSKEY env=env\Scripts\activate.bat && ^
DOSKEY py=python $* && ^
DOSKEY ipy=ipython $* && ^
DOSKEY pd=ipython -i bin\shell.py $* && ^
DOSKEY rdb=bin\refresh_db.bat && ^
DOSKEY test=nosetests -v $* && ^
DOSKEY et=set "SQL_ALCHEMY_ECHO=True" && ^
DOSKEY ef=set "SQL_ALCHEMY_ECHO=False" && ^
DOSKEY push=git push origin $* && ^
DOSKEY pull=git pull origin $* && ^
DOSKEY s=git status && ^
DOSKEY co=git checkout $* && ^
DOSKEY merge=git merge $* && ^
DOSKEY mergetool=git mergetool && ^
DOSKEY c=git commit $* && ^
DOSKEY cm=git commit -am "$*" && ^
DOSKEY sb=git branch ^| grep $* && ^
DOSKEY mc=python bin\mcinspect $* && ^
DOSKEY appcfg=python "C:\Program Files (x86)\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\appcfg.py" $* && ^
DOSKEY commit=git commit $*
rem to run with IntelliJ: http://www.qopy.me/0p14k5OMSyiA8rLNlVh6JA
cmd.exe /k "C:\bin\aliases.cmd" -new_console
import click
import os
def path(relative_part):
return os.path.realpath(
os.path.join(os.path.dirname(__file__), relative_part))
@click.group()
def cli():
pass
@cli.command()
def run_tests():
""" run tests only from one module just for testing """
import unittest
import sys
import os
suite = unittest.TestLoader().discover(path('knowstory'))
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(1 if result.errors or result.failures else 0)
@cli.command()
def truncate_db():
""" truncate all db tables """
os.environ['CURRENT_ENV'] = os.environ.get('CURRENT_ENV') or 'DEV'
from server.settings import get_engine
from sqlalchemy.schema import MetaData
# create db schema with alembic before running this code
# Base.metadata.create_all(engine)
def truncate_all_tables(engine):
# delete all table data (but keep tables)
meta = MetaData(bind=engine, reflect=True)
con = engine.connect()
trans = con.begin()
engine.execute('SET FOREIGN_KEY_CHECKS=0;')
for table in reversed(meta.sorted_tables):
con.execute(table.delete())
engine.execute('SET FOREIGN_KEY_CHECKS=1;')
trans.commit()
click.confirm('Do you really want to truncate all tables in \n{} ?' \
.format(get_engine()), abort=True)
truncate_all_tables(get_engine())
click.echo('Successfully truncated DB!')
@cli.command()
@click.option('--ipython/--no-ipython', default=True)
def shell(ipython):
"""runs dev shell with pre-inited environment"""
file = path('shell.py')
if not os.path.isfile(file):
click.echo('File "{}" does not exists. Make sure you have '
'copied shell.example.by to shell.py'.format(file))
return
from subprocess import call
call(["ipython" if ipython else "python", "-i", file])
@cli.command()
def refresh_db():
"""runs dev shell with pre-inited environment"""
click.confirm('Do you really want to fully recreate DB?', abort=True)
file = path('refresh_db.bat')
if not os.path.isfile(file):
click.echo('File "{}" does not exists. Make sure you have '
'copied refresh_db.example.bat to refresh_db.bat'.format(file))
return
from subprocess import call
call([file])
click.echo('Successfully refreshed DB!')
if __name__ == '__main__':
cli()
from server.main_app import app
ctx = app.test_request_context('/')
ctx.push()
set "OLD_SQL_ALCHEMY_ECHO=%SQL_ALCHEMY_ECHO%"
set "SQL_ALCHEMY_ECHO=False"
set "OLD_CURRENT_ENV=%CURRENT_ENV%"
set "CURRENT_ENV=DEV" && mysqladmin -uroot -f drop knowstory && mysqladmin -uroot create knowstory && alembic upgrade head && python fake_data_db_generator\main.py
set "CURRENT_ENV=UNIT-TEST" && mysqladmin -uroot -f drop knowstory_test && mysqladmin -uroot create knowstory_test && alembic upgrade head
set "CURRENT_ENV=%OLD_CURRENT_ENV%"
set "SQL_ALCHEMY_ECHO=%OLD_SQL_ALCHEMY_ECHO%"
""" python cli.py shell """
import click
@click.group()
def cli():
pass
@cli.command()
@click.option('--ipython', default=False)
def shell(ipython):
a=5 # test local var
if ipython:
import IPython
IPython.embed()
else:
import code
code.InteractiveConsole(locals=globals()).interact()
"""Command on cli"""
if __name__ == '__main__':
cli()
# for autocompletion
# import rlcompleter, readline
# readline.parse_and_bind('tab:complete')
import os
import yaml
os.environ['CURRENT_ENV'] = 'DEV'
if os.environ['CURRENT_ENV'] == 'DEV':
with open(os.path.realpath(os.path.join(os.path.dirname(__file__), '../knowstory/app-dev.yaml')), 'r') as stream:
vars_ = yaml.load(stream)['env_variables'].items()
for var, value in vars_:
os.environ[var] = str(value)
os.environ['SQL_ALCHEMY_ECHO'] = 'True'
import webapp2
import server
from server.main_app import app
from server.models import *
from server.const import *
from server.processors import *
from server.testing_tools import *
from sqlalchemy import *
from sqlalchemy.orm import *
from server.handlers.security import PermissionsChecker
from server.utils import *
from server.const import *
import server.models as models
import server.processors as proc
from server.gcs_connector import *
from server.mailing.email_sender import EmailSender
request = webapp2.Request.blank('/')
request.app = app
webapp2._local.request = request
mailer = EmailSender()
db = SessionContainer()
BaseModel.set_static_session(db)
u = db.query(User).filter(User.email=='test@test.test').first()
# user = u
# db.commit()
user_id = u.user_id
perm = fake_permissions(user_id)
import traceback
import sys
MAX_ADMIN_EMAIL_LENGTH = 15500 # ~16Kb
def _add_msg_footer_(msg, add_last_exception_stacktrace,
add_current_line_stacktrace):
msg = str(msg).decode('utf-8')
if add_last_exception_stacktrace:
msg += "\nLast exception:\n" + traceback.format_exc()
if add_current_line_stacktrace:
frames = [
frame for frame in traceback.format_stack()
if __file__ not in frame]
stacktrace_str = "".join(frames)
msg += "\nLogger was called at following line:\n" + stacktrace_str
return msg
try:
a.b=5
except Exception as e:
f = _add_msg_footer_(e, True, True)
import ipdb; ipdb.set_trace(context=5)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment