virtualenv venv
source venv/Scripts/activate
Then install packages and freeze them in a file:
pip freeze > requirements.txt
To install an existing virtual env:
pip install -r requirements.txt
x = 5
print(f'the value of x is {x}')
Here is a basic template of a script with arguments and logging:
import argparse
import logging
import os
def main():
args, args_parser = parse_cli_args()
init_logging(args)
logging.log(logging.INFO, "Hello World")
if args.action == 'parser1':
parser1()
elif args.action == 'parser2':
parser2()
else:
args_parser.print_help()
def parser1():
logging.info('parser1')
return
def parser2():
logging.info('parser2')
return
def init_logging(args):
if args.log_file:
if os.path.splitext(args.log_file)[1] == '.html':
logging.basicConfig(filename=args.log_file, filemode='w',
format='%(asctime)s - %(levelname)s - %(message)s<br>', level=logging.INFO)
else:
logging.basicConfig(filename=args.log_file, filemode='w',
format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
logging.info(f'Logging to file {args.log_file}')
else:
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
logging.info('Logging to standard output')
def parse_cli_args():
def parse_string_list(string):
return string.split(',')
parser = argparse.ArgumentParser(description='Desc')
parser.add_argument('-l', '--log-file', help='Path to log file.')
commands = parser.add_subparsers(
title='commands', dest='action')
parser1 = commands.add_parser('parser1')
parser1.add_argument('arg1', help='arg1')
parser2 = commands.add_parser('parser2')
parser2.add_argument('arg2', help='arg2')
parser2.add_argument('-o1', '--optional-arg1',
type=int, help='optional_arg')
parser2.add_argument('-o2', '--optional-arg2',
type=parse_string_list, help='otional_string_list')
return parser.parse_args(), parser
if __name__ == "__main__":
main()
When using threads with flask, the hot reloader must be disabled because it creates a fork of the application, leading to multiple instances of the same thread.
from flask import Flask
import time, threading
from threading import Thread
class MyThread(Thread):
def __init__(self):
Thread.__init__(self)
self.stop = False
def run(self):
while not self.stop:
time.sleep(1)
print("I am a thread {}".format(threading.get_ident()))
print("Bye !")
def main():
app = Flask(__name__)
@app.route('/')
def hello():
time.sleep(5)
return 'hello'
@app.route('/bonjour')
def bonjour():
return 'bonjour'
worker = MyThread()
worker.start()
# use_reloader=False is important otherwise flask seems to fork and create two thread instances
app.run(debug=True, threaded=True, use_reloader=False)
worker.stop = True
worker.join()
main()
import traceback
traceback.print_exc(file=sys.stdout)
import os
result = os.path.isfile(fname)
https://jeffknupp.com/blog/2016/03/07/python-with-context-managers/
With a function:
from contextlib import contextmanager
@contextmanager
def timer(msg):
start = time.time() # Executed in __enter__
yield
logging.info("{} - [ {:.3f}s ]".format(self.msg, time.time() - self.start)) # Executed in __exit__
A function that allows iterating over both stdout and stderr concurrently, in realtime, line by line
In case you need to get the output stream for both stdout and stderr at the same time, you can use the following function.
The function uses Queues to merge both Popen pipes into a single iterator.
Here we create the function read_popen_pipes():
from queue import Queue, Empty
from concurrent.futures import ThreadPoolExecutor
def enqueue_output(file, queue):
for line in iter(file.readline, ''):
queue.put(line)
file.close()
def read_popen_pipes(p):
with ThreadPoolExecutor(2) as pool:
q_stdout, q_stderr = Queue(), Queue()
pool.submit(enqueue_output, p.stdout, q_stdout)
pool.submit(enqueue_output, p.stderr, q_stderr)
while True:
if p.poll() is not None and q_stdout.empty() and q_stderr.empty():
break
out_line = err_line = ''
try:
out_line = q_stdout.get_nowait()
err_line = q_stderr.get_nowait()
except Empty:
pass
yield (out_line, err_line)
read_popen_pipes() in use:
import subprocess as sp
with sp.Popen(my_cmd, stdout=sp.PIPE, stderr=sp.PIPE, text=True) as p:
for out_line, err_line in read_popen_pipes(p):
# Do stuff with each line, e.g.:
print(out_line, end='')
print(err_line, end='')
return p.poll() # return status-code