Initialize a git repo in the current directory
# git init
Add a remote called "origin"
docker run -it ubuntu:16.04 | |
apt-get -y update | |
apt-get -y upgrade | |
apt-get install -y python3-pip | |
pip3 install awscli | |
aws --version | |
aws-cli/1.14.48 Python/3.5.2 Linux/4.13.0-36-generic botocore/1.9.1 |
package main | |
import ( | |
"context" | |
"fmt" | |
"log" | |
"net/http" | |
"time" | |
) |
#!/bin/bash | |
go test -coverprofile=coverage.out | |
go tool cover -html=coverage.out |
/* | |
* Sample API with GET and POST endpoint. | |
* POST data is converted to string and saved in internal memory. | |
* GET endpoint returns all strings in an array. | |
*/ | |
package main | |
import ( | |
"encoding/json" | |
"flag" |
#!/bin/bash | |
echo "Please use stackme.sh from https://github.com/rm-you/devstack_deploy instead! Always the latest:" | |
echo "bash <(curl -sL bit.do/devstack)" | |
exit | |
BARBICAN_PATCH="" | |
NEUTRON_LBAAS_PATCH="" | |
OCTAVIA_PATCH="refs/changes/30/278830/6" |
#Create LB | |
http://localhost:9876/v1/loadbalancers | |
{ | |
"name": "loadbalancer1", | |
"description": "simple lb", | |
"vip": {"subnet_id": "ac7d27ae-7ace-459f-ae11-a4568abd227a"} | |
} | |
#Create LIstener | |
http://localhost:9876/v1/loadbalancers/0c59f9c3-13e2-47ad-bf7e-fd533c51e918/listeners |
import shutil, tempfile | |
from os import path | |
import unittest | |
class TestExample(unittest.TestCase): | |
def setUp(self): | |
# Create a temporary directory | |
self.test_dir = tempfile.mkdtemp() | |
def tearDown(self): |
Initialize a git repo in the current directory
# git init
Add a remote called "origin"
As of version 3.3, python includes the very promising concurrent.futures
module, with elegant context managers for running tasks concurrently. Thanks to the simple and consistent interface you can use both threads and processes with minimal effort.
For most CPU bound tasks - anything that is heavy number crunching - you want your program to use all the CPUs in your PC. The simplest way to get a CPU bound task to run in parallel is to use the ProcessPoolExecutor, which will create enough sub-processes to keep all your CPUs busy.
We use the context manager thusly:
with concurrent.futures.ProcessPoolExecutor() as executor:
// $ 6g echo.go && 6l -o echo echo.6 | |
// $ ./echo | |
// | |
// ~ in another terminal ~ | |
// | |
// $ nc localhost 3540 | |
package main | |
import ( |