docker --version
Docker version 1.13.1, build 092cba3
- start a docker container (ubuntu)
docker run -it ubuntu bash
root@1834dea29581:/# apt-get update
root@1834dea29581:/# apt-get install vim -y
This is a working version : | |
from ansible import utils, errors | |
import random, string | |
class LookupModule (object): | |
""" This lookup generate a UUID of the length specified by the term | |
Usage: | |
set_fact: container_id="{{ lookup('uuid',32) }}" |
- set_fact: action_data="{'action':'one','data':'two'}" | |
tags: | |
- deploy_container | |
- deploy_container_id | |
- debug: msg="say {{ action_data['action'] }}" | |
tags: | |
- deploy_container | |
- deploy_container_id |
// _Channels_ are the pipes that connect concurrent | |
// goroutines. You can send values into channels from one | |
// goroutine and receive those values into another | |
// goroutine. | |
package main | |
import "fmt" | |
import "time" |
--- | |
- hosts: localhost | |
user: belisarius | |
vars: | |
test: | |
- [ "uid", ["val1","val2"]] | |
- [ "uid2", ["val1","val2"]] | |
tasks: | |
- debug: msg="{{ test }}" | |
# result is : |
#!/usr/bin/env python | |
import os | |
from pprint import pprint | |
word_list = {} | |
count = 0 | |
with open("text.txt") as f: | |
for word in f.readline().split(): | |
count += 1 |
#!/usr/bin/env python | |
import os | |
from pprint import pprint | |
class word_counter(): | |
def __init__(self): | |
print "starting the word counter Class" | |
def topwords(self,fileDesc,count): |
docker --version
Docker version 1.13.1, build 092cba3
docker run -it ubuntu bash
root@1834dea29581:/# apt-get update
root@1834dea29581:/# apt-get install vim -y
### Keybase proof | |
I hereby claim: | |
* I am prune998 on github. | |
* I am prune (https://keybase.io/prune) on keybase. | |
* I have a public key ASBe2BvV992Wnl4co7H3NDXUmWPitPIMYlj7ISAG_PULqgo | |
To claim this, I am signing this object: |
apiVersion: kafka.banzaicloud.io/v1beta1 | |
kind: KafkaCluster | |
metadata: | |
labels: | |
controller-tools.k8s.io: "1.0" | |
kafka_cr: kf-kafka | |
name: kf-kafka | |
namespace: alerting | |
spec: | |
headlessServiceEnabled: false |
File: envoy | |
Type: inuse_space | |
Showing nodes accounting for 792089.69kB, 95.07% of 833184.75kB total | |
Dropped 1124 nodes (cum <= 4165.92kB) | |
flat flat% sum% cum cum% | |
771712kB 92.62% 92.62% 771712.26kB 92.62% Envoy::Buffer::OwnedImpl::add | |
6791.91kB 0.82% 93.44% 6791.91kB 0.82% nghttp2_bufs_init3 | |
5918.74kB 0.71% 94.15% 5918.74kB 0.71% Envoy::Stats::FakeSymbolTableImpl::populateList | |
2448.34kB 0.29% 94.44% 21163.86kB 2.54% std::__1::__function::__func::operator() | |
1546.27kB 0.19% 94.63% 7097.63kB 0.85% Envoy::Upstream::ClusterManagerImpl::ThreadLocalClusterManagerImpl::ClusterEntry::ClusterEntry |