- start a mock network
> export NUM_NODES=1
> bazel run //src/assembly-dev:stop_mock_network
- create 2 key aliases
curl -X POST http://127.0.0.1:32100/api/v1/key_aliases\?sync\=true
curl -X POST http://127.0.0.1:32100/api/v1/key_aliases\?sync\=true
linux-vdso.so.1 => (0x00007ffe699be000) | |
libtinfo.so.5 => /lib64/libtinfo.so.5 (0x00007f8af5fca000) | |
libsodium.so.23 => /lib64/libsodium.so.23 (0x00007f8af5d72000) | |
libpq.so.5 => /lib64/libpq.so.5 (0x00007f8af5b43000) | |
libz.so.1 => /lib64/libz.so.1 (0x00007f8af592d000) | |
libpthread.so.0 => /lib64/libpthread.so.0 (0x00007f8af5711000) | |
librt.so.1 => /lib64/librt.so.1 (0x00007f8af5509000) | |
libutil.so.1 => /lib64/libutil.so.1 (0x00007f8af5306000) | |
libdl.so.2 => /lib64/libdl.so.2 (0x00007f8af5102000) | |
libgmp.so.10 => /lib64/libgmp.so.10 (0x00007f8af4e8a000) |
> export NUM_NODES=1
> bazel run //src/assembly-dev:stop_mock_network
curl -X POST http://127.0.0.1:32100/api/v1/key_aliases\?sync\=true
curl -X POST http://127.0.0.1:32100/api/v1/key_aliases\?sync\=true
module ConcurrentMap where | |
import Control.Concurrent.STM.TVar (TVar) | |
import Control.Concurrent.STM (STM) | |
import Data.Hashable (Hashable) | |
import Data.HashMap.Strict (HashMap) | |
import Data.Vector (Vector) | |
import qualified Control.Concurrent.STM.TVar as TVar | |
import qualified Data.Hashable as Hashable |
testCase "Example test case" $ do | |
assertBool "arithmetic is still sane" $ 2 + 2 == 4 |
DROP DATABASE IF EXISTS state_db; | |
DROP ROLE IF EXISTS datadog; | |
DROP ROLE IF EXISTS txe; | |
DROP ROLE IF EXISTS sailfish; | |
DROP ROLE IF EXISTS api_server; | |
CREATE DATABASE state_db; |
# remove publish payloads for versions 1-5 | |
for i in {1..5}; do cat $i.json | jq -c '.channelActionAndSequencedTransaction |= map(select(.[1].contents[4].tag != "Publish"))' | jq --indent 4 . > $i-x.json; rm $i.json; mv $i-x.json $i.json; done | |
# remove publish payloads for version 6 | |
for i in {6..6}; do cat $i.json | jq -c '.channelActionAndSequencedTransaction |= map(select(.[2][4].tag != "Publish"))' | jq --indent 4 . > $i-x.json; rm $i.json; mv $i-x.json $i.json; done | |
# remove publish payloads for versions 7-8 | |
for i in {7..8}; do cat $i.json | jq -c '.channelActionAndSequencedTransaction |= map(select(.[2].wcaChannelAction.tag? != "Publish"))' | jq --indent 4 . > $i-x.json; done |
# find all txe log files from pod logs and copy them locally with an incrementing number | |
# 0.log, 1.log, 2.log, 3.log | |
# this uses: | |
# - https://github.com/sharkdp/fd (fast alternative to find) | |
# - https://github.com/BurntSushi/ripgrep (fast alternative to rg) | |
fd 0.log | rg txe | cat -n | while read n f; do cp -n "$f" "$n.log"; done | |
# extract the 'message' field from each log file and write it to txe-1.log, | |
# txe-2.log, txe-3.log, txe-4.log | |
for i in 1 2 3 4; do cat ~/Downloads/symbiont/logs/$i.log | jq '.log | fromjson | .message' > txe-$i.log; done |
# for example | |
TAG=dade6249f8206e13150f464ad2b0df4cc1a07bd5 | |
for i in 'sailfish' 'smartlog' 'epilog' 'apollo' 'api-server' 'txe' 'txe-postgres' 'api-server'; do docker pull us.gcr.io/development-148212/$i:$TAG; done |