Skip to content

Instantly share code, notes, and snippets.

#!/usr/bin/env python2.7
# dbus-send --print-reply --system --dest=org.freedesktop.UPower /org/freedesktop/UPower/devices/battery_BAT0 org.freedesktop.DBus.Properties.Get string:org.freedesktop.UPower.Device string:'Percentage'
import dbus
bus = dbus.SystemBus()
bat0_object = bus.get_object('org.freedesktop.UPower',
'/org/freedesktop/UPower/devices/battery_BAT0')
bat0 = dbus.Interface(bat0_object, 'org.freedesktop.DBus.Properties')
>>> import base64
>>> import dnslib
>>>
>>> dnsmsg = 'f2+AgAABAAEAAAAAA3d3dwRyaXBlA25ldAAAAQABwAwAAQABAAA3+gAEwQAGiw=='
>>> dnslib.DNSRecord.parse(base64.b64decode(dnsmsg))
<DNS Header: id=0x7f6f type=RESPONSE opcode=QUERY flags=RA rcode='NOERROR' q=1 a=1 ns=0 ar=0>
<DNS Question: 'www.ripe.net.' qtype=A qclass=IN>
<DNS RR: 'www.ripe.net.' rtype=A rclass=IN ttl=14330 rdata='193.0.6.139'>
@kjmkznr
kjmkznr / Dockerfile
Created July 28, 2019 04:17
Mackerel Container Agent Image for ARM
FROM golang:alpine AS builder
RUN apk update && \
apk add --no-cache git ca-certificates && \
update-ca-certificates
ENV CGO_ENABLED=0
ENV GOOS=linux
ENV GOARCH=arm
package main
import (
"bytes"
"flag"
"io"
"log"
"net/http"
"strconv"
)
---
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: flannel
rules:
- apiGroups:
- ""
resources:
- pods
#!/usr/bin/env python3
from scapy.all import *
def decap_and_send(packet):
if packet[IP][0].proto == 4:
decap = (Ether() / packet[IP][1])
sendp(decap, iface='dummy0')
print(decap)
LX8ogUgQVusVXr6fUruK5VPUoTwXHxbHxz
Section "InputClass"
Identifier "Keyboard-ThinkPad"
Driver "libinput"
#MatchProduct "ThinkPad Compact Bluetooth Keyboard with TrackPoint"
#MatchIsKeyboard "on"
#Option "SendCoreEvents" "true"
Option "XkbLayout" "dvorak,us"
Option "XkbVariant" "dvorak"
Option "XkbOptions" "ctrl:swapcaps,grp:alt_shift_toggle,grp_led:scroll"
EndSection
FROM java:8
# SPARK
ARG SPARK_ARCHIVE=http://ftp.jaist.ac.jp/pub/apache/spark/spark-2.1.0/spark-2.1.0-bin-hadoop2.7.tgz
ENV SPARK_HOME /usr/local/spark-2.1.0-bin-hadoop2.7
ENV PATH $PATH:${SPARK_HOME}/bin
RUN curl -s ${SPARK_ARCHIVE} | tar -xz -C /usr/local/
WORKDIR $SPARK_HOME
package main
import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"strconv"