Skip to content

Instantly share code, notes, and snippets.

View imfht's full-sized avatar
🎯
Focusing

imfht imfht

🎯
Focusing
View GitHub Profile
estest2017.malconmikami-fe20.aivencloud.com
kafka-2d42b206.geisenhut-9f20.aivencloud.com
grafana-31cce7e7.sre-a5b0.aivencloud.com
influx-e21823b.sre-a5b0.aivencloud.com
idbs-ecs-prod-us.sre-a5b0.aivencloud.com
grafana-2d97d3bd.ravekee-cec0.aivencloud.com
grafana-2ab8b991.systems-a3f0.aivencloud.com
influx-25c186bf.systems-a3f0.aivencloud.com
info-tlab034052.influx-25c186bf.systems-a3f0.aivencloud.com
kafka-prod.phx-prod-1.aivencloud.com
@imfht
imfht / finger.json
Created January 5, 2018 12:58
邮件系统指纹
{
"products": [
{
"name": "eYou",
"banner": {
"http_banner": "亿邮电子邮件系统,亿邮邮件整体解决方案"
}
},
{
"name": "anymacro",
// from https://stackoverflow.com/a/45537886/8591480
CREATE FUNCTION is_valid_utf8(bytea) RETURNS boolean
LANGUAGE plpgsql AS
$$BEGIN
PERFORM convert_from($1, 'UTF8');
RETURN TRUE;
EXCEPTION
WHEN character_not_in_repertoire THEN
RAISE WARNING '%', SQLERRM;
RETURN FALSE;
@imfht
imfht / why_so_many_vuln.md
Created February 22, 2022 03:21
why_so_many_vuln.md
cna total_vul total_company most_common_company total_product most_common_product
CybersecurityCOE@eaton.com 6 1 eaton(6) 3 intelligent_power_
@imfht
imfht / why_so_many_vuln.md
Created February 22, 2022 03:20
why_so_many_vuln.md
#!/bin/sh
ipmi_host=idrac
ipmi_user=root
ipmi_passwd=xxxx
temp_threshold="60"
get_value=`ipmitool -I lanplus -H $ipmi_host -U $ipmi_user -P $ipmi_passwd sensor reading "Temp" | cut -d "|" -f 2 | cut -d " " -f 2`
if [[ $get_value > $temp_threshold ]]
then
for i in `seq 2002 2020`
do
wget https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-$i.json.gz && gunzip nvdcve-1.1-$i.json.gz
done
package main
import (
"bytes"
"fmt"
"net"
"runtime"
"sync"
"sync/atomic"
"time"
import socket
import struct
import sys
from netaddr import IPNetwork
pkt = b'\x00\x00\x00\xc0\xfeSMB@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x08\x00\x01\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x00\x00\x00\x02\x00\x00\x00\x02\x02\x10\x02"\x02$\x02\x00\x03\x02\x03\x10\x03\x11\x03\x00\x00\x00\x00\x01\x00&\x00\x00\x00\x00\x00\x01\x00 \x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\n\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
subnet = sys.argv[1]
for ip in IPNetwork(subnet):
@imfht
imfht / pyspark_cos_qcloud_example.py
Created April 14, 2020 01:30
A example show how to use cos with pyspark
import os
from pyspark import SparkContext
# first you'll need download hadoop-cos-x.x.x-shaded.jar at -> https://github.com/tencentyun/hadoop-cos
os.environ[
'PYSPARK_SUBMIT_ARGS'] = './hadoop-cos-2.8.5-shaded.jar pyspark-shell'
sc = SparkContext(appName="wordCount").getOrCreate()
# some basic configuration, find more at https://cloud.tencent.com/document/product/436/6884