Skip to content

Instantly share code, notes, and snippets.

View youben11's full-sized avatar
🏠
Working from home

Ayoub Benaissa youben11

🏠
Working from home
View GitHub Profile
@youben11
youben11 / bsides22_aes.py
Last active January 5, 2022 17:47
Bsides 2022 Crypto Workshop AES
import os
from Crypto.Cipher import AES
# Can be 16, 24, or 32 bytes
KEY = b"A"*16
############
# ECB mode #
############
aes_ecb = AES.new(KEY, mode=AES.MODE_ECB)
evaluate("Encryption is awesome")
+ the sentence was positive (original: 99.99%, simulated: 99.99%, actual: 99.99%, difference: 0.00%, took: 33.813 seconds)
def evaluate(sentence):
try:
embedded = encode(sentence)
except KeyError as error:
print("! the word", error, "is unknown")
return
if embedded.shape[0] > SENTENCE_LENGTH_LIMIT:
print(f"! the sentence should not contain more than {SENTENCE_LENGTH_LIMIT} tokens")
return
padded = np.zeros((SENTENCE_LENGTH_LIMIT, 300))
context = homomorphic_inferer.create_context()
keys = context.keygen()
SENTENCE_LENGTH_LIMIT = 5
inferer = Inferer(model)
homomorphic_inferer = hnp.compile_fhe(
inferer.infer,
{
"x": hnp.encrypted_ndarray(bounds=(-1, 1), shape=(SENTENCE_LENGTH_LIMIT, 300))
},
config=hnp.config.CompilationConfig(
class Inferer:
def __init__(self, model):
parameters = list(model.lstm.parameters())
W_ii, W_if, W_ig, W_io = parameters[0].split(HIDDEN_SIZE)
W_hi, W_hf, W_hg, W_ho = parameters[1].split(HIDDEN_SIZE)
b_ii, b_if, b_ig, b_io = parameters[2].split(HIDDEN_SIZE)
b_hi, b_hf, b_hg, b_ho = parameters[3].split(HIDDEN_SIZE)
self.W_ii = W_ii.detach().numpy()
HIDDEN_SIZE = 100
class Model(torch.nn.Module):
def __init__(self):
super().__init__()
self.lstm = torch.nn.LSTM(input_size=300, hidden_size=HIDDEN_SIZE)
self.fc = torch.nn.Linear(HIDDEN_SIZE, 1)
self.sigmoid = torch.nn.Sigmoid()
x_enc= keys.encrypt(x)
res = h.run(keys.public_keys, x_enc)
print(f"Encrypted computation result: {keys.decrypt(res)}")
print(f"Encrypted computation result: {h.encrypt_and_run(keys, x)}")
ctx = h.create_context()
keys = ctx.keygen()