Skip to content

Instantly share code, notes, and snippets.

View bquast's full-sized avatar
:octocat:

Bastiaan Quast bquast

:octocat:
View GitHub Profile
@bquast
bquast / BGV-2.py
Created January 3, 2024 14:05
BGV in Python
import numpy as np
from numpy.polynomial import Polynomial
def polynomial_modulo(polynomial, mod):
"""
Perform polynomial modulo operation using divmod.
"""
q, r = divmod(polynomial, mod)
return r
@bquast
bquast / BFV-2.py
Last active January 3, 2024 13:56
BFV in python without using functions
import numpy as np
from numpy.polynomial import Polynomial
def polynomial_modulo(polynomial, mod):
"""
Perform polynomial modulo operation using divmod.
"""
q, r = divmod(polynomial, mod)
return r
@bquast
bquast / CKKS-encoder.R
Last active December 23, 2023 23:18
Homomorphic Encryption CKKS encoder R
library(polynom)
M <- 8
N <- M %/% 2
scale <- 64
xi <- complex(real = cos(2 * pi / M), imaginary = sin(2 * pi / M))
vandermonde <- function(xi, M) {
N <- M %/% 2
# Initialize an empty matrix with complex data type
@bquast
bquast / CKKSencoder.py
Last active December 19, 2023 12:37
OpenMind CKKSencoder CKKS encoder Daniel Huynh
import numpy as np
from numpy.polynomial import Polynomial
# Set the parameters
M = 8
N = M // 2
scale = 64
xi = np.exp(2 * np.pi * 1j / M)
def vandermonde(xi: np.complex128, M: int) -> np.array:
library(matrixStats)
# Softmax function
softmax <- function(x) {
exp_x <- exp(x - max(x))
exp_x / sum(exp_x)
}
# Scaled dot product attention
scaled_dot_product_attention <- function(Q, K, V, mask = NULL) {
# HE illustrated primer
# define some parameters (small as an example)
# N.B. these parameters are not secure, they are completely insecure
d = 4
n = 2^d
t = (n/2)-1
q = 874
# load library to create polynomials
# HE illustrated primer
# define some parameters (small as an example)
# N.B. these parameters are not secure, they are completely insecure
n = 4
d = 2^n
t = 7
q = 874
# load library to create polynomials
@bquast
bquast / attention.R
Last active May 18, 2023 13:50
R implementation of attention, see blog post: https://qua.st/attention-in-R
# attention.R
# Bastiaan Quast
# bquast@gmail.com
# based on:
# https://machinelearningmastery.com/the-attention-mechanism-from-scratch/
# encoder representations of four different words
word_1 = matrix(c(1,0,0), nrow=1)
word_2 = matrix(c(0,1,0), nrow=1)
# logsumexp
logsumexp <- function (x) {
y = max(x)
y + log(sum(exp(x - y)))
}
# softmax
softmax <- function (x) {
exp(x - logsumexp(x))
}
# contructor function
tensor <- function(x) {
# check that it's numeric
if (!is.numeric(x)) stop("X must be numeric")
# create the array and change the class
y <- structure(array(x), class = "tensor")
# add attributes
attributes(y)$creators <- list()