Skip to content

Instantly share code, notes, and snippets.

@fsndzomga
Created June 5, 2024 03:23
Show Gist options
  • Save fsndzomga/fb7d17194f5081c5349e13b96d52b930 to your computer and use it in GitHub Desktop.
Save fsndzomga/fb7d17194f5081c5349e13b96d52b930 to your computer and use it in GitHub Desktop.
import numpy as np
import math
# Function to generate polynomial features
def generate_polynomial_features(x, degree):
features = np.zeros((len(x), degree))
for i in range(degree):
features[:, i] = np.power(x, i + 1)
return features
# y should be an array of prime numbers 2000 elements long
def is_prime(n):
"""Function to check if a number is prime"""
if n <= 1:
return False
if n <= 3:
return True
if n % 2 == 0 or n % 3 == 0:
return False
i = 5
while i * i <= n:
if n % i == 0 or n % (i + 2) == 0:
return False
i += 6
return True
# Create random input and output data
x = np.linspace(0, 1, 303)
# Generate y as an array of prime numbers
y = np.array([i for i in range(2, 2002) if is_prime(i)])
# y = np.sin(x)
# Define the degree of the polynomial
degree = 303
# Generate polynomial features
X = generate_polynomial_features(x, degree)
# Randomly initialize weights
weights = np.random.randn(degree)
learning_rate = 1e-3
for t in range(2000000):
# Forward pass: compute predicted y
y_pred = np.dot(X, weights)
# Compute and print loss
loss = np.square(y_pred - y).sum()
if t % 100 == 30:
print(t, loss)
# Backpropagation to compute gradients of weights with respect to loss
grad_y_pred = 2.0 * (y_pred - y)
gradients = np.dot(X.T, grad_y_pred)
# Update weights
weights -= learning_rate * gradients
# Print the result
result = "y = "
for i in range(degree):
if i == 0:
result += f"{weights[i]}"
else:
result += f" + {weights[i]} * x^{i+1}"
# print(result)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment