Skip to content

Instantly share code, notes, and snippets.

View tirthajyoti's full-sized avatar
🎯
Creating ideas

Tirthajyoti Sarkar tirthajyoti

🎯
Creating ideas
View GitHub Profile
@tirthajyoti
tirthajyoti / distfit-demo-1.py
Last active August 16, 2021 05:16
Distfit demo
# Generate test data
data1 = np.random.normal(loc=5.0, scale=10, size=1000)
# Initialize model
dist1 = distfit(bins=25,alpha=0.02,stats='ks')
# Fit
dist1.fit_transform(data1,verbose=1)
@tirthajyoti
tirthajyoti / comb.scala
Created October 4, 2020 09:04
A simple Scala class for calculating combination
object comb {
def nCr(n: Int, r: Int) = {
// Factorial
def fact(i: Int) = {
var res = 1
for (e <-i to 2 by -1)
res*=e
res
} // End 'fact' function
@tirthajyoti
tirthajyoti / bayes-4.py
Created March 8, 2020 05:51
bayes-4.py
p1 = drug_user(prob_th=0.5,sensitivity=0.97,specificity=0.95,prevelance=0.005)
print("Probability of the test-taker being a drug user, in the first round of test, is:",round(p1,3))
print()
p2 = drug_user(prob_th=0.5,sensitivity=0.97,specificity=0.95,prevelance=p1)
print("Probability of the test-taker being a drug user, in the second round of test, is:",round(p2,3))
print()
p3 = drug_user(prob_th=0.5,sensitivity=0.97,specificity=0.95,prevelance=p2)
print("Probability of the test-taker being a drug user, in the third round of test, is:",round(p3,3))
@tirthajyoti
tirthajyoti / bayes-3.py
Created March 8, 2020 05:33
bayes-3.py
ps = []
sens = []
for sen in [i*0.001+0.95 for i in range(1,50,2)]:
sens.append(sen)
p = drug_user(prob_th=0.5,sensitivity=sen,specificity=0.95,prevelance=0.005,verbose=False)
ps.append(p)
plt.figure(figsize=(10,5))
plt.title("Probability of user with test sensitivity",fontsize=15)
plt.plot(sens,ps,color='k',marker='o',markersize=8)
@tirthajyoti
tirthajyoti / bayes-2.py
Created March 8, 2020 05:27
bayes-2.py
ps = []
pres = []
for pre in [i*0.001 for i in range(1,51,2)]:
pres.append(pre*100)
p = drug_user(prob_th=0.5,sensitivity=0.97,specificity=0.95,prevelance=pre,verbose=False)
ps.append(p)
plt.figure(figsize=(10,5))
plt.title("Probability of user with prevelance rate",fontsize=15)
plt.plot(pres,ps,color='k',marker='o',markersize=8)
@tirthajyoti
tirthajyoti / bayes-1.py
Last active October 7, 2023 21:36
bayes-1.py
def drug_user(
prob_th=0.5,
sensitivity=0.99,
specificity=0.99,
prevelance=0.01,
verbose=True):
"""
Computes the posterior using Bayes' rule
"""
p_user = prevelance
@tirthajyoti
tirthajyoti / pytorch-5.py
Created November 26, 2019 22:49
pytorch-5
for i,e in enumerate(range(epochs)):
optimizer.zero_grad()
output = model.forward(X)
loss = criterion(output,y)
loss.backward()
optimizer.step()
running_loss.append(loss.item())
if i!=0 and (i+1)%20==0:
logits = model.forward(X).detach().numpy().flatten()
plt.figure(figsize=(15,3))
@tirthajyoti
tirthajyoti / pytorch-4.py
Created November 26, 2019 22:12
pytorch-4
epochs = 10
for i,e in enumerate(range(epochs)):
optimizer.zero_grad() # Reset the grads
output = model.forward(X) # Forward pass
loss = criterion(output.view(output.shape[0]),y) # Calculate loss
print(f"Epoch - {i+1}, Loss - {round(loss.item(),3)}") # Print loss
loss.backward() # Backpropagation
optimizer.step() # Optimizer one step
@tirthajyoti
tirthajyoti / pytorch-3.py
Created November 26, 2019 22:00
pytorch-3
# Resets the gradients i.e. do not accumulate over passes
optimizer.zero_grad()
# Forward pass
output = model.forward(X)
# Calculate loss
loss = criterion(output,y)
# Backward pass (AutoGrad)
loss.backward()
# One step of the optimizer
optimizer.step()
@tirthajyoti
tirthajyoti / pytorch-2.py
Last active November 26, 2019 21:43
pytorch-2
model = Network()
print(model)
Network(
(hidden1): Linear(in_features=5, out_features=8, bias=True)
(hidden2): Linear(in_features=8, out_features=4, bias=True)
(relu): ReLU()
(output): Linear(in_features=4, out_features=1, bias=True)
(sigmoid): Sigmoid()
)