Skip to content

Commit

Permalink
Updated SMAI Assignments and Dip Assignments
Browse files Browse the repository at this point in the history
  • Loading branch information
shubhMaheshwari committed Sep 3, 2018
1 parent 38a5d90 commit dc2b7a1
Show file tree
Hide file tree
Showing 39 changed files with 720 additions and 162 deletions.
Binary file added ABA/ABA_Assignment_5_Bonding_Due_Sep-3.pdf
Binary file not shown.
348 changes: 289 additions & 59 deletions DIP/Assignment2/Report.ipynb

Large diffs are not rendered by default.

Binary file modified DIP/Assignment2/output_data/q6.jpeg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added DIP/a2_20161170.tar.gz
Binary file not shown.
Binary file removed DIP/a2_201631170.tar.gz
Binary file not shown.
1 change: 1 addition & 0 deletions DIP/notes/ftt_filtering.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# FFT filtering in Frequency domain
File renamed without changes.
Binary file not shown.
75 changes: 0 additions & 75 deletions SMAI/Q71_3.py

This file was deleted.

36 changes: 36 additions & 0 deletions SMAI/SMAI_hw_04/q2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import numpy as np

# Input
X = np.array([
[0,-2,1],
[-7,0,1],
[5,-4,1],
[11,-4,1],
[12,-5,1],
[2,-8,1]])

# Output
Y = np.array([0,1,1,0,0,0])
n = Y.shape[0]
# Given Weights
W1 = np.array([-11,3,2])
W2 = np.array([11,-3,2])
W3 = np.array([2,-1,0])

# Predictions
Y_pred1 = np.sign(X.dot(W1))
Y_pred2 = np.sign(X.dot(W2))
Y_pred3 = np.sign(X.dot(W3))

Y_pred1[Y_pred1 == -1] = 0
Y_pred2[Y_pred2 == -1] = 0
Y_pred3[Y_pred3 == -1] = 0

acc1 = (n - np.sum(np.abs(Y - Y_pred1)))/n
acc2 = (n - np.sum(np.abs(Y - Y_pred2)))/n
acc3 = (n - np.sum(np.abs(Y - Y_pred3)))/n

print("Y_pred1:",Y_pred1)
print("Y_pred2:",Y_pred2)
print("Y_pred3:",Y_pred3)
print("Accuracy of W1:{} Accuracy of W2:{} Accuracy of W3:{}".format(acc1,acc2,acc3))
Binary file added SMAI/SMAI_hw_04/q2_ans.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added SMAI/SMAI_hw_04/q2_code.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added SMAI/SMAI_hw_04/q4_1.jpeg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added SMAI/SMAI_hw_04/q4_2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
47 changes: 47 additions & 0 deletions SMAI/SMAI_hw_05/q1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import torch
import torch.nn.functional as F
import numpy as np
import matplotlib.pyplot as plt

lr_list = [0.001,0.1,0.4]


while True:
# INPUT
X = np.random.uniform(-1,1,(10,2))
# OUTPUT
W_final = np.random.random((2,))
# W_final = np.array([1,1])
Y = X.dot(W_final)


for lr in lr_list:

# INITIAL VALUES
W = np.array([-1,1],dtype='float64')
loss_hist = []


for i in range(100):
# Forward Prop
Y_Pred = X.dot(W)
dL = (Y - Y_Pred)
# LOSS
loss = np.sum(dL**2)

# Back prop
dW = X.T.dot(dL)
W += lr*dW

# For better under visualization cap the loss to 50
if loss > 50:
loss = loss_hist[-1]
loss_hist.append(loss)

plt.plot(loss_hist,label=str(lr))


plt.legend()
plt.show()


Binary file added SMAI/SMAI_hw_05/q5_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
File renamed without changes
Binary file added SMAI/SMAI_hw_08/q8_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
58 changes: 58 additions & 0 deletions SMAI/SMAI_hw_08/q8_1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D


# Input => 1000 2d samples between 0,1
X = np.random.uniform(-1,1,(1000,2))

# Weights to be predicted by our model
W_final = np.random.uniform(-1,1,(2,))

# Non linearity function (sigmoid)
def sigmoid(x):
return 1/(1 + np.exp(-x))

# Output that needs to be predicted
Y = sigmoid(X.dot(W_final))
Y[ Y > 0.5 ] = 1
Y[ Y <= 0.5 ] = -1

# Plotting data
fig = plt.figure()

W1,W2 = np.meshgrid(np.linspace(-1,1,20),np.linspace(-1,1,20))


# Case 1
# Let us study loss = y - g(W.dot(X))^2 by plot the error surface on the data
y_pred = X[:,0]*W1.reshape((20,20,1)) + X[:,1]*W2.reshape((20,20,1))
y_pred = sigmoid(y_pred)

y_pred[ y_pred > 0.5 ] = 1
y_pred[ y_pred <= 0.5 ] = -1

y = Y
error_non_convex = np.mean((y - y_pred)**2,axis=2)
ax1 = fig.add_subplot(121, projection='3d')
ax1.plot_surface(W1, W2,error_non_convex, cmap='terrain', alpha=0.9)
ax1.set_xlabel('W1')
ax1.set_ylabel('W2')
ax1.set_zlabel('Error func')
ax1.set_title('Non-convex Error Surface')

# Case 2
# Let us study loss = (y - W.dot(X))^2 by plot the error surface on the data
y_pred = X[:,0]*W1.reshape((20,20,1)) + X[:,1]*W2.reshape((20,20,1))
y = Y
error_convex = np.mean((y - y_pred)**2,axis=2)
ax2 = fig.add_subplot(122, projection='3d')
ax2.plot_surface(W1, W2,error_convex, cmap='terrain', alpha=0.9)
ax2.set_xlabel('W1')
ax2.set_ylabel('W2')
ax2.set_zlabel('Error func')
ax2.set_title('Convex Error Surface')



plt.show()
Binary file added SMAI/SMAI_hw_08/q8_2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
79 changes: 79 additions & 0 deletions SMAI/SMAI_hw_08/q8_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import numpy as np
import matplotlib.pyplot as plt

# Generate Data with 2 classes classfied as -1 and 1
X = np.zeros((1000,2))
X[0:500] = np.random.normal([-4,0],3,(500,2))
X[500:1000] = np.random.normal([4,0],3,(500,2))
Y = np.zeros((1000,))
Y[0:500] = 0
Y[500:1000] = 1

# Shuffle data
shuffle = np.random.permutation(1000)

X = X[shuffle]
Y = Y[shuffle]

# Train data
train_x = X[0:700,:]
train_y = Y[0:700]
# Test data
test_x = X[700:1000,:]
test_y = Y[700:1000]

# Training
classes = [
{
'mean': np.mean(train_x[train_y == 0,:],axis=0),
'var': np.std(train_x[train_y == 0,:]),
'prob': 0.5
},
{
'mean': np.mean(train_x[train_y == 1,:],axis=0),
'var': np.std(train_x[train_y == 1,:]),
'prob': 0.5
}
]

# get probablity of being in each class
def get_predictions(x,classes):

x = np.array(x)
# Store probablity of each attribute for each class
p_x_w_list = []
for clas in classes:
# Calculate gaussian
exponent = np.exp(-0.5*((x-clas['mean'])/clas['var'])**2)
p_x = (1 / (np.sqrt(2*np.pi) * clas['var'])) * exponent

# We are simply multipying for each attribute
p_x = np.prod(p_x,axis=1)
# Use the bayes laws
p_x_w = p_x*clas['prob']
p_x_w_list.append(p_x_w)
p_x_w_list = np.array(p_x_w_list)
# Get the class with the max problality
pred = np.argmax(p_x_w_list,axis=0)

return pred

train_y_pred = get_predictions(train_x,classes)
test_y_pred = get_predictions(test_x,classes)


# Get accuracy
def accuracy(y,y_pred):
n = np.shape(y)[0]
acc = np.sum(np.abs(y - y_pred))/n
acc = (1 - acc)*100
return acc

print("Training Accuracy:",accuracy(train_y,train_y_pred))
print("Testing Accuracy:",accuracy(test_y,test_y_pred))


plt.scatter(X[Y ==0,0], X[Y ==0,1], color='red')
plt.scatter(X[Y ==1,0], X[Y ==1,1], color='blue')
plt.title("Data\nTrain Acc:{}\nTest Acc:{}".format(accuracy(train_y,train_y_pred), accuracy(test_y,test_y_pred)))
plt.show()
Binary file added SMAI/SMAI_hw_08/q8_2_determinant.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
66 changes: 66 additions & 0 deletions SMAI/SMAI_hw_08/q8_2_determinant.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import numpy as np
import matplotlib.pyplot as plt

# Generate Data with 2 classes classfied as 0 and 1
X = np.random.random((1000,2))

# Mean, Covaraince Matrix, Feature Probablity
num_features = 2
feat_mean = np.array([[-3,0], [2,2]])
cov_mat = np.array([
[[2,0],[0,2]],
[[2,0],[0,2]]
])
feat_prob = np.array([0.5,0.5])
print(feat_mean[0].shape,cov_mat[0].shape)

X[0:500] = np.random.multivariate_normal(feat_mean[0,:],cov_mat[0],(500,))
X[500:1000] = np.random.multivariate_normal(feat_mean[1,:],cov_mat[1],(500,))
Y = np.zeros((1000,))
Y[0:500] = 0
Y[500:1000] = 1

# Shuffle data
shuffle = np.random.permutation(1000)

X = X[shuffle]
Y = Y[shuffle]

# Train data
train_x = X[0:700,:]
train_y = Y[0:700]
# Test data
test_x = X[700:1000,:]
test_y = Y[700:1000]

# Let us define 2 gi(x) = >g1(x) and g2(x) for each get wi, and bi
W = []
b = []

for i in range(num_features):
W.append(np.linalg.inv(cov_mat[i]).dot(feat_mean[i,:]))
b.append(np.log(feat_prob[i]) + feat_mean[i].dot(np.linalg.inv(cov_mat[i])).dot(feat_mean[i]))

print(W,b)

# As we only have 2 features g(x) = g1(x) - g2(x)
g1 = X.dot(W[0]) + b[0]
g2 = X.dot(W[1]) + b[1]

# Calculate error
error = np.sum(np.abs((Y - (g2 > g1))))/1000

print("Error:",error)

plt.scatter(X[Y ==0,0], X[Y ==0,1], color='red',label='Class 1')
plt.scatter(X[Y ==1,0], X[Y ==1,1], color='blue', label='Class 2')
plt.title("Bayesian classifier\nError.{}".format(error))
plt.xlabel('W1')
plt.ylabel('W2')
# Now let us plot the classifier as a linear plot
x1 = np.linspace(-3,3,20)
x2 = -(x1*(W[1][0] - W[0][0]) + (b[1] - b[0]))/(W[1][1] - W[0][1] + 1e-7)

plt.plot(x1, x2, color='black')
plt.legend()
plt.show()
Loading

0 comments on commit dc2b7a1

Please sign in to comment.