Skip to content

Instantly share code, notes, and snippets.

@psaitu
Created September 18, 2019 16:03
Show Gist options
  • Save psaitu/2d4e56cb25ece52a9a6b33e204dc86e4 to your computer and use it in GitHub Desktop.
Save psaitu/2d4e56cb25ece52a9a6b33e204dc86e4 to your computer and use it in GitHub Desktop.
from layers import *
class CNN_B():
def __init__(self):
# Your initialization code goes here
self.layers = []
def __call__(self, x):
return self.forward(x)
def init_weights(self, weights):
w0_reshaped = np.zeros((8, 24, 8))
w0 = weights[0].T
for i in range(w0_reshaped.shape[0]):
w0_reshaped[i] = w0[i].reshape(8, 24).T
w1_reshaped = np.zeros((16, 8, 1))
w1 = weights[1].T
for i in range(w1_reshaped.shape[0]):
w1_reshaped[i] = w1[i].reshape(1, 8).T
w2_reshaped = np.zeros((4, 16, 1))
w2 = weights[2].T
for i in range(w2_reshaped.shape[0]):
w2_reshaped[i] = w2[i].reshape(1, 16).T
layer1 = Conv1D(24, 8, 8, 4)
layer1.W = w0_reshaped
layer2 = Conv1D(8, 16, 1, 1)
layer2.W = w1_reshaped
layer3 = Conv1D(16, 4, 1, 1)
layer3.W = w2_reshaped
# self.W = [w0_reshaped, w1_reshaped, w2_reshaped]
# self.W = np.array(self.W)
self.layers.append(layer1)
self.layers.append(ReLU())
self.layers.append(layer2)
self.layers.append(ReLU())
self.layers.append(layer3)
# self.layers.append(Linear)
self.layers.append(Flatten())
# self.layers.append(ReLU())
# self.layers.append(Linear(4, 31))
# self.layers.append(Flatten())
# weights_list = []
# weights_list.append(weights[0])
# weights_list.append(weights[1])
# weights_list.append(weights[2])
# self.W = np.asarray(weights_list)
# raise NotImplemented
def forward(self, x):
# You do not need to modify this method
out = x
for layer in self.layers:
out = layer(out)
return out
def backward(self, delta):
# You do not need to modify this method
for layer in self.layers[::-1]:
delta = layer.backward(delta)
return delta
class CNN_C():
def __init__(self):
# Your initialization code goes here
self.layers = []
def __call__(self, x):
return self.forward(x)
def init_weights(self, weights):
# Load the weights for your CNN from the MLP Weights given
print(weights[0].shape)
print(weights[1].shape)
print(weights[2].shape)
w0_reshaped = np.zeros((8, 24, 8))
w0 = weights[0].T
for i in range(w0_reshaped.shape[0]):
w0_reshaped[i] = w0[i].reshape(8, 24).T
nans = np.isnan(w0_reshaped[i])
w0_reshaped[i][nans] = 0.
w1_reshaped = np.zeros((16, 8, 1))
w1 = weights[1].T
for i in range(w1_reshaped.shape[0]):
w1_reshaped[i] = w1[i].reshape(1, 8).T
nans = np.isnan(w1_reshaped[i])
w1_reshaped[i][nans] = 0.
w2_reshaped = np.zeros((4, 16, 1))
w2 = weights[2].T
for i in range(w2_reshaped.shape[0]):
w2_reshaped[i] = w2[i].reshape(1, 16).T
nans = np.isnan(w2_reshaped[i])
w2_reshaped[i][nans] = 0.
layer1 = Conv1D(24, 8, 8, 4)
layer1.W = w0_reshaped
layer2 = Conv1D(8, 16, 1, 1)
layer2.W = w1_reshaped
layer3 = Conv1D(16, 4, 1, 1)
layer3.W = w2_reshaped
self.layers.append(layer1)
self.layers.append(ReLU())
self.layers.append(layer2)
self.layers.append(ReLU())
self.layers.append(layer3)
# self.layers.append(Linear)
self.layers.append(Flatten())
# raise NotImplemented
def forward(self, x):
# You do not need to modify this method
out = x
for layer in self.layers:
out = layer(out)
return out
def backward(self, delta):
# You do not need to modify this method
for layer in self.layers[::-1]:
delta = layer.backward(delta)
return delta
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment