-*- coding: utf-8 -*- | |||||
""" |
@author: Balaji |
""" |
print('\n 18 March Program for a simple neural network, doing a forward pass with one layer') |
print('\n 21 March Added provision for data generation for use in training.') |
print('\n 21 March Formating input data as a matrix for faster multiplication with weights') |
print('\n 22 March Initialise bias to 0, weights to very small ~0.01 random numbers, put computations inside a class') |
|
import numpy as np |
import matplotlib.pyplot as plt |
|
class Layer_dense(): |
def __init__(self, num_pixels, num_neurons): |
np.random.seed(1); #For repeatability |
self.weights = np.random.randn(num_neurons ,num_pixels); |
self.biases = np.random.randn(num_neurons ,1); |
#self.output_beforeActivation = np.zeros((num_neurons,1)); |
def forward(self, inputs): |
self.output_beforeActivation = np.dot(self.weights,inputs)+self.biases; |
def applyReluActivation (self): |
self.output_afterActivation = np.maximum(0,self.output_beforeActivation); |
|
num_neurons = 4; |
num_pixels = 5; |
num_images = 10; |
|
# Input data set properties |
slope = np.tan(np.deg2rad(30)); |
intercept = 50; |
|
print('\n Creating input data:') |
np.random.seed(1); #For repeatability |
inputs = np.zeros((num_pixels,num_images)); |
x = np.arange(0,num_pixels); |
x = x.reshape((num_pixels,1)); |
for dataSet_iter in range(num_images): |
inputs[:,dataSet_iter:dataSet_iter+1] = (slope*x+intercept) + (intercept*0.1)*(np.random.randn(x.size,1)); |
|
print('\n Plotting input data set :') |
plt.plot(x,inputs,'o') |
plt.xlabel('Pixel number') |
plt.ylabel('Pixel value') |
plt.title('Input data set') |
plt.legend() |
plt.show() |
|
print('\n Create a dense layer, fully connected, of the class Layer_dense') |
firstLayer = Layer_dense(num_pixels = num_pixels, num_neurons = num_neurons); |
firstLayer.forward(inputs=inputs); |
firstLayer.applyReluActivation(); |
|
print('\n Result = ') |
print('\n Each column is result of the network per image, after activation') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
""" | |
@author: Balaji | |
""" | |
print('\n 18 March Program for a simple neural network, doing a forward pass with one layer') | |
print('\n 21 March Added provision for data generation for use in training.') | |
print('\n 21 March Formating input data as a matrix for faster multiplication with weights') | |
print('\n 22 March Initialise bias to 0, weights to very small ~0.01 random numbers, put computations inside a class') | |
import numpy as np | |
import matplotlib.pyplot as plt | |
class Layer_dense(): | |
def __init__(self, num_pixels, num_neurons): | |
np.random.seed(1); #For repeatability | |
self.weights = np.random.randn(num_neurons ,num_pixels); | |
self.biases = np.random.randn(num_neurons ,1); | |
#self.output_beforeActivation = np.zeros((num_neurons,1)); | |
def forward(self, inputs): | |
self.output_beforeActivation = np.dot(self.weights,inputs)+self.biases; | |
def applyReluActivation (self): | |
self.output_afterActivation = np.maximum(0,self.output_beforeActivation); | |
num_neurons = 4; | |
num_pixels = 5; | |
num_images = 10; | |
# Input data set properties | |
slope = np.tan(np.deg2rad(30)); | |
intercept = 50; | |
print('\n Creating input data:') | |
np.random.seed(1); #For repeatability | |
inputs = np.zeros((num_pixels,num_images)); | |
x = np.arange(0,num_pixels); | |
x = x.reshape((num_pixels,1)); | |
for dataSet_iter in range(num_images): | |
inputs[:,dataSet_iter:dataSet_iter+1] = (slope*x+intercept) + (intercept*0.1)*(np.random.randn(x.size,1)); | |
print('\n Plotting input data set :') | |
plt.plot(x,inputs,'o') | |
plt.xlabel('Pixel number') | |
plt.ylabel('Pixel value') | |
plt.title('Input data set') | |
plt.legend() | |
plt.show() | |
print('\n Create a dense layer, fully connected, of the class Layer_dense') | |
firstLayer = Layer_dense(num_pixels = num_pixels, num_neurons = num_neurons); | |
firstLayer.forward(inputs=inputs); | |
firstLayer.applyReluActivation(); | |
print('\n Result = ') | |
print('\n Each column is result of the network per image, after activation') | |
print(firstLayer.output_afterActivation) |
No comments:
Post a Comment