All data here is fictional/imaginary.
Nothing in this belongs to me.
This is a record of work, for future use only.
All data here is fictional/imaginary.
Nothing in this belongs to me.
This is a record of work, for future use only.
Index | Name | Link | Disk loading | Main rotor diameter | Tail rotor diameter | Main rotor to tail rotor | Power | MTOW | Ceiling without ground effect | Engine | Payload | Max ascent speed | Service ceiling | Cruise speed | Endurance | Max speed | Fuselage length |
kg/m2 | m | m | m | W | kg | m | kg | m/s | m | m/s | hr | m/s | m | ||||
1 | AeroDreams Chi-7 | https://sites.google.com/site/stingrayslistofrotorcraft/aerodreams-chi-7 | 15.92356688 | 6 | 1.1 | NA | 74600 | 450 | 1800 | Rotax 912 | 230 | 8 | 2500 | 36.11111111 | 10 | 52.77777778 | 7.15 |
-*- coding: utf-8 -*- | |||||
""" |
@author: Balaji |
""" |
print('\n 18 March Program for a simple neural network, doing a forward pass with one layer') |
print('\n 21 March Added provision for data generation for use in training.') |
print('\n 21 March Formating input data as a matrix for faster multiplication with weights') |
print('\n 22 March Initialise bias to 0, weights to very small ~0.01 random numbers, put computations inside a class') |
|
import numpy as np |
import matplotlib.pyplot as plt |
|
class Layer_dense(): |
def __init__(self, num_pixels, num_neurons): |
np.random.seed(1); #For repeatability |
self.weights = np.random.randn(num_neurons ,num_pixels); |
self.biases = np.random.randn(num_neurons ,1); |
#self.output_beforeActivation = np.zeros((num_neurons,1)); |
def forward(self, inputs): |
self.output_beforeActivation = np.dot(self.weights,inputs)+self.biases; |
def applyReluActivation (self): |
self.output_afterActivation = np.maximum(0,self.output_beforeActivation); |
|
num_neurons = 4; |
num_pixels = 5; |
num_images = 10; |
|
# Input data set properties |
slope = np.tan(np.deg2rad(30)); |
intercept = 50; |
|
print('\n Creating input data:') |
np.random.seed(1); #For repeatability |
inputs = np.zeros((num_pixels,num_images)); |
x = np.arange(0,num_pixels); |
x = x.reshape((num_pixels,1)); |
for dataSet_iter in range(num_images): |
inputs[:,dataSet_iter:dataSet_iter+1] = (slope*x+intercept) + (intercept*0.1)*(np.random.randn(x.size,1)); |
|
print('\n Plotting input data set :') |
plt.plot(x,inputs,'o') |
plt.xlabel('Pixel number') |
plt.ylabel('Pixel value') |
plt.title('Input data set') |
plt.legend() |
plt.show() |
|
print('\n Create a dense layer, fully connected, of the class Layer_dense') |
firstLayer = Layer_dense(num_pixels = num_pixels, num_neurons = num_neurons); |
firstLayer.forward(inputs=inputs); |
firstLayer.applyReluActivation(); |
|
print('\n Result = ') |
print('\n Each column is result of the network per image, after activation') |
1 Simple forward pass of one layer neural network in Python, with numpy
# -*- coding: utf-8 -*- """ Created on Fri Mar 18 12:39:21 2022 @author: Balaji """ import numpy as np num_neurons = 2; num_inputs = 3; num_dataSets= 1; print('\n Program for a simple neural network, doing a forward pass with one layer') print('\n Each row is a data set') inputs = np.random.randn(num_dataSets,num_inputs); weights = np.random.randn(num_neurons ,num_inputs); bias = np.random.randn(num_neurons ,1); results = np.zeros_like(bias) print('\n Input = ') print(inputs) print('\n Weights = ') print(weights) print('\n Bias = ') print(bias) for input_iter in inputs: for weight_iter,bias_iter,result_iter in zip(weights,bias,results): print('\n Input data set:') print(input_iter); print('\n is multiplied with weights') print(weight_iter) print('\n and added with bias') print(bias_iter) print('\n to give the result of neuron as') result_iter = input_iter.dot(weight_iter) + bias_iter; print(result_iter)