Wednesday, March 30, 2022

#1 Helicopter info collection, for design data base

All data here is fictional/imaginary.

Nothing in this belongs to me.

This is a record of work, for future use only.

IndexNameLinkDisk loadingMain rotor diameterTail rotor diameterMain rotor to tail rotorPowerMTOWCeiling without ground effectEnginePayloadMax ascent speedService ceilingCruise speedEnduranceMax speedFuselage length



kg/m2mmmWkgm
kgm/smm/shrm/sm
1AeroDreams Chi-7https://sites.google.com/site/stingrayslistofrotorcraft/aerodreams-chi-715.9235668861.1NA746004501800Rotax 9122308250036.111111111052.777777787.15

Tuesday, March 22, 2022

Learning Neural networks 3

      -*- coding: utf-8 -*-
"""
@author: Balaji
"""
print('\n 18 March Program for a simple neural network, doing a forward pass with one layer')
print('\n 21 March Added provision for data generation for use in training.')
print('\n 21 March Formating input data as a matrix for faster multiplication with weights')
print('\n 22 March Initialise bias to 0, weights to very small ~0.01 random numbers, put computations inside a class')

import numpy as np
import matplotlib.pyplot as plt

class Layer_dense():
def __init__(self, num_pixels, num_neurons):
np.random.seed(1); #For repeatability
self.weights = np.random.randn(num_neurons ,num_pixels);
self.biases = np.random.randn(num_neurons ,1);
#self.output_beforeActivation = np.zeros((num_neurons,1));
def forward(self, inputs):
self.output_beforeActivation = np.dot(self.weights,inputs)+self.biases;
def applyReluActivation (self):
self.output_afterActivation = np.maximum(0,self.output_beforeActivation);

num_neurons = 4;
num_pixels = 5;
num_images = 10;

# Input data set properties
slope = np.tan(np.deg2rad(30));
intercept = 50;

print('\n Creating input data:')
np.random.seed(1); #For repeatability
inputs = np.zeros((num_pixels,num_images));
x = np.arange(0,num_pixels);
x = x.reshape((num_pixels,1));
for dataSet_iter in range(num_images):
inputs[:,dataSet_iter:dataSet_iter+1] = (slope*x+intercept) + (intercept*0.1)*(np.random.randn(x.size,1));

print('\n Plotting input data set :')
plt.plot(x,inputs,'o')
plt.xlabel('Pixel number')
plt.ylabel('Pixel value')
plt.title('Input data set')
plt.legend()
plt.show()

print('\n Create a dense layer, fully connected, of the class Layer_dense')
firstLayer = Layer_dense(num_pixels = num_pixels, num_neurons = num_neurons);
firstLayer.forward(inputs=inputs);
firstLayer.applyReluActivation();

print('\n Result = ')
print('\n Each column is result of the network per image, after activation')
print(firstLayer.output_afterActivation)

 

Monday, March 21, 2022

Learning neural networks 2

# -*- coding: utf-8 -*-
"""
@author: Balaji
"""
print('\n 18 March Program for a simple neural network, doing a forward pass with one layer')
print('\n 21 March Added provision for data generation for use in training.')
print('\n 21 March Formating input data as a matrix for faster multiplication with weights')

import numpy as np
import  matplotlib.pyplot as plt
num_neurons     = 4;
num_pixels      = 2;
num_images      = 3;
np.random.seed(1); #For repeatability
# Input data set properties
slope           = np.tan(np.deg2rad(30));
intercept       = 50;

print('\n Creating input data:')
inputs  = np.zeros((num_images,num_pixels));
x = np.arange(0,num_pixels);
for dataSet_iter in range(num_images):
    inputs[dataSet_iter,:] = (slope*x+intercept)+ intercept*0.2*(np.random.randn(1,x.size) );

print('\n Plotting input data set :')    
plt.plot(x,inputs.transpose(),'o')
plt.xlabel('Pixel number')
plt.ylabel('Pixel value')
plt.title('Input data set')
plt.legend()
plt.show()

print('\n Each row is a data set')
weights = np.random.randn(num_neurons ,num_pixels);
bias    = np.random.randn(num_neurons ,1);
results = np.zeros_like(bias)

print('\n Input = ')
print(inputs)

print('\n Weights = ')
print(weights)

print('\n Bias = ')
print(bias)

# Output computation in matrix format
results = np.dot(weights,inputs.transpose())+bias;

# Output  computation in loop method
for input_iter in inputs:
    for weight_iter,bias_iter,result_iter in zip(weights,bias,results):
        print('\n Input data set:')
        print(input_iter);
        print('\n is multiplied with weights')
        print(weight_iter)
        print('\n and added with bias')
        print(bias_iter)
        print('\n to give the result of neuron as')
        result_iter = input_iter.dot(weight_iter) + bias_iter;
        print(result_iter)

Sunday, March 20, 2022

Learning neural networks 1

1 Simple forward pass of one layer neural network in Python, with numpy

 






# -*- coding: utf-8 -*-
"""
Created on Fri Mar 18 12:39:21 2022

@author: Balaji
"""

import numpy as np
num_neurons = 2;
num_inputs  = 3;
num_dataSets= 1;
print('\n Program for a simple neural network, doing a forward pass with one layer')
print('\n Each row is a data set')
inputs  = np.random.randn(num_dataSets,num_inputs);
weights = np.random.randn(num_neurons ,num_inputs);
bias    = np.random.randn(num_neurons ,1);
results = np.zeros_like(bias)

print('\n Input = ')
print(inputs)

print('\n Weights = ')
print(weights)

print('\n Bias = ')
print(bias)

for input_iter in inputs:
    for weight_iter,bias_iter,result_iter in zip(weights,bias,results):
        print('\n Input data set:')
        print(input_iter);
        print('\n is multiplied with weights')
        print(weight_iter)
        print('\n and added with bias')
        print(bias_iter)
        print('\n to give the result of neuron as')
        result_iter = input_iter.dot(weight_iter) + bias_iter;
        print(result_iter)