Numpy¶
In [1]:
import numpy as np
a = np.array([1, 2, 3, 4, 5])
b = np.array([6, 7, 8, 9, 10])
c = a + b
d = np.arange(8)
d.reshape(4, 2)
np.linspace(0, 2, num=11)
Out[1]:
array([0. , 0.2, 0.4, 0.6, 0.8, 1. , 1.2, 1.4, 1.6, 1.8, 2. ])
In [2]:
import numpy as np
# Each row is a training example, each column is a feature [X1, X2, X3]
X = np.array(([0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]), dtype=float)
y = np.array(([0], [1], [1], [0]), dtype=float)
# Define useful functions
# Activation function
def sigmoid(t):
return 1 / (1 + np.exp(-t))
# Derivative of sigmoid
def sigmoid_derivative(p):
return p * (1 - p)
# Class definition
class NeuralNetwork:
def __init__(self, x, y):
self.input = x
self.weights1 = np.random.rand(
self.input.shape[1], 4
) # considering we have 4 nodes in the hidden layer
self.weights2 = np.random.rand(4, 1)
self.y = y
self.output = np.zeros(y.shape)
def feedforward(self):
self.layer1 = sigmoid(np.dot(self.input, self.weights1))
self.layer2 = sigmoid(np.dot(self.layer1, self.weights2))
return self.layer2
def backprop(self):
d_weights2 = np.dot(
self.layer1.T, 2 * (self.y - self.output) * sigmoid_derivative(self.output)
)
d_weights1 = np.dot(
self.input.T,
np.dot(
2 * (self.y - self.output) * sigmoid_derivative(self.output),
self.weights2.T,
)
* sigmoid_derivative(self.layer1),
)
self.weights1 += d_weights1
self.weights2 += d_weights2
def train(self, X, y):
self.output = self.feedforward()
self.backprop()
NN = NeuralNetwork(X, y)
for i in range(1001): # trains the NN 1,000 times
if i % 100 == 0:
print("for iteration # " + str(i) + "\n")
print("Input : \n" + str(X))
print("Actual Output: \n" + str(y))
print("Predicted Output: \n" + str(NN.feedforward()))
print(
"Loss: \n" + str(np.mean(np.square(y - NN.feedforward())))
) # mean sum squared loss
print("\n")
print("Layer 1: \n" + str(NN.layer1))
print("\n")
NN.train(X, y)
for iteration # 0 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.72566269] [0.75334452] [0.75836412] [0.78005202]] Loss: 0.313573579550288 Layer 1: [[0.56570494 0.61451868 0.54346212 0.56709 ] [0.6313528 0.65293811 0.75640461 0.70136411] [0.72902257 0.61625936 0.74321464 0.59861713] [0.77960146 0.65460282 0.88303848 0.72780618]] for iteration # 100 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.46980893] [0.50139325] [0.52731077] [0.53243998]] Loss: 0.24406414103905177 Layer 1: [[0.57678847 0.62304912 0.55012231 0.56383382] [0.80077394 0.67087989 0.7738889 0.69855184] [0.81975479 0.63122741 0.76038012 0.56814275] [0.93061805 0.67855583 0.89880362 0.70223274]] for iteration # 200 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.23884547] [0.6129094 ] [0.65815569] [0.51574973]] Loss: 0.14743540155290347 Layer 1: [[0.32155343 0.53603948 0.58996664 0.55371823] [0.95492295 0.54903057 0.90862345 0.87040065] [0.94603584 0.80441189 0.89384791 0.50836036] [0.99872542 0.81251669 0.98310631 0.84841778]] for iteration # 300 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.07420984] [0.8560804 ] [0.85889414] [0.17855253]] Loss: 0.019502956040823027 Layer 1: [[0.20822196 0.41375559 0.63394696 0.54725574] [0.9780621 0.16767608 0.94610869 0.98997789] [0.97578599 0.97911076 0.93874297 0.1632893 ] [0.99985365 0.93045399 0.99360399 0.94099663]] for iteration # 400 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.03980906] [0.91172754] [0.91331688] [0.10897353]] Loss: 0.007191495285311059 Layer 1: [[0.20406192 0.43356106 0.65918934 0.57759343] [0.98289547 0.11894929 0.95738954 0.9957967 ] [0.98139607 0.99122201 0.95211438 0.11590626] [0.99991543 0.95219362 0.99568914 0.95783112]] for iteration # 500 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.02848828] [0.93242074] [0.93354415] [0.0833737 ]] Loss: 0.004186523051985402 Layer 1: [[0.20398071 0.44521872 0.67379204 0.59146787] [0.98488761 0.10088792 0.96267605 0.9971925 ] [0.98365696 0.99419245 0.95829342 0.09837268] [0.99993468 0.95989731 0.99652677 0.96398625]] for iteration # 600 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.02276065] [0.94359942] [0.94447774] [0.06958305]] Loss: 0.002905898752990908 Layer 1: [[0.2043932 0.45312237 0.68373593 0.59995954] [0.98604458 0.09090606 0.9658989 0.99781848] [0.9849593 0.99552413 0.96203564 0.08866039] [0.99994448 0.96408447 0.99699701 0.96739523]] for iteration # 700 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.01924518] [0.95076847] [0.95149516] [0.06074839]] Loss: 0.0022093018329437646 Layer 1: [[0.20488135 0.45901936 0.69117584 0.60592272] [0.98682917 0.0843436 0.96813817 0.99817822] [0.98583835 0.99628712 0.96462478 0.08226402] [0.9999506 0.9668107 0.99730611 0.96964413]] for iteration # 800 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.01683999] [0.95583646] [0.95645967] [0.05450589]] Loss: 0.0017751637220149775 Layer 1: [[0.2053591 0.46368797 0.69707424 0.61045086] [0.98740967 0.07959403 0.96981821 0.99841428] [0.98648669 0.99678606 0.96656127 0.07762892] [0.99995486 0.96877062 0.9975286 0.97127701]] for iteration # 900 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.01507611] [0.95965077] [0.96019862] [0.04980856]] Loss: 0.0014800978030824732 Layer 1: [[0.20580639 0.46753378 0.70193594 0.61406642] [0.98786373 0.07594097 0.9711437 0.99858246] [0.98699262 0.99714038 0.96808537 0.07406071] [0.99995805 0.97027043 0.99769845 0.97253648]] for iteration # 1000 Input : [[0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]] Actual Output: [[0.] [1.] [1.] [0.]] Predicted Output: [[0.01371873] [0.96264956] [0.96313989] [0.04611582]] Loss: 0.001267148805535194 Layer 1: [[0.2062203 0.47079279 0.70605621 0.61705588] [0.98823286 0.07301113 0.97222723 0.99870916] [0.98740314 0.99740654 0.96932874 0.07119702] [0.99996054 0.97146845 0.99783357 0.97354909]]
Last update: September 27, 2021 09:50:41