-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
261 lines (211 loc) · 8.4 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
'''
Algorithm: Neural Network with dynamic activation functions
Date: Thursday November 5th, 2020
Autor: Ramiro Mendez, based in Carlos Santana Model
'''
import time
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
from sklearn.datasets import make_circles # Dataset
from util import get_csv, write_csv
# Neural layers
from neural_layer import Neural_Layer
'''
Anonymous function (commented)
Sigmoid activation function and derivate
index 0 actual function
index 1 derivation function
'''
# sigm = (lambda x: 1 / (1 + np.e ** (-x)),
# lambda x: x * (1 - x))
def sigm(x, derivate=False):
u = 1 / (1 + np.e ** (-x))
du = x * (1 - x)
return du if (derivate == True) else u
'''
Anonymous function (commented)
Relu activation function and derivate
'''
# relu = lambda x: np.maximum(0,x)
'''
Cost Function (anonymous commented)
Least Mean Square function and derivate
Returns difference between output and desired
'''
# lms_cost = (lambda output, desired: np.mean((output - desired) ** 2),
# lambda output, desired: (output - desired))
def lms_cost(output, desired, derivate=False):
u = np.mean((output - desired) ** 2)
du = (output - desired)
return du if (derivate == True) else u
'''
Neural Network topology
Returns nn Neural network array with layers inside
topology Vector to instantiate n neurons per layer in NN
act_f Activation function
For item in array 'topology' creates actual layer
Instantiates 'n' inputs to the next one and 'act_f' per layer
'''
def create_nn(topology, act_f):
nn = []
for i, connections in enumerate(topology[:-1]):
nn.append(Neural_Layer(connections, topology[i+1], act_f))
return nn
'''
Training
neural_network Vector of layers [(obj), (obj1),...]
samples Vector of inputs [(x1, x2), (x1, x2),...]
desired Vector of desired output (Classification)[1, 0, 1, ...]
lms_cost Least Men Square, Cost Function > Returns error
lr Learning Rate
train Specifies if Executes training(True) or just prediction(False)
'''
def train(neural_network, samples, desired, lms_cost, lr=0.05, train=True):
'''
Forward pass ->
Gets samples and desired, passes through layers
Applies weighted sum (samples * weights + bias)
Weighted sum to Activation function
z Stores value of weighted sum
a Stores value of activation function of z
output Vector that stores output of every layer
Weighted sum and activation function per layer
'''
# Just for first layer
output = [(None, samples)]
for _, layer in enumerate(neural_network):
# @ it is used to matrix multiplication
# Gets the value of activation function in last pair of values (output of previous layer)
z = output[-1][1] @ layer.weights + layer.bias
a = layer.activation_f(z)
output.append((z, a))
'''
Backward pass <-
Back propagation:
Partials derivates for steepest descent
Propagates error backwards (delta)
Steepest descent
'''
if train:
# Back Propagation
'''
delta error per layer
'''
deltas = []
for i in reversed(range(0, len(neural_network))):
z = output[i+1][0]
a = output[i+1][1]
if i == len(neural_network) - 1:
# Calculates last layer delta
deltas.insert(0, lms_cost(a, desired, derivate=True)
* neural_network[i].activation_f(a, derivate=True))
else:
# Calculates delta based in previous layer
deltas.insert(
0, deltas[0] @ _weights.T * neural_network[i].activation_f(a, derivate=True))
_weights = neural_network[i].weights
# Steepest descent
neural_network[i].bias = neural_network[i].bias - \
(np.mean(deltas[0], axis=0, keepdims=True) * lr)
neural_network[i].weights = neural_network[i].weights - \
output[i][1].T @ deltas[0] * lr
return output[-1][1]
def run(epochs):
for i in range(epochs):
# Training
pY = train(neural_network, samples, desired, lms_cost)
if i % 25 == 0:
# Outout prediction every 25 epochs
print(f"Output prediction: {pY.T}")
loss.append(lms_cost(pY, desired))
res = 75
_x0 = np.linspace(samples.min()-1, samples.max()+1, res)
_x1 = np.linspace(samples.min()-1, samples.max()+1, res)
_Y = np.zeros((res, res))
for i0, x0 in enumerate(_x0):
for i1, x1 in enumerate(_x1):
_Y[i0, i1] = train(neural_network, np.array(
[[x0, x1]]), desired, lms_cost, train=False)[0][0]
plt.pcolormesh(_x0, _x1, _Y, cmap="PRGn")
# plt.axis("equal")
# Samples in first class, in this case class '0'`
plt.scatter(samples[desired[:, 0] == 0, 0],
samples[desired[:, 0] == 0, 1], color="violet")
# Samples in first class, in this case class '1'
plt.scatter(samples[desired[:, 0] == 1, 0],
samples[desired[:, 0] == 1, 1], color="green")
print(f"Epochs: {i}")
plt.ion()
plt.show()
plt.pause(0.5)
return pY;
# plt.clf()
# plt.plot(range(len(loss)), loss)
# plt.show()
if __name__ == "__main__":
option = 3
while option != 0:
print("MENU")
print("1.- Select samples file")
print("2.- Select desired file")
print("3.- Train")
print("4.- Save outputs(y)")
print("5.- SK-Circles")
print("0.- Exit")
option = int(input(">>> "))
if option == 1:
samples = get_csv()
print(f"Training inputs:\n{samples}")
# Create dataset
# n stands for number of samples to train with
# p number of inputs per sample, characteristics of every sample
n = len(samples)
p = len(samples[0])
elif option == 2:
desired = get_csv()
# Samples in first class, in this case class '0'`
plt.scatter(samples[desired[:, 0] == 0, 0],
samples[desired[:, 0] == 0, 1], color="violet")
# Samples in first class, in this case class '1'
plt.scatter(samples[desired[:, 0] == 1, 0],
samples[desired[:, 0] == 1, 1], color="green")
plt.show()
elif option == 3:
'''
topology Stores number of neurons per layer in neural network
Final is desired len depends on classification, if binary is just 1
neural_network Stores all layers based in topology
sends activation function of network
loss Vector of losses per epochs
'''
topology = [p, 5, 5, len(desired[0])]
neural_network = create_nn(topology, sigm)
loss = []
epochs = int(input("Epochs >>> "))
out = run(epochs)
print(f"Final prediction: \n{out}")
elif option == 4:
write_csv(out.T)
elif option == 5:
n = 500
p = 2
'''
make_circles
n_samples : int or two-element tuple, optional (default=100)
If int, it is the total number of points generated.
For odd numbers, the inner circle will have one point more than the outer circle.
If two-element tuple, number of points in outer circle and inner circle.
noise : double or None (default=None)
Standard deviation of Gaussian noise added to the data.
factor : 0 < double < 1 (default=.8)
Scale factor between inner and outer circle.
'''
# Samples it's a matrix that gets inputs per sample
# Desired it's a vector that stores clasification per sample
samples, desired = make_circles(
n_samples=n, factor=0.45, noise=0.05)
desired = desired[:, np.newaxis]
else:
print("Bye bitch")
break