2023-10-25 12:12:43 +00:00
|
|
|
import numpy as np
|
|
|
|
from copy import deepcopy
|
|
|
|
|
|
|
|
class Network:
|
2023-10-26 07:04:31 +00:00
|
|
|
def __init__(self, layers):
|
|
|
|
self.layers = layers
|
|
|
|
self.n_layers = 4
|
|
|
|
self.n_inputs = 784
|
|
|
|
self.n_outputs = 10
|
|
|
|
self.n = [self.n_inputs, 512, 256, self.n_outputs]
|
|
|
|
self.x = np.random.rand(self.n_inputs)
|
|
|
|
|
2023-10-25 12:12:43 +00:00
|
|
|
|
2023-10-26 07:04:31 +00:00
|
|
|
def run(self):
|
|
|
|
|
|
|
|
result = []
|
|
|
|
for n in self.layers:
|
|
|
|
l = n(self.x, W_file = 'W_1.txt', b_file = 'b_1.txt')
|
|
|
|
result = l.run()
|
|
|
|
return result
|
2023-10-25 12:12:43 +00:00
|
|
|
class Layer:
|
2023-10-26 07:04:31 +00:00
|
|
|
def __init__(self, x, W_file=None, b_file=None):
|
2023-10-25 12:12:43 +00:00
|
|
|
# define dimensions
|
|
|
|
self.n_layers = 4
|
|
|
|
self.n_inputs = 784
|
|
|
|
self.n_outputs = 10
|
|
|
|
self.n = [self.n_inputs, 512, 256, self.n_outputs]
|
2023-10-26 07:04:31 +00:00
|
|
|
self.x = x
|
2023-10-25 12:12:43 +00:00
|
|
|
|
|
|
|
# define weights and biases
|
|
|
|
# generate random wheights if no file is provided. else read the file
|
2023-10-26 07:04:31 +00:00
|
|
|
files = read(W_file, b_file)
|
|
|
|
self.W = np.load(files.get('W'))
|
|
|
|
self.b = np.load(files.get('b'))
|
|
|
|
print('Y dimensjon 1 lag: ', len(self.W_list[0]), len(self.b_list[0]), len(self.x))
|
|
|
|
print('Y dimensjon siste lag: ', len(self.W_list[-1]), len(self.b_list[-1]), len(self.x))
|
2023-10-25 12:12:43 +00:00
|
|
|
def run(self):
|
2023-10-26 07:04:31 +00:00
|
|
|
|
|
|
|
return layer(self.W_list, self.b_list, self.x)
|
|
|
|
|
|
|
|
def read(W_file, b_file):
|
|
|
|
with open(W_file) as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
W_list = [x.split(' ') for x in lines]
|
|
|
|
W_list = [[float(n) for n in x] for x in W_list]
|
|
|
|
with open(b_file) as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
b_list = [x.split(' ') for x in lines]
|
|
|
|
b_list = [[float(n) for n in x] for x in W_list]
|
|
|
|
|
|
|
|
return {'W': W_list, 'b': b_list}
|
2023-10-25 12:12:43 +00:00
|
|
|
|
|
|
|
# define activation function
|
|
|
|
def sigma(y):
|
|
|
|
if y > 0:
|
|
|
|
return y
|
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
sigma_vec = np.vectorize(sigma)
|
|
|
|
|
|
|
|
# define layer function for given weight matrix, input and bias
|
|
|
|
def layer(W, x, b):
|
|
|
|
return sigma_vec(W @ x + b)
|
|
|
|
|
|
|
|
# define neural network with all weights W and all biases b in W_list and b_list
|
|
|
|
def f(W_list, b_list, x):
|
|
|
|
y = deepcopy(x) # deepcopy so that input is not changed
|
|
|
|
for W, b in zip(W_list, b_list):
|
|
|
|
y = layer(W, y, b) # call layer multiple times with all weights and biases
|
|
|
|
return y
|
|
|
|
|
|
|
|
def main():
|
2023-10-26 07:04:31 +00:00
|
|
|
network = Network([Layer, Layer])
|
|
|
|
network.run()
|
2023-10-25 12:12:43 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2023-10-26 07:04:31 +00:00
|
|
|
main()
|