starta på uke6
This commit is contained in:
parent
dbd0506e4c
commit
d5c072ba3f
107
uke6.py
Normal file
107
uke6.py
Normal file
@ -0,0 +1,107 @@
|
||||
import numpy as np
|
||||
from copy import deepcopy
|
||||
|
||||
class Network:
|
||||
def __init__(self):
|
||||
self.layers = []
|
||||
|
||||
class Layer:
|
||||
def __init__(self, w_file=None, b_file=None):
|
||||
# define dimensions
|
||||
self.n_layers = 4
|
||||
self.n_inputs = 784
|
||||
self.n_outputs = 10
|
||||
self.n = [self.n_inputs, 512, 256, self.n_outputs]
|
||||
self.x = np.random.rand(self.n_inputs)
|
||||
|
||||
# define weights and biases
|
||||
# generate random wheights if no file is provided. else read the file
|
||||
if w_file == None:
|
||||
self.W_list = []
|
||||
for (self.n_cur, self.n_next) in zip(self.n[:-1], self.n[1:]):
|
||||
self.W_list.append(np.random.rand(self.n_next, self.n_cur))
|
||||
else:
|
||||
with open(w_file) as f:
|
||||
lines = f.readlines()
|
||||
self.W_list = [x.split(' ') for x in lines]
|
||||
self.W_list = [[float(n) for n in x] for x in self.W_list]
|
||||
|
||||
f.close()
|
||||
if b_file == None:
|
||||
self.b_list = []
|
||||
for (self.n_cur, self.n_next) in zip(self.n[:-1], self.n[1:]):
|
||||
self.b_list.append(np.random.rand(self.n_next))
|
||||
else:
|
||||
with open(b_file) as f:
|
||||
lines = f.readlines()
|
||||
self.b_list = [x.split(' ') for x in lines]
|
||||
self.b_list = [[float(n) for n in x] for x in self.W_list]
|
||||
|
||||
f.close()
|
||||
print(len(self.W_list[0]), len(self.b_list[0]), len(self.x))
|
||||
print(len(self.W_list[-1]), len(self.b_list[-1]), len(self.x))
|
||||
def run(self):
|
||||
print(f(self.W_list, self.b_list, self.x))
|
||||
|
||||
# define activation function
|
||||
def sigma(y):
|
||||
if y > 0:
|
||||
return y
|
||||
else:
|
||||
return 0
|
||||
sigma_vec = np.vectorize(sigma)
|
||||
|
||||
# define layer function for given weight matrix, input and bias
|
||||
def layer(W, x, b):
|
||||
return sigma_vec(W @ x + b)
|
||||
|
||||
# define neural network with all weights W and all biases b in W_list and b_list
|
||||
def f(W_list, b_list, x):
|
||||
y = deepcopy(x) # deepcopy so that input is not changed
|
||||
for W, b in zip(W_list, b_list):
|
||||
y = layer(W, y, b) # call layer multiple times with all weights and biases
|
||||
return y
|
||||
|
||||
def main():
|
||||
l = Layer()
|
||||
l.run()
|
||||
l2 = Layer(w_file = 'W_1.txt', b_file = 'b_1.txt')
|
||||
l2.run()
|
||||
|
||||
def gamle_greier():
|
||||
# define dimensions
|
||||
n_layers = 4
|
||||
n_inputs = 64
|
||||
n_outputs = 10
|
||||
n = [n_inputs, 128, 128, n_outputs]
|
||||
|
||||
# define weights and biases
|
||||
W_list = []
|
||||
b_list = []
|
||||
for (n_cur, n_next) in zip(n[:-1], n[1:]):
|
||||
W_list.append(np.random.rand(n_next, n_cur))
|
||||
b_list.append(np.random.rand(n_next))
|
||||
|
||||
# generate random input (this would usually be pixels of an image)
|
||||
x = np.random.rand(n_inputs)
|
||||
|
||||
# call the network
|
||||
print(f(W_list, b_list, x))
|
||||
|
||||
for W in W_list:
|
||||
print(W.shape)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
gamle_greier()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user