Compare commits

...

3 Commits

Author SHA1 Message Date
d5c072ba3f starta på uke6 2023-10-25 14:12:43 +02:00
dbd0506e4c uke5 2023-10-25 14:12:34 +02:00
001702b010 Ferdig uke 3 2023-09-29 12:32:02 +02:00
3 changed files with 135 additions and 3 deletions

View File

@ -3,7 +3,7 @@
"""
Created on Thu Sep 28 08:23:56 2023
@author: innagumauri
@author: Inna Gumauri, Trygve Børte Nomeland
"""
#%% Task 1
@ -47,7 +47,7 @@ def print_imp_dir(path="./"):
p = Path(path)
files = list(p.glob('*.py'))
for f in files:
print(f'{Path.cwd()}+{f}: {get_imp_file(f)}')
print(f'{Path.cwd()}/{f}: {get_imp_file(f)}')
print_imp_dir()
# %%

25
uke5.py Normal file
View File

@ -0,0 +1,25 @@
import numpy as np
def relu(y):
return np.maximum(0, y)
def layer(W, x, b):
return relu(W @ x + b)
n = [64,128,128,128,10]
print(f"Dimensions: {n}")
# First layer
x = np.random.rand(n[0])
b = np.random.rand(n[1])
y = np.random.rand(128,64)
y = layer(y, x, b)
for i in range(2, len(n)):
W = np.random.rand(n[i], n[i - 1])
b = np.random.rand(n[i])
y = layer(W, y, b)
print(y)

107
uke6.py Normal file
View File

@ -0,0 +1,107 @@
import numpy as np
from copy import deepcopy
class Network:
def __init__(self):
self.layers = []
class Layer:
def __init__(self, w_file=None, b_file=None):
# define dimensions
self.n_layers = 4
self.n_inputs = 784
self.n_outputs = 10
self.n = [self.n_inputs, 512, 256, self.n_outputs]
self.x = np.random.rand(self.n_inputs)
# define weights and biases
# generate random wheights if no file is provided. else read the file
if w_file == None:
self.W_list = []
for (self.n_cur, self.n_next) in zip(self.n[:-1], self.n[1:]):
self.W_list.append(np.random.rand(self.n_next, self.n_cur))
else:
with open(w_file) as f:
lines = f.readlines()
self.W_list = [x.split(' ') for x in lines]
self.W_list = [[float(n) for n in x] for x in self.W_list]
f.close()
if b_file == None:
self.b_list = []
for (self.n_cur, self.n_next) in zip(self.n[:-1], self.n[1:]):
self.b_list.append(np.random.rand(self.n_next))
else:
with open(b_file) as f:
lines = f.readlines()
self.b_list = [x.split(' ') for x in lines]
self.b_list = [[float(n) for n in x] for x in self.W_list]
f.close()
print(len(self.W_list[0]), len(self.b_list[0]), len(self.x))
print(len(self.W_list[-1]), len(self.b_list[-1]), len(self.x))
def run(self):
print(f(self.W_list, self.b_list, self.x))
# define activation function
def sigma(y):
if y > 0:
return y
else:
return 0
sigma_vec = np.vectorize(sigma)
# define layer function for given weight matrix, input and bias
def layer(W, x, b):
return sigma_vec(W @ x + b)
# define neural network with all weights W and all biases b in W_list and b_list
def f(W_list, b_list, x):
y = deepcopy(x) # deepcopy so that input is not changed
for W, b in zip(W_list, b_list):
y = layer(W, y, b) # call layer multiple times with all weights and biases
return y
def main():
l = Layer()
l.run()
l2 = Layer(w_file = 'W_1.txt', b_file = 'b_1.txt')
l2.run()
def gamle_greier():
# define dimensions
n_layers = 4
n_inputs = 64
n_outputs = 10
n = [n_inputs, 128, 128, n_outputs]
# define weights and biases
W_list = []
b_list = []
for (n_cur, n_next) in zip(n[:-1], n[1:]):
W_list.append(np.random.rand(n_next, n_cur))
b_list.append(np.random.rand(n_next))
# generate random input (this would usually be pixels of an image)
x = np.random.rand(n_inputs)
# call the network
print(f(W_list, b_list, x))
for W in W_list:
print(W.shape)
if __name__ == '__main__':
main()
gamle_greier()