-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlayer.m
More file actions
78 lines (60 loc) · 2.4 KB
/
layer.m
File metadata and controls
78 lines (60 loc) · 2.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
classdef layer
%LAYER Summary of this class goes here
% Detailed explanation goes here
properties
name
activation_fxn
weights
bias
grad
output
end
methods
function obj = layer(name, activation_fxn, input, output, bias_shape)
obj.name = name;
obj.weights = tensor(rand(input, output));
obj.bias = tensor(rand(bias_shape));
obj.activation_fxn = activation_fxn;
end
function x = lay_forward(obj, x)
disp("_-_-_-_-_-_-_-_-_-_-_-_-_-_");
x = mult(x, obj.weights, obj.name);
% x = k_mult(x, obj.weights, obj.name);
% % x = sum_2d(x, obj.name);
% disp("add");
% disp(x.value);
% disp("bias");
% disp(obj.bias.value);
x = k_add(x, obj.bias, obj.name);
% %activation_fxn must be manual update
% disp("logistic");
% disp(x.value);
z = obj.activation_fxn.fxn_forward(x.value);
x = update(x, z);
% disp("out");
% disp(x.value);
x.graph(length(x.graph) + 1) = graph_node(obj.activation_fxn.name, x, obj.weights, obj.name, false);
end
function obj = lay_backward_mul(obj, update_fxn, running_grad)
% disp("shape");
% shape = size(obj.weights.value);
% disp(shape);
% disp("run")
% disp(size(running_grad));
obj.weights = bprop_compute_tensor(update_fxn, running_grad, obj.weights);
% for i = 1:shape(3)
% run = running_grad(:, :, i);
% w = obj.weights.value(:, :, i);
%
% obj.weights.value(:, :, i) = bprop_compute_matrix(update_fxn, run, w );
% end
%
% obj.weights = bprop_compute(update_fxn, running_grad, obj.weights);
% obj.bias = bprop_compute(update_fxn, running_grad, obj.bias);
end
function obj = lay_backward_add(obj, update_fxn, running_grad)
% obj.weights = bprop_compute(update_fxn, running_grad, obj.weights);
obj.bias = bprop_compute_tensor(update_fxn, running_grad, obj.bias);
end
end
end