-
Notifications
You must be signed in to change notification settings - Fork 1
/
Layers.py
111 lines (74 loc) · 2.78 KB
/
Layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import numpy as np
class Layer(object):
def Forward(self, inputs):
raise NotImplementedError('Calling an abstract method')
def Backward(self, prev_inputs, dout):
raise NotImplementedError('Calling an abstract method')
def Param(self):
raise NotImplementedError('Calling an abstract method')
def Optimize(self, optimizer, optimizer_cache, d_param):
raise NotImplementedError('Calling an abstract method')
class MatMul(Layer):
def __init__(self, rows, cols):
self.weights = np.random.randn(rows, cols) / np.sqrt(rows)
def Forward(self, inputs):
return inputs.dot(self.weights)
def Backward(self, prev_inputs, dout):
self.d_weights = np.dot(prev_inputs.T, dout)
return np.dot(dout, self.weights.T), self.d_weights
def Param(self):
return self.weights
def Optimize(self, optimizer, optimizer_cache, d_param):
new_weights, cache = optimizer.Optimize(self.weights, d_param, optimizer_cache)
self.weights = new_weights
return cache
class PreInitializedMatMul(MatMul):
def __init__(self, weights):
self.weights = weights
class Bias(Layer):
def __init__(self, shape):
self.bias = np.zeros(shape)
def Forward(self, inputs):
return inputs + self.bias
def Backward(self, prev_inputs, dout):
self.d_bias = np.sum(dout, axis=0)
return dout, self.d_bias
def Param(self):
return self.bias
def Optimize(self, optimizer, optimizer_cache, d_param):
new_bias, cache = optimizer.Optimize(self.bias, d_param, optimizer_cache)
self.bias = new_bias
return cache
class PreInitializedBias(Bias):
def __init__(self, bias):
self.bias = bias
class ReLU(Layer):
def Forward(self, inputs):
return inputs * (inputs > 0)
def Backward(self, prev_inputs, dout):
return dout * (prev_inputs >= 0), None
def Param(self):
return None
def Optimize(self, optimizer, optimizer_cache, d_param):
return None
class Sigmoid(Layer):
def Forward(self, inputs):
return 1. / (1 + np.exp(-inputs))
def Backward(self, prev_inputs, dout):
return (self.Forward(prev_inputs) * (1. - self.Forward(prev_inputs))) * dout, None
def Param(self):
return None
def Optimize(self, optimizer, optimizer_cache, d_param):
return None
class BinaryStochastic(Layer):
def Forward(self, inputs):
pad = np.random.uniform(size=inputs.shape)
inputs[pad <= inputs] = 1
inputs[pad > inputs] = 0
return inputs
def Backward(self, prev_inputs, dout):
return dout, None
def Param(self):
return None
def Optimize(self, optimizer, optimizer_cache, d_param):
return None