-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSimpleConvNet.py
More file actions
77 lines (62 loc) · 2.45 KB
/
SimpleConvNet.py
File metadata and controls
77 lines (62 loc) · 2.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import numpy as np
import sys, os
sys.path.append(os.pardir)
from collections import OrderedDict
from SampleCode.common.layers import *
def __init__(self, input_dim=(1,28,28),
conv_param={'filter_num':30,'filter_size':5,'pad':0,'stride':1},
hidden_size=100,output_size=10,weight_init_std=0.01):
filter_num=conv_param['filter_num']
filter_size=conv_param['filter_size']
filter_pad=conv_param['pad']
filter_stride=conv_param['stride']
input_size=input_dim[1]
conv_output_size=(input_size - filter_size+2*filter_pad) /\
filter_stride+1
pool_output_size=int(filter_num*(conv_output_size/2)*(conv_output_size/2))
self.params={}
self.params['W1']=weight_init_std*\
np.random.randn(filter_num,input_dim[0],filter_size,filter_size)
self.params['b1']=np.zeros(filter_num)
self.params['W2']=weight_init_std*\
np.random.randn(pool_output_size,hidden_size)
self.params['W3']=weight_init_std*\
np.random.randn(hidden_size,output_size)
self.params['b3']=np.zeros(output_size)
self.layers=OrderedDict()
self.layers['Conv1']=Convolution(self.params['W1'],
self.params['W2'],
conv_param['stride'],
conv_param['pad'])
self.layers['Relu1']=Relu()
self.layers['Pool1']=Pooling(pool_h=2,pool_w=2,stride=2)
self.layers['Affine1']=Affine(self.params['W2'],self.params['b2'])
self.layers['Relu2']=Relu()
self.layers['Affine2']=Affine(self.params['W3'],self.params['b3'])
self.last_layer=SoftmaxWithLoss()
def predict(self, x):
for layer in self.layers.values():
x=layer.forward(x)
return x
def loss(self, x,t):
y=self.predict(x)
return self.last_layer.fotward(y,t)
def gradient(self,x,t):
#forward
self.loss(x,t)
#backward
dout=1
dout=self.last_layer.backward(dout)
layers=list(self.layers.values())
layers.reverse()
for layer in layers:
dout =layer.backward(dout)
#設定
grads={}
grads['W1']=self.layers['Conv1'].dw
grads['b1']=self.layers['Conv1'].db
grads['W2']=self.layers['Affine1'].dw
grads['b2']=self.layers['Affine1'].db
grads['W3']=self.layers['Affine2'].dw
grads['b2']=self.layers['Affine2'].db
return grads