Skip to content

Commit beb1ac5

Browse files
author
matthieurouif
committed
part 2
1 parent c0e66d9 commit beb1ac5

File tree

8 files changed

+42
-21
lines changed

8 files changed

+42
-21
lines changed

assignment1.pdf

203 KB
Binary file not shown.

q1_softmax.pyc

2.46 KB
Binary file not shown.

q2_gradcheck.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,23 @@ def gradcheck_naive(f, x):
1818
it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
1919
while not it.finished:
2020
ix = it.multi_index
21-
21+
2222
### try modifying x[ix] with h defined above to compute numerical gradients
2323
### make sure you call random.setstate(rndstate) before calling f(x) each time, this will make it
2424
### possible to test cost functions with built in randomness later
2525
### YOUR CODE HERE:
26-
raise NotImplementedError
27-
### END YOUR CODE
26+
epsilon = np.zeros(x.shape)
27+
epsilon[ix] = h
28+
29+
random.setstate(rndstate)
30+
f_m, grad_m = f(x - epsilon)
31+
32+
random.setstate(rndstate)
33+
f_p, grad_p = f(x + epsilon)
2834

35+
numgrad = (f_p - f_m) / (2 * h)
36+
37+
### END YOUR CODE
2938
# Compare gradients
3039
reldiff = abs(numgrad - grad[ix]) / max(1, abs(numgrad), abs(grad[ix]))
3140
if reldiff > 1e-5:
@@ -35,7 +44,6 @@ def gradcheck_naive(f, x):
3544
return
3645

3746
it.iternext() # Step to next dimension
38-
3947
print "Gradient check passed!"
4048

4149
def sanity_check():

q2_gradcheck.pyc

2.51 KB
Binary file not shown.

q2_neural.py

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ def forward_backward_prop(data, labels, params, dimensions):
1616
### Unpack network parameters (do not modify)
1717
ofs = 0
1818
Dx, H, Dy = (dimensions[0], dimensions[1], dimensions[2])
19+
N = labels.shape[0]
1920

2021
W1 = np.reshape(params[ofs:ofs+ Dx * H], (Dx, H))
2122
ofs += Dx * H
@@ -26,17 +27,35 @@ def forward_backward_prop(data, labels, params, dimensions):
2627
b2 = np.reshape(params[ofs:ofs + Dy], (1, Dy))
2728

2829
### YOUR CODE HERE: forward propagation
29-
raise NotImplementedError
30+
x = data
31+
a1 = x
32+
z1 = x
33+
z2 = np.dot(a1,W1) + b1
34+
a2 = sigmoid(z2)
35+
z3 = np.dot(a2, W2) + b2
36+
37+
a3 = softmax(z3) #normalized for probabilty
38+
#loss function would be here ? https://en.wikipedia.org/wiki/Cross_entropy or lecture 6
39+
cost = - np.sum(np.log(np.sum(labels * a3, axis=1))) / N #this labels * a3 multiplication only keeps the non 0 vector, we use the softmax function here
40+
3041
### END YOUR CODE
3142

3243
### YOUR CODE HERE: backward propagation
33-
raise NotImplementedError
44+
error = (a3 - labels) / N
45+
46+
gradW2 = np.dot(np.transpose(a2), error)
47+
gradb2 = np.sum(error, axis=0)
48+
49+
delta2 = sigmoid_grad(a2) * np.dot(error, np.transpose(W2))
50+
gradW1 = np.dot( np.transpose(x), delta2)
51+
gradb1 = np.sum(delta2, axis=0)
52+
3453
### END YOUR CODE
3554

3655
### Stack gradients (do not modify)
3756
grad = np.concatenate((gradW1.flatten(), gradb1.flatten(),
3857
gradW2.flatten(), gradb2.flatten()))
39-
58+
4059
return cost, grad
4160

4261
def sanity_check():
@@ -46,12 +65,12 @@ def sanity_check():
4665
"""
4766
print "Running sanity check..."
4867

49-
N = 20
68+
N = 20 #number of window to classify
5069
dimensions = [10, 5, 10]
5170
data = np.random.randn(N, dimensions[0]) # each row will be a datum
5271
labels = np.zeros((N, dimensions[2]))
5372
for i in xrange(N):
54-
labels[i,random.randint(0,dimensions[2]-1)] = 1
73+
labels[i,random.randint(0,dimensions[2]-1)] = 1 # give probability 1 to each row, give a 100% label to each row
5574

5675
params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (
5776
dimensions[1] + 1) * dimensions[2], )
@@ -73,4 +92,4 @@ def your_sanity_checks():
7392

7493
if __name__ == "__main__":
7594
sanity_check()
76-
your_sanity_checks()
95+
your_sanity_checks()

q2_sigmoid.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,7 @@ def sigmoid(x):
44
"""
55
Compute the sigmoid function for the input here.
66
"""
7-
8-
### YOUR CODE HERE
9-
raise NotImplementedError
10-
### END YOUR CODE
11-
7+
x = 1 / (1 + np.exp(-x))
128
return x
139

1410
def sigmoid_grad(f):
@@ -17,10 +13,7 @@ def sigmoid_grad(f):
1713
for this implementation, the input f should be the sigmoid
1814
function value of your original input x.
1915
"""
20-
21-
### YOUR CODE HERE
22-
raise NotImplementedError
23-
### END YOUR CODE
16+
f = (1-f)*(f)
2417

2518
return f
2619

q2_sigmoid.pyc

1.96 KB
Binary file not shown.

q3_word2vec.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ def normalizeRows(x):
1010
# Implement a function that normalizes each row of a matrix to have unit length
1111

1212
### YOUR CODE HERE
13-
raise NotImplementedError
13+
norm = np.sqrt(np.sum(x * x,axis = 1, keepdims=True))
14+
x = x/norm
1415
### END YOUR CODE
1516

1617
return x
@@ -195,4 +196,4 @@ def getRandomContext(C):
195196

196197
if __name__ == "__main__":
197198
test_normalize_rows()
198-
test_word2vec()
199+
test_word2vec()

0 commit comments

Comments
 (0)