CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
amanchadha

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: amanchadha/coursera-deep-learning-specialization
Path: blob/master/C5 - Sequence Models/Week 1/Dinosaur Island -- Character-level language model/generateTestCases.py
Views: 4819
1
# New Generate Test Cases
2
from solutions import *
3
import numpy as np
4
import math
5
import os,sys
6
import copy
7
from random import shuffle
8
# import tensorflow as tf
9
sys.path.append('../')
10
sys.path.append('../../')
11
12
from grader_support import stdout_redirector
13
from grader_support import util
14
15
16
mFiles = [
17
"clip.py",
18
"sample.py",
19
"optimize.py",
20
"model.py"
21
]
22
23
24
data = open('dinos.txt', 'r').read()
25
data= data.lower()
26
chars = list(set(data))
27
data_size, vocab_size = len(data), len(chars)
28
char_to_ix = { ch:i for i,ch in enumerate(sorted(chars)) }
29
ix_to_char = { i:ch for i,ch in enumerate(sorted(chars)) }
30
31
32
# set the seed to be able to replicate the same results.
33
np.random.seed(3)
34
35
dWax = np.random.randn(5,3)*10
36
dWaa = np.random.randn(5,5)*10
37
dWya = np.random.randn(2,5)*10
38
db = np.random.randn(5,1)*10
39
dby = np.random.randn(2,1)*10
40
gradients = {"dWax": dWax, "dWaa": dWaa, "dWya": dWya, "db": db, "dby": dby}
41
gradients1 = copy.deepcopy(gradients)
42
gradients = clip(gradients, 10)
43
44
# generating test cases for sampling function
45
vocab_size = 27
46
n = 23
47
n_a = 50
48
a0 = np.random.randn(n_a, 1) * 0.2
49
i0 = 1 # first character is ix_to_char[i0]
50
Wax = np.random.randn(n_a, vocab_size)
51
Waa = np.random.randn(n_a, n_a)
52
Wya = np.random.randn(vocab_size, n_a)
53
b = np.random.randn(n_a, 1)
54
by = np.random.randn(vocab_size, 1)
55
parameters = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}
56
indexes = sample(parameters, char_to_ix, 0)
57
58
# # generating test cases for optimize function
59
vocab_size = 27
60
n_a = 50
61
a_prev = np.random.randn(n_a, 1) * 0.2
62
Wax = np.random.randn(n_a, vocab_size) * 0.4
63
Waa = np.random.randn(n_a, n_a)
64
Wya = np.random.randn(vocab_size, n_a)
65
b = np.random.randn(n_a, 1)
66
by = np.random.randn(vocab_size, 1)
67
parameters2 = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}
68
parameters3 = copy.deepcopy(parameters2)
69
X = [12,3,5,11,22,3]
70
Y = [4,14,11,22,25, 26]
71
loss, g, a_last = optimize(X, Y, a_prev, parameters2, learning_rate = 0.01)
72
73
# generating the model. Killing the print statements.
74
with stdout_redirector.stdout_redirected():
75
# generating the model
76
with open("dinos.txt") as f:
77
examples = f.readlines()
78
np.random.seed(0)
79
np.random.shuffle(examples)
80
a = model(examples, ix_to_char, char_to_ix, 200)
81
82
def generateTestCases():
83
testCases = {
84
'clip': {
85
'partId': 'sYLqC',
86
'testCases': [
87
{
88
'testInput': (gradients1, 10),
89
'testOutput': gradients
90
}
91
]
92
},
93
'sample': {
94
'partId': 'QxiNo',
95
'testCases': [
96
{
97
'testInput': (parameters, char_to_ix, 0),
98
'testOutput': indexes
99
}
100
]
101
},
102
'optimize': {
103
'partId': 'x2pxm',
104
'testCases': [
105
{
106
'testInput': (X, Y, a_prev, parameters3),
107
'testOutput': (loss, g, a_last)
108
}
109
]
110
},
111
'model': {
112
'partId': 'mJTOb',
113
'testCases': [
114
{
115
'testInput': (examples, ix_to_char, char_to_ix, 200),
116
'testOutput': a
117
}
118
]
119
}
120
}
121
return testCases
122
123
124