Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Path: blob/master/C5 - Sequence Models/Week 1/Dinosaur Island -- Character-level language model/generateTestCases.py
Views: 4819
# New Generate Test Cases1from solutions import *2import numpy as np3import math4import os,sys5import copy6from random import shuffle7# import tensorflow as tf8sys.path.append('../')9sys.path.append('../../')1011from grader_support import stdout_redirector12from grader_support import util131415mFiles = [16"clip.py",17"sample.py",18"optimize.py",19"model.py"20]212223data = open('dinos.txt', 'r').read()24data= data.lower()25chars = list(set(data))26data_size, vocab_size = len(data), len(chars)27char_to_ix = { ch:i for i,ch in enumerate(sorted(chars)) }28ix_to_char = { i:ch for i,ch in enumerate(sorted(chars)) }293031# set the seed to be able to replicate the same results.32np.random.seed(3)3334dWax = np.random.randn(5,3)*1035dWaa = np.random.randn(5,5)*1036dWya = np.random.randn(2,5)*1037db = np.random.randn(5,1)*1038dby = np.random.randn(2,1)*1039gradients = {"dWax": dWax, "dWaa": dWaa, "dWya": dWya, "db": db, "dby": dby}40gradients1 = copy.deepcopy(gradients)41gradients = clip(gradients, 10)4243# generating test cases for sampling function44vocab_size = 2745n = 2346n_a = 5047a0 = np.random.randn(n_a, 1) * 0.248i0 = 1 # first character is ix_to_char[i0]49Wax = np.random.randn(n_a, vocab_size)50Waa = np.random.randn(n_a, n_a)51Wya = np.random.randn(vocab_size, n_a)52b = np.random.randn(n_a, 1)53by = np.random.randn(vocab_size, 1)54parameters = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}55indexes = sample(parameters, char_to_ix, 0)5657# # generating test cases for optimize function58vocab_size = 2759n_a = 5060a_prev = np.random.randn(n_a, 1) * 0.261Wax = np.random.randn(n_a, vocab_size) * 0.462Waa = np.random.randn(n_a, n_a)63Wya = np.random.randn(vocab_size, n_a)64b = np.random.randn(n_a, 1)65by = np.random.randn(vocab_size, 1)66parameters2 = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}67parameters3 = copy.deepcopy(parameters2)68X = [12,3,5,11,22,3]69Y = [4,14,11,22,25, 26]70loss, g, a_last = optimize(X, Y, a_prev, parameters2, learning_rate = 0.01)7172# generating the model. Killing the print statements.73with stdout_redirector.stdout_redirected():74# generating the model75with open("dinos.txt") as f:76examples = f.readlines()77np.random.seed(0)78np.random.shuffle(examples)79a = model(examples, ix_to_char, char_to_ix, 200)8081def generateTestCases():82testCases = {83'clip': {84'partId': 'sYLqC',85'testCases': [86{87'testInput': (gradients1, 10),88'testOutput': gradients89}90]91},92'sample': {93'partId': 'QxiNo',94'testCases': [95{96'testInput': (parameters, char_to_ix, 0),97'testOutput': indexes98}99]100},101'optimize': {102'partId': 'x2pxm',103'testCases': [104{105'testInput': (X, Y, a_prev, parameters3),106'testOutput': (loss, g, a_last)107}108]109},110'model': {111'partId': 'mJTOb',112'testCases': [113{114'testInput': (examples, ix_to_char, char_to_ix, 200),115'testOutput': a116}117]118}119}120return testCases121122123124