diff --git a/recurrent-neural-networks/char-rnn/Character_Level_RNN_Exercise.ipynb b/recurrent-neural-networks/char-rnn/Character_Level_RNN_Exercise.ipynb index 88c049f97d..a3b8c561d4 100644 --- a/recurrent-neural-networks/char-rnn/Character_Level_RNN_Exercise.ipynb +++ b/recurrent-neural-networks/char-rnn/Character_Level_RNN_Exercise.ipynb @@ -324,7 +324,7 @@ "In `__init__` the suggested structure is as follows:\n", "* Create and store the necessary dictionaries (this has been done for you)\n", "* Define an LSTM layer that takes as params: an input size (the number of characters), a hidden layer size `n_hidden`, a number of layers `n_layers`, a dropout probability `drop_prob`, and a batch_first boolean (True, since we are batching)\n", - "* Define a dropout layer with `dropout_prob`\n", + "* Define a dropout layer with `drop_prob`\n", "* Define a fully-connected layer with params: input size `n_hidden` and output size (the number of characters)\n", "* Finally, initialize the weights (again, this has been given)\n", "\n", @@ -557,7 +557,7 @@ }, "outputs": [], "source": [ - "## TODO: set you model hyperparameters\n", + "## TODO: set your model hyperparameters\n", "# define and print the net\n", "n_hidden=\n", "n_layers=\n", diff --git a/recurrent-neural-networks/char-rnn/Character_Level_RNN_Solution.ipynb b/recurrent-neural-networks/char-rnn/Character_Level_RNN_Solution.ipynb index 9e534b884e..9f1c228c10 100644 --- a/recurrent-neural-networks/char-rnn/Character_Level_RNN_Solution.ipynb +++ b/recurrent-neural-networks/char-rnn/Character_Level_RNN_Solution.ipynb @@ -383,7 +383,7 @@ "In `__init__` the suggested structure is as follows:\n", "* Create and store the necessary dictionaries (this has been done for you)\n", "* Define an LSTM layer that takes as params: an input size (the number of characters), a hidden layer size `n_hidden`, a number of layers `n_layers`, a dropout probability `drop_prob`, and a batch_first boolean (True, since we are batching)\n", - "* Define a dropout layer with `dropout_prob`\n", + "* Define a dropout layer with `drop_prob`\n", "* Define a fully-connected layer with params: input size `n_hidden` and output size (the number of characters)\n", "* Finally, initialize the weights (again, this has been given)\n", "\n",