diff --git a/docs/examples/finetuning.ipynb b/docs/examples/finetuning.ipynb index 819b85fa..16573392 100644 --- a/docs/examples/finetuning.ipynb +++ b/docs/examples/finetuning.ipynb @@ -299,7 +299,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -316,9 +316,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "BatchJobStatus.RUNNING\n" + ] + } + ], "source": [ "# Wait for fine tune to complete\n", "\n", @@ -351,11 +359,11 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ - "your_fine_tuned_model = \"llama-2-7b.my-first-finetune.2023-07-18-20-28-50\" # Note: you will have a different model!" + "your_fine_tuned_model = \"llama-2-7b.my-first-fine-tune.2023-07-19-00-48-07\" # Note: you will have a different model!" ] }, { @@ -374,7 +382,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ @@ -412,7 +420,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -492,7 +500,7 @@ "954 hockey " ] }, - "execution_count": 13, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -503,16 +511,16 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 18, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "1.0" + "0.98" ] }, - "execution_count": 14, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -524,7 +532,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -554,17 +562,25 @@ " \n", " \n", " \n", + " \n", + " 974\n", + " From: maX <maX@maxim.rinaco.msk.su>\\nSubject: ...\n", + " hockey\n", + " baseball\n", + " \n", " \n", "\n", "" ], "text/plain": [ - "Empty DataFrame\n", - "Columns: [raw_prompt, response, predicted_response]\n", - "Index: []" + " raw_prompt response \\\n", + "974 From: maX \\nSubject: ... hockey \n", + "\n", + " predicted_response \n", + "974 baseball " ] }, - "execution_count": 15, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" }