diff --git a/DeepSpeech.ipynb b/DeepSpeech.ipynb index ee16703e..54d2888a 100644 --- a/DeepSpeech.ipynb +++ b/DeepSpeech.ipynb @@ -1322,7 +1322,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now as training and test are done, we persist the results alongside with the involved hyper parameters for further reporting." + "Now, as training and test are done, we persist the results alongside with the involved hyper parameters for further reporting." ] }, { @@ -1373,6 +1373,48 @@ " }, dump_file, sort_keys=True, indent = 4)\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's also re-populate a central JS file, that contains all the dumps at once." + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "written = False\n", + "logs_dir = \"logs\"\n", + "\n", + "# All direct sub directories of the logs directory\n", + "dirs = [os.path.join(logs_dir, o) for o in os.listdir(logs_dir) if os.path.isdir(os.path.join(logs_dir, o))]\n", + "\n", + "# Let's first populate a temporal file and rename it afterwards - guarantees an interruption free web experience\n", + "nhf = '%s/%s' % (logs_dir, 'new_hyper.js')\n", + "\n", + "with open(nhf, 'w') as dump_file:\n", + " # Assigning a global variable that the report page can pick up after loading the data as a regular script\n", + " dump_file.write('window.ALL_THE_DATA = [')\n", + " for d in dirs:\n", + " hf = os.path.join(d, \"hyper.json\")\n", + " if os.path.isfile(hf):\n", + " # Separate by comma if there was already something written\n", + " if written:\n", + " dump_file.write(',\\n')\n", + " written = True\n", + " # Append the whole file\n", + " dump_file.write(open(hf, 'r').read())\n", + " dump_file.write('];')\n", + " \n", + "# Finally we rename the file temporal file and overwrite a potentially existing active one\n", + "os.rename(nhf, '%s/%s' % (logs_dir, 'hyper.js'))" + ] + }, { "cell_type": "code", "execution_count": null,