Canceled hyperparameter at 3x2400/2x1200, added future plans to presentation

master
Tuan-Dat Tran 2021-06-09 08:12:50 +00:00
parent 33941658b9
commit d3807c1fc6
4 changed files with 638 additions and 171 deletions

View File

@ -3,7 +3,7 @@
{
"cell_type": "code",
"execution_count": 1,
"id": "25685460",
"id": "71b073fd",
"metadata": {},
"outputs": [],
"source": [
@ -14,7 +14,7 @@
{
"cell_type": "code",
"execution_count": 2,
"id": "a7b5d6ab",
"id": "faccec4b",
"metadata": {},
"outputs": [],
"source": [
@ -30,7 +30,7 @@
{
"cell_type": "code",
"execution_count": 3,
"id": "8a37e95b",
"id": "557006eb",
"metadata": {},
"outputs": [],
"source": [
@ -52,7 +52,7 @@
{
"cell_type": "code",
"execution_count": 4,
"id": "53e288a8",
"id": "311c9b66",
"metadata": {},
"outputs": [],
"source": [
@ -73,7 +73,7 @@
{
"cell_type": "code",
"execution_count": 5,
"id": "0e638fb8",
"id": "53b9bb75",
"metadata": {},
"outputs": [],
"source": [
@ -97,7 +97,7 @@
{
"cell_type": "code",
"execution_count": 6,
"id": "5080bf16",
"id": "34c391e7",
"metadata": {},
"outputs": [],
"source": [
@ -111,7 +111,7 @@
{
"cell_type": "code",
"execution_count": 7,
"id": "41f26d6c",
"id": "22346c9c",
"metadata": {},
"outputs": [],
"source": [
@ -138,7 +138,7 @@
{
"cell_type": "code",
"execution_count": 8,
"id": "9bf4ea28",
"id": "dc81d9b9",
"metadata": {},
"outputs": [],
"source": [
@ -174,7 +174,7 @@
{
"cell_type": "code",
"execution_count": 9,
"id": "fde0b3cf",
"id": "1a80e403",
"metadata": {},
"outputs": [],
"source": [
@ -197,7 +197,7 @@
{
"cell_type": "code",
"execution_count": 10,
"id": "9712dd25",
"id": "e3dd7348",
"metadata": {},
"outputs": [],
"source": [
@ -218,7 +218,7 @@
{
"cell_type": "code",
"execution_count": 11,
"id": "6decb953",
"id": "5bbb81d7",
"metadata": {},
"outputs": [],
"source": [
@ -229,8 +229,8 @@
},
{
"cell_type": "code",
"execution_count": null,
"id": "ff6a75e9",
"execution_count": 12,
"id": "6f2dc487",
"metadata": {},
"outputs": [
{
@ -939,7 +939,427 @@
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 2400\n"
" Dense Neurons 2: 2400\n",
"Accuracy: 78.00\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.61\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.68\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.00\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.21\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.99\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.20\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 77.88\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 2\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 77.83\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 600\n",
"Accuracy: 77.78\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 77.93\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 77.98\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.02\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.04\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 77.59\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 77.97\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 77.55\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 600\n",
"Accuracy: 77.38\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 77.40\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 77.10\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 600\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 76.91\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.61\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.61\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.44\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.40\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.46\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.59\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.17\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.02\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.15\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 77.58\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1200\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 77.02\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.60\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.39\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.58\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.49\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.37\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.45\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.02\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 77.69\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 600\n",
"Accuracy: 77.95\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 77.59\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 77.53\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 1800\n",
" Dense Count 2: 3\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 77.26\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.33\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.39\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 1800\n",
"Accuracy: 78.46\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 1\n",
" Dense Neurons 2: 2400\n",
"Accuracy: 78.38\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 600\n",
"Accuracy: 78.62\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1200\n",
"Accuracy: 78.02\n",
"Testing with: Threshold: 70\n",
" Leeway: 0\n",
" Epoch: 20\n",
" Dense Count 1: 3\n",
" Dense Neurons 1: 2400\n",
" Dense Count 2: 2\n",
" Dense Neurons 2: 1800\n"
]
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<timed exec>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n",
"\u001b[0;32m<ipython-input-9-47e2893956f1>\u001b[0m in \u001b[0;36mget_avg_acc\u001b[0;34m(X_train, y_train, X_test, y_test, epoch, dcount, dnons, dcount2, dnons2)\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mAVG_FROM\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbuild_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdcount\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdnons\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdcount2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdnons2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mX_train\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m model.fit(X_train, y_train, \n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mepoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m128\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m 1129\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1130\u001b[0m steps_per_execution=self._steps_per_execution)\n\u001b[0;32m-> 1131\u001b[0;31m val_logs = self.evaluate(\n\u001b[0m\u001b[1;32m 1132\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mval_x\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1133\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mval_y\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mevaluate\u001b[0;34m(self, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, return_dict)\u001b[0m\n\u001b[1;32m 1387\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'test'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstep_num\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_r\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1388\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_test_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1389\u001b[0;31m \u001b[0mtmp_logs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtest_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1390\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1391\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 826\u001b[0m \u001b[0mtracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 827\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtm\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 828\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 829\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"xla\"\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_experimental_compile\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m\"nonXla\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 830\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 860\u001b[0m \u001b[0;31m# In this case we have not created variables on the first call. So we can\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 861\u001b[0m \u001b[0;31m# run the first trace but we should fail if variables are created.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 862\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_stateful_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 863\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_created_variables\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 864\u001b[0m raise ValueError(\"Creating variables on a non-first call to a function\"\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2940\u001b[0m (graph_function,\n\u001b[1;32m 2941\u001b[0m filtered_flat_args) = self._maybe_define_function(args, kwargs)\n\u001b[0;32m-> 2942\u001b[0;31m return graph_function._call_flat(\n\u001b[0m\u001b[1;32m 2943\u001b[0m filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access\n\u001b[1;32m 2944\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[0;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[1;32m 1916\u001b[0m and executing_eagerly):\n\u001b[1;32m 1917\u001b[0m \u001b[0;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1918\u001b[0;31m return self._build_call_outputs(self._inference_function.call(\n\u001b[0m\u001b[1;32m 1919\u001b[0m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[1;32m 1920\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[1;32m 553\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0m_InterpolateFunctionError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 554\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcancellation_manager\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 555\u001b[0;31m outputs = execute.execute(\n\u001b[0m\u001b[1;32m 556\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msignature\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 557\u001b[0m \u001b[0mnum_outputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_num_outputs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/jupyterhub/lib/python3.8/site-packages/tensorflow/python/eager/execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[0;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[1;32m 57\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 59\u001b[0;31m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0m\u001b[1;32m 60\u001b[0m inputs, attrs, num_outputs)\n\u001b[1;32m 61\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
@ -974,13 +1394,14 @@
" 'DENSE_COUNT2': dc2,\n",
" 'DENSE_NEURON2': dn2,\n",
" 'Accuracy': acc}, ignore_index=True)\n",
" print(f\"Accuracy: {acc*100:.2f}\\n\\n\")"
" print(f\"Accuracy: {acc*100:.2f}\\n\\n\")\n",
" result.to_csv('results.csv', header=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "89a47c03",
"execution_count": 13,
"id": "88b3193a",
"metadata": {},
"outputs": [],
"source": [
@ -989,8 +1410,8 @@
},
{
"cell_type": "code",
"execution_count": null,
"id": "7520408c",
"execution_count": 14,
"id": "5219e081",
"metadata": {},
"outputs": [],
"source": [

File diff suppressed because one or more lines are too long

View File

@ -1 +1,139 @@
,Threshold,Leeway,Epoch,DENSE_COUNT1,DENSE_NEURON1,DENSE_COUNT2,DENSE_NEURON2,Accuracy
0,70.0,0.0,20.0,1.0,600.0,1.0,600.0,0.7682723144690196
1,70.0,0.0,20.0,1.0,600.0,1.0,1200.0,0.7767373164494832
2,70.0,0.0,20.0,1.0,600.0,1.0,1800.0,0.7784842669963836
3,70.0,0.0,20.0,1.0,600.0,1.0,2400.0,0.778047525882721
4,70.0,0.0,20.0,1.0,600.0,2.0,600.0,0.7818882505098979
5,70.0,0.0,20.0,1.0,600.0,2.0,1200.0,0.7789466917514801
6,70.0,0.0,20.0,1.0,600.0,2.0,1800.0,0.7772382815678914
7,70.0,0.0,20.0,1.0,600.0,2.0,2400.0,0.7803982019424438
8,70.0,0.0,20.0,1.0,600.0,3.0,600.0,0.7823635200659435
9,70.0,0.0,20.0,1.0,600.0,3.0,1200.0,0.784881184498469
10,70.0,0.0,20.0,1.0,600.0,3.0,1800.0,0.7835324327150981
11,70.0,0.0,20.0,1.0,600.0,3.0,2400.0,0.7761978129545848
12,70.0,0.0,20.0,1.0,1200.0,1.0,600.0,0.7726268450419108
13,70.0,0.0,20.0,1.0,1200.0,1.0,1200.0,0.7799614687760671
14,70.0,0.0,20.0,1.0,1200.0,1.0,1800.0,0.7784328818321228
15,70.0,0.0,20.0,1.0,1200.0,1.0,2400.0,0.7781631370385488
16,70.0,0.0,20.0,1.0,1200.0,2.0,600.0,0.7773410419623057
17,70.0,0.0,20.0,1.0,1200.0,2.0,1200.0,0.7804495811462402
18,70.0,0.0,20.0,1.0,1200.0,2.0,1800.0,0.7802440643310546
19,70.0,0.0,20.0,1.0,1200.0,2.0,2400.0,0.7829672475655873
20,70.0,0.0,20.0,1.0,1200.0,3.0,600.0,0.7805908799171448
21,70.0,0.0,20.0,1.0,1200.0,3.0,1200.0,0.783121387163798
22,70.0,0.0,20.0,1.0,1200.0,3.0,1800.0,0.7843673706054688
23,70.0,0.0,20.0,1.0,1200.0,3.0,2400.0,0.7823763648668925
24,70.0,0.0,20.0,1.0,1800.0,1.0,600.0,0.7720873514811198
25,70.0,0.0,20.0,1.0,1800.0,1.0,1200.0,0.7787925561269124
26,70.0,0.0,20.0,1.0,1800.0,1.0,1800.0,0.7745921631654104
27,70.0,0.0,20.0,1.0,1800.0,1.0,2400.0,0.779884394009908
28,70.0,0.0,20.0,1.0,1800.0,2.0,600.0,0.7813487490018208
29,70.0,0.0,20.0,1.0,1800.0,2.0,1200.0,0.7857803463935852
30,70.0,0.0,20.0,1.0,1800.0,2.0,1800.0,0.7819010992844899
31,70.0,0.0,20.0,1.0,1800.0,2.0,2400.0,0.7834810515244802
32,70.0,0.0,20.0,1.0,1800.0,3.0,600.0,0.7837379515171051
33,70.0,0.0,20.0,1.0,1800.0,3.0,1200.0,0.7852922280629476
34,70.0,0.0,20.0,1.0,1800.0,3.0,1800.0,0.7848426441351573
35,70.0,0.0,20.0,1.0,1800.0,3.0,2400.0,0.7807193279266358
36,70.0,0.0,20.0,1.0,2400.0,1.0,600.0,0.7676300545533498
37,70.0,0.0,20.0,1.0,2400.0,1.0,1200.0,0.7773153483867645
38,70.0,0.0,20.0,1.0,2400.0,1.0,1800.0,0.7793063541253408
39,70.0,0.0,20.0,1.0,2400.0,1.0,2400.0,0.7774823367595672
40,70.0,0.0,20.0,1.0,2400.0,2.0,600.0,0.7816827217737834
41,70.0,0.0,20.0,1.0,2400.0,2.0,1200.0,0.7836480398972829
42,70.0,0.0,20.0,1.0,2400.0,2.0,1800.0,0.7850867052872975
43,70.0,0.0,20.0,1.0,2400.0,2.0,2400.0,0.7797687868277232
44,70.0,0.0,20.0,1.0,2400.0,3.0,600.0,0.7827360272407532
45,70.0,0.0,20.0,1.0,2400.0,3.0,1200.0,0.7847912609577179
46,70.0,0.0,20.0,1.0,2400.0,3.0,1800.0,0.7861785511175792
47,70.0,0.0,20.0,1.0,2400.0,3.0,2400.0,0.7813230554262797
48,70.0,0.0,20.0,2.0,600.0,1.0,600.0,0.7796917120615642
49,70.0,0.0,20.0,2.0,600.0,1.0,1200.0,0.778869624932607
50,70.0,0.0,20.0,2.0,600.0,1.0,1800.0,0.7777263998985291
51,70.0,0.0,20.0,2.0,600.0,1.0,2400.0,0.7782016774018605
52,70.0,0.0,20.0,2.0,600.0,2.0,600.0,0.7792292873064677
53,70.0,0.0,20.0,2.0,600.0,2.0,1200.0,0.7812845230102539
54,70.0,0.0,20.0,2.0,600.0,2.0,1800.0,0.7793705920378368
55,70.0,0.0,20.0,2.0,600.0,2.0,2400.0,0.780757870276769
56,70.0,0.0,20.0,2.0,600.0,3.0,600.0,0.7792164385318756
57,70.0,0.0,20.0,2.0,600.0,3.0,1200.0,0.7799743115901947
58,70.0,0.0,20.0,2.0,600.0,3.0,1800.0,0.7761335849761963
59,70.0,0.0,20.0,2.0,600.0,3.0,2400.0,0.7748233755429585
60,70.0,0.0,20.0,2.0,1200.0,1.0,600.0,0.7816955665747325
61,70.0,0.0,20.0,2.0,1200.0,1.0,1200.0,0.7834938963254293
62,70.0,0.0,20.0,2.0,1200.0,1.0,1800.0,0.7819010933240255
63,70.0,0.0,20.0,2.0,1200.0,1.0,2400.0,0.7837508042653402
64,70.0,0.0,20.0,2.0,1200.0,2.0,600.0,0.7782915830612183
65,70.0,0.0,20.0,2.0,1200.0,2.0,1200.0,0.7820809185504913
66,70.0,0.0,20.0,2.0,1200.0,2.0,1800.0,0.7830956975618998
67,70.0,0.0,20.0,2.0,1200.0,2.0,2400.0,0.7822607517242431
68,70.0,0.0,20.0,2.0,1200.0,3.0,600.0,0.7854463696479798
69,70.0,0.0,20.0,2.0,1200.0,3.0,1200.0,0.7852536916732789
70,70.0,0.0,20.0,2.0,1200.0,3.0,1800.0,0.781425819794337
71,70.0,0.0,20.0,2.0,1200.0,3.0,2400.0,0.7780732174714406
72,70.0,0.0,20.0,2.0,1800.0,1.0,600.0,0.7881181756655375
73,70.0,0.0,20.0,2.0,1800.0,1.0,1200.0,0.7832626859347026
74,70.0,0.0,20.0,2.0,1800.0,1.0,1800.0,0.784264612197876
75,70.0,0.0,20.0,2.0,1800.0,1.0,2400.0,0.7857675015926361
76,70.0,0.0,20.0,2.0,1800.0,2.0,600.0,0.7841618498166402
77,70.0,0.0,20.0,2.0,1800.0,2.0,1200.0,0.7858317295710245
78,70.0,0.0,20.0,2.0,1800.0,2.0,1800.0,0.7843930661678314
79,70.0,0.0,20.0,2.0,1800.0,2.0,2400.0,0.781836861371994
80,70.0,0.0,20.0,2.0,1800.0,3.0,600.0,0.7841618518034618
81,70.0,0.0,20.0,2.0,1800.0,3.0,1200.0,0.7839563290278116
82,70.0,0.0,20.0,2.0,1800.0,3.0,1800.0,0.7809248546759288
83,70.0,0.0,20.0,2.0,1800.0,3.0,2400.0,0.7755812446276347
84,70.0,0.0,20.0,2.0,2400.0,1.0,600.0,0.7810276170571645
85,70.0,0.0,20.0,2.0,2400.0,1.0,1200.0,0.7848683357238769
86,70.0,0.0,20.0,2.0,2400.0,1.0,1800.0,0.7824020544687907
87,70.0,0.0,20.0,2.0,2400.0,1.0,2400.0,0.7800000031789144
88,70.0,0.0,20.0,2.0,2400.0,2.0,600.0,0.786101472377777
89,70.0,0.0,20.0,2.0,2400.0,2.0,1200.0,0.786769425868988
90,70.0,0.0,20.0,2.0,2400.0,2.0,1800.0,0.7799743076165517
91,70.0,0.0,20.0,2.0,2400.0,2.0,2400.0,0.7820809264977773
92,70.0,0.0,20.0,2.0,2400.0,3.0,600.0,0.7899293502171835
93,70.0,0.0,20.0,2.0,2400.0,3.0,1200.0,0.7819910069306691
94,70.0,0.0,20.0,2.0,2400.0,3.0,1800.0,0.7788439293702444
95,70.0,0.0,20.0,2.0,2400.0,3.0,2400.0,0.7783172746499379
96,70.0,0.0,20.0,3.0,600.0,1.0,600.0,0.7778034687042237
97,70.0,0.0,20.0,3.0,600.0,1.0,1200.0,0.7792549788951874
98,70.0,0.0,20.0,3.0,600.0,1.0,1800.0,0.7797687828540802
99,70.0,0.0,20.0,3.0,600.0,1.0,2400.0,0.7802440543969472
100,70.0,0.0,20.0,3.0,600.0,2.0,600.0,0.7803982039292653
101,70.0,0.0,20.0,3.0,600.0,2.0,1200.0,0.7759280701478323
102,70.0,0.0,20.0,3.0,600.0,2.0,1800.0,0.7796917140483857
103,70.0,0.0,20.0,3.0,600.0,2.0,2400.0,0.7755041797955831
104,70.0,0.0,20.0,3.0,600.0,3.0,600.0,0.7737572213013967
105,70.0,0.0,20.0,3.0,600.0,3.0,1200.0,0.7740398168563842
106,70.0,0.0,20.0,3.0,600.0,3.0,1800.0,0.7710468788941701
107,70.0,0.0,20.0,3.0,600.0,3.0,2400.0,0.7690944095452626
108,70.0,0.0,20.0,3.0,1200.0,1.0,600.0,0.7860501031080882
109,70.0,0.0,20.0,3.0,1200.0,1.0,1200.0,0.7860500951608022
110,70.0,0.0,20.0,3.0,1200.0,1.0,1800.0,0.7844444433848063
111,70.0,0.0,20.0,3.0,1200.0,1.0,2400.0,0.7839820186297098
112,70.0,0.0,20.0,3.0,1200.0,2.0,600.0,0.7845600485801697
113,70.0,0.0,20.0,3.0,1200.0,2.0,1200.0,0.7859344879786173
114,70.0,0.0,20.0,3.0,1200.0,2.0,1800.0,0.7817469457785289
115,70.0,0.0,20.0,3.0,1200.0,2.0,2400.0,0.7801669915517171
116,70.0,0.0,20.0,3.0,1200.0,3.0,600.0,0.7815414230028789
117,70.0,0.0,20.0,3.0,1200.0,3.0,1200.0,0.7769813776016236
118,70.0,0.0,20.0,3.0,1200.0,3.0,1800.0,0.7757867693901062
119,70.0,0.0,20.0,3.0,1200.0,3.0,2400.0,0.7701862533887227
120,70.0,0.0,20.0,3.0,1800.0,1.0,600.0,0.7860115627447765
121,70.0,0.0,20.0,3.0,1800.0,1.0,1200.0,0.7839049438635508
122,70.0,0.0,20.0,3.0,1800.0,1.0,1800.0,0.7858060359954834
123,70.0,0.0,20.0,3.0,1800.0,1.0,2400.0,0.7849325656890869
124,70.0,0.0,20.0,3.0,1800.0,2.0,600.0,0.7836608866850535
125,70.0,0.0,20.0,3.0,1800.0,2.0,1200.0,0.7844958245754242
126,70.0,0.0,20.0,3.0,1800.0,2.0,1800.0,0.7802055180072784
127,70.0,0.0,20.0,3.0,1800.0,2.0,2400.0,0.7769428412119548
128,70.0,0.0,20.0,3.0,1800.0,3.0,600.0,0.7795247296492259
129,70.0,0.0,20.0,3.0,1800.0,3.0,1200.0,0.7758638401826222
130,70.0,0.0,20.0,3.0,1800.0,3.0,1800.0,0.7753114978472392
131,70.0,0.0,20.0,3.0,1800.0,3.0,2400.0,0.7726396898428599
132,70.0,0.0,20.0,3.0,2400.0,1.0,600.0,0.783314069112142
133,70.0,0.0,20.0,3.0,2400.0,1.0,1200.0,0.7838664094607035
134,70.0,0.0,20.0,3.0,2400.0,1.0,1800.0,0.7846371273199717
135,70.0,0.0,20.0,3.0,2400.0,1.0,2400.0,0.7837508062521616
136,70.0,0.0,20.0,3.0,2400.0,2.0,600.0,0.7862427830696106
137,70.0,0.0,20.0,3.0,2400.0,2.0,1200.0,0.7801669875780741

1 Threshold Leeway Epoch DENSE_COUNT1 DENSE_NEURON1 DENSE_COUNT2 DENSE_NEURON2 Accuracy
2 0 70.0 0.0 20.0 1.0 600.0 1.0 600.0 0.7682723144690196
3 1 70.0 0.0 20.0 1.0 600.0 1.0 1200.0 0.7767373164494832
4 2 70.0 0.0 20.0 1.0 600.0 1.0 1800.0 0.7784842669963836
5 3 70.0 0.0 20.0 1.0 600.0 1.0 2400.0 0.778047525882721
6 4 70.0 0.0 20.0 1.0 600.0 2.0 600.0 0.7818882505098979
7 5 70.0 0.0 20.0 1.0 600.0 2.0 1200.0 0.7789466917514801
8 6 70.0 0.0 20.0 1.0 600.0 2.0 1800.0 0.7772382815678914
9 7 70.0 0.0 20.0 1.0 600.0 2.0 2400.0 0.7803982019424438
10 8 70.0 0.0 20.0 1.0 600.0 3.0 600.0 0.7823635200659435
11 9 70.0 0.0 20.0 1.0 600.0 3.0 1200.0 0.784881184498469
12 10 70.0 0.0 20.0 1.0 600.0 3.0 1800.0 0.7835324327150981
13 11 70.0 0.0 20.0 1.0 600.0 3.0 2400.0 0.7761978129545848
14 12 70.0 0.0 20.0 1.0 1200.0 1.0 600.0 0.7726268450419108
15 13 70.0 0.0 20.0 1.0 1200.0 1.0 1200.0 0.7799614687760671
16 14 70.0 0.0 20.0 1.0 1200.0 1.0 1800.0 0.7784328818321228
17 15 70.0 0.0 20.0 1.0 1200.0 1.0 2400.0 0.7781631370385488
18 16 70.0 0.0 20.0 1.0 1200.0 2.0 600.0 0.7773410419623057
19 17 70.0 0.0 20.0 1.0 1200.0 2.0 1200.0 0.7804495811462402
20 18 70.0 0.0 20.0 1.0 1200.0 2.0 1800.0 0.7802440643310546
21 19 70.0 0.0 20.0 1.0 1200.0 2.0 2400.0 0.7829672475655873
22 20 70.0 0.0 20.0 1.0 1200.0 3.0 600.0 0.7805908799171448
23 21 70.0 0.0 20.0 1.0 1200.0 3.0 1200.0 0.783121387163798
24 22 70.0 0.0 20.0 1.0 1200.0 3.0 1800.0 0.7843673706054688
25 23 70.0 0.0 20.0 1.0 1200.0 3.0 2400.0 0.7823763648668925
26 24 70.0 0.0 20.0 1.0 1800.0 1.0 600.0 0.7720873514811198
27 25 70.0 0.0 20.0 1.0 1800.0 1.0 1200.0 0.7787925561269124
28 26 70.0 0.0 20.0 1.0 1800.0 1.0 1800.0 0.7745921631654104
29 27 70.0 0.0 20.0 1.0 1800.0 1.0 2400.0 0.779884394009908
30 28 70.0 0.0 20.0 1.0 1800.0 2.0 600.0 0.7813487490018208
31 29 70.0 0.0 20.0 1.0 1800.0 2.0 1200.0 0.7857803463935852
32 30 70.0 0.0 20.0 1.0 1800.0 2.0 1800.0 0.7819010992844899
33 31 70.0 0.0 20.0 1.0 1800.0 2.0 2400.0 0.7834810515244802
34 32 70.0 0.0 20.0 1.0 1800.0 3.0 600.0 0.7837379515171051
35 33 70.0 0.0 20.0 1.0 1800.0 3.0 1200.0 0.7852922280629476
36 34 70.0 0.0 20.0 1.0 1800.0 3.0 1800.0 0.7848426441351573
37 35 70.0 0.0 20.0 1.0 1800.0 3.0 2400.0 0.7807193279266358
38 36 70.0 0.0 20.0 1.0 2400.0 1.0 600.0 0.7676300545533498
39 37 70.0 0.0 20.0 1.0 2400.0 1.0 1200.0 0.7773153483867645
40 38 70.0 0.0 20.0 1.0 2400.0 1.0 1800.0 0.7793063541253408
41 39 70.0 0.0 20.0 1.0 2400.0 1.0 2400.0 0.7774823367595672
42 40 70.0 0.0 20.0 1.0 2400.0 2.0 600.0 0.7816827217737834
43 41 70.0 0.0 20.0 1.0 2400.0 2.0 1200.0 0.7836480398972829
44 42 70.0 0.0 20.0 1.0 2400.0 2.0 1800.0 0.7850867052872975
45 43 70.0 0.0 20.0 1.0 2400.0 2.0 2400.0 0.7797687868277232
46 44 70.0 0.0 20.0 1.0 2400.0 3.0 600.0 0.7827360272407532
47 45 70.0 0.0 20.0 1.0 2400.0 3.0 1200.0 0.7847912609577179
48 46 70.0 0.0 20.0 1.0 2400.0 3.0 1800.0 0.7861785511175792
49 47 70.0 0.0 20.0 1.0 2400.0 3.0 2400.0 0.7813230554262797
50 48 70.0 0.0 20.0 2.0 600.0 1.0 600.0 0.7796917120615642
51 49 70.0 0.0 20.0 2.0 600.0 1.0 1200.0 0.778869624932607
52 50 70.0 0.0 20.0 2.0 600.0 1.0 1800.0 0.7777263998985291
53 51 70.0 0.0 20.0 2.0 600.0 1.0 2400.0 0.7782016774018605
54 52 70.0 0.0 20.0 2.0 600.0 2.0 600.0 0.7792292873064677
55 53 70.0 0.0 20.0 2.0 600.0 2.0 1200.0 0.7812845230102539
56 54 70.0 0.0 20.0 2.0 600.0 2.0 1800.0 0.7793705920378368
57 55 70.0 0.0 20.0 2.0 600.0 2.0 2400.0 0.780757870276769
58 56 70.0 0.0 20.0 2.0 600.0 3.0 600.0 0.7792164385318756
59 57 70.0 0.0 20.0 2.0 600.0 3.0 1200.0 0.7799743115901947
60 58 70.0 0.0 20.0 2.0 600.0 3.0 1800.0 0.7761335849761963
61 59 70.0 0.0 20.0 2.0 600.0 3.0 2400.0 0.7748233755429585
62 60 70.0 0.0 20.0 2.0 1200.0 1.0 600.0 0.7816955665747325
63 61 70.0 0.0 20.0 2.0 1200.0 1.0 1200.0 0.7834938963254293
64 62 70.0 0.0 20.0 2.0 1200.0 1.0 1800.0 0.7819010933240255
65 63 70.0 0.0 20.0 2.0 1200.0 1.0 2400.0 0.7837508042653402
66 64 70.0 0.0 20.0 2.0 1200.0 2.0 600.0 0.7782915830612183
67 65 70.0 0.0 20.0 2.0 1200.0 2.0 1200.0 0.7820809185504913
68 66 70.0 0.0 20.0 2.0 1200.0 2.0 1800.0 0.7830956975618998
69 67 70.0 0.0 20.0 2.0 1200.0 2.0 2400.0 0.7822607517242431
70 68 70.0 0.0 20.0 2.0 1200.0 3.0 600.0 0.7854463696479798
71 69 70.0 0.0 20.0 2.0 1200.0 3.0 1200.0 0.7852536916732789
72 70 70.0 0.0 20.0 2.0 1200.0 3.0 1800.0 0.781425819794337
73 71 70.0 0.0 20.0 2.0 1200.0 3.0 2400.0 0.7780732174714406
74 72 70.0 0.0 20.0 2.0 1800.0 1.0 600.0 0.7881181756655375
75 73 70.0 0.0 20.0 2.0 1800.0 1.0 1200.0 0.7832626859347026
76 74 70.0 0.0 20.0 2.0 1800.0 1.0 1800.0 0.784264612197876
77 75 70.0 0.0 20.0 2.0 1800.0 1.0 2400.0 0.7857675015926361
78 76 70.0 0.0 20.0 2.0 1800.0 2.0 600.0 0.7841618498166402
79 77 70.0 0.0 20.0 2.0 1800.0 2.0 1200.0 0.7858317295710245
80 78 70.0 0.0 20.0 2.0 1800.0 2.0 1800.0 0.7843930661678314
81 79 70.0 0.0 20.0 2.0 1800.0 2.0 2400.0 0.781836861371994
82 80 70.0 0.0 20.0 2.0 1800.0 3.0 600.0 0.7841618518034618
83 81 70.0 0.0 20.0 2.0 1800.0 3.0 1200.0 0.7839563290278116
84 82 70.0 0.0 20.0 2.0 1800.0 3.0 1800.0 0.7809248546759288
85 83 70.0 0.0 20.0 2.0 1800.0 3.0 2400.0 0.7755812446276347
86 84 70.0 0.0 20.0 2.0 2400.0 1.0 600.0 0.7810276170571645
87 85 70.0 0.0 20.0 2.0 2400.0 1.0 1200.0 0.7848683357238769
88 86 70.0 0.0 20.0 2.0 2400.0 1.0 1800.0 0.7824020544687907
89 87 70.0 0.0 20.0 2.0 2400.0 1.0 2400.0 0.7800000031789144
90 88 70.0 0.0 20.0 2.0 2400.0 2.0 600.0 0.786101472377777
91 89 70.0 0.0 20.0 2.0 2400.0 2.0 1200.0 0.786769425868988
92 90 70.0 0.0 20.0 2.0 2400.0 2.0 1800.0 0.7799743076165517
93 91 70.0 0.0 20.0 2.0 2400.0 2.0 2400.0 0.7820809264977773
94 92 70.0 0.0 20.0 2.0 2400.0 3.0 600.0 0.7899293502171835
95 93 70.0 0.0 20.0 2.0 2400.0 3.0 1200.0 0.7819910069306691
96 94 70.0 0.0 20.0 2.0 2400.0 3.0 1800.0 0.7788439293702444
97 95 70.0 0.0 20.0 2.0 2400.0 3.0 2400.0 0.7783172746499379
98 96 70.0 0.0 20.0 3.0 600.0 1.0 600.0 0.7778034687042237
99 97 70.0 0.0 20.0 3.0 600.0 1.0 1200.0 0.7792549788951874
100 98 70.0 0.0 20.0 3.0 600.0 1.0 1800.0 0.7797687828540802
101 99 70.0 0.0 20.0 3.0 600.0 1.0 2400.0 0.7802440543969472
102 100 70.0 0.0 20.0 3.0 600.0 2.0 600.0 0.7803982039292653
103 101 70.0 0.0 20.0 3.0 600.0 2.0 1200.0 0.7759280701478323
104 102 70.0 0.0 20.0 3.0 600.0 2.0 1800.0 0.7796917140483857
105 103 70.0 0.0 20.0 3.0 600.0 2.0 2400.0 0.7755041797955831
106 104 70.0 0.0 20.0 3.0 600.0 3.0 600.0 0.7737572213013967
107 105 70.0 0.0 20.0 3.0 600.0 3.0 1200.0 0.7740398168563842
108 106 70.0 0.0 20.0 3.0 600.0 3.0 1800.0 0.7710468788941701
109 107 70.0 0.0 20.0 3.0 600.0 3.0 2400.0 0.7690944095452626
110 108 70.0 0.0 20.0 3.0 1200.0 1.0 600.0 0.7860501031080882
111 109 70.0 0.0 20.0 3.0 1200.0 1.0 1200.0 0.7860500951608022
112 110 70.0 0.0 20.0 3.0 1200.0 1.0 1800.0 0.7844444433848063
113 111 70.0 0.0 20.0 3.0 1200.0 1.0 2400.0 0.7839820186297098
114 112 70.0 0.0 20.0 3.0 1200.0 2.0 600.0 0.7845600485801697
115 113 70.0 0.0 20.0 3.0 1200.0 2.0 1200.0 0.7859344879786173
116 114 70.0 0.0 20.0 3.0 1200.0 2.0 1800.0 0.7817469457785289
117 115 70.0 0.0 20.0 3.0 1200.0 2.0 2400.0 0.7801669915517171
118 116 70.0 0.0 20.0 3.0 1200.0 3.0 600.0 0.7815414230028789
119 117 70.0 0.0 20.0 3.0 1200.0 3.0 1200.0 0.7769813776016236
120 118 70.0 0.0 20.0 3.0 1200.0 3.0 1800.0 0.7757867693901062
121 119 70.0 0.0 20.0 3.0 1200.0 3.0 2400.0 0.7701862533887227
122 120 70.0 0.0 20.0 3.0 1800.0 1.0 600.0 0.7860115627447765
123 121 70.0 0.0 20.0 3.0 1800.0 1.0 1200.0 0.7839049438635508
124 122 70.0 0.0 20.0 3.0 1800.0 1.0 1800.0 0.7858060359954834
125 123 70.0 0.0 20.0 3.0 1800.0 1.0 2400.0 0.7849325656890869
126 124 70.0 0.0 20.0 3.0 1800.0 2.0 600.0 0.7836608866850535
127 125 70.0 0.0 20.0 3.0 1800.0 2.0 1200.0 0.7844958245754242
128 126 70.0 0.0 20.0 3.0 1800.0 2.0 1800.0 0.7802055180072784
129 127 70.0 0.0 20.0 3.0 1800.0 2.0 2400.0 0.7769428412119548
130 128 70.0 0.0 20.0 3.0 1800.0 3.0 600.0 0.7795247296492259
131 129 70.0 0.0 20.0 3.0 1800.0 3.0 1200.0 0.7758638401826222
132 130 70.0 0.0 20.0 3.0 1800.0 3.0 1800.0 0.7753114978472392
133 131 70.0 0.0 20.0 3.0 1800.0 3.0 2400.0 0.7726396898428599
134 132 70.0 0.0 20.0 3.0 2400.0 1.0 600.0 0.783314069112142
135 133 70.0 0.0 20.0 3.0 2400.0 1.0 1200.0 0.7838664094607035
136 134 70.0 0.0 20.0 3.0 2400.0 1.0 1800.0 0.7846371273199717
137 135 70.0 0.0 20.0 3.0 2400.0 1.0 2400.0 0.7837508062521616
138 136 70.0 0.0 20.0 3.0 2400.0 2.0 600.0 0.7862427830696106
139 137 70.0 0.0 20.0 3.0 2400.0 2.0 1200.0 0.7801669875780741