formatage de code

This commit is contained in:
François Pelletier 2019-05-03 00:36:22 -04:00
parent 596bdab5e1
commit 4a3a32f4e9
2 changed files with 50 additions and 17 deletions

View file

@ -43,7 +43,8 @@
"metadata": {},
"outputs": [],
"source": [
"train1, train_labels1, test1, test_labels1 = ld.load_iris_dataset(train_ratio = 0.7)"
"train1, train_labels1, test1, test_labels1 = (\n",
" ld.load_iris_dataset(train_ratio = 0.7))"
]
},
{
@ -86,12 +87,17 @@
"range_lc = range(30,len(train_labels1))\n",
"for i in range_lc:\n",
" range_lc_split_test = np.array_split(range(i),10)\n",
" range_lc_split_train = [np.setdiff1d(range(i),t) for t in range_lc_split_test]\n",
" range_lc_split_train = (\n",
" [np.setdiff1d(range(i),t) for t in range_lc_split_test])\n",
" accuracy_cv = []\n",
" for r_i in range(10):\n",
" try:\n",
" training = dt1.train(train1[range_lc_split_train[r_i]], train_labels1[range_lc_split_train[r_i]],verbose=False)\n",
" test = dt1.test(train1[range_lc_split_test[r_i]], train_labels1[range_lc_split_test[r_i]],verbose=False)\n",
" training = dt1.train(train1[range_lc_split_train[r_i]], \n",
" train_labels1[range_lc_split_train[r_i]],\n",
" verbose=False)\n",
" test = dt1.test(train1[range_lc_split_test[r_i]], \n",
" train_labels1[range_lc_split_test[r_i]],\n",
" verbose=False)\n",
" accuracy_cv.append(test[1])\n",
" except:\n",
" pass\n",
@ -164,7 +170,9 @@
"k_cv = 5\n",
"all_indices = range(len(train_labels1))\n",
"np.random.seed(12345)\n",
"indices_cv_test = np.sort(np.array_split(np.random.permutation(all_indices),k_cv))"
"indices_cv_test = (\n",
" np.sort(np.array_split(np.random.permutation(all_indices),\n",
" k_cv)))"
]
},
{
@ -173,7 +181,8 @@
"metadata": {},
"outputs": [],
"source": [
"indices_cv_train = [np.setdiff1d(all_indices,indices_cv_test[i]) for i in range(k_cv)]"
"indices_cv_train = (\n",
" [np.setdiff1d(all_indices,indices_cv_test[i]) for i in range(k_cv)])"
]
},
{
@ -194,8 +203,12 @@
" accuracy_cv=[]\n",
" for cv_set in range(k_cv):\n",
" nn1 = NeuralNet.NeuralNet(np.array([4,n_neurones,3]),range(3))\n",
" nn1.train(train1[indices_cv_train[cv_set]], train_labels1[indices_cv_train[cv_set]], 0.1, 1, verbose=False)\n",
" _,accuracy,_,_,_ = nn1.test(train1[indices_cv_test[cv_set]], train_labels1[indices_cv_test[cv_set]], verbose=False)\n",
" nn1.train(train1[indices_cv_train[cv_set]], \n",
" train_labels1[indices_cv_train[cv_set]], 0.1, 1, \n",
" verbose=False)\n",
" _,accuracy,_,_,_ = nn1.test(train1[indices_cv_test[cv_set]], \n",
" train_labels1[indices_cv_test[cv_set]], \n",
" verbose=False)\n",
" accuracy_cv.append(accuracy)\n",
" accuracy_cum.append(np.mean(np.array(accuracy_cv)))"
]
@ -222,7 +235,8 @@
"metadata": {},
"outputs": [],
"source": [
"n_neurones_optimal1 = choix_n_neurones[np.where(accuracy_cum==max(accuracy_cum))[0][0]]\n",
"n_neurones_optimal1 = (\n",
" choix_n_neurones[np.where(accuracy_cum==max(accuracy_cum))[0][0]])\n",
"n_neurones_optimal1"
]
},
@ -254,7 +268,11 @@
"lc_cum = []\n",
"for n_couches in choix_n_couches:\n",
" accuracy_cv=[]\n",
" nn1 = NeuralNet.NeuralNet(np.hstack((4,np.repeat(n_neurones_optimal1,n_couches),3)),range(3))\n",
" nn1 = NeuralNet.NeuralNet(\n",
" np.hstack((4,\n",
" np.repeat(n_neurones_optimal1,n_couches),\n",
" 3)),\n",
" range(3))\n",
" lc = nn1.train(train1, train_labels1, 0.1, 10, verbose=False)\n",
" lc_cum.append(lc)\n",
" _,accuracy,_,_,_ = nn1.test(train1, train_labels1, verbose=False)\n",
@ -341,8 +359,14 @@
"metadata": {},
"outputs": [],
"source": [
"nn1_poidszero = NeuralNet.NeuralNet(np.hstack((4,np.repeat(n_neurones_optimal1,n_couches_optimal1),3)),range(3),input_weights=0)\n",
"lc_nn1_poidszero = nn1_poidszero.train(train1, train_labels1, 0.1, 10, verbose=False)"
"nn1_poidszero = NeuralNet.NeuralNet(\n",
" np.hstack((4,\n",
" np.repeat(n_neurones_optimal1,n_couches_optimal1),\n",
" 3)),\n",
" range(3),\n",
" input_weights=0)\n",
"lc_nn1_poidszero = (\n",
" nn1_poidszero.train(train1, train_labels1, 0.1, 10, verbose=False))"
]
},
{
@ -358,9 +382,14 @@
"metadata": {},
"outputs": [],
"source": [
"nn1_poidsunif = NeuralNet.NeuralNet(np.hstack((4,np.repeat(n_neurones_optimal1,n_couches_optimal1),3)),range(3))\n",
"nn1_poidsunif = NeuralNet.NeuralNet(\n",
" np.hstack((4,\n",
" np.repeat(n_neurones_optimal1,n_couches_optimal1),\n",
" 3)),\n",
" range(3))\n",
"np.random.seed(12345)\n",
"lc_nn1_poidsunif = nn1_poidsunif.train(train1, train_labels1, 0.1, 10, verbose=False)"
"lc_nn1_poidsunif = (\n",
" nn1_poidsunif.train(train1, train_labels1, 0.1, 10, verbose=False))"
]
},
{
@ -379,7 +408,11 @@
"plt.subplot(111)\n",
"plt.plot(range(10),lc_nn1_poidszero, label=\"RN_Zero\")\n",
"plt.plot(range(10),lc_nn1_poidsunif, label=\"RN_Non_Zero)\")\n",
"leg = plt.legend(loc='best', ncol=2, mode=\"expand\", shadow=True, fancybox=True)\n",
"leg = plt.legend(loc='best', \n",
" ncol=2, \n",
" mode=\"expand\", \n",
" shadow=True, \n",
" fancybox=True)\n",
"leg.get_frame().set_alpha(0.5)"
]
},

View file

@ -41,13 +41,13 @@ def prediction_metrics(cm,obs_labels,pred_labels):
if (not np.any(np.isnan(myPrecision))):
precision.append(myPrecision)
except:
myPrecision = 0
pass
try:
myRecall = cm[label_num,label_num] / sum(cm[label_num,:])
if (not np.any(np.isnan(myRecall))):
recall.append(myRecall)
except:
myRecall = 0
pass
return accuracy, precision, recall