améliorations rapport et correction bogues dans le code

This commit is contained in:
François Pelletier 2019-05-03 00:11:36 -04:00
parent 65b9a00d02
commit 596bdab5e1
4 changed files with 182 additions and 151 deletions

View file

@ -13,7 +13,6 @@ import numpy as np
import time
import NeuralNode
import metrics
import random
# le nom de votre classe
# NeuralNet pour le modèle Réseaux de Neurones
@ -21,7 +20,7 @@ import random
class NeuralNet: #nom de la class à changer
def __init__(self, layers_size, all_labels, **kwargs):
def __init__(self, layers_size, all_labels, input_weights=None, **kwargs):
"""
c'est un Initializer.
Vous pouvez passer d'autre paramètres au besoin,
@ -36,11 +35,11 @@ class NeuralNet: #nom de la class à changer
# Couches cachées
for j in range(self.n_hidden_layers):
for i in range(self.layers_size[j+1]):
self.layers[j].append(NeuralNode.NeuralNode(layers_size[j]))
self.layers[j].append(NeuralNode.NeuralNode(layers_size[j],input_weights))
# Couche de sortie
for i in range(layers_size[-1]):
self.layers[-1].append(NeuralNode.NeuralNode(layers_size[-2]))
self.layers[-1].append(NeuralNode.NeuralNode(layers_size[-2],input_weights))
def __str__(self):
outstr = ""

View file

@ -11,10 +11,10 @@ import NeuralNetUtils as nnu
class NeuralNode:
def __init__(self, input_size, input_weights=None, **kwargs):
self.input_size = input_size+1
if (input_weights == None):
if (input_weights is None):
self.input_weights = np.random.uniform(-1,1,self.input_size)
else:
self.input_weights = input_weights
self.input_weights = np.repeat(input_weights,self.input_size)
def __str__(self):
return str(self.input_weights)

File diff suppressed because one or more lines are too long

View file

@ -41,13 +41,13 @@ def prediction_metrics(cm,obs_labels,pred_labels):
if (not np.any(np.isnan(myPrecision))):
precision.append(myPrecision)
except:
raise
myPrecision = 0
try:
myRecall = cm[label_num,label_num] / sum(cm[label_num,:])
if (not np.any(np.isnan(myRecall))):
recall.append(myRecall)
except:
raise
myRecall = 0
return accuracy, precision, recall