nn.py 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. import numpy as np
  2. class NeuralNetwork:
  3. def __init__(self, input_layer_size=3, hidden_layer_size=4, output_layer_size=2):
  4. self.input_layer_size = input_layer_size
  5. self.hidden_layer_size = hidden_layer_size
  6. self.output_layer_size = output_layer_size
  7. self.weights_input_to_hidden = np.random.uniform(-0.5, 0.5, (self.hidden_layer_size, self.input_layer_size))
  8. self.weights_hidden_to_output = np.random.uniform(-0.5, 0.5, (self.output_layer_size, self.hidden_layer_size))
  9. self.bias_input_to_hidden = np.zeros((self.hidden_layer_size, 1))
  10. self.bias_hidden_to_output = np.zeros((self.output_layer_size, 1))
  11. self.epochs = 3000 # Default
  12. self.learning_rate = 0.1 # Default
  13. return
  14. def feedforward(self, data):
  15. # Forward propagation (to hidden layer)
  16. hidden_raw = self.bias_input_to_hidden + self.weights_input_to_hidden @ data
  17. self.hidden = 1 / (1 + np.exp(-hidden_raw)) # sigmoid
  18. # Forward propagation (to output layer)
  19. output_raw = self.bias_hidden_to_output + self.weights_hidden_to_output @ self.hidden
  20. output = 1 / (1 + np.exp(-output_raw))
  21. return output
  22. def backprop(self, data, output, result):
  23. # Backpropagation (output layer)
  24. delta_output = output - result
  25. self.weights_hidden_to_output += -self.learning_rate * delta_output @ np.transpose(self.hidden)
  26. self.bias_hidden_to_output += -self.learning_rate * delta_output
  27. # Backpropagation (hidden layer)
  28. delta_hidden = np.transpose(self.weights_hidden_to_output) @ delta_output * (self.hidden * (1 - self.hidden))
  29. self.weights_input_to_hidden += -self.learning_rate * delta_hidden @ np.transpose(data)
  30. self.bias_input_to_hidden += -self.learning_rate * delta_hidden
  31. return
  32. def get(self, data):
  33. data = np.reshape(data, (-1, 1))
  34. return self.feedforward(data)
  35. def learning(self, dataset, results, epochs, learning_rate):
  36. self.epochs = epochs
  37. self.learning_rate = learning_rate
  38. e_loss = 0
  39. e_correct = 0
  40. # Learning
  41. for epoch in range(epochs):
  42. print(f"Epoch {epoch}")
  43. for data, result in zip(dataset, results):
  44. data = np.reshape(data, (-1, 1))
  45. result = np.reshape(result, (-1, 1))
  46. output = self.feedforward(data)
  47. # Loss / Error calculation
  48. e_loss += 1 / len(output) * np.sum((output - result) ** 2, axis=0)
  49. e_correct += int(np.argmax(output) == np.argmax(result))
  50. self.backprop(data, output, result)
  51. # print some debug info between epochs
  52. print(f"Loss: {round((e_loss[0] / len(dataset)) * 100, 3)}%")
  53. print(f"Accuracy: {round((e_correct / len(dataset)) * 100, 3)}%")
  54. e_loss = 0
  55. e_correct = 0
  56. return
  57. def save(self, filename):
  58. np.savez(filename,
  59. weights_input_to_hidden=self.weights_input_to_hidden,
  60. weights_hidden_to_output=self.weights_hidden_to_output,
  61. bias_input_to_hidden=self.bias_input_to_hidden,
  62. bias_hidden_to_output=self.bias_hidden_to_output
  63. )
  64. return
  65. def load(self, filename):
  66. with np.load(filename) as f:
  67. self.weights_input_to_hidden = f['weights_input_to_hidden'],
  68. self.weights_hidden_to_output = f['weights_hidden_to_output'],
  69. self.bias_input_to_hidden = f['bias_input_to_hidden'],
  70. self.bias_hidden_to_output = f['bias_hidden_to_output']
  71. return