Python 4 5/8/2017


SUBMITTED BY: Guest

DATE: May 8, 2017, 5:53 p.m.

FORMAT: Python

SIZE: 4.7 kB

HITS: 311

  1. If you want more of my pastes visit: https://randompaste.000webhostapp.com/index.html
  2. --------------------------------------------------------------------------------------
  3. view my last post at: https://bitbin.it/rQfL5wi4/
  4. --------------------------------------------------------------------------------------
  5. import numpy as np
  6. import matplotlib.pyplot as plt
  7. from random import shuffle
  8. """
  9. File for implementation
  10. of neural network from scratch
  11. """
  12. def create_data():
  13. x = np.linspace(0,1,100)
  14. y = 4 * x + 109
  15. zipped_dat = zip(x, y)
  16. return zipped_dat
  17. class model:
  18. """
  19. +---------+
  20. | |
  21. | |
  22. | |
  23. | |
  24. | |
  25. +--------+ | | +-------+
  26. | 1x1 | | + |Output |
  27. | Input +------> | Hidden +------> |1x1 |
  28. +--------+ W1 | | W2 +-------+
  29. 1x10 | | 10x1
  30. | |
  31. | |
  32. | |
  33. | |
  34. | |
  35. | |
  36. +---------+
  37. """
  38. def __init__(self, num_hidden = 10, learning_rate = 0.01, batch_size = 10):
  39. self.W1 = np.random.randn(1, 10)
  40. self.bias1 = np.zeros([10, 1])
  41. self.W2 = np.random.randn(10, 1)
  42. self.bias2 = np.zeros([1, 1])
  43. self.learning_rate = learning_rate
  44. self.batch_size = batch_size
  45. self.update2 = np.empty([10 , 1]) # Parameter gradients for w2
  46. self.update1 = np.empty([1, 10]) # Parameter Gradients for w1
  47. self.updatebias2 = np.empty([1, 1]) # Parameter for bias2
  48. self.updatebias1 = np.empty([10, 1]) # Parameter for bias1
  49. def fwd_pass(self, data, value):
  50. """
  51. This function is used for doing a
  52. fwd pass over the NN. This function
  53. also call calc_gradients for each data
  54. point
  55. Parameters:
  56. data: int, double
  57. Data Point (x)
  58. value: int, double
  59. corresponding y values of the data
  60. """
  61. data = np.array(data).reshape(1, 1)
  62. value = np.array(value).reshape(1, 1)
  63. data = data.reshape(data.shape[0], 1)
  64. value = value.reshape(data.shape[0], 1)
  65. temp = np.add(np.dot(self.W1.T, data), self.bias1)
  66. predicted = np.dot(self.W2.T, temp) + self.bias2
  67. error = predicted - value
  68. self.calc_gradients(data, error, temp)
  69. return predicted
  70. def calc_gradients(self, data, error, temp):
  71. """
  72. This function stores the gradient information
  73. calculated over the batch.
  74. Parameters:
  75. data: int, double
  76. """
  77. self.update2 = np.hstack([self.update2, np.multiply((self.learning_rate * error), temp)])
  78. self.updatebias2 = np.hstack([self.updatebias2, self.learning_rate * error * 1])
  79. self.update1 = np.vstack([self.update1, self.learning_rate * error * np.dot(self.W2, data).T])
  80. self.updatebias1 = np.hstack([self.updatebias1, self.learning_rate * error * self.W2 * 1])
  81. def update_params(self):
  82. self.W2 -= np.sum(self.update2, axis = 1).reshape([10, 1])
  83. self.W1 -= np.sum(self.update1, axis = 0).reshape([1, 10])
  84. self.bias2 -= np.sum(self.updatebias2, axis = 1).reshape([1, 1])
  85. self.bias1 -= np.sum(self.updatebias1, axis = 1).reshape([10, 1])
  86. def loss(self, data, predicted_value, value):
  87. self.update_params()
  88. print np.sum(predicted_value - value, axis = 0)
  89. return 0.5 * np.sum(np.power(predicted_value - value, 2), axis = 1)
  90. def main():
  91. # create the model
  92. m1 = model()
  93. data = create_data()
  94. predicted_list = []
  95. # divid the data into batches of size 10. You should shuffle before.
  96. shuffle(data)
  97. data, value = zip(*data)
  98. for epoch in range(0, 1):
  99. for i in range(1, len(data)):
  100. predicted_list.append(m1.fwd_pass(np.array(data[i]), np.array(value[i])))
  101. if i%10==0:
  102. predicted = np.array(predicted_list).reshape(10,1)
  103. m1.loss(np.array(data[i:i+10]), predicted, np.array(value[i:i+10]).reshape(10,1))
  104. predicted_list = []
  105. if __name__ == "__main__":
  106. main()

comments powered by Disqus