|
| 1 | +""" |
| 2 | +Minimal Recurrent Neural Network (RNN) demonstration. |
| 3 | +
|
| 4 | +Forward propagation explanation: |
| 5 | +https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250 |
| 6 | +RNN fundamentals: |
| 7 | +https://towardsdatascience.com/recurrent-neural-networks-d4642c9bc7ce/ |
| 8 | +""" |
| 9 | + |
| 10 | +import math |
| 11 | +import random |
| 12 | + |
| 13 | + |
| 14 | +# Sigmoid activation |
| 15 | +def sigmoid_function(value: float, deriv: bool = False) -> float: |
| 16 | + """Return the sigmoid function of a float. |
| 17 | +
|
| 18 | + >>> round(sigmoid_function(3.5), 4) |
| 19 | + 0.9707 |
| 20 | + >>> round(sigmoid_function(0.5, True), 4) |
| 21 | + 0.25 |
| 22 | + """ |
| 23 | + if deriv: |
| 24 | + return value * (1 - value) |
| 25 | + return 1 / (1 + math.exp(-value)) |
| 26 | + |
| 27 | + |
| 28 | +# Initial constants |
| 29 | +INITIAL_VALUE = 0.02 # learning rate |
| 30 | +SEQUENCE_LENGTH = 5 # time steps in the sequence |
| 31 | + |
| 32 | + |
| 33 | +def forward_propagation_rnn(expected: int, number_propagations: int) -> float: |
| 34 | + """Return the value found after RNN forward propagation training. |
| 35 | +
|
| 36 | + >>> res = forward_propagation_rnn(50, 500_000) |
| 37 | + >>> res > 45 and res < 55 |
| 38 | + True |
| 39 | +
|
| 40 | + >>> res = forward_propagation_rnn(50, 500) |
| 41 | + >>> res > 48 and res < 50 |
| 42 | + True |
| 43 | + """ |
| 44 | + random.seed(0) |
| 45 | + |
| 46 | + # Random weight initialization |
| 47 | + W_xh = (random.random() * 2 - 1) # Input to hidden |
| 48 | + W_hh = (random.random() * 2 - 1) # Hidden to hidden (recurrent) |
| 49 | + W_hy = (random.random() * 2 - 1) # Hidden to output |
| 50 | + |
| 51 | + # Training loop |
| 52 | + for _ in range(number_propagations): |
| 53 | + h_prev = 0.0 # hidden state starts at zero |
| 54 | + total_error = 0.0 |
| 55 | + |
| 56 | + # Forward pass through time |
| 57 | + for t in range(SEQUENCE_LENGTH): |
| 58 | + # Fake input sequence: small constant or could be pattern-based |
| 59 | + x_t = INITIAL_VALUE |
| 60 | + |
| 61 | + # Hidden state update |
| 62 | + h_t = sigmoid_function(W_xh * x_t + W_hh * h_prev) |
| 63 | + |
| 64 | + # Output |
| 65 | + y_t = sigmoid_function(W_hy * h_t) |
| 66 | + |
| 67 | + # Error (target distributed over time steps) |
| 68 | + error_t = (expected / 100) - y_t |
| 69 | + total_error += abs(error_t) |
| 70 | + |
| 71 | + # Backpropagation Through Time (simplified) |
| 72 | + d_y = error_t * sigmoid_function(y_t, True) |
| 73 | + d_h = d_y * W_hy * sigmoid_function(h_t, True) |
| 74 | + |
| 75 | + # Weight updates |
| 76 | + W_hy += INITIAL_VALUE * d_y * h_t |
| 77 | + W_xh += INITIAL_VALUE * d_h * x_t |
| 78 | + W_hh += INITIAL_VALUE * d_h * h_prev |
| 79 | + |
| 80 | + # Move to next time step |
| 81 | + h_prev = h_t |
| 82 | + |
| 83 | + # Final output after training |
| 84 | + final_output = y_t * 100 |
| 85 | + return final_output |
| 86 | + |
| 87 | + |
| 88 | +if __name__ == "__main__": |
| 89 | + import doctest |
| 90 | + |
| 91 | + doctest.testmod() |
| 92 | + |
| 93 | + expected = int(input("Expected value: ")) |
| 94 | + number_propagations = int(input("Number of propagations: ")) |
| 95 | + print(forward_propagation_rnn(expected, number_propagations)) |
0 commit comments