以下是Python编程实现RNN和LSTM的示例代码:

RNN:

import numpy as np

# Define the sigmoid activation function
def sigmoid(x):
    return 1 / (1 + np.exp(-x))

# Define the RNN class
class RNN:
    def __init__(self, input_size, hidden_size, output_size):
        # Define the size of the input, hidden, and output layers
        self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = output_size
        
        # Initialize the weights and biases with random values
        self.Wxh = np.random.randn(hidden_size, input_size) * 0.01
        self.Whh = np.random.randn(hidden_size, hidden_size) * 0.01
        self.Why = np.random.randn(output_size, hidden_size) * 0.01
        self.bh = np.zeros((hidden_size, 1))
        self.by = np.zeros((output_size, 1))
        
    def forward(self, x):
        # Initialize the hidden state with zeros
        h = np.zeros((self.hidden_size, 1))
        
        # Initialize the list of hidden states and outputs
        hs = []
        ys = []
        
        # Perform the forward pass for each input
        for t in range(len(x)):
            # Compute the next hidden state using the current input and previous hidden state
            h = np.tanh(np.dot(self.Wxh, x[t]) + np.dot(self.Whh, h) + self.bh)
            
            # Compute the output using the current hidden state
            y = np.dot(self.Why, h) + self.by
            
            # Apply the sigmoid activation function to the output
            y = sigmoid(y)
            
            # Append the current hidden state and output to their respective lists
            hs.append(h)
            ys.append(y)
        
        # Return the list of hidden states and outputs
        return hs, ys
        
# Instantiate an RNN with an input size of 3, a hidden size of 4, and an output size of 2
rnn = RNN(3, 4, 2)

# Define some inputs
x = [np.array([[1], [2], [3]]), np.array([[4], [5], [6]]), np.array([[7], [8], [9]])]

# Perform the forward pass on the inputs
hs, ys = rnn.forward(x)

# Print the hidden states and outputs
print("Hidden states:")
for h in hs:
    print(h)
    
print("Outputs:")
for y in ys:
    print(y)

LSTM:

import numpy as np

# Define the sigmoid activation function
def sigmoid(x):
    return 1 / (1 + np.exp(-x))

# Define the tanh activation function
def tanh(x):
    return np.tanh(x)

# Define the LSTM class
class LSTM:
    def __init__(self, input_size, hidden_size, output_size):
        # Define the size of the input, hidden, and output layers
        self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = output_size
        
        # Initialize the weights and biases with random values
        self.Wxi = np.random.randn(hidden_size, input_size) * 0.01
        self.Whi = np.random.randn(hidden_size, hidden_size) * 0.01
        self.Wxf = np.random.randn(hidden_size, input_size) * 0.01
        self.Whf = np.random.randn(hidden_size, hidden_size) * 0.01
        self.Wxo = np.random.randn(hidden_size, input_size) * 0.01
        self.Who = np.random.randn(hidden_size, hidden_size) * 0.01
        self.Wxc = np.random.randn(hidden_size, input_size) * 0.01
        self.Whc = np.random.randn(hidden_size, hidden_size) * 0.01
        self.Why = np.random.randn(output_size, hidden_size) * 0.01
        self.bi = np.zeros((hidden_size, 1))
        self.bf = np.zeros((hidden_size, 1))
        self.bo = np.zeros((hidden_size, 1))
        self.bc = np.zeros((hidden_size, 1))
        self.by = np.zeros((output_size, 1))
        
    def forward(self, x):
        # Initialize the hidden state and cell state with zeros
        h = np.zeros((self.hidden_size, 1))
        c = np.zeros((self.hidden_size, 1))
        
        # Initialize the list of hidden states and outputs
        hs = []
        ys = []
        
        # Perform the forward pass for each input
        for t in range(len(x)):
            # Compute the input, forget, and output gates using the current input and previous hidden state
            i = sigmoid(np.dot(self.Wxi, x[t]) + np.dot(self.Whi, h) + self.bi)
            f = sigmoid(np.dot(self.Wxf, x[t]) + np.dot(self.Whf, h) + self.bf)
            o = sigmoid(np.dot(self.Wxo, x[t]) + np.dot(self.Who, h) + self.bo)
            
            # Compute the candidate cell state using the current input and previous hidden state
            c_cand = tanh(np.dot(self.Wxc, x[t]) + np.dot(self.Whc, h) + self.bc)
            
            # Compute the cell state by combining the candidate cell state with the previous cell state using the input and forget gates
            c = i * c_cand + f * c
            
            # Compute the hidden state by applying the output gate to the cell state
            h = o * tanh(c)
            
            # Compute the output using the current hidden state
            y = np.dot(self.Why, h) + self.by
            
            # Apply the sigmoid activation function to the output
            y = sigmoid(y)
            
            # Append the current hidden state and output to their respective lists
            hs.append(h)
            ys.append(y)
        
        # Return the list of hidden states and outputs
        return hs, ys
        
# Instantiate an LSTM with an input size of 3, a hidden size of 4, and an output size of 2
lstm = LSTM(3, 4, 2)

# Define some inputs
x = [np.array([[1], [2], [3]]), np.array([[4], [5], [6]]), np.array([[7], [8], [9]])]

# Perform the forward pass on the inputs
hs, ys = lstm.forward(x)

# Print the hidden states and outputs
print("Hidden states:")
for h in hs:
    print(h)
    
print("Outputs:")
for y in ys:
    print(y)
``
Python编程实现RNN和LSTM

原文地址: https://www.cveoy.top/t/topic/cKi4 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录