时间序列预测迁移学习:基于全连接层基础模型的 Python Keras 实现
以下是基于 Python 和 Keras 实现的时间序列预测的迁移学习模型,基础模型为全连接层:
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Input
from keras.optimizers import Adam
from keras.utils import to_categorical
from sklearn.preprocessing import MinMaxScaler
# 生成时间序列数据
def generate_data(n_samples, n_features, n_in, n_out):
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_out))
for i in range(n_samples):
for j in range(n_out):
y[i,j] = np.sum(X[i,j:(j+n_in)]) # 将输入特征的后n_in个元素相加,作为输出特征
return X, y
# 归一化数据
def normalize_data(X_train, X_test, y_train, y_test):
scaler_X = MinMaxScaler(feature_range=(0, 1))
scaler_X.fit(X_train)
X_train = scaler_X.transform(X_train)
X_test = scaler_X.transform(X_test)
scaler_y = MinMaxScaler(feature_range=(0, 1))
scaler_y.fit(y_train)
y_train = scaler_y.transform(y_train)
y_test = scaler_y.transform(y_test)
return X_train, X_test, y_train, y_test, scaler_X, scaler_y
# 构建全连接层模型
def build_base_model(input_shape, output_shape):
model = Sequential()
model.add(Dense(64, input_shape=input_shape, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(output_shape, activation='linear'))
return model
# 迁移学习模型
def build_transfer_learning_model(base_model, input_shape, output_shape):
model = Sequential()
model.add(base_model)
model.add(Dense(32, input_shape=input_shape, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(output_shape, activation='linear'))
return model
# 训练模型
def train_model(model, X_train, y_train, X_test, y_test):
model.compile(loss='mse', optimizer=Adam(lr=0.001), metrics=['mae'])
model.fit(X_train, y_train, epochs=100, batch_size=32, validation_data=(X_test, y_test), verbose=1)
# 预测模型
def predict_model(model, X_test, scaler_y):
y_pred = model.predict(X_test)
y_pred = scaler_y.inverse_transform(y_pred)
return y_pred
# 测试模型
def test_model(y_test, y_pred):
mae = np.mean(np.abs(y_test - y_pred))
print('MAE: %.2f' % mae)
if __name__ == '__main__':
# 生成数据集
n_samples = 1000
n_features = 10
n_in = 5
n_out = 3
X, y = generate_data(n_samples, n_features, n_in, n_out)
# 划分训练集和测试集
train_size = int(n_samples * 0.8)
X_train, X_test = X[:train_size], X[train_size:]
y_train, y_test = y[:train_size], y[train_size:]
# 归一化数据
X_train, X_test, y_train, y_test, scaler_X, scaler_y = normalize_data(X_train, X_test, y_train, y_test)
# 构建基础模型和迁移学习模型
input_shape = (n_features,)
output_shape = n_out
base_model = build_base_model(input_shape, output_shape)
transfer_learning_model = build_transfer_learning_model(base_model, input_shape, output_shape)
# 训练基础模型和迁移学习模型
train_model(base_model, X_train, y_train, X_test, y_test)
train_model(transfer_learning_model, X_train, y_train, X_test, y_test)
# 预测基础模型和迁移学习模型
y_pred_base = predict_model(base_model, X_test, scaler_y)
y_pred_transfer = predict_model(transfer_learning_model, X_test, scaler_y)
# 测试基础模型和迁移学习模型
test_model(y_test, y_pred_base)
test_model(y_test, y_pred_transfer)
该代码首先生成一个包含10个特征的时间序列数据集,其中每个样本包含5个输入特征和3个输出特征。然后将数据集划分为训练集和测试集,并对数据进行归一化。接下来,构建了一个基础模型和一个迁移学习模型,其中基础模型为一个三层的全连接层模型,迁移学习模型在基础模型的基础上添加了一个额外的全连接层。最后,训练模型、预测模型并测试模型的性能。
原文地址: https://www.cveoy.top/t/topic/lyKQ 著作权归作者所有。请勿转载和采集!