import torch import torch.nn as nn import torch.optim as optim import pandas as pd import numpy as np from sklearn.metrics import roc_curve, auc from bayes_opt import BayesianOptimization import matplotlib.pyplot as plt

读取数据

data = pd.read_excel('C:\Users\lenovo\Desktop\HIV\DNN神经网络测试\data1.xlsx') y = data.iloc[:, 0] x = data.iloc[:, 1:]

将数据转换为tensor类型

x = torch.tensor(np.array(x), dtype=torch.float32) y = torch.tensor(np.array(y), dtype=torch.float32).unsqueeze(1)

定义DNN模型

class DNN(nn.Module): def init(self, input_dim, hidden_dim1, hidden_dim2, hidden_dim3, output_dim): super(DNN, self).init() self.fc1 = nn.Linear(input_dim, hidden_dim1) self.fc2 = nn.Linear(hidden_dim1, hidden_dim2) self.fc3 = nn.Linear(hidden_dim2, hidden_dim3) self.fc4 = nn.Linear(hidden_dim3, output_dim) self.dropout = nn.Dropout(p=0.5) self.relu = nn.ReLU() self.softmax = nn.Softmax(dim=1)

def forward(self, x):
    x = self.relu(self.fc1(x))
    x = self.dropout(x)
    x = self.relu(self.fc2(x))
    x = self.dropout(x)
    x = self.relu(self.fc3(x))
    x = self.dropout(x)
    x = self.fc4(x)
    x = self.softmax(x)
    return x

定义损失函数和优化器

criterion = nn.BCELoss() optimizer = optim.Adam

定义模型训练函数

def train(model, x_train, y_train, num_epochs, learning_rate): optimizer = optim.Adam(model.parameters(), lr=learning_rate) losses = [] accuracies = [] for epoch in range(num_epochs): optimizer.zero_grad() y_pred = model(x_train) loss = criterion(y_pred, y_train) loss.backward() optimizer.step() losses.append(loss.item()) accuracy = ((y_pred > 0.5).type(torch.float32) == y_train).type(torch.float32).mean().item() accuracies.append(accuracy) if epoch % 10 == 0: print('Epoch [{}/{}], Loss: {:.4f}, Accuracy: {:.4f}'.format(epoch+1, num_epochs, loss.item(), accuracy)) return losses, accuracies

定义贝叶斯优化函数

def optimize_dnn(hidden_dim1, hidden_dim2, hidden_dim3, learning_rate): input_dim = 16 output_dim = 1 model = DNN(input_dim, hidden_dim1=int(hidden_dim1), hidden_dim2=int(hidden_dim2), hidden_dim3=int(hidden_dim3), output_dim=output_dim) num_epochs = 200 losses, accuracies = train(model, x, y, num_epochs, learning_rate) return np.array(accuracies).mean()

进行贝叶斯优化

optimizer = BayesianOptimization(f=optimize_dnn, pbounds={'hidden_dim1': (10, 50), 'hidden_dim2': (10, 50), 'hidden_dim3': (10, 50), 'learning_rate': (0.0001, 0.01)}, random_state=1) optimizer.maximize(init_points=20, n_iter=80)

输出最优参数

print(optimizer.max)

训练最优模型

best_params = optimizer.max['params'] input_dim = 16 output_dim = 1 model = DNN(input_dim, hidden_dim1=int(best_params['hidden_dim1']), hidden_dim2=int(best_params['hidden_dim2']), hidden_dim3=int(best_params['hidden_dim3']), output_dim=output_dim) num_epochs = 200 learning_rate = best_params['learning_rate'] losses, accuracies = train(model, x, y, num_epochs, learning_rate)

输出每个样本的概率

y_pred = model(x) probabilities = y_pred.detach().numpy()

绘制准确率变化的图

plt.plot(range(num_epochs), accuracies) plt.xlabel('Epochs') plt.ylabel('Accuracy') plt.title('Accuracy vs. Epochs') plt.show()

绘制损失变化的图

plt.plot(range(num_epochs), losses) plt.xlabel('Epochs') plt.ylabel('Loss') plt.title('Loss vs. Epochs') plt.show()

绘制ROC图

fpr, tpr, thresholds = roc_curve(y, y_pred.detach().numpy()) roc_auc = auc(fpr, tpr) plt.plot(fpr, tpr, color='darkorange', lw=2, label='ROC curve (area = %0.2f)' % roc_auc) plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.title('Receiver Operating Characteristic') plt.legend(loc="lower right") plt.show()

使用Python和贝叶斯优化构建DNN神经网络预测患者患病概率

原文地址: https://www.cveoy.top/t/topic/ndlS 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录