Creating an instance of the model

model = DummyNetVariant()

Defining the loss function and optimizer

criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=0.001)

Loading the CIFAR-10 dataset and applying data augmentation

transform = transforms.Compose([transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) train_dataset = datasets.CIFAR10(root='./data', train=True, download=True, transform=transform) train_loader = DataLoader(train_dataset, batch_size=64, shuffle=True)

Training the model

num_epochs = 10 for epoch in range(num_epochs): running_loss = 0.0 for i, data in enumerate(train_loader, 0): # Getting the inputs and labels inputs, labels = data

    # Zeroing the parameter gradients
    optimizer.zero_grad()

    # Forward pass, backward pass and optimization
    outputs = model(inputs)
    loss = criterion(outputs, labels)
    loss.backward()
    optimizer.step()

    # Updating the running loss
    running_loss += loss.item()

    # Printing the average loss every 2000 mini-batches
    if i % 2000 == 1999:
        print('[%d, %5d] loss: %.3f' % (epoch + 1, i + 1, running_loss / 2000))
        running_loss = 0.0

print('Finished training')

import torch import torchnn as nn import torchoptim as optim import torchvisiondatasets as datasets import torchvisiontransforms as transforms from torchutilsdata import DataLoader class DummyNetVar

原文地址: https://www.cveoy.top/t/topic/bsTw 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录