1. 程式人生 > >PyTorch學習筆記之DataLoaders

PyTorch學習筆記之DataLoaders

pan 學習筆記 write hand ide min rand variable model

A DataLoader wraps a Dataset and provides minibatching, shuffling, multithreading, for you。

 1 import torch
 2 from torch.autograd import Variable
 3 import torch.nn as nn
 4 from torch.utils.data import TensorDataset, DataLoader
 5 
 6 # define our whole model as a single Module
 7 class TwoLayerNet(nn.Module):
8 # Initializer sets up two children (Modules can contain modules) 9 def _init_(self, D_in, H, D_out): 10 super(TwoLayerNet, self)._init_() 11 self.linear1 = torch.nn.Linear(D_in, H) 12 self.linear2 = torch.nn.Linear(H, D_out) 13 14 # Define forward pass using child modules and autograd ops on Variables
15 # No need to define backward - autograd will handle it 16 def forward(self, x): 17 h_relu = self.linear1(x).clamp(min=0) 18 y_pred = self.linear2(h_relu) 19 return y_pred 20 21 N, D_in, H, D_out = 64, 1000, 100, 10 22 x = Variable(torch.randn(N, D_in)) 23 y = Variable(torch.randn(N, D_out))
24 25 # When you need to load custom data, just write your own Dataset class 26 loader = DataLoader(TensorDataset(x, y), batch_size=8) 27 28 model = TwoLayerNet(D_in, H, D_out) 29 30 criterion = torch.nn.MSELoss(size_average=False) 31 optimizer = torch.optim.SGD(model.parameters(), lr=1e-4) 32 for epoch in range(10): 33 # Iterate(遍歷) over loader to form minibatches 34 for x_batch, y_batch in loader: 35 # Loader gives Tensors so you need to wrap in Variables 36 x_var, y_var = Variable(x), Variable(y) 37 y_pred = model(x_var) 38 loss = criterion(y_pred, y_var) 39 40 optimizer.zero_grad() 41 loss.backward() 42 optimizer.step()

PyTorch學習筆記之DataLoaders