-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathPyTorch_06.py
More file actions
59 lines (49 loc) · 1.71 KB
/
PyTorch_06.py
File metadata and controls
59 lines (49 loc) · 1.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import torch
from torch.utils.data import DataLoader,Dataset
from torch.autograd import Variable
import torch.nn as nn
import torch.optim as optim
import numpy as np
class DiabetesDataset(Dataset):
#initialize data
def __init__(self):
xy = np.loadtxt('diabetes.csv',delimiter=',',dtype=np.float32)
self.len = xy.shape[0]
# pylint: disable=E1101
self.x_data = torch.from_numpy(xy[:, 0:-1])
self.y_data = torch.from_numpy(xy[:, [-1]])
# pylint: enable=E1101
def __getitem__(self,index):
return self.x_data[index],self.y_data[index]
def __len__(self):
return self.len
dataset = DiabetesDataset()
train_loader = DataLoader(dataset = dataset,
batch_size=32,
shuffle=True)
class Model(nn.Module):
def __init__(self):
super(Model,self).__init__()
self.l1 = nn.Linear(8,6)
self.l2 = nn.Linear(6,4)
self.l3 = nn.Linear(4,1)
self.sigmoid = nn.Sigmoid()
def forward(self,x):
out1 = self.sigmoid(self.l1(x))
out2 = self.sigmoid(self.l2(out1))
y_pred = self.sigmoid(self.l3(out2))
return y_pred
model = Model()
#print(model)
criterion = nn.BCELoss(reduction='mean')
optimizer = optim.SGD(model.parameters(),lr = 0.01)
for epoch in range(40):
for batch_idx,data in enumerate(train_loader,0):
#get inputs
x_data,y_data = data
y_pred = model(x_data)
loss = criterion(y_pred,y_data)
print("Epoch : {} Batch : {} Loss : {}".format(epoch+1,batch_idx+1,loss.item()))
optimizer.zero_grad()
loss.backward()
optimizer.step()