pytorch-task3

pytorch實現邏輯迴歸

import torch
import torch.nn as nn
from torch.optim import SGD

class LR(nn.Module):
    def __init__(self):
        super(LR, self).__init__()
        self.linear = nn.Linear(2, 1)
        self.sigmoid = nn.Sigmoid()
    
    def forward(self, x):
        x = self.linear(x)
        x = self.sigmoid(x)
        return x

class Model(object):
    def __init__(self, x, y, config):
        self.x = x
        self.y = y
        self.config = config
        self.model = LR()
        self.criterion = nn.BCELoss()
        self.optimizer = torch.optim.SGD(self.model.parameters(), lr=self.config['learning_rate'])

    def fit(self):
        for epoch in range(self.config['epochs']):
            out = self.model(self.x)
            self.optimizer.zero_grad()
            loss = criterion(out, self.y)
            print(epoch, loss)
            loss.backward()
            self.optimizer.step()

data = torch.ones(100, 2)
xa = torch.normal(2 * data, 1)
xb = torch.normal(-2 * data, 1)
ya = torch.zeros(100)
yb = torch.ones(100)
x = torch.cat((xa, xb), 0)
y = torch.cat((ya, yb), 0)     

config = {}
config['learning_rate'] = 0.01
config['epochs'] = 100

model = Model(x, y, config)
model.fit()

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章