# 1) model build
# sigmoid function 交给了 torch.sigmoid
class LogisticRegression(nn.Module):
def __init__(self, input_dim):
super(LogisticRegression, self).__init__()
# define layers
self.linear = nn.Linear(input_dim, 1)
def forward(self, x):
y_predicted = torch.sigmoid(self.linear(x))
return y_predicted
model = LogisticRegression(n_features)
# BCE stands for Binary Cross Entropy
criterion = nn.BCELoss()
# SGD stands for stochastic gradient descent
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
# 0) data import and preprocessing
from sklearn import datasets
iris = datasets.load_iris()
# print(iris.DESCR)
# use pandas as dataframe and merge features and targetsf
feature = pd.DataFrame(iris.data, columns=iris.feature_names)
target = pd.DataFrame(iris.target, columns=['target'])
iris_data = pd.concat([feature, target], axis=1)
# keep only sepal length in cm, sepal width in cm and target
iris_data = iris_data[['sepal length (cm)', 'sepal width (cm)', 'target']]
# keep only Iris-Setosa and Iris-Versicolour classes
iris_data = iris_data[iris_data.target <= 1]
iris_data.head(5)
feature = iris_data[['sepal length (cm)', 'sepal width (cm)']]
target = iris_data[['target']]
n_samples, n_features = feature.shape
# split training data and testing data
from sklearn.model_selection import train_test_split
feature_train, feature_test, target_train, target_test = train_test_split(
feature, target, test_size=0.3, random_state=4
)
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
feature_train = sc.fit_transform(feature_train)
feature_test = sc.fit_transform(feature_test)
target_train = np.array(target_train)
target_test = np.array(target_test)
# change data to torch
feature_train = torch.from_numpy(feature_train.astype(np.float32))
feature_test = torch.from_numpy(feature_test.astype(np.float32))
target_train = torch.from_numpy(target_train.astype(np.float32))
target_test = torch.from_numpy(target_test.astype(np.float32))
# 1) model build
# sigmoid function 交给了 torch.sigmoid
class LogisticRegression(nn.Module):
def __init__(self, input_dim):
super(LogisticRegression, self).__init__()
# define layers
self.linear = nn.Linear(input_dim, 1)
def forward(self, x):
y_predicted = torch.sigmoid(self.linear(x))
return y_predicted
model = LogisticRegression(n_features)
# 2) loss and optimizer
learning_rate = 0.01
# BCE stands for Binary Cross Entropy
criterion = nn.BCELoss()
# SGD stands for stochastic gradient descent
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
# 3) Training loop
epochs = 100
for epoch in range(epochs):
# forward pass and loss
y_predicted = model(feature_train)
loss = criterion(y_predicted, target_train)
# backward pass
loss.backward()
# optimizer
optimizer.step()
# init optimizer
optimizer.zero_grad()
if (epoch + 1) % 10 == 0:
print(f'epoch {epoch + 1}: loss = {loss:.8f}')
# checking testing accuracy
with torch.no_grad():
y_predicted = model(feature_test)
y_predicted_cls = y_predicted.round()
acc = y_predicted_cls.eq(target_test).sum() / float(target_test.shape[0])
print(f'accuracy = {acc: .4f}')
__init__
跟 forward()
了,因为他们分别宣告了要层层传递的部分和要执行的流程,也就是说,我们在上面的例子可以看到 forward()
里面说明了我们实际上是先做一次 linear 计算,之後才做 sigmoid,这也代表着如果今天遇到一个未知网路,我们都可以里用撰写者写的 forward()
function 去理解这个神经网路是如何传递资料的,因此整个 Model 最重要的可以说是这个部份了
<<: Day-22 树(Tree), 二元搜寻树(Binary Search Tree)
>>: Day18-TypeScript(TS)的类别(Class)继承(Inheritance)
来到了第七天,今天让我们来一起看看Route 53吧 Route 53帮那些事情? 有了Route ...
爬虫原理: 抓取资料->分析结构->取出要的结构文字->输出想要的格式 程序码: ...
Grid是什麽 Grid是砖墙式版面,使用二维的排版方式,与flexbox不同的地方是Grid一次可...
我们在这里用到了文字编辑器, 我们使用的是CKEditor, 可以到 这边 下载 也可以参照 官方文...
最後一个 Plugin 就是我们经常会使用到的表单,因为样式都被清除了,必须全部重写,如果我们使用...