动手从零实现一个多层感知机(前馈神经网络)

1.首先需要一个数据迭代器用来制造网络的输入部分

import random
import torch


torch.manual_seed(42)  # 设置随机数种子

# 生成输入输出值X,Y
def synthetic_data(w, b, num_examples):  # @save
    X, Y = [], []
    for i in range(num_examples):
        x = torch.normal(0, 1, (1, len(w)))
        y = torch.matmul(x, w) + b
        if (y > torch.tensor([0])) == torch.tensor([True]):
            y = torch.tensor([1, 0])
        else:
            y = torch.tensor([0, 1])
            # y += torch.normal(0, 0.01, y.shape)
        x = x.tolist()
        y = y.tolist()
        X.append(x[0])
        Y.append(y)
    return torch.tensor(X), torch.tensor(Y)

true_w = torch.tensor([2, -3.4, 5])
true_b = 4.2

features, labels = synthetic_data(true_w, true_b, 1000)
test_features, test_labels = synthetic_data(true_w, true_b, 100)

# 数据迭代器
def data_iter(batch_size, features, labels):
    num_examples = len(features)
    indices = list(range(num_examples))
    # 这些样本是随机读取的,没有特定的顺序
    random.shuffle(indices)
    for i in range(0, num_examples, batch_size):
        batch_indices = torch.tensor(
            indices[i: min(i + batch_size, num_examples)])
        yield features[batch_indices], labels[batch_indices]

2. 设置一个网络结构,具体是一个有3维输入层,4,5,4维的隐藏层,2维的输出层,代码如下

class FNN:
    """
    前馈神经网络
    """

    def __init__(self, input_odes, hidden_nodes, output_nodes, lr):
        self.inodes = input_odes  # 第一层节点数量,即输入层节点数量
        self.hnodes1, self.hnodes2, self.hnodes3 = hidden_nodes  # 隐藏层节点数量
        self.onodes = output_nodes  # 输出层节点数量
        self.wih = torch.normal(0.0, 1, (self.hnodes1, self.inodes))  # 输入层到隐藏层矩阵
        self.whh1 = torch.normal(0.0, 1, (self.hnodes2, self.hnodes1))  # 隐藏层到隐藏层矩阵
        self.whh2 = torch.normal(0.0, 1, (self.hnodes3, self.hnodes2))
        self.who = torch.normal(0.0, 1, (self.onodes, self.hnodes3))  # 隐藏层到输出层矩阵
        # print("self.wih.shape", self.wih.shape)
        # print("self.whh1.shape", self.whh1.shape)
        # print("self.whh2.shape", self.whh2.shape)
        # print("self.who.shape", self.who.shape)
        self.lr = lr  # 学习率
        self.activation = lambda x: torch.relu(x)  # 激活函数

    def ff_bp(self, inputs, targets):
        """
        实现前馈和反向传播算法
        """
        inputs = inputs.T  # 输入转置
        targets = targets.T  # gold标签转置

        # 以下是神经网络的前馈计算过程
        hidden1 = torch.matmul(self.wih, inputs)
        hidden1_output = self.activation(hidden1)

        hidden2 = torch.matmul(self.whh1, hidden1_output)
        hidden2_output = self.activation(hidden2)

        hidden3 = torch.matmul(self.whh2, hidden2_output)
        hidden3_output = self.activation(hidden3)

        final = torch.matmul(self.who, hidden3_output)
        final_output = self.activation(final)

        # 反向传播算法部分
        # 计算误差
        output_error = targets - final_output
        hidden3_error = torch.matmul(self.who.T, output_error)
        hidden2_error = torch.matmul(self.whh2.T, hidden3_error)
        hidden1_error = torch.matmul(self.whh1.T, hidden2_error)

        # 根据学习率进行误差回传
        self.who += self.lr * torch.matmul((output_error * final_output * (1.0 - final_output)), hidden3_output.T)
        self.whh2 += self.lr * torch.matmul((hidden3_error * hidden3_output * (1.0 - hidden3_output)), hidden2_output.T)
        self.whh1 += self.lr * torch.matmul((hidden2_error * hidden2_output * (1.0 - hidden2_output)), hidden1_output.T)
        self.wih += self.lr * torch.matmul((hidden1_error * hidden1_output * (1.0 - hidden1_output)), inputs.T)

    def query(self, inputs):
        # 用于测试,相当于model.eval()模式
        inputs = inputs.T
        hidden1 = torch.matmul(self.wih, inputs)
        hidden1_output = self.activation(hidden1)

        hidden2 = torch.matmul(self.whh1, hidden1_output)
        hidden2_output = self.activation(hidden2)

        hidden3 = torch.matmul(self.whh2, hidden2_output)
        hidden3_output = self.activation(hidden3)

        final = torch.matmul(self.who, hidden3_output)
        final_output = self.activation(final)

        return final_output

3. 具体实现

# 网络的实例化
nn = FNN(3, (4, 5, 4), 2, 0.001)
epochs = 15 # 迭代次数
# 进行训练
for epoch in range(epochs):
    for x, y in data_iter(1, features, labels):
        nn.ff_bp(x, y)

result = []  # 输出的结果表
# 进行查询和最后的结果转换
for x, y in data_iter(1, test_features, test_labels):
    label = nn.query(x)
    pos = torch.argmax(label)
    # print(y.shape)
    if pos == 0:
        label = torch.tensor([[1, 0]])
    else:
        label = torch.tensor([[0, 1]])
    # print(label, y)
    if torch.equal(label, y):
        result.append(1)
    else:
        result.append(0)

total = 0

for ele in range(0, len(result)):
    total = total + result[ele]
print("正确率为:{:.2f}".format(total / len(result)))

文章出处登录后可见!

已经登录?立即刷新

共计人评分,平均

到目前为止还没有投票!成为第一位评论此文章。

(0)
青葱年少的头像青葱年少普通用户
上一篇 2022年5月31日
下一篇 2022年5月31日

相关推荐