mirror of
https://github.com/apachecn/ailearning.git
synced 2026-04-24 02:23:45 +08:00
2020-10-19 21:48:57
This commit is contained in:
@@ -12,7 +12,7 @@
|
||||
|
||||

|
||||
|
||||
```
|
||||
```py
|
||||
import torch
|
||||
import torch.utils.data as Data
|
||||
import torch.nn.functional as F
|
||||
@@ -42,7 +42,7 @@ loader = Data.DataLoader(dataset=torch_dataset, batch_size=BATCH_SIZE, shuffle=T
|
||||
|
||||
为了对比每一种优化器, 我们给他们各自创建一个神经网络, 但这个神经网络都来自同一个 Net 形式.
|
||||
|
||||
```
|
||||
```py
|
||||
# 默认的 network 形式
|
||||
class Net(torch.nn.Module):
|
||||
def __init__(self):
|
||||
@@ -67,7 +67,7 @@ nets = [net_SGD, net_Momentum, net_RMSprop, net_Adam]
|
||||
|
||||
接下来在创建不同的优化器, 用来训练不同的网络. 并创建一个 loss_func 用来计算误差. 我们用几种常见的优化器, SGD , Momentum , RMSprop , Adam .
|
||||
|
||||
```
|
||||
```py
|
||||
# different optimizers
|
||||
opt_SGD = torch.optim.SGD(net_SGD.parameters(), lr=LR)
|
||||
opt_Momentum = torch.optim.SGD(net_Momentum.parameters(), lr=LR, momentum=0.8)
|
||||
@@ -83,7 +83,7 @@ losses_his = [[], [], [], []] # 记录 training 时不同神经网络的 loss
|
||||
|
||||
接下来训练和 loss 画图.
|
||||
|
||||
```
|
||||
```py
|
||||
for epoch in range(EPOCH):
|
||||
print(\\'Epoch: \\', epoch)
|
||||
for step, (batch_x, batch_y) in enumerate(loader):
|
||||
|
||||
Reference in New Issue
Block a user