mirror of
https://github.com/apachecn/ailearning.git
synced 2026-04-24 02:23:45 +08:00
2020-10-19 21:48:57
This commit is contained in:
@@ -14,7 +14,7 @@
|
||||
|
||||

|
||||
|
||||
```
|
||||
```py
|
||||
import torch
|
||||
from torch.autograd import Variable
|
||||
from torch import nn
|
||||
@@ -60,7 +60,7 @@ plt.show()
|
||||
|
||||
这里就教你如何构建带有 BN 的神经网络的. BN 其实可以看做是一个 layer ( BN layer ). 我们就像平时加层一样加 BN layer 就好了. 注意, 我还对输入数据进行了一个 BN 处理, 因为如果你把输入数据看出是 从前面一层来的输出数据, 我们同样也能对她进行 BN.
|
||||
|
||||
```
|
||||
```py
|
||||
class Net(nn.Module):
|
||||
def __init__(self, batch_normalization=False):
|
||||
super(Net, self).__init__()
|
||||
@@ -108,7 +108,7 @@ nets = [Net(batch_normalization=False), Net(batch_normalization=True)]
|
||||
|
||||
训练的时候, 这两个神经网络分开训练. 训练的环境都一样.
|
||||
|
||||
```
|
||||
```py
|
||||
opts = [torch.optim.Adam(net.parameters(), lr=LR) for net in nets]
|
||||
|
||||
loss_func = torch.nn.MSELoss()
|
||||
@@ -132,7 +132,7 @@ for epoch in range(EPOCH):
|
||||
|
||||

|
||||
|
||||
```
|
||||
```py
|
||||
f, axs = plt.subplots(4, N_HIDDEN 1, figsize=(10, 5))
|
||||
|
||||
def plot_histogram(l_in, l_in_bn, pre_ac, pre_ac_bn):
|
||||
|
||||
Reference in New Issue
Block a user