1. 程式人生 > >mxnet-自定義前向函數

mxnet-自定義前向函數

3.3 eat per usr none pytho 2.7 urn near

#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Fri Aug 10 16:13:29 2018 @author: myhaspl """ from mxnet import nd from mxnet.gluon import nn class MixMLP(nn.Block): def __init__(self, **kwargs): # Run `nn.Block`‘s init method super(MixMLP, self).__init__(**kwargs) self.blk = nn.Sequential() self.blk.add(nn.Dense(3, activation=‘relu‘),nn.Dense(4, activation=‘relu‘)) self.dense = nn.Dense(5) def forward(self, x): y = nd.relu(self.blk(x)) print(y) return self.dense(y) net = MixMLP() print net

1.nn.Sequential中,?MXNet 自動構造前向函數,該函數可以執行增加層,可以自定義一個彈性的前向函數

2.nn.Sequential 和nn.Dense都是?nn.Block的子類

#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 10 16:13:29 2018

@author: myhaspl
"""
from mxnet import nd
from mxnet.gluon import nn

class MixMLP(nn.Block):
    def __init__(self, **kwargs):
        # Run `nn.Block`‘s init method
        super(MixMLP, self).__init__(**kwargs)
        self.blk = nn.Sequential()
        self.blk.add(nn.Dense(3, activation=‘relu‘),nn.Dense(4, activation=‘relu‘))
        self.dense = nn.Dense(5)
    def forward(self, x):
        y = nd.relu(self.blk(x))
        print(y)
        return self.dense(y)
net = MixMLP()
print net
net.initialize()
x = nd.random.uniform(shape=(7,2))
net(x)
print net.blk[0].weight.data()
MixMLP(
? (dense): Dense(None -> 5, linear)
? (blk): Sequential(
? ? (0): Dense(None -> 3, Activation(relu))
? ? (1): Dense(None -> 4, Activation(relu))
? )
)

[[9.6452924e-05 0.0000000e+00 2.7557719e-04 0.0000000e+00]
?[6.0751504e-04 0.0000000e+00 1.7357409e-03 0.0000000e+00]
?[5.6857511e-04 0.0000000e+00 1.6244850e-03 0.0000000e+00]
?...
?[1.7680142e-04 0.0000000e+00 3.3347241e-03 0.0000000e+00]
?[9.5664361e-04 0.0000000e+00 4.8063148e-04 0.0000000e+00]
?[1.8764728e-04 0.0000000e+00 1.7001196e-03 0.0000000e+00]]
<NDArray 7x4 @cpu(0)>

[[ 0.01617834 -0.04664135]
?[-0.0526652 ? 0.03906714]
?[ 0.04872115 ?0.05109067]]

<NDArray 3x2 @cpu(0)>

mxnet-自定義前向函數