多层神经网络实现
多层神经网络
张量方式实现
初始化各层调用梯度记录器(自动求导)搭建各层
import tensorflow
as tf
from tensorflow
.keras
import layers
x
= tf
.random
.normal
([3,784])
w1
= tf
.Variable
(tf
.random
.truncated_normal
([784,256],stddev
=0.5))
b1
= tf
.zeros
([256])
w2
= tf
.Variable
(tf
.random
.truncated_normal
([256,128],stddev
=0.5))
b2
= tf
.zeros
([128])
w3
= tf
.Variable
(tf
.random
.truncated_normal
([128,64],stddev
=0.5))
b3
= tf
.zeros
([64])
w4
= tf
.Variable
(tf
.random
.truncated_normal
([64,10],stddev
=0.5))
b4
= tf
.zeros
([10])
with tf
.GradientTape
() as tape
:
o1
= tf
.matmul
(x
,w1
) + b1
o1
= tf
.nn
.relu
(o1
)
o2
= tf
.matmul
(o1
,w2
) + b2
o2
= tf
.nn
.relu
(o2
)
o3
=tf
.matmul
(o2
,w3
) + b3
o3
= tf
.nn
.relu
(o3
)
o4
=tf
.matmul
(o3
,w4
) + b4
print(o4
)
out
:
tf
.Tensor
(
[[-184.37228 250.41742 790.16876 -711.15125 -268.63834 246.24713
-434.43478 547.58203 591.3756 1196.3761 ]
[-841.72205 -416.0834 166.62344 -616.033 -439.455 59.845196
318.67456 60.858128 383.62747 1009.4866 ]
[ -9.59348 297.99423 -370.27063 -913.0671 -73.28527 449.61548
-586.05334 -529.05145 674.3345 610.58124 ]], shape
=(3, 10), dtype
=float32
)
层方式实现
简单调用
import tensorflow
as tf
from tensorflow
.keras
import layers
fc1
= layers
.Dense
(256,activation
=tf
.nn
.relu
)
fc2
= layers
.Dense
(128,activation
=tf
.nn
.relu
)
fc3
= layers
.Dense
(64,activation
=tf
.nn
.relu
)
fc4
= layers
.Dense
(10,activation
=None)
x
= tf
.random
.normal
([3,256])
o1
= fc1
(x
)
o2
= fc2
(o1
)
o3
= fc3
(o2
)
o4
= fc4
(o3
)
print(o4
)
out
:
tf
.Tensor
(
[[-0.49023932 -0.04196656 -0.5316457 -0.22760229 -0.64782906 0.4603924
-0.46672398 0.54325604 -0.7902754 -0.15966915]
[-0.17537238 0.15008762 -0.3496042 -0.17603163 0.50083745 -0.62207043
0.01685877 0.796759 -0.34656522 1.0465541 ]
[-0.01338768 -0.16533731 0.11743313 -0.1506098 -0.65836793 0.3003255
0.21182871 0.22682111 0.33577356 0.33805916]], shape
=(3, 10), dtype
=float32
)
封装调用
使用Sequential作为容器
import tensorflow
as tf
from tensorflow
.keras
import layers
from tensorflow
.keras
import Sequential
modle
= Sequential
([
layers
.Dense
(256,activation
=tf
.nn
.relu
),
layers
.Dense
(128,activation
=tf
.nn
.relu
),
layers
.Dense
(64,activation
=tf
.nn
.relu
),
layers
.Dense
(10,activation
=None)])
x
= tf
.random
.normal
([3,256])
a
= modle
(x
)
print(a
)
out
:
tf
.Tensor
(
[[-0.48306665 0.3618398 -0.27131498 -0.80744046 -0.6388896 -0.26316196
0.8380004 0.341826 0.15942936 -0.19762628]
[ 0.03107908 0.07116681 -0.49450177 -0.48394847 -0.40307134 -0.2677082
0.4005356 0.29835856 0.0189686 -0.2250806 ]
[ 0.55381507 0.6946548 -0.7538996 -1.324675 -0.59704345 0.39796948
0.6455777 0.02428103 -0.31168345 -0.51364326]], shape
=(3, 10), dtype
=float32
)
转载请注明原文地址:https://tech.qufami.com/read-25258.html