0%

tensorflow初始化及其求导

直接上代码,一看就明白!

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import tensorflow as tf
import numpy as np

#随机初始化
v1=tf.Variable(tf.random.normal(shape=[4,3],mean=0,stddev=1))

#结合numpy
a = np.random.random((3,3))
#常量形式
v2=tf.constant(a.astype('float32'))
#变量形式1
v3=tf.Variable(a.astype('float32'))
#变量形式2
v4=tf.Variable(v2)

#特殊形式0,1等
v5=tf.Variable(tf.ones([4,3]))

v6=tf.convert_to_tensor(a,dtype=tf.float32)

# print(a)
# print(v1)
# print(v2)
# print(v3)
# print(v4)
# print(v5)
# print(v6)

# print((v6.numpy() == v2.numpy()).all())
# print((v6.numpy() == v3.numpy()).all())
# print((v6.numpy() == v4.numpy()).all())

#试图对常量求导
with tf.GradientTape() as g: # 记录求导的磁带
y = v2 * v2
dy_dx = g.gradient(y, v2) # 求导
print(dy_dx)

#监控常量
with tf.GradientTape() as g: # 记录求导的磁带
g.watch(v2)
y = v2 * v2
dy_dx = g.gradient(y, v2) # 求导
print(dy_dx)

#对变量求导
with tf.GradientTape() as g: # 记录求导的磁带
y = v4 * v4
dy_dx = g.gradient(y, v4) # 求导
print(dy_dx)
------------- Thank you for reading -------------

Title - Artist
0:00