tf.nn.dynamic_rnn和MultiRNNCell构建多层动态LSTM

版权声明:微信公众号:数据挖掘与机器学习进阶之路。本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/u013230189/article/details/82777998
import tensorflow as tf;
import numpy as np;

X = tf.random_normal(shape=[3, 5, 6], dtype=tf.float32)
X = tf.reshape(X, [-1, 5, 6])
stacked_rnn=[]
for i in range(3):
    stacked_rnn.append(tf.contrib.rnn.BasicLSTMCell(24))
# cell = tf.nn.rnn_cell.BasicLSTMCell(10)  # 也可以换成别的,比如GRUCell,BasicRNNCell等等
lstm_multi = tf.contrib.rnn.MultiRNNCell(stacked_rnn)
# state = lstm_multi.zero_state(3, tf.float32)
output, state = tf.nn.dynamic_rnn(lstm_multi, X, time_major=False,dtype=tf.float32)

# with tf.Session() as sess:
#     sess.run(tf.initialize_all_variables())
#     print(output.get_shape())
#     print(sess.run(state))


print(output.shape)

print(len(state))#三个LSTM隐藏层
#第一个LSTM隐藏层
print(state[0].h.shape)#LSTM中的h状态
print(state[0].c.shape)#LSTM中的c状态
#第二个LSTM隐藏层
print(state[1].h.shape)
print(state[1].c.shape)
#第三个LSTM隐藏层
print(state[2].h.shape)
print(state[2].c.shape)

猜你喜欢

转载自blog.csdn.net/u013230189/article/details/82777998