tf.nn.bidirectional_dynamic_rnn和MultiRNNCell构建双向多层RNN(LSTM)

版权声明:微信公众号:数据挖掘与机器学习进阶之路。本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/u013230189/article/details/82777924
import tensorflow as tf

import numpy as np

X = np.random.randn(10, 5, 5) # 输入数据,批次、序列长度、样本维度

# 第二个样本的维度为3

X[1, 2:] = 0

stacked_rnn = []

stacked_bw_rnn = []

for i in range(3):

    stacked_rnn.append(tf.contrib.rnn.BasicLSTMCell(3))

    stacked_bw_rnn.append(tf.contrib.rnn.BasicLSTMCell(4))

mcell = tf.contrib.rnn.MultiRNNCell(stacked_rnn)

mcell_bw = tf.contrib.rnn.MultiRNNCell(stacked_bw_rnn)

# bioutputs, output_state_fw, output_state_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn([mcell], [mcell_bw], X,

# dtype=tf.float64)

bioutputs, output_state_fw = tf.nn.bidirectional_dynamic_rnn(mcell, mcell_bw, X,

dtype=tf.float64)

print(bioutputs[0].shape)#(10, 5, 3),正向RNN

print(bioutputs[1].shape)#(10, 5, 4),反向RNN

print(len(output_state_fw))#2,正向RNN和反向RNN

print(len(output_state_fw[0]))#3,正向RNN有三个隐藏层

print(len(output_state_fw[1]))#3,反向RNN有三个隐藏层

print(output_state_fw[0][0].h.shape)#(10, 3),正向RNN中第一个LSTM隐藏层的h状态

print(output_state_fw[0][1].h.shape)#(10, 3),正向RNN中第二个LSTM隐藏层的h状态

print(output_state_fw[0][0].c.shape)#(10, 3),正向RNN中第一个LSTM隐藏层的c状态

print(output_state_fw[1][0].h.shape)#(10, 4),反向RNN中第一个LSTM隐藏层的h状态

猜你喜欢

转载自blog.csdn.net/u013230189/article/details/82777924