How to change this RNN text classification code to become text generation code?

0

1

I can do text classification with RNN, in which the last output of RNN (rnn_outputs[-1]) is used to matmul with output layer weight and plus bias. That is getting a word (class name) after the last T in the time dimension of RNN.

The matter is for text generation, I need a word somewhere in the middle of time dimension, eg.:

t0  t1    t2  t3
The brown fox jumps

For this example, I have the first 2 words: The, brown.

How to get the next word ie. "fox" using RNN (LSTM)? How to convert the following text classification code to text generating code?

Source code (text classification):

import tensorflow as tf;
tf.reset_default_graph();

#data
'''
t0      t1      t2
british gray    is => cat (y=0)
0       1       2
white   samoyed is => dog (y=1)
3       4       2 
'''
Bsize = 2;
Times = 3;
Max_X = 4;
Max_Y = 1;

X = [[[0],[1],[2]], [[3],[4],[2]]];
Y = [[0],           [1]          ];

#normalise
for I in range(len(X)):
  for J in range(len(X[I])):
    X[I][J][0] /= Max_X;

for I in range(len(Y)):
  Y[I][0] /= Max_Y;

#model
Inputs   = tf.placeholder(tf.float32, [Bsize,Times,1]);
Expected = tf.placeholder(tf.float32, [Bsize,      1]);

#single LSTM layer
#'''
Layer1   = tf.keras.layers.LSTM(20);
Hidden1  = Layer1(Inputs);
#'''

#multi LSTM layers
'''
Layers = tf.keras.layers.RNN([
  tf.keras.layers.LSTMCell(30), #hidden 1
  tf.keras.layers.LSTMCell(20)  #hidden 2
]);
Hidden2 = Layers(Inputs);
'''

Weight3  = tf.Variable(tf.random_uniform([20,1], -1,1));
Bias3    = tf.Variable(tf.random_uniform([   1], -1,1));
Output   = tf.sigmoid(tf.matmul(Hidden1,Weight3) + Bias3);

Loss     = tf.reduce_sum(tf.square(Expected-Output));
Optim    = tf.train.GradientDescentOptimizer(1e-1);
Training = Optim.minimize(Loss);

#train
Sess = tf.Session();
Init = tf.global_variables_initializer();
Sess.run(Init);

Feed = {Inputs:X, Expected:Y};
for I in range(1000): #number of feeds, 1 feed = 1 batch
  if I%100==0: 
    Lossvalue = Sess.run(Loss,Feed);
    print("Loss:",Lossvalue);
  #end if

  Sess.run(Training,Feed);
#end for

Lastloss = Sess.run(Loss,Feed);
print("Loss:",Lastloss,"(Last)");

#eval
Results = Sess.run(Output,Feed);
print("\nEval:");
print(Results);

print("\nDone.");
#eof

datdinhquoc

Posted 2019-09-20T15:57:15.957

Reputation: 715

Answers

0

I found out how to switch it (the code) to do text generation task, use 3D input (X) and 3D labels (Y) as in the source code below:

Source code:

import tensorflow as tf;
tf.reset_default_graph();

#data
'''
t0       t1       t2
british  gray     is  cat
0        1        2   (3)  <=x
1        2        3        <=y
white    samoyed  is  dog
4        5        2   (6)  <=x
5        2        6        <=y 
'''
Bsize = 2;
Times = 3;
Max_X = 5;
Max_Y = 6;

X = [[[0],[1],[2]], [[4],[5],[2]]];
Y = [[[1],[2],[3]], [[5],[2],[6]]];

#normalise
for I in range(len(X)):
  for J in range(len(X[I])):
    X[I][J][0] /= Max_X;

for I in range(len(Y)):
  for J in range(len(Y[I])):
    Y[I][J][0] /= Max_Y;

#model
Input    = tf.placeholder(tf.float32, [Bsize,Times,1]);
Expected = tf.placeholder(tf.float32, [Bsize,Times,1]);

#single LSTM layer
'''
Layer1   = tf.keras.layers.LSTM(20);
Hidden1  = Layer1(Input);
'''

#multi LSTM layers
#'''
Layers = tf.keras.layers.RNN([
  tf.keras.layers.LSTMCell(30), #hidden 1
  tf.keras.layers.LSTMCell(20)  #hidden 2
],
return_sequences=True);
Hidden2 = Layers(Input);
#'''

Weight3  = tf.Variable(tf.random_uniform([20,1], -1,1));
Bias3    = tf.Variable(tf.random_uniform([   1], -1,1));
Output   = tf.sigmoid(tf.matmul(Hidden2,Weight3) + Bias3); #sequence of 2d * 2d

Loss     = tf.reduce_sum(tf.square(Expected-Output));
Optim    = tf.train.GradientDescentOptimizer(1e-1);
Training = Optim.minimize(Loss);

#train
Sess = tf.Session();
Init = tf.global_variables_initializer();
Sess.run(Init);

Feed   = {Input:X, Expected:Y};
Epochs = 10000;

for I in range(Epochs): #number of feeds, 1 feed = 1 batch
  if I%(Epochs/10)==0: 
    Lossvalue = Sess.run(Loss,Feed);
    print("Loss:",Lossvalue);
  #end if

  Sess.run(Training,Feed);
#end for

Lastloss = Sess.run(Loss,Feed);
print("Loss:",Lastloss,"(Last)");

#eval
Results = Sess.run(Output,Feed).tolist();
print("\nEval:");
for I in range(len(Results)):
  for J in range(len(Results[I])):
    for K in range(len(Results[I][J])):
      Results[I][J][K] = round(Results[I][J][K]*Max_Y);
#end for i      
print(Results);

print("\nDone.");
#eof

datdinhquoc

Posted 2019-09-20T15:57:15.957

Reputation: 715