Tuesday, 24 September 2019

TenorFlow RNN: Text Generation Using LSTM

Text generation can be done with DNN by feeding a sequence of words then the class is a word. However, RNN with time dimension does much better in this job, but basic RNN neurons will make the network face the vanishing gradients problem. LSTM network solves this problem. The following source code learns to generate a few sentences.

Source code:
import tensorflow as tf;
tf.reset_default_graph();

#data
'''
t0       t1       t2
british  gray     is  cat
0        1        2   (3)  <=x
1        2        3        <=y
white    samoyed  is  dog
4        5        2   (6)  <=x
5        2        6        <=y 
'''
Bsize = 2;
Times = 3;
Max_X = 5;
Max_Y = 6;

X = [[[0],[1],[2]], [[4],[5],[2]]];
Y = [[[1],[2],[3]], [[5],[2],[6]]];

#normalise
for I in range(len(X)):
  for J in range(len(X[I])):
    X[I][J][0] /= Max_X;

for I in range(len(Y)):
  for J in range(len(Y[I])):
    Y[I][J][0] /= Max_Y;

#model
Input    = tf.placeholder(tf.float32, [Bsize,Times,1]);
Expected = tf.placeholder(tf.float32, [Bsize,Times,1]);

#single LSTM layer
'''
Layer1   = tf.keras.layers.LSTM(20);
Hidden1  = Layer1(Input);
'''

#multi LSTM layers
#'''
Layers = tf.keras.layers.RNN([
  tf.keras.layers.LSTMCell(30), #hidden 1
  tf.keras.layers.LSTMCell(20)  #hidden 2
],
return_sequences=True);
Hidden2 = Layers(Input);
#'''

Weight3  = tf.Variable(tf.random_uniform([20,1], -1,1));
Bias3    = tf.Variable(tf.random_uniform([   1], -1,1));
Output   = tf.sigmoid(tf.matmul(Hidden2,Weight3) + Bias3); #sequence of 2d * 2d

Loss     = tf.reduce_sum(tf.square(Expected-Output));
Optim    = tf.train.GradientDescentOptimizer(1e-1);
Training = Optim.minimize(Loss);

#train
Sess = tf.Session();
Init = tf.global_variables_initializer();
Sess.run(Init);

Feed   = {Input:X, Expected:Y};
Epochs = 10000;

for I in range(Epochs): #number of feeds, 1 feed = 1 batch
  if I%(Epochs/10)==0: 
    Lossvalue = Sess.run(Loss,Feed);
    print("Loss:",Lossvalue);
  #end if

  Sess.run(Training,Feed);
#end for

Lastloss = Sess.run(Loss,Feed);
print("Loss:",Lastloss,"(Last)");

#eval
Results = Sess.run(Output,Feed).tolist();
print("\nEval:");
for I in range(len(Results)):
  for J in range(len(Results[I])):
    for K in range(len(Results[I][J])):
      Results[I][J][K] = round(Results[I][J][K]*Max_Y);
#end for i      
print(Results);

print("\nDone.");
#eof

Colab link:
https://colab.research.google.com/drive/1C4jZfMb0YLoLWDj5T6gecjjJHjrYpdJc

No comments:

Post a Comment