Thursday, 10 October 2019

TensorFlow: Simple Single Neuron Linear Regressor

A single neuron with linear activation, aka identity activation, or just like having no activation can do linear regression too. The following code is low-level TensorFlow code to do the task.

Source code:
%tensorflow_version 2.x
%reset -f

#libs
import tensorflow              as tf;
from   tensorflow.keras.layers import *;

#constants
BSIZE = 1;

#model
class model(tf.Module):
  def __init__(this):
    super().__init__();
    this.W1 = tf.Variable(tf.random.uniform([2,1], -1,1));
    this.B1 = tf.Variable(tf.random.uniform([  1], -1,1));

  @tf.function(input_signature=[tf.TensorSpec([BSIZE,2])])
  def __call__(this,X):
    Out = tf.matmul(X,this.W1) + this.B1;
    return Out;

#data
X = tf.convert_to_tensor([[1,2]],tf.float32);
Y = tf.convert_to_tensor([[3]  ],tf.float32);

#train
Model = model();
Loss  = tf.losses.LogCosh();
Optim = tf.optimizers.SGD(1e-1);
Steps = 10;

for I in range(Steps):
  if I%(Steps/10)==0:
    Out       = Model(X);
    Lossvalue = Loss(Y,Out);
    print("Loss:",Lossvalue.numpy());

  with tf.GradientTape() as T:
    Out       = Model(X);
    Lossvalue = Loss(Y,Out);

  Grads = T.gradient(Lossvalue, Model.trainable_variables);
  Optim.apply_gradients(zip(Grads, Model.trainable_variables));

Out       = Model(X);
Lossvalue = Loss(Y,Out);
print("Loss:",Lossvalue.numpy(),"(Last)");

print("\nTest");
print(Model(X).numpy());

print("\nDone.");
#eof

No comments:

Post a Comment