$$ \huge{\underline{\textbf{ Minimal TF Keras }}} $$

<- index


This post presents super minimal tf.keras example

Imports

In [1]:
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf

Train data

In [2]:
x = np.arange(-1, 1, 0.001)
x = np.array(x, ndmin=2).T
y = np.sin(x * 8)
In [3]:
print('x shape:', x.shape, '\ty shape:', y.shape)
x shape: (2000, 1) 	y shape: (2000, 1)
In [4]:
print(x)
[[-1.   ]
 [-0.999]
 [-0.998]
 ...
 [ 0.997]
 [ 0.998]
 [ 0.999]]
In [5]:
print(y)
[[-0.98935825]
 [-0.99049058]
 [-0.99155951]
 ...
 [ 0.99256499]
 [ 0.99155951]
 [ 0.99049058]]
In [6]:
plt.plot(x, y, label='data')
plt.legend()
Out[6]:
<matplotlib.legend.Legend at 0x7f19864790f0>

Create model

In [7]:
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(units=256, activation='relu', input_dim=1))
model.add(tf.keras.layers.Dense(units=256, activation='relu'))
model.add(tf.keras.layers.Dense(units=1, activation='linear'))
model.compile(loss='mse', optimizer='sgd')

Plot data and untrained model

In [8]:
plt.plot(x, y, label='data')
plt.plot(x, model.predict(x), label='model')
plt.legend()
Out[8]:
<matplotlib.legend.Legend at 0x7f1980a65dd8>

Train model

In [9]:
history = model.fit(x, y, epochs=200, batch_size=50, verbose=0)

Plot history

In [10]:
plt.plot(history.history['loss'])
plt.xlabel('Epoch')
plt.ylabel('Loss')
Out[10]:
Text(0,0.5,'Loss')

Plot data and trained model

In [11]:
plt.plot(x, y, label='data')
plt.plot(x, model.predict(x), label='model')
plt.legend()
Out[11]:
<matplotlib.legend.Legend at 0x7f1940661240>