Keras Regression

Linear Regression

  1. Gen data
  2. Configure model and train
  3. Visualize
In [1]:
# Import Librarys
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.callbacks import History
import numpy as np
import matplotlib.pyplot as plt
Using TensorFlow backend.
In [2]:
%matplotlib inline
# Generate data
x_data = np.random.random((80, 1))
y_data = x_data * 3.14 + 0.001516 + np.random.random((80, 1)) * 0.3
plt.plot(x_data, y_data, '.')
Out[2]:
[<matplotlib.lines.Line2D at 0x7f2bbd29f9d0>]
In [3]:
# Define the model
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='sgd', loss='mse')
# Train
history = History()
model.fit(x_data, y_data, epochs=1000, batch_size=32, verbose=0, callbacks=[history])
losses = history.history['loss']
print losses[len(losses)-1]
0.00802586562932
In [4]:
# Visualize
x = np.linspace(0, 1)
y = model.predict(x)
plt.plot(x_data, y_data, '.', label='Sample Data')
plt.plot(x, y, '-', label='Regression Line')
plt.legend()
Out[4]:
<matplotlib.legend.Legend at 0x7f2bbb009410>

Other Regression

In [5]:
# Generate data
x_data = np.random.random((1000, 1))
y_data = x_data * x_data * x_data - 2 * x_data * x_data + x_data
plt.plot(x_data, y_data, '.')
Out[5]:
[<matplotlib.lines.Line2D at 0x7f2bb8564a10>]
In [6]:
# Define and train the model
model = Sequential()
model.add(Dense(32, input_dim=1, activation='relu'))
model.add(Dense(1))
model.compile(optimizer='sgd', loss='mse')
history = History()
model.fit(x_data, y_data, epochs=8000, batch_size=128, verbose=0, callbacks=[history])
losses = history.history['loss']
print losses[len(losses)-1]
3.76308517589e-05
In [7]:
# Visualize
x = np.linspace(0, 1)
y = model.predict(x)
plt.plot(x_data, y_data, '.', label='Sample Data')
plt.plot(x, y, label='Regression Line')
plt.legend()
Out[7]:
<matplotlib.legend.Legend at 0x7f2bb2725a90>