-
Notifications
You must be signed in to change notification settings - Fork 14
Description
x = data[['variance', 'skewness']].values
y = data['class'].values
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(x, y)
X_train.shape, X_test.shape, y_train.shape, y_test.shape
x = (x - np.mean(x, axis=0)) / np.std(x, axis=0)
plt.scatter(x[:, 0], x[:, 1], c=y)
plt.show()
Perceptron algorithm
def perceptron(x, y, lr=0.1, n_iters=100):
w = np.zeros(x.shape[1])
b = 0
for _ in range(n_iters):
for i in range(x.shape[0]):
if y[i] * (np.dot(x[i], w) + b) <= 0:
w += lr * y[i] * x[i]
b += lr * y[i]
return w, b
perceptron_w, perceptron_b = perceptron(x, y)
Plot decision boundary for Perceptron
plt.scatter(x[:, 0], x[:, 1], c=y)
xlim = plt.gca().get_xlim()
ylim = plt.gca().get_ylim()
x_boundary = np.linspace(xlim[0], xlim[1])
y_boundary = -(perceptron_w[0] / perceptron_w[1]) * x_boundary - (perceptron_b / perceptron_w[1])
plt.plot(x_boundary, y_boundary, color='black')
plt.xlim(xlim)
plt.ylim(ylim)
plt.show()
Adaline algorithm
def adaline(x, y, lr=0.1, n_iters=100):
w = np.zeros(x.shape[1])
b = 0
for i in range(n_iters):
output = np.dot(x, w) + b
errors = y - output
w += lr * np.dot(x.T, errors)
b += lr * errors.sum()
return w, b
adaline_w, adaline_b = adaline(x, y)
Plot decision boundary for Adaline
plt.scatter(x[:, 0], x[:, 1], c=y)
xlim = plt.gca().get_xlim()
ylim = plt.gca().get_ylim()
x_boundary = np.linspace(xlim[0], xlim[1])
y_boundary = -(adaline_w[0] / adaline_w[1]) * x_boundary - (adaline_b / adaline_w[1])
plt.plot(x_boundary, y_boundary, color='black')
plt.xlim(xlim)
plt.ylim(ylim)
plt.show()