import csv import numpy as np import matplotlib.pyplot as plt from random import uniform def J(X, y, theta): preds = np.squeeze(np.matmul(X, theta)) temp = preds - np.squeeze(y) return np.sqrt(np.sum(np.matmul(np.transpose(temp), temp))) ######################################################################### # Read Data ######################################################################### def load_dataset(pat): with open(pat) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') next(csv_reader) # to skip the header file X = [] # inputs y = [] # outputs for row in csv_reader: X.append([float(row[0]), 1]) y.append(float(row[1])) X = np.asarray(X) y = np.asarray(y) return X, y ######################################################################### # Gradient Descent method ######################################################################### def gradient_descent(theta, lr, tol, X, y): it = 0 Js = [] while True: t_0 = 0 t_1 = 0 for j in range(len(y)): t_0 += (theta[0] * X[j][0] + theta[1] - y[j]) * X[j][0] t_1 += theta[0] * X[j][0] + theta[1] - y[j] t_0 = t_0 / len(y) t_1 = t_0 / len(y) theta_old = np.copy(theta) theta[0] = theta[0] - lr * t_0 theta[1] = theta[1] - lr * t_1 it += 1 delta = np.sum(np.abs(theta - theta_old)) Js.append(J(X,y, theta_gd)) print('[%d] Solution (Gradient descent): J=%.5f, theta=(%.2f, %.2f), delta=%.5f' % (it, Js[-1], theta_gd[0], theta_gd[1], delta)) plt.ion() plt.figure(1) plt.plot(range(len(Js)), Js, '-ko') plt.grid() plt.xlabel('Iterations') plt.ylabel('J()') plt.show() plt.pause(0.1) if delta < tol: break return theta, Js learning_rate = 0.001 tolerance = 0.0001 theta_gd = np.asarray([uniform(0., 0.5), uniform(750., 1000.)]) Features, outs = load_dataset('/Users/hugomcp/Desktop/pizza.csv') theta_gd, evolution_J = gradient_descent(theta_gd, learning_rate, tolerance, Features, outs) input('Close app?')