作为一项自学练习,我尝试对从头开始的线性回归问题实施梯度下降,并在等高线上绘制结果迭代。
我的梯度下降实现给出了正确的结果(使用Sklearn测试),但是梯度下降图似乎并不垂直于轮廓线。这是预期的还是我的代码/理解中出现问题?
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def costfunction(X,y,theta):
m = np.size(y)
#Cost function in vectorized form
h = X @ theta
J = float((1./(2*m)) * (h - y).T @ (h - y));
return J;
def gradient_descent(X,y,theta,alpha = 0.0005,num_iters=1000):
#Initialisation of useful values
m = np.size(y)
J_history = np.zeros(num_iters)
theta_0_hist, theta_1_hist = [], [] #For plotting afterwards
for i in range(num_iters):
#Grad function …Run Code Online (Sandbox Code Playgroud) python optimization machine-learning linear-regression gradient-descent