#计算代价函数J(θ) def computeCost(X, y, theta): m = len(y) predictions = X.dot(theta) square_err = (predictions - y) ** 2 J = (1.0 / (2 * m)) * np.sum(square_err) return J

#批量梯度下降 def gradientDescent(X, y, theta, alpha, epoch): cost = np.zeros(epoch) # 初始化一个 ndarray ,包含每次 epoch 的cost m = len(y) for i in range(epoch): predictions = X.dot(theta) theta = theta - alpha * (1.0 / m) * X.T.dot(predictions - y) cost[i] = computeCost(X, y, theta) return theta, cos

import numpy as npimport pandas as pd#计算代价函数Jθdef computeCostX y theta# X 、 y 、 theta 与数据预处理参数保持一致#返回代价函数的值参考编程要求中的代价函数函数# Begin # m = leny predictions = Xdottheta square_err = predictions -

原文地址: https://www.cveoy.top/t/topic/cscC 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录