题解 | 使用梯度下降的线性回归
使用梯度下降的线性回归
https://www.nowcoder.com/practice/e9f12bb403f44847b44e287d5a71e56c
import numpy as np
def linear_regression_gradient_descent(X, y, alpha, iterations):
# 补全代码
X = np.array(X, dtype=float)
y = np.array(y, dtype=float).reshape(-1, 1) #(m, 1)
m, n = X.shape
# 初始化theta
theta = np.zeros((n, 1), dtype=float)
# 使用梯度下降迭代
for _ in range(int(iterations)):
error = X @ theta - y #(m, 1)
grad = (X.T @ error) / m #(n, 1)
theta = theta - alpha * grad #(n, 1)
# 四舍五入到小数点后4位,并按题意返回 List
theta = np.round(theta.flatten(), 4) # (n,)
return theta
# 主程序
if __name__ == "__main__":
# 输入矩阵和向量
matrix_inputx = input()
array_y = input()
alpha = input()
iterations = input()
# 处理输入
import ast
matrix = np.array(ast.literal_eval(matrix_inputx))
y = np.array(ast.literal_eval(array_y)).reshape(-1,1)
alpha = float(alpha)
iterations = int(iterations)
# 调用函数计算逆矩阵
output = linear_regression_gradient_descent(matrix,y,alpha,iterations)
# 输出结果
print(output)
