Question: Machine Learning (Python): I Need Help with Part 1+ 2 : 1.) How do I write a ridge regression from scratch using the stochastic gradient
Machine Learning (Python):
I Need Help with Part 1+ 2 :
1.) How do I write a ridge regression from scratch using the stochastic gradient descent code (see below) and how to write an objective function to optimize the Ridge Regression using Python?
2.) How do I implement Least Squares Linear Regression(LSRL), Ridge, Lasso, and Elastic Net Regression using existing Python Libraries?
Code:
import pandas as pd import numpy as np import matplotlib.pyplot as plt from sklearn.metrics import mean_squared_error from sklearn.model_selection import train_test_split from sklearn.datasets import load_boston from random import seed from random import randrange from csv import reader from math import sqrt from sklearn import preprocessing X = load_boston().data Y = load_boston().target X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.3, random_state=0) scaler = preprocessing.StandardScaler() X_train = scaler.fit_transform(X_train) X_test = scaler.transform(X_test) X_train = pd.DataFrame(data = X_train, columns=load_boston().feature_names) X_train['cost'] = list(y_train) X_test = pd.DataFrame(data = X_test, columns=load_boston().feature_names) X_test['cost'] = list(y_test) def sgd(X, y, learning_rate=0.3, n_epochs=1000, k=30): w = np.random.randn(1,13) b = np.random.randn(1,1) epoch=1 while epoch <= n_epochs: temp = X.sample(k) X_tr = temp.iloc[:,0:13].values y_tr = temp.iloc[:,-1].values Lw = w Lb = b loss = 0 y_predicted = [] least_loss = [] for i in range(k): Lw = (-2/k * X_tr[i]) * (y_tr[i] - np.dot(X_tr[i],w.T) - b) Lb = (-2/k) * (y_tr[i] - np.dot(X_tr[i],w.T) - b) w = w - learning_rate * Lw b = b - learning_rate * Lb y_pred = np.dot(X_tr[i],w.T) y_predicted.append(y_pred) loss = mean_squared_error(y_predicted, y_tr) epoch+=1 learning_rate = learning_rate/1.03 return w,b def predict_using_sgd(x,w,b): y_predicted=[] for i in range(len(x)): tmp = x X_test = tmp.iloc[:,0:13].values y = np.asscalar(np.dot(w,X_test[i])+b) y_predicted.append(y) return np.array(y_predicted) w,b = sgd(X_train,y_train) y_predicted_sgd = predict_using_sgd(X_test,w,b) from matplotlib.pyplot import figure plt.figure(figsize=(30,6)) plt.plot(y_test, label='actual_result') plt.plot(y_predicted_sgd, label='Predicted_result') plt.legend(prop={'size': 15}) plt.show() Step by Step Solution
There are 3 Steps involved in it
Get step-by-step solutions from verified subject matter experts
