Question: update this code like own work not copied from online tool import numpy as np import matplotlib.pyplot as plt # Load the data from CSV

update this code like own work not copied from online tool
import numpy as np
import matplotlib.pyplot as plt
# Load the data from CSV
data = np.genfromtxt('Advertising_N200_p3.csv', delimiter=',', skip_header=1)
# Separate the features and the target variable
X = data[:, :-1] # TV, Radio, Newspaper
y = data[:,-1] # Sales
# Add a column of ones for the intercept term
X = np.column_stack((np.ones(X.shape[0]), X)) # N x (p+1) design matrix
np.random.seed(42) # For reproducibility
beta = np.random.uniform(-1,1, X.shape[1]) # p+1 parameters
def mini_batch_gradient_descent(X, y, beta, batch_size=10, learning_rate=2.5e-6, iterations=20000):
N = X.shape[0] # Number of observations
B = N // batch_size # Number of batches per iteration
cost_history =[]
beta_history =[]
for itr in range(iterations):
# Shuffle data
permutation = np.random.permutation(N)
X_shuffled = X[permutation]
y_shuffled = y[permutation]
for b in range(B):
# Create mini-batch
start = b * batch_size
end = start + batch_size
X_b = X_shuffled[start:end]
y_b = y_shuffled[start:end]
# Compute the prediction
y_pred = np.dot(X_b, beta)
# Compute the gradient
gradient =-2* np.dot(X_b.T,(y_b - y_pred))/ batch_size
# Update the parameters
beta -= learning_rate * gradient
# Record cost and beta at each iteration
cost = np.sum((y - np.dot(X, beta))**2)
cost_history.append(cost)
beta_history.append(beta.copy())
return beta, cost_history, beta_history
final_beta, cost_history, beta_history = mini_batch_gradient_descent(X, y, beta)
beta_history = np.array(beta_history)
plt.figure(figsize=(10,6))
for i in range(beta_history.shape[1]):
plt.plot(beta_history[:, i], label=f'Beta {i}')
plt.xlabel('Iteration')
plt.ylabel('Beta Coefficients')
plt.title('Effect of Iterations on Beta Coefficients')
plt.legend()
plt.show()
plt.figure(figsize=(10,6))
plt.plot(cost_history)
plt.xlabel('Iteration')
plt.ylabel('Cost')
plt.title('Effect of Iterations on Cost')
plt.show()
print("Best-fit model parameters:", final_beta)
mse = np.mean((y - np.dot(X, final_beta))**2)
print("Mean Squared Error on training set:", mse)

Step by Step Solution

There are 3 Steps involved in it

1 Expert Approved Answer
Step: 1 Unlock blur-text-image
Question Has Been Solved by an Expert!

Get step-by-step solutions from verified subject matter experts

Step: 2 Unlock
Step: 3 Unlock

Students Have Also Explored These Related Programming Questions!