Question: There are three section of implement code here import numpy as np import matplotlib import sys import matplotlib.pyplot as plt import time sys.path.append('/home/codio/workspace/.guides/hf') from helper

There are three section of "implement code here"

import numpy as np import matplotlib import sys import matplotlib.pyplot as plt import time sys.path.append('/home/codio/workspace/.guides/hf') from helper import * %matplotlib notebook print('You\'re running python %s' % sys.version.split(' ')[0])

def perceptron_update(x,y,w): """ function w=perceptron_update(x,y,w); Implementation of Perceptron weights updating Input: x : input vector of d dimensions (d) y : corresponding label (-1 or +1) w : weight vector of d dimensions Output: w : weight vector after updating (d) """ # YOUR CODE HERE raise NotImplementedError() # little test x = np.random.rand(10) y = -1 w = np.random.rand(10) w1 = perceptron_update(x,y,w)

# This self test will check that your perceptron_update function returns the correct values for input vector [0,1], label -1, and weight vector [1,1] def test_perceptron_update1(): x = np.array([0,1]) y = -1 w = np.array([1,1]) w1 = perceptron_update(x,y,w) return (w1.reshape(-1,) == np.array([1,0])).all() def test_perceptron_update2(): x = np.random.rand(25) y = 1 w = np.zeros(25) w1 = perceptron_update(x,y,w) return np.linalg.norm(w1-x)<1e-8 def test_perceptron_update3(): x = np.random.rand(25) y = -1 w = np.zeros(25) w1 = perceptron_update(x,y,w) return np.linalg.norm(w1+x)<1e-8 runtest(test_perceptron_update1, 'test_perceptron_update1') runtest(test_perceptron_update2, 'test_perceptron_update2') runtest(test_perceptron_update3, 'test_perceptron_update3')

def perceptron(xs,ys): """ function w=perceptron(xs,ys); Implementation of a Perceptron classifier Input: xs : n input vectors of d dimensions (nxd) ys : n labels (-1 or +1) Output: w : weight vector (1xd) b : bias term """ n, d = xs.shape # so we have n input vectors, of d dimensions each w = np.zeros(d) b = 0.0 # YOUR CODE HERE raise NotImplementedError() return (w,b)

# These self tests will check that your perceptron function successfully classifies points in two different linearly separable dataset def test_Perceptron1(): N = 100; d = 10; x = np.random.rand(N,d) w = np.random.rand(1,d) y = np.sign(w.dot(x.T))[0] w, b = perceptron(x,y) preds = classify_linear_grader(x,w,b) return np.array_equal(preds.reshape(-1,),y.reshape(-1,)) def test_Perceptron2(): x = np.array([ [-0.70072, -1.15826], [-2.23769, -1.42917], [-1.28357, -3.52909], [-3.27927, -1.47949], [-1.98508, -0.65195], [-1.40251, -1.27096], [-3.35145,-0.50274], [-1.37491,-3.74950], [-3.44509,-2.82399], [-0.99489,-1.90591], [0.63155,1.83584], [2.41051,1.13768], [-0.19401,0.62158], [2.08617,4.41117], [2.20720,1.24066], [0.32384,3.39487], [1.44111,1.48273], [0.59591,0.87830], [2.96363,3.00412], [1.70080,1.80916]]) y = np.array([1]*10 + [-1]*10) w, b =perceptron(x,y) preds = classify_linear_grader(x,w,b) return np.array_equal(preds.reshape(-1,),y.reshape(-1,)) runtest(test_Perceptron1, 'test_Perceptron1') runtest(test_Perceptron2, 'test_Perceptron2')

# number of input vectors N = 100 # generate random (linarly separable) data xs = np.random.rand(N, 2)*10-5 # defining random hyperplane w0 = np.random.rand(2) b0 = np.random.rand()*2-1; # assigning labels +1, -1 labels depending on what side of the plane they lie on ys = np.sign(xs.dot(w0)+b0) # call perceptron to find w from data w,b = perceptron(xs.copy(),ys.copy()) # test if all points are classified correctly assert (all(np.sign(ys*(xs.dot(w)+b))==1.0)) # yw'x should be +1.0 for every input print("Looks like you passed the Perceptron test!") # we can make a pretty visualization visboundary(w,b,xs,ys)

def onclick(event): global w,b,ldata,ax,line,xydata pos=np.array([[event.xdata],[event.ydata]]) if event.key == 'shift': # add positive point color='or' label=1 else: # add negative point color='ob' label=-1 ax.plot(pos[0],pos[1],color) ldata.append(label); xydata=np.vstack((xydata,pos.T)) # call Perceptron function w,b=perceptron(xydata,np.array(ldata).flatten()) # draw decision boundary q=-b/(w**2).sum() *w; if line is None: line, = ax.plot([q[0]-w[1],q[0]+w[1]],[q[1]+w[0],q[1]-w[0]],'b--') else: line.set_data([q[0]-w[1],q[0]+w[1]],[q[1]+w[0],q[1]-w[0]]) return xydata=np.random.rand(0,2) ldata=[] w=np.zeros(2) b=np.zeros(1) line=None %matplotlib notebook fig = plt.figure() ax = fig.add_subplot(111) plt.xlim(0,1) plt.ylim(0,1) cid = fig.canvas.mpl_connect('button_press_event', onclick) plt.title('Use shift-click to add negative points.')

def classify_linear(xs,w,b=None): """ function preds=classify_linear(xs,w,b) Make predictions with a linear classifier Input: xs : n input vectors of d dimensions (nxd) [could also be a single vector of d dimensions] w : weight vector of dimensionality d b : bias (scalar) Output: preds: predictions (1xn) """ w = w.flatten() predictions=np.zeros(xs.shape[0]) # YOUR CODE HERE raise NotImplementedError() return predictions

# Run this self-test to check that your linear classifier correctly classifies the points in a linearly separable dataset def test_linear1(): xs = np.random.rand(50000,20)-0.5 # draw random data w0 = np.random.rand(20) b0 =- 0.1 # with bias -0.1 ys = classify_linear(xs,w0,b0) uniquepredictions=np.unique(ys) # check if predictions are only -1 or 1 return set(uniquepredictions)==set([-1,1]) def test_linear2(): xs = np.random.rand(1000,2)-0.5 # draw random data w0 = np.array([0.5,-0.3]) # define a random hyperplane b0 =- 0.1 # with bias -0.1 ys = np.sign(xs.dot(w0)+b0) # assign labels according to this hyperplane (so you know it is linearly separable) return (all(np.sign(ys*classify_linear(xs,w0,b0))==1.0)) # the original hyperplane (w0,b0) should classify all correctly runtest(test_linear1, 'test_linear1') runtest(test_linear2, 'test_linear2')

Step by Step Solution

There are 3 Steps involved in it

1 Expert Approved Answer
Step: 1 Unlock blur-text-image
Question Has Been Solved by an Expert!

Get step-by-step solutions from verified subject matter experts

Step: 2 Unlock
Step: 3 Unlock

Students Have Also Explored These Related Databases Questions!