Question: HOw to use / include / implement non - convex optimizer to this code? import tensorflow as tf import numpy as np from sklearn.preprocessing import
HOw to useincludeimplement nonconvex optimizer to this code? import tensorflow as tf
import numpy as np
from sklearn.preprocessing import normalize
import matplotlib.pyplot as plt
# parameter zeroone controls, whether the labels are or
def readparsedatazerooneTrue:
labels
data
with openijcnntxt as f:
for line in f:
xs line.split
h
for s in xs::
s sstrip
if lens:
continue
k v ssplit:
hintk floatv
data.appendh
if xs:
labels.append
elif zeroone:
labels.append
else:
labels.append
return nparraydata nparraylabels
def predictionw data:
return npexpnpdotdata w
def loglosspred labels:
pred npclippredee
Li labels nplogpred labels nplog pred
return npmeanLi
def gradw data, labels:
pred predictionw data
return npdotdataTpred labels lenlabels
def wolfew p data, labels, gradfgrad, lossflogloss predfprediction, alpha c c:
gradw gradfw data, labels
for i in range:
if lossfpredfw alpha p data labels lossfpredfw data labels c alpha npdotgradw p:
alpha
elif npdotgradfw alpha p data, labels p c npdotgradw p:
alpha
elif i :
raise Exceptionwolfe doesn't finish"
else:
break
return alpha
def dfpw B data, labels, predfprediction, gradfgrad, lossflogloss maxiter tol:
w w
Binv nplinalg.invB
gradw gradfw data, labels
losses
for i in rangemaxiter:
p npdotBinv, gradw
alpha wolfew p data, labels, gradfgradf lossflossf
s alpha p
wnew w s
gradnew gradfwnew, data, labels
if nplinalg.normgradnew gradw tol:
break
y gradnew gradw
gradw gradnew
sy npdots y
Bs npdotBinv, s
Binv npouters s npdots y npouterBs Bs npdots Bs
predictions predfw data
loss lossfpredictions labels
printfIter: i Loss: loss
losses.appendloss
w wnew
return w losses
n
Bnpeyen #use either identity or compute Hessian in first step for BFGS
wnpzerosn #initialize weight vector
features, labels readparsedata
n features.shape
w nponesn
B npeyen
wopt, losses dfpw B features, labels, gradfgrad, lossflogloss
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Iter: Loss:
Step by Step Solution
There are 3 Steps involved in it
1 Expert Approved Answer
Step: 1 Unlock
Question Has Been Solved by an Expert!
Get step-by-step solutions from verified subject matter experts
Step: 2 Unlock
Step: 3 Unlock
