Question: Hello chegg This is my neural network code and I made Three Def. if i get the wanted result form Def(1) named as lop1(). how

Hello chegg This is my neural network code and I made Three Def. if i get the wanted result form Def(1) named as lop1(). how can I neglect lop2(), lop3() and end the program

import tensorflow as tf import numpy as np from tensorflow import keras

def lop3(): file=open('D:\\1.txt','a') file.write(("------we are working in loop 3 with three hidden layer--------") +' ') file.close() for i in range(1,20): model=tf.keras.Sequential([keras.layers.Dense(units= i, activation="relu",input_shape=[1], name="layer1"), keras.layers.Dense(units=i, activation="relu", name="layer2"), keras.layers.Dense(units=i, activation="relu", name="layer3"), keras.layers.Dense(units=1, activation="linear", name="layer4"), ])

model.compile(loss='mse', optimizer='Adamax', metrics=['accuracy'])

# a= float(input("enter the wanted value to convert : "))

xs=np.array([443707.401,455897.072, 396833.899,407426.699,435646.069, 419953.188,436349.443,633372.629, 572704.102,506379.29,596808.359, 622705.893,521749.843,500965.861, 558482.399,672648.564,739873.87, 459092.199,485007.612,579586.959, 509713.739,725009.687,727394.13, 658740.26,485686.823,461640.1,502495.219, 625584.252,680222.202,760907.585, 738381.47,712415.6], dtype=float)

ys=np.array([443417.925,455608.023, 397588.538,407135.987,435356.29, 419662.842,436059.613,633089.564, 572418.983,506091.937,596524.026, 622422.445,521463.004,500678.296, 558196.649,672366.797,739594.302, 458803.188,484719.43,579302.029, 509426.339,724729.561,727114.01, 658457.788,485398.691,461351.171, 502207.616,625300.699,679940.373, 760628.252,738101.41,712134.732],dtype=float)

#plt.plot(xs,ys) #Graph what your current data looks like model.fit(xs,ys,epochs=1000, batch_size=20, verbose=2) result= (model.predict([443707.401])) #print(result) p = result - 443417.925 print(result) print(p) file=open('D:\\1.txt','a') file.write("for one hidden layer with neuron equal" + str(i) + "predicted is" + str(result) +' ') file.close() if 10>=p>0: break file=open('D:\\1.txt','a') file.write("for loop"+ str(i) + "predicted is"+ str(result) + ("this is the best result") +' ') file.close() print("end")

def lop2(): file=open('D:\\1.txt','a') file.write(("------we are working in loop 2 with two hidden layer--------") +' ') file.close() for i in range(1,20): model=tf.keras.Sequential([keras.layers.Dense(units= i, activation="relu",input_shape=[1], name="layer1"), keras.layers.Dense(units=i, activation="relu", name="layer2"), keras.layers.Dense(units=1, activation="linear", name="layer3"), ])

model.compile(loss='mse', optimizer='Adamax', metrics=['accuracy'])

# a= float(input("enter the wanted value to convert : "))

xs=np.array([443707.401,455897.072, 396833.899,407426.699,435646.069, 419953.188,436349.443,633372.629, 572704.102,506379.29,596808.359, 622705.893,521749.843,500965.861, 558482.399,672648.564,739873.87, 459092.199,485007.612,579586.959, 509713.739,725009.687,727394.13, 658740.26,485686.823,461640.1,502495.219, 625584.252,680222.202,760907.585, 738381.47,712415.6], dtype=float)

ys=np.array([443417.925,455608.023, 397588.538,407135.987,435356.29, 419662.842,436059.613,633089.564, 572418.983,506091.937,596524.026, 622422.445,521463.004,500678.296, 558196.649,672366.797,739594.302, 458803.188,484719.43,579302.029, 509426.339,724729.561,727114.01, 658457.788,485398.691,461351.171, 502207.616,625300.699,679940.373, 760628.252,738101.41,712134.732],dtype=float)

#plt.plot(xs,ys) #Graph what your current data looks like model.fit(xs,ys,epochs=1000, batch_size=20, verbose=2) result= (model.predict([443707.401])) #print(result) p = result - 443417.925 print(result) print(p) file=open('D:\\1.txt','a') file.write("for one hidden layer with neuron equal" + str(i) + "predicted is" + str(result) +' ') file.close() if 10>=p>0: break file=open('D:\\1.txt','a') file.write("for loop"+ str(i) + "predicted is"+ str(result) + ("this is the best result") +' ') file.close() print("end")

def lop1(): file=open('D:\\1.txt','a') file.write(("------we are working in loop 1 with one hidden layer--------") +' ') file.close() for i in range(1,20): model=tf.keras.Sequential([keras.layers.Dense(units= i, activation="relu",input_shape=[1], name="layer1"), keras.layers.Dense(units=1, activation="linear", name="layer2"), ])

model.compile(loss='mse', optimizer='Adamax', metrics=['accuracy'])

# a= float(input("enter the wanted value to convert : "))

xs=np.array([443707.401,455897.072, 396833.899,407426.699,435646.069, 419953.188,436349.443,633372.629, 572704.102,506379.29,596808.359, 622705.893,521749.843,500965.861, 558482.399,672648.564,739873.87, 459092.199,485007.612,579586.959, 509713.739,725009.687,727394.13, 658740.26,485686.823,461640.1,502495.219, 625584.252,680222.202,760907.585, 738381.47,712415.6], dtype=float)

ys=np.array([443417.925,455608.023, 397588.538,407135.987,435356.29, 419662.842,436059.613,633089.564, 572418.983,506091.937,596524.026, 622422.445,521463.004,500678.296, 558196.649,672366.797,739594.302, 458803.188,484719.43,579302.029, 509426.339,724729.561,727114.01, 658457.788,485398.691,461351.171, 502207.616,625300.699,679940.373, 760628.252,738101.41,712134.732],dtype=float)

#plt.plot(xs,ys) #Graph what your current data looks like model.fit(xs,ys,epochs=1000, batch_size=20, verbose=2) result= (model.predict([443707.401])) #print(result) p = result - 443417.925 #print(result) #print(p) file=open('D:\\1.txt','a') file.write("for one hidden layer with neuron equal" + str(i) + "predicted is" + str(result) +' ') file.close() if 10>=p>0: break file=open('D:\\1.txt','a') file.write("for loop"+ str(i) + "predicted is"+ str(result) + ("this is the best result") +' ') file.close() print("end")

lop1() lop2() lop3()

Step by Step Solution

There are 3 Steps involved in it

1 Expert Approved Answer
Step: 1 Unlock blur-text-image
Question Has Been Solved by an Expert!

Get step-by-step solutions from verified subject matter experts

Step: 2 Unlock
Step: 3 Unlock

Students Have Also Explored These Related Databases Questions!