Question: Need help modifying this code to implement the XOR function. import math import colorama from colorama import Fore, Style Backpropagation Learning Algorithm

Need help modifying this code to implement the XOR function.
import math
import colorama
from colorama import Fore, Style
"""
Backpropagation Learning Algorithm Homework with XOR function
Name: Emily Dogbatse
CMS430: Artificial Intelligence
"""
colorama.init(autoreset=True)
# Steps to do
# 1. activation function sigmoid (DONE)
# 2. sigmoid derivative (DONE)
# 3. forward propagation (DONE)
# 4. Training function to calculate the error for the hidden layers (DONE)
def print_header(message):
"""
More cool ways to print for aesthetics
"""
print(Fore.GREEN + Style.BRIGHT +"="*60)
print(f"{message: ^60}")
print(Fore.GREEN + Style.BRIGHT +"="*60+ Style.RESET_ALL)
def print_subheader(message):
"""
Even more cool ways to print for aesthetics
"""
print(Fore.YELLOW + Style.BRIGHT +"-"*60)
print(f"{message: ^60}")
print(Fore.YELLOW + Style.BRIGHT +"-"*60+ Style.RESET_ALL)
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='', print_end="\r"):
"""
This function just prints out a cool loading bar, it doesn't have anything to do
with the actual program itself.
"""
percent =("{0:."+ str(decimals)+"f}").format(100*(iteration / float(total)))
filled_length = int(length * iteration // total)
bar = fill * filled_length +'-'*(length - filled_length)
print(f'\r{prefix}|{bar}|{percent}%{suffix}', end=print_end)
# Print New Line on Complete
if iteration == total:
print()
def sigmoid(x):
return 1/(1+ math.exp(-x))
def sigmoid_derivative(x):
return x *(1- x)
# Next steps : Forward propagation
# This step is basically adding the hidden layer
def forward_propagation(w_hiddenlayer_1, w_hiddenlayer_2, w_output, x_input):
# hidden layer 1
hidden_1_sum = w_hiddenlayer_1[0]+ w_hiddenlayer_1[1]* x_input[0]+ w_hiddenlayer_1[2]+ x_input[1]
hidden1_output = sigmoid(hidden_1_sum)
# hidden layer 2
hidden_2_sum = w_hiddenlayer_2[0]+ w_hiddenlayer_2[1]* x_input[0]+ w_hiddenlayer_2[2]+ x_input[1]
hidden2_output = sigmoid(hidden_2_sum)
# output layer
output_sum = w_output[0]+ w_output[1]* hidden1_output + w_output[2]* hidden2_output
output = sigmoid(output_sum)
# now return all of the outputs
return hidden1_output, hidden2_output, output
# what this returns is the output of the hidden layers and the output layer
# training function (we did this by hand)
def training(w_hiddenlayer_1, w_hiddenlayer_2, w_output, x_input, target, eta):
# forward pass
hidden1_output, hidden2_output, output = forward_propagation(w_hiddenlayer_1, w_hiddenlayer_2, w_output, x_input)
# printing the results to make sure this works
print(f"Input: {x_input}, Actual Output: {output}, Target: {target}")
# calculate the error
# we want to minimize this in order to classify correctly
output_error = target - output
# error for hidden layer 1:
hidden1_error = output_error * w_output[1]* sigmoid_derivative(hidden1_output)
# error for hidden layer 2:
hidden2_error = output_error * w_output[2]* sigmoid_derivative(hidden2_output)
# NEXT STEP: UPDATE THE OUTPUT LAYER WEIGHTS
w_output[0]+= eta * output_error
w_output[1]+= eta * output_error * hidden1_output
w_output[2]+= eta * output_error * hidden2_output
# update the hidden-layer weights at each hidden neuron
w_hiddenlayer_1[0]+= eta * hidden1_error
w_hiddenlayer_1[1]+= eta * hidden1_error * x_input[0]
w_hiddenlayer_1[2]+= eta * hidden1_error * x_input[1]
w_hiddenlayer_2[0]+= eta * hidden2_error
w_hiddenlayer_2[1]+= eta * hidden2_error * x_input[0]
w_hiddenlayer_2[2]+= eta * hidden2_error * x_input[1]
return w_hiddenlayer_1, w_hiddenlayer_2, w_output
# train one full epoch
def epoch_XOR(w_hidden1, w_hidden2, w_output, eta):
# XOR function this time
w_hidden1, w_hidden2, w_output = training(w_hidden1, w_hidden2, w_output, [0,0],0, eta)
w_hidden1, w_hidden2, w_output = training(w_hidden1, w_hidden2, w_output, [0,1],1, eta)
w_hidden1, w_hidden2, w_output = training(w_hidden1, w_hidden2, w_output, [1,0],1, eta)
w_hidden1, w_hidden2, w_output = training(w_hidden1, w_hidden2, w_output, [1,1],0, eta)
return w_hidden1, w_hidden2, w_output
# driver code As
def main():
print_header("Backpropagation Learning Algorithm for XOR")
# Initialize the weights
w_hidden1=[-0.08,1,0.99] # w10, w11, w12
w_hidden2=[-0.02,1,1]
w_output =[-0.01,1.10,1.10]
eta =0.1
training_data =[([0,0],0),([0,1],1),([1,0],1),([1,1],0)]
# Introducing a stopping criterion
max_epochs =10000
error_threshold =0.001
print_subheader("Training Neural Network...")
print("Progress: ", end="")
Assume there is the rest of the main here

Step by Step Solution

There are 3 Steps involved in it

1 Expert Approved Answer
Step: 1 Unlock

To modify the given code for the XOR function we need to ensure that the neural network is configured to handle the XOR logic correctly The XOR function has 4 possible inputs 0 0 0 1 1 0 and 1 1 with ... View full answer

blur-text-image
Question Has Been Solved by an Expert!

Get step-by-step solutions from verified subject matter experts

Step: 2 Unlock
Step: 3 Unlock

Students Have Also Explored These Related Accounting Questions!