Commit 7aa1fedc authored by Steven Jude Iovine's avatar Steven Jude Iovine
Browse files

Running

parent 9bf38aa6
# Some potentially useful modules
# Whether or not you use these (or others) depends on your implementation!
import random
import numpy
import math
import tensorflow
from tensorflow import keras
import matplotlib.pyplot as plt
class NeuralMMAgent(object):
'''
Class to for Neural Net Agents
'''
def __init__(self, num_in_nodes, num_hid_nodes, num_hid_layers, num_out_nodes, \
learning_rate = 0.2, max_epoch=10000, max_sse=.01, momentum=0.2, \
creation_function=None, activation_function=None, random_seed=1):
'''
Arguments:
num_in_nodes -- total # of input nodes for Neural Net
num_hid_nodes -- total # of hidden nodes for each hidden layer
in the Neural Net
num_hid_layers -- total # of hidden layers for Neural Net
num_out_nodes -- total # of output nodes for Neural Net
learning_rate -- learning rate to be used when propagating error
creation_function -- function that will be used to create the
neural network given the input
activation_function -- list of two functions:
1st function will be used by network to determine activation given a weighted summed input
2nd function will be the derivative of the 1st function
random_seed -- used to seed object random attribute.
This ensures that we can reproduce results if wanted
'''
assert num_in_nodes > 0 and num_hid_layers > 0 and num_hid_nodes and\
num_out_nodes > 0, "Illegal number of input, hidden, or output layers!"
def train_net(self, input_list, output_list, max_num_epoch=100000, \
max_sse=0.1):
''' Trains neural net using incremental learning
(update once per input-output pair)
Arguments:
input_list -- 2D list of inputs
output_list -- 2D list of outputs matching inputs
'''
#Some code...#
all_err.append(total_err)
if (total_err < max_sse):
break
#Show us how our error has changed
plt.plot(all_err)
plt.show()
def _calculate_deltas(self):
'''Used to calculate all weight deltas for our neural net
Arguments:
out_error -- output error (typically SSE), obtained using target
output and actual output
'''
#Calculate error gradient for each output node & propgate error
# (calculate weight deltas going backward from output_nodes)
def _adjust_weights_thetas(self):
'''Used to apply deltas
'''
@staticmethod
def create_neural_structure(num_in, num_hid, num_hid_layers, num_out, rand_obj):
''' Creates the structures needed for a simple backprop neural net
This method creates random weights [-0.5, 0.5]
Arguments:
num_in -- total # of input nodes for Neural Net
num_hid -- total # of hidden nodes for each hidden layer
in the Neural Net
num_hid_layers -- total # of hidden layers for Neural Net
num_out -- total # of output nodes for Neural Net
rand_obj -- the random object that will be used to selecting
random weights
Outputs:
Tuple w/ the following items
1st - 2D list of initial weights
2nd - 2D list for weight deltas
3rd - 2D list for activations
4th - 2D list for errors
5th - 2D list of thetas for threshold
6th - 2D list for thetas deltas
'''
#-----Begin ACCESSORS-----#
#-----End ACCESSORS-----#
@staticmethod
def sigmoid_af(summed_input):
#Sigmoid function
@staticmethod
def sigmoid_af_deriv(sig_output):
#the derivative of the sigmoid function
test_agent = NeuralMMAgent(2, 2, 1, 1,random_seed=5, max_epoch=1000000,
\ learning_rate=0.2, momentum=0)
test_in = [[1,0],[0,0],[1,1],[0,1]]
test_out = [[1],[0],[0],[1]]
test_agent.set_weights([[-.37,.26,.1,-.24],[-.01,-.05]])
test_agent.set_thetas([[0,0],[0,0],[0]])
test_agent.train_net(test_in, test_out, max_sse = test_agent.max_sse, \
max_num_epoch = test_agent.max_epoch)
'''
Class to for Neural Net Agents
'''
def __init__(self, num_in_nodes, num_hid_nodes, num_hid_layers, num_out_nodes,
learning_rate=0.2, max_epoch=10000, max_sse=.01, momentum=0.2,
creation_function=None, activation_function=None, random_seed=1):
'''
Arguments:
num_in_nodes -- total # of input nodes for Neural Net
num_hid_nodes -- total # of hidden nodes for each hidden layer
in the Neural Net
num_hid_layers -- total # of hidden layers for Neural Net
num_out_nodes -- total # of output nodes for Neural Net
learning_rate -- learning rate to be used when propagating error
creation_function -- function that will be used to create the
neural network given the input
activation_function -- list of two functions:
1st function will be used by network to determine activation given weighted summed input
2nd function will be the derivative of the 1st function
random_seed -- used to seed object random attribute.
This ensures that we can reproduce results if wanted
'''
assert num_in_nodes > 0 and num_hid_layers > 0 and num_hid_nodes and \
num_out_nodes > 0, "Illegal number of input, hidden, or output layers!"
self.learning_rate = learning_rate
self.max_epoch = max_epoch
self.max_sse = max_sse
self.momentum = momentum
self.random_seed = random_seed
self.model = keras.models.Sequential()
def train_net(self, input_list, output_list, max_num_epoch=100000,
max_sse=0.1):
''' Trains neural net using incremental learning
(update once per input-output pair)
Arguments:
input_list -- 2D list of inputs
output_list -- 2D list of outputs matching inputs
'''
# Some code...#
all_err.append(total_err)
if (total_err < max_sse):
"break"
# Show us how our error has changed
plt.plot(all_err)
plt.show()
def _calculate_deltas(self, scores):
'''Used to calculate all weight deltas for our neural net
Arguments:
out_error -- output error (typically SSE), obtained using target
output and actual output
'''
# Calculate error gradient for each output node & propgate error \
# (calculate weight deltas going backward from output_nodes)
def _adjust_weights_thetas(self):
'''Used to apply deltas'''
def classify_input(self, current_yaw, current_life):
""" """
@staticmethod
def create_neural_structure(num_in, num_hid, num_hid_layers, num_out, rand_obj):
""" Creates the structures needed for a simple backprop neural net
This method creates random weights [-0.5, 0.5]
Arguments:
num_in -- total # of input nodes for Neural Net
num_hid -- total # of hidden nodes for each hidden layer
in the Neural Net
num_hid_layers -- total # of hidden layers for Neural Net
num_out -- total # of output nodes for Neural Net
rand_obj -- the random object that will be used to selecting
random weights
Outputs:
Tuple w/ the following items
1st - 2D list of initial weights
2nd - 2D list for weight deltas
3rd - 2D list for activations
4th - 2D list for errors
5th - 2D list of thetas for threshold
6th - 2D list for thetas deltas
"""
# -----Begin ACCESSORS-----#
# -----End ACCESSORS-----#
@staticmethod
def sigmoid_af(summed_input):
"""Sigmoid function"""
@staticmethod
def sigmoid_af_deriv(sig_output):
"""the derivative of the sigmoid function"""
test_agent = NeuralMMAgent(2, 2, 1, 1, random_seed=5, max_epoch=1000000, learning_rate=0.2, momentum=0)
test_in = [[1, 0], [0, 0], [1, 1], [0, 1]]
test_out = [[1], [0], [0], [1]]
#test_agent.set_weights([[-.37, .26, .1, -.24], [-.01, -.05]])
#test_agent.set_thetas([[0, 0], [0, 0], [0]])
#test_agent.train_net(test_in, test_out,
# max_sse=test_agent.max_sse, max_num_epoch=test_agent.max_epoch)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment