This repository has been archived on 2025-04-11. You can view files and clone it, but cannot push or open issues or pull requests.
nn420-private-pine64backup/neural_network.py

220 lines
5.7 KiB
Python
Raw Normal View History

2017-12-07 19:41:08 -06:00
from functools import reduce
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from os.path import join
# Used to format our input binary state.
def format_input(acc, elem):
hex_elem = (elem - (elem >> 4 << 4))
for x in range(16):
if x == hex_elem:
acc.append(1)
else:
acc.append(0)
hex_elem = (elem >> 4) % 16
for x in range(16):
if x == hex_elem:
acc.append(1)
else:
acc.append(0)
return acc
# Calculate Manhattan distance between two points.
def man_dist(x, y):
for a, b in zip(x, y):
a_one, a_two = x
b_one, b_two = y
return (abs(a_one - b_one) + abs(a_two - b_two))
# Calculate Manhattan distance between each set of two points in a list.
def man_dist_state(x, y):
return sum(man_dist(a, b) for a, b in zip(x, y))
# Used to format the positions we parsed from our binary input.
def format_pos(acc, elem):
hex_elem = (elem[1] - (elem[1] >> 4 << 4))
if hex_elem == 0:
acc.append((hex_elem, (3,3)))
else:
acc.append((hex_elem, ((15 - ((elem[0]) * 2)) % 4,int((15 - ((elem[0]) * 2)) / 4))))
hex_elem = (elem[1] >> 4) % 16
if hex_elem == 0:
acc.append((hex_elem, (3,3)))
else:
acc.append((hex_elem, ((15 - ((elem[0]) * 2 + 1)) % 4,int((15 - ((elem[0]) * 2 + 1)) / 4))))
return acc
# The title of this function is slightly misleading.
# I'm simply generating a list of positions that each
# puzzle piece in the current parsed state SHOULD be at.
# I organize this in order of the pieces as they were
# parsed so the two lists line up perfectly.
def generate_pos(acc, elem):
if(elem[0] == 0):
acc.append((3,3))
else:
acc.append((((elem[0] - 1) % 4), (int((elem[0] - 1)/4))))
return acc
# Used to format our ending Manhattan distance into a format
# that can be compared with our 29 output neurons.
def format_man_dist(elem):
acc = []
for x in range(28, -1, -1):
if x == elem:
acc.append(1)
else:
acc.append(0)
return acc
target = []
for i in range(29):
2017-12-07 22:08:36 -06:00
filename = join('/pub/faculty_share/daugher/datafiles/data/' + str(i) + 'states.bin')
2017-12-07 19:41:08 -06:00
# Debugging to print the current file from which states are being parsed.
#print(i)
temp = []
with open(filename, 'rb') as f:
data = f.read(8)
counter = 0
2017-12-07 22:08:36 -06:00
while(data and counter < 2000):
2017-12-07 19:41:08 -06:00
temp.append(format_man_dist(i))
data = f.read(8)
counter += 1
target.append(temp)
#print(target[28][500])
# Sets up a Sequential model, Sequential is all
# that should need to be used for this project,
# considering that it will only be dealing with
# a linear stack of layers of neurons.
model = Sequential()
# Adding layers to the model.
model.add(Dense(units=240, activation='tanh', input_dim=240))
model.add(Dense(units=120, activation='tanh'))
model.add(Dense(units=60, activation='tanh'))
model.add(Dense(units=29, activation='sigmoid'))
# Configure the learning process.
model.compile(optimizer='sgd',
2017-12-07 22:09:27 -06:00
loss='mean_squared_error',
metrics=['accuracy'])
2017-12-07 19:41:08 -06:00
for i in range(29):
2017-12-07 22:08:36 -06:00
filename = join('/pub/faculty_share/daugher/datafiles/data/' + str(i) + 'states.bin')
2017-12-07 19:41:08 -06:00
# Debugging to print the current file from which states are being parsed.
print(i)
with open(filename, 'rb') as f:
data = f.read(8)
counter = 0
training = []
2017-12-07 22:08:36 -06:00
while(data and counter < 2000):
2017-12-07 19:41:08 -06:00
bin_data = reduce(format_input, list(data), [])
bin_data.reverse()
bin_data = bin_data[16:]
training.append(bin_data)
data = f.read(8)
counter += 1
#print(training[0])
# Train the network.
2017-12-07 22:08:36 -06:00
model.fit(np.array(training), np.array(target[i]), epochs=8, batch_size=2000)
2017-12-07 19:41:08 -06:00
#model.train_on_batch(np.array(temp), np.array(target))
# Used for testing data
2017-12-07 22:08:36 -06:00
for i in range(11, 29):
2017-12-07 22:30:13 -06:00
filename = join('/pub/faculty_share/daugher/datafiles/data/', str(i) + 'states.bin')
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
print(i)
2017-12-07 19:41:08 -06:00
2017-12-07 22:30:13 -06:00
with open(filename, 'rb') as f:
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
for i in range(2000):
data = f.read(8)
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
data = f.read(8)
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
counter = 0
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
testing = []
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
testing_target = []
2017-12-07 22:30:13 -06:00
while(data and counter < 10000):
2017-12-07 22:09:27 -06:00
bin_data = reduce(format_input, list(data), [])
bin_data.reverse()
bin_data = bin_data[16:]
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
testing.append(bin_data)
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
pos_data = reduce(format_pos, enumerate(list(data)), [])
pos_data.reverse()
pos_data = pos_data[1:]
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
state_pos = []
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
for p in pos_data:
state_pos.append(p[1])
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
testing_target_pos = reduce(generate_pos, pos_data, [])
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
testing_target.append(format_man_dist(man_dist_state(state_pos, testing_target_pos)))
2017-12-07 19:41:08 -06:00
2017-12-07 22:09:27 -06:00
counter += 1
data = f.read(8)
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
# Evaluate accuracy
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
loss_and_metrics = model.evaluate(np.array(testing),np.array(testing_target), batch_size=1000)
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
# Generating predictions:
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
predictions = model.predict(np.array(testing), batch_size=1000)
2017-12-07 22:09:27 -06:00
2017-12-07 22:08:36 -06:00
output = []
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
for p in range(len(predictions)):
if np.argmax(testing_target[p]) < 18:
output.append(100*((18 - (28 - np.argmax(predictions[p]))) / (18 - np.argmax(testing_target[p]))))
else:
output.append(0)
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
#for i in range(len(output)):
# print(output[i])
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
print("Percentage possible improvement: ", np.array(output).mean())
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
print(model.metrics_names[0], loss_and_metrics[0])
2017-12-07 19:41:08 -06:00
2017-12-07 22:08:36 -06:00
print(model.metrics_names[1], loss_and_metrics[1])