Initial commit.

This commit is contained in:
Alex Huddleston 2017-12-01 20:27:20 +00:00
commit f825ec3c9d
5 changed files with 227 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
data/

BIN
420Project17fall.docx Normal file

Binary file not shown.

BIN
420Project17fall.pdf Normal file

Binary file not shown.

11
README.md Normal file
View file

@ -0,0 +1,11 @@
See 420Project17fall.pdf
Use this script to download the data files, because I'm not hosting them here.
~~~~
mkdir "data"
for i in {0..28}
do
wget "http://courses.cse.tamu.edu/daugher/misc/PPP/homeworks/data/${i}states.bin" -O "data/${i}.bin"
done
~~~~

215
neural_network.ipynb Normal file
View file

@ -0,0 +1,215 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
}
],
"source": [
"# If you're really new to python this code might be\n",
"# a bit unreadable, but I tried to make it as simple\n",
"# as possible.\n",
"\n",
"# Setting up our imported libraries.\n",
"import numpy as np\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense\n",
"\n",
"# We're going to use numpy for some easy\n",
"# array functionality with keras, and keras\n",
"# is our library for handling most of our neural network\n",
"# stuff. That being said, you need to have some sort of\n",
"# backend for keras to work with, such as the recommended\n",
"# TensorFlow, which I'm using here.\n",
"# Since keras uses those as a backend, you don't inherently\n",
"# need to import it."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"240000\n",
"240\n",
"[[ 0.28588903 0.05308564 0.99171479 ..., 0.92657084 0.09114427\n",
" 0.76495161]\n",
" [ 0.50998915 0.74032164 0.04898317 ..., 0.77742777 0.46720853\n",
" 0.01731216]\n",
" [ 0.31522802 0.11448062 0.40291163 ..., 0.87519373 0.31255597\n",
" 0.7202333 ]\n",
" ..., \n",
" [ 0.13906598 0.99536312 0.36709839 ..., 0.68740262 0.9536678\n",
" 0.53053495]\n",
" [ 0.13696298 0.91392043 0.5846018 ..., 0.84365665 0.92837426\n",
" 0.18738981]\n",
" [ 0.05775272 0.7919279 0.51444914 ..., 0.53078037 0.67684536\n",
" 0.25327729]]\n"
]
}
],
"source": [
"# Generating dummy data so I can understand how to input.\n",
"data = np.random.random((1000,240))\n",
"output = np.random.random((1000, 29))\n",
"\n",
"# Here's some printouts to see exactly what I'm doing here.\n",
"print(data.size)\n",
"print(data[0].size)\n",
"print(data)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/5\n",
"1000/1000 [==============================] - 4s 4ms/step - loss: 0.0986 - acc: 0.0250\n",
"Epoch 2/5\n",
"1000/1000 [==============================] - 0s 155us/step - loss: 0.0936 - acc: 0.0350\n",
"Epoch 3/5\n",
"1000/1000 [==============================] - 0s 157us/step - loss: 0.0912 - acc: 0.0380\n",
"Epoch 4/5\n",
"1000/1000 [==============================] - 0s 154us/step - loss: 0.0901 - acc: 0.0370\n",
"Epoch 5/5\n",
"1000/1000 [==============================] - 0s 162us/step - loss: 0.0894 - acc: 0.0370\n"
]
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x7fe4fae027f0>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Sets up a Sequential model, Sequential is all\n",
"# that should need to be used for this project,\n",
"# considering that it will only be dealing with\n",
"# a linear stack of layers of neurons.\n",
"model = Sequential()\n",
"\n",
"# Adding layers to the model.\n",
"\n",
"# Dense is the type of layer I think, don't need\n",
"# to look into this more since this is all I should\n",
"# need to use.\n",
"\n",
"# units = the number of neurons for this layer.\n",
"\n",
"# activation = the activation function for this layer.\n",
"# our project doc says to use hyperbolic tangent, so\n",
"# I set this to tanh. Except for the output layer,\n",
"# which I set to sigmoid.\n",
"\n",
"# input_dim = the dimension of the input list,\n",
"# should only be set for the input layer.\n",
"\n",
"model.add(Dense(units=240, activation='tanh', input_dim=240))\n",
"model.add(Dense(units=120, activation='tanh'))\n",
"model.add(Dense(units=29, activation='sigmoid'))\n",
"\n",
"# Configure the learning process.\n",
"# \n",
"\n",
"# optimizer = I'm just using \n",
"# Stomchastic gradient descent for this,\n",
"# remember that this uses essentially staggered\n",
"# aggregation by step to calculate gradient\n",
"# descent towards our target, which is faster\n",
"# than doing all of the calculation together.\n",
"\n",
"# loss = the loss function, currently I'm using\n",
"# mean squared error, but might change to\n",
"# mean_absolute_percentage_error, considering\n",
"# I think we're supposed to calculate cost\n",
"# based on the percentage we are away from the\n",
"# correct number of moves away from the solved state.\n",
"\n",
"# metrics = evaluation metrics...\n",
"# I think all I care about is accuracy in this case,\n",
"# if anything.\n",
"\n",
"model.compile(optimizer='sgd',\n",
" loss='mean_squared_error',\n",
" metrics=['accuracy'])\n",
"\n",
"# This is where we're configuring how we train the network:\n",
"\n",
"# data = the input sets of training data for this network,\n",
"# in my case I'm unsure what exactly that will be.\n",
"\n",
"# output = the input sets of target data for the network,\n",
"# I believe this should just be a set of the same size\n",
"# as the training data all containing the number\n",
"# of steps until being solved for each state... I think.\n",
"\n",
"# epochs = it seems like this is how many times this\n",
"# training should be run...\n",
"\n",
"# batch_size = I'm pretty sure this directly correlates\n",
"# to how many input sets we train per step.\n",
"\n",
"model.fit(data, output, epochs=5, batch_size=10)\n",
"\n",
"# Generating predictions should look like this:\n",
"\n",
"# predictions = model.predict(testing_data, batch_size=10)\n",
"\n",
"# I've commented it out since I don't have any real\n",
"# data to predict yet."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}