Removed comment code, cleaned up notebook.
This commit is contained in:
parent
f825ec3c9d
commit
2ba5366c4c
1 changed files with 26 additions and 70 deletions
|
@ -14,23 +14,10 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"# If you're really new to python this code might be\n",
|
||||
"# a bit unreadable, but I tried to make it as simple\n",
|
||||
"# as possible.\n",
|
||||
"\n",
|
||||
"# Setting up our imported libraries.\n",
|
||||
"import numpy as np\n",
|
||||
"from keras.models import Sequential\n",
|
||||
"from keras.layers import Dense\n",
|
||||
"\n",
|
||||
"# We're going to use numpy for some easy\n",
|
||||
"# array functionality with keras, and keras\n",
|
||||
"# is our library for handling most of our neural network\n",
|
||||
"# stuff. That being said, you need to have some sort of\n",
|
||||
"# backend for keras to work with, such as the recommended\n",
|
||||
"# TensorFlow, which I'm using here.\n",
|
||||
"# Since keras uses those as a backend, you don't inherently\n",
|
||||
"# need to import it."
|
||||
"from keras.layers import Dense"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -61,14 +48,12 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"# Generating dummy data so I can understand how to input.\n",
|
||||
"# Generating dummy data.\n",
|
||||
"\n",
|
||||
"data = np.random.random((1000,240))\n",
|
||||
"output = np.random.random((1000, 29))\n",
|
||||
"\n",
|
||||
"# Here's some printouts to see exactly what I'm doing here.\n",
|
||||
"print(data.size)\n",
|
||||
"print(data[0].size)\n",
|
||||
"print(data)"
|
||||
"# Replace this with parser code later."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -108,77 +93,48 @@
|
|||
"# that should need to be used for this project,\n",
|
||||
"# considering that it will only be dealing with\n",
|
||||
"# a linear stack of layers of neurons.\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"# Adding layers to the model.\n",
|
||||
"\n",
|
||||
"# Dense is the type of layer I think, don't need\n",
|
||||
"# to look into this more since this is all I should\n",
|
||||
"# need to use.\n",
|
||||
"\n",
|
||||
"# units = the number of neurons for this layer.\n",
|
||||
"\n",
|
||||
"# activation = the activation function for this layer.\n",
|
||||
"# our project doc says to use hyperbolic tangent, so\n",
|
||||
"# I set this to tanh. Except for the output layer,\n",
|
||||
"# which I set to sigmoid.\n",
|
||||
"\n",
|
||||
"# input_dim = the dimension of the input list,\n",
|
||||
"# should only be set for the input layer.\n",
|
||||
"\n",
|
||||
"model.add(Dense(units=240, activation='tanh', input_dim=240))\n",
|
||||
"model.add(Dense(units=120, activation='tanh'))\n",
|
||||
"model.add(Dense(units=29, activation='sigmoid'))\n",
|
||||
"\n",
|
||||
"# Configure the learning process.\n",
|
||||
"# \n",
|
||||
"\n",
|
||||
"# optimizer = I'm just using \n",
|
||||
"# Stomchastic gradient descent for this,\n",
|
||||
"# remember that this uses essentially staggered\n",
|
||||
"# aggregation by step to calculate gradient\n",
|
||||
"# descent towards our target, which is faster\n",
|
||||
"# than doing all of the calculation together.\n",
|
||||
"\n",
|
||||
"# loss = the loss function, currently I'm using\n",
|
||||
"# mean squared error, but might change to\n",
|
||||
"# mean_absolute_percentage_error, considering\n",
|
||||
"# I think we're supposed to calculate cost\n",
|
||||
"# based on the percentage we are away from the\n",
|
||||
"# correct number of moves away from the solved state.\n",
|
||||
"\n",
|
||||
"# metrics = evaluation metrics...\n",
|
||||
"# I think all I care about is accuracy in this case,\n",
|
||||
"# if anything.\n",
|
||||
"\n",
|
||||
"model.compile(optimizer='sgd',\n",
|
||||
" loss='mean_squared_error',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"# This is where we're configuring how we train the network:\n",
|
||||
"\n",
|
||||
"# data = the input sets of training data for this network,\n",
|
||||
"# in my case I'm unsure what exactly that will be.\n",
|
||||
"\n",
|
||||
"# output = the input sets of target data for the network,\n",
|
||||
"# I believe this should just be a set of the same size\n",
|
||||
"# as the training data all containing the number\n",
|
||||
"# of steps until being solved for each state... I think.\n",
|
||||
"\n",
|
||||
"# epochs = it seems like this is how many times this\n",
|
||||
"# training should be run...\n",
|
||||
"\n",
|
||||
"# batch_size = I'm pretty sure this directly correlates\n",
|
||||
"# to how many input sets we train per step.\n",
|
||||
"# Train the network.\n",
|
||||
"\n",
|
||||
"model.fit(data, output, epochs=5, batch_size=10)\n",
|
||||
"\n",
|
||||
"# Generating predictions should look like this:\n",
|
||||
"\n",
|
||||
"# predictions = model.predict(testing_data, batch_size=10)\n",
|
||||
"# predictions = model.predict(testing_data, batch_size=10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0xf23456789abcdef0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"with open('data/0.bin', 'rb') as f:\n",
|
||||
" data = f.read()\n",
|
||||
"\n",
|
||||
"# I've commented it out since I don't have any real\n",
|
||||
"# data to predict yet."
|
||||
"print(hex(int.from_bytes(data, byteorder='little', signed=False)))"
|
||||
]
|
||||
},
|
||||
{
|
Reference in a new issue