This repository has been archived on 2025-04-11. You can view files and clone it, but cannot push or open issues or pull requests.
nn420-private-pine64backup/nn_puzzle_solver.ipynb
2017-12-01 20:31:39 +00:00

171 lines
4.3 KiB
Text

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
}
],
"source": [
"# Setting up our imported libraries.\n",
"import numpy as np\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"240000\n",
"240\n",
"[[ 0.28588903 0.05308564 0.99171479 ..., 0.92657084 0.09114427\n",
" 0.76495161]\n",
" [ 0.50998915 0.74032164 0.04898317 ..., 0.77742777 0.46720853\n",
" 0.01731216]\n",
" [ 0.31522802 0.11448062 0.40291163 ..., 0.87519373 0.31255597\n",
" 0.7202333 ]\n",
" ..., \n",
" [ 0.13906598 0.99536312 0.36709839 ..., 0.68740262 0.9536678\n",
" 0.53053495]\n",
" [ 0.13696298 0.91392043 0.5846018 ..., 0.84365665 0.92837426\n",
" 0.18738981]\n",
" [ 0.05775272 0.7919279 0.51444914 ..., 0.53078037 0.67684536\n",
" 0.25327729]]\n"
]
}
],
"source": [
"# Generating dummy data.\n",
"\n",
"data = np.random.random((1000,240))\n",
"output = np.random.random((1000, 29))\n",
"\n",
"# Replace this with parser code later."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/5\n",
"1000/1000 [==============================] - 4s 4ms/step - loss: 0.0986 - acc: 0.0250\n",
"Epoch 2/5\n",
"1000/1000 [==============================] - 0s 155us/step - loss: 0.0936 - acc: 0.0350\n",
"Epoch 3/5\n",
"1000/1000 [==============================] - 0s 157us/step - loss: 0.0912 - acc: 0.0380\n",
"Epoch 4/5\n",
"1000/1000 [==============================] - 0s 154us/step - loss: 0.0901 - acc: 0.0370\n",
"Epoch 5/5\n",
"1000/1000 [==============================] - 0s 162us/step - loss: 0.0894 - acc: 0.0370\n"
]
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x7fe4fae027f0>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Sets up a Sequential model, Sequential is all\n",
"# that should need to be used for this project,\n",
"# considering that it will only be dealing with\n",
"# a linear stack of layers of neurons.\n",
"\n",
"model = Sequential()\n",
"\n",
"# Adding layers to the model.\n",
"\n",
"model.add(Dense(units=240, activation='tanh', input_dim=240))\n",
"model.add(Dense(units=120, activation='tanh'))\n",
"model.add(Dense(units=29, activation='sigmoid'))\n",
"\n",
"# Configure the learning process.\n",
"\n",
"model.compile(optimizer='sgd',\n",
" loss='mean_squared_error',\n",
" metrics=['accuracy'])\n",
"\n",
"# Train the network.\n",
"\n",
"model.fit(data, output, epochs=5, batch_size=10)\n",
"\n",
"# Generating predictions should look like this:\n",
"\n",
"# predictions = model.predict(testing_data, batch_size=10)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf23456789abcdef0\n"
]
}
],
"source": [
"with open('data/0.bin', 'rb') as f:\n",
" data = f.read()\n",
"\n",
"print(hex(int.from_bytes(data, byteorder='little', signed=False)))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}