Neural Network
Create a simple feed-forward neural network in python. You must implement the following components:
- An input layer that receives data and passes it on
- Some hidden layers (at least a 2-layer neural network)
- An output layer
- Weights and biases between each layer
- Use the Sigmoid activation function for each hidden layer
- Implement a simple feed-forward neural network
- Train the feed-forward neurons using back-propagation.
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Tiana.ipynb",
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "code",
"metadata": {
"id": "L5AWaGEqD_fx"
},
"source": [
"import numpy as np\n",
"import tensorflow as tf\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"from keras.datasets import boston_housing\n",
"import sys\n",
"import time"
],
"execution_count": 81,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "VMkrkrGTFRb7"
},
"source": [
"# Read DATA"
]
},
{
"cell_type": "code",
"metadata": {
"id": "2CPpkcZzHR5r"
},
"source": [
"(train_data, train_targets), (test_data, test_targets) = boston_housing.load_data()"
],
"execution_count": 57,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "I8Su1eJPHm3q",
"outputId": "60eba728-b37f-46a6-cfe4-1bdddf375a11"
},
"source": [
"print(f'Training data : {train_data.shape}')\n",
"print(f'Test data : {test_data.shape}')\n",
"print(f'Training sample : {train_data[0]}')\n",
"print(f'Training target sample : {train_targets[0]}')"
],
"execution_count": 58,
"outputs": [
{
"output_type": "stream",
"text": [
"Training data : (404, 13)\n",
"Test data : (102, 13)\n",
"Training sample : [ 1.23247 0. 8.14 0. 0.538 6.142 91.7\n",
" 3.9769 4. 307. 21. 396.9 18.72 ]\n",
"Training target sample : 15.2\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "_9FZvnIPH0Lw"
},
"source": [
"# Build Model"
]
},
{
"cell_type": "code",
"metadata": {
"id": "OWHDS9M4HznA"
},
"source": [
"model = tf.keras.models.Sequential()\n",
"\n",
"# Add input layer\n",
"model.add(tf.keras.layers.Dense(10, input_dim = train_data.shape[1]))\n",
"\n",
"# Add two hidden layers\n",
"model.add(tf.keras.layers.Dense(64, activation='sigmoid'))\n",
"model.add(tf.keras.layers.Dropout(0.1))\n",
"model.add(tf.keras.layers.Dense(32, activation='sigmoid'))\n",
"model.add(tf.keras.layers.Dropout(0.1))\n",
"model.add(tf.keras.layers.Dense(16, activation='sigmoid'))\n",
"model.add(tf.keras.layers.Dropout(0.1))\n",
"model.add(tf.keras.layers.Dense(8, activation='sigmoid'))\n",
"model.add(tf.keras.layers.Dropout(0.1))\n",
"# Add output layer\n",
"model.add(tf.keras.layers.Dense(1))\n",
"\n",
"# compile\n",
"#model.compile(optimizer = tf.keras.optimizers.Adam(lr=1e-3), loss = 'mse')"
],
"execution_count": 146,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "EMRRvdaZYuU3",
"outputId": "db1c285a-a978-4e10-bda9-e19995185816"
},
"source": [
"for i in range(12):\n",
" print(model.trainable_weights[i].shape)"
],
"execution_count": 136,
"outputs": [
{
"output_type": "stream",
"text": [
"(13, 10)\n",
"(10,)\n",
"(10, 64)\n",
"(64,)\n",
"(64, 32)\n",
"(32,)\n",
"(32, 16)\n",
"(16,)\n",
"(16, 8)\n",
"(8,)\n",
"(8, 1)\n",
"(1,)\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "HlL7XJMsJFaJ"
},
"source": [
"# Run Model"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 357
},
"id": "J24adzVrJEjL",
"outputId": "ca75e583-6c65-440c-e5f5-938b33fa0ef5"
},
"source": [
"EPOCHS = 1000\n",
"history = model.fit(train_data, train_targets, validation_data = (test_data, test_targets), epochs = EPOCHS, steps_per_epoch=20, verbose = 1)"
],
"execution_count": 137,
"outputs": [
{
"output_type": "error",
"ename": "RuntimeError",
"evalue": "ignored",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mEPOCHS\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1000\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhistory\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_targets\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidation_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mtest_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_targets\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mEPOCHS\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m20\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mverbose\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m 1103\u001b[0m \u001b[0;31m# Legacy graph support is contained in `training_v1.Model`.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1104\u001b[0m \u001b[0mversion_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdisallow_legacy_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Model'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'fit'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1105\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_assert_compile_was_called\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1106\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_call_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'fit'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1107\u001b[0m \u001b[0m_disallow_inside_tf_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'fit'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36m_assert_compile_was_called\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 2691\u001b[0m \u001b[0;31m# (i.e. whether the model is built and its inputs/outputs are set).\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2692\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_is_compiled\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2693\u001b[0;31m raise RuntimeError('You must compile your model before '\n\u001b[0m\u001b[1;32m 2694\u001b[0m \u001b[0;34m'training/testing. '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2695\u001b[0m 'Use `model.compile(optimizer, loss)`.')\n",
"\u001b[0;31mRuntimeError\u001b[0m: You must compile your model before training/testing. Use `model.compile(optimizer, loss)`."
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "djasrd4OTK9_"
},
"source": [
"# Backpropagation"
]
},
{
"cell_type": "code",
"metadata": {
"id": "cmtFGf0NZjBA"
},
"source": [
"weights = tf.random.uniform(shape=(8,1))\n",
"offsets = tf.random.uniform(shape=(1,))"
],
"execution_count": 147,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "FS6BEJlfZt-b"
},
"source": [
"model.layers[9].set_weights([weights, offsets])"
],
"execution_count": 148,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "q_FW6rvvTKnK",
"outputId": "58e33216-bb83-4110-c909-5cb11e6664ad"
},
"source": [
"EPOCHS = 1000\n",
"BATCH_SIZE = 64\n",
"LEARNING_RATE = 1E-3\n",
"\n",
"opt = tf.keras.optimizers.RMSprop(learning_rate=LEARNING_RATE, decay=LEARNING_RATE / EPOCHS)\n",
"\n",
"# calculate number of iteratios to do\n",
"num_iters = int(train_data.shape[0] / BATCH_SIZE)\n",
"\n",
"loss_fn = loss = tf.keras.losses.mean_squared_error\n",
"losses = []\n",
"val_losses = []\n",
"for epoch in range(0, EPOCHS):\n",
"\tsys.stdout.flush()\n",
"\tepoch_start = time.time()\n",
"\tfor i in range(0, num_iters):\n",
"\t\t# determine starting and ending index of batch\n",
"\t\tstart = i * BATCH_SIZE\n",
"\t\tend = start + BATCH_SIZE\n",
"\t\t# step\n",
"\t\twith tf.GradientTape() as tape:\n",
" # predict\n",
"\t\t\tX_ = train_data[start:end]\n",
"\t\t\ty_ = train_targets[start:end]\n",
"\t\t\ty_pred = model(X_) # this returns an array of size (N, 13) where N is the size of the test dataset.\n",
" # Pick the label with the highest probability\n",
"\t\t\tloss = loss_fn(y_, y_pred) # calculate loss\n",
"\t\t\tXtest_ = test_data\n",
"\t\t\tytest_ = test_targets\n",
"\t\t\tytest_pred = model(Xtest_)\n",
"\t\t\tval_loss = np.mean(loss_fn(ytest_, ytest_pred))\n",
"\n",
" # calculate the gradients using our tape and then update the\n",
" # model weights\n",
"\t\tgrads = tape.gradient(loss, model.trainable_weights)\n",
"\t\topt.apply_gradients(zip(grads, model.trainable_weights))\n",
"\t# show timing information for the epoch\n",
" # show the current epoch number\n",
"\tprint(\"Epoch {}/{} - loss: {:.4f} - val_loss: {:.4f}\".format(\n",
"\t\tepoch + 1, EPOCHS, np.mean(loss), np.mean(val_loss)), end=\"\")\n",
"\tepoch_end = time.time()\n",
"\telapsed = (epoch_end - epoch_start)\n",
"\tprint(\"-> took {:.2f} seconds\".format(elapsed))\n",
"\tlosses.append(np.mean(loss.numpy()))\n",
"\tval_losses.append(val_loss)"
],
"execution_count": 150,
"outputs": [
{
"output_type": "stream",
"text": [
"Epoch 1/1000 - loss: 598.2261 - val_loss: 491.6591-> took 0.13 seconds\n",
"Epoch 2/1000 - loss: 590.0509 - val_loss: 484.2758-> took 0.13 seconds\n",
"Epoch 3/1000 - loss: 583.1237 - val_loss: 478.0358-> took 0.14 seconds\n",
"Epoch 4/1000 - loss: 576.7520 - val_loss: 472.2795-> took 0.13 seconds\n",
"Epoch 5/1000 - loss: 570.6929 - val_loss: 466.8140-> took 0.14 seconds\n",
"Epoch 6/1000 - loss: 564.8884 - val_loss: 461.5770-> took 0.13 seconds\n",
"Epoch 7/1000 - loss: 559.2507 - val_loss: 456.4980-> took 0.13 seconds\n",
"Epoch 8/1000 - loss: 553.7295 - val_loss: 451.5350-> took 0.13 seconds\n",
"Epoch 9/1000 - loss: 548.3182 - val_loss: 446.6741-> took 0.15 seconds\n",
"Epoch 10/1000 - loss: 543.0594 - val_loss: 441.9456-> took 0.14 seconds\n",
"Epoch 11/1000 - loss: 537.9497 - val_loss: 437.3539-> took 0.16 seconds\n",
"Epoch 12/1000 - loss: 532.9792 - val_loss: 432.8968-> took 0.15 seconds\n",
"Epoch 13/1000 - loss: 528.1459 - val_loss: 428.5631-> took 0.15 seconds\n",
"Epoch 14/1000 - loss: 523.4313 - val_loss: 424.3358-> took 0.13 seconds\n",
"Epoch 15/1000 - loss: 518.7852 - val_loss: 420.1700-> took 0.13 seconds\n",
"Epoch 16/1000 - loss: 514.2212 - val_loss: 416.0862-> took 0.15 seconds\n",
"Epoch 17/1000 - loss: 509.7957 - val_loss: 412.1268-> took 0.14 seconds\n",
"Epoch 18/1000 - loss: 505.4937 - val_loss: 408.2802-> took 0.13 seconds\n",
"Epoch 19/1000 - loss: 501.3298 - val_loss: 404.5589-> took 0.12 seconds\n",
"Epoch 20/1000 - loss: 497.2869 - val_loss: 400.9485-> took 0.14 seconds\n",
"Epoch 21/1000 - loss: 493.3611 - val_loss: 397.4438-> took 0.13 seconds\n",
"Epoch 22/1000 - loss: 489.5528 - val_loss: 394.0469-> took 0.14 seconds\n",
"Epoch 23/1000 - loss: 485.8574 - val_loss: 390.7527-> took 0.13 seconds\n",
"Epoch 24/1000 - loss: 482.2721 - val_loss: 387.5583-> took 0.13 seconds\n",
"Epoch 25/1000 - loss: 478.7928 - val_loss: 384.4598-> took 0.14 seconds\n",
"Epoch 26/1000 - loss: 475.4146 - val_loss: 381.4541-> took 0.13 seconds\n",
"Epoch 27/1000 - loss: 472.1335 - val_loss: 378.5358-> took 0.12 seconds\n",
"Epoch 28/1000 - loss: 468.9500 - val_loss: 375.7068-> took 0.13 seconds\n",
"Epoch 29/1000 - loss: 465.8613 - val_loss: 372.9629-> took 0.14 seconds\n",
"Epoch 30/1000 - loss: 462.8603 - val_loss: 370.2983-> took 0.13 seconds\n",
"Epoch 31/1000 - loss: 459.9394 - val_loss: 367.7050-> took 0.13 seconds\n",
"Epoch 32/1000 - loss: 457.0928 - val_loss: 365.1799-> took 0.15 seconds\n",
"Epoch 33/1000 - loss: 454.3147 - val_loss: 362.7173-> took 0.12 seconds\n",
"Epoch 34/1000 - loss: 451.6016 - val_loss: 360.3132-> took 0.14 seconds\n",
"Epoch 35/1000 - loss: 448.9493 - val_loss: 357.9639-> took 0.13 seconds\n",
"Epoch 36/1000 - loss: 446.3535 - val_loss: 355.6659-> took 0.13 seconds\n",
"Epoch 37/1000 - loss: 443.8101 - val_loss: 353.4152-> took 0.13 seconds\n",
"Epoch 38/1000 - loss: 441.3170 - val_loss: 351.2101-> took 0.12 seconds\n",
"Epoch 39/1000 - loss: 438.8707 - val_loss: 349.0476-> took 0.13 seconds\n",
"Epoch 40/1000 - loss: 436.4684 - val_loss: 346.9247-> took 0.13 seconds\n",
"Epoch 41/1000 - loss: 434.1071 - val_loss: 344.8392-> took 0.12 seconds\n",
"Epoch 42/1000 - loss: 431.7858 - val_loss: 342.7897-> took 0.13 seconds\n",
"Epoch 43/1000 - loss: 429.5008 - val_loss: 340.7735-> took 0.13 seconds\n",
"Epoch 44/1000 - loss: 427.2509 - val_loss: 338.7893-> took 0.14 seconds\n",
"Epoch 45/1000 - loss: 425.0349 - val_loss: 336.8357-> took 0.13 seconds\n",
"Epoch 46/1000 - loss: 422.8495 - val_loss: 334.9099-> took 0.13 seconds\n",
"Epoch 47/1000 - loss: 420.6929 - val_loss: 333.0105-> took 0.13 seconds\n",
"Epoch 48/1000 - loss: 418.5632 - val_loss: 331.1355-> took 0.13 seconds\n",
"Epoch 49/1000 - loss: 416.4581 - val_loss: 329.2831-> took 0.12 seconds\n",
"Epoch 50/1000 - loss: 414.3763 - val_loss: 327.4519-> took 0.13 seconds\n",
"Epoch 51/1000 - loss: 412.3162 - val_loss: 325.6407-> took 0.13 seconds\n",
"Epoch 52/1000 - loss: 410.2764 - val_loss: 323.8481-> took 0.13 seconds\n",
"Epoch 53/1000 - loss: 408.2556 - val_loss: 322.0730-> took 0.13 seconds\n",
"Epoch 54/1000 - loss: 406.2526 - val_loss: 320.3145-> took 0.13 seconds\n",
"Epoch 55/1000 - loss: 404.2665 - val_loss: 318.5715-> took 0.14 seconds\n",
"Epoch 56/1000 - loss: 402.2963 - val_loss: 316.8432-> took 0.13 seconds\n",
"Epoch 57/1000 - loss: 400.3410 - val_loss: 315.1287-> took 0.13 seconds\n",
"Epoch 58/1000 - loss: 398.4000 - val_loss: 313.4276-> took 0.13 seconds\n",
"Epoch 59/1000 - loss: 396.4724 - val_loss: 311.7390-> took 0.13 seconds\n",
"Epoch 60/1000 - loss: 394.5578 - val_loss: 310.0626-> took 0.12 seconds\n",
"Epoch 61/1000 - loss: 392.6555 - val_loss: 308.3977-> took 0.14 seconds\n",
"Epoch 62/1000 - loss: 390.7649 - val_loss: 306.7437-> took 0.14 seconds\n",
"Epoch 63/1000 - loss: 388.8855 - val_loss: 305.1003-> took 0.13 seconds\n",
"Epoch 64/1000 - loss: 387.0168 - val_loss: 303.4672-> took 0.13 seconds\n",
"Epoch 65/1000 - loss: 385.1586 - val_loss: 301.8438-> took 0.13 seconds\n",
"Epoch 66/1000 - loss: 383.3103 - val_loss: 300.2300-> took 0.14 seconds\n",
"Epoch 67/1000 - loss: 381.4717 - val_loss: 298.6254-> took 0.14 seconds\n",
"Epoch 68/1000 - loss: 379.6424 - val_loss: 297.0295-> took 0.13 seconds\n",
"Epoch 69/1000 - loss: 377.8222 - val_loss: 295.4425-> took 0.13 seconds\n",
"Epoch 70/1000 - loss: 376.0107 - val_loss: 293.8637-> took 0.14 seconds\n",
"Epoch 71/1000 - loss: 374.2077 - val_loss: 292.2930-> took 0.13 seconds\n",
"Epoch 72/1000 - loss: 372.4130 - val_loss: 290.7305-> took 0.15 seconds\n",
"Epoch 73/1000 - loss: 370.6264 - val_loss: 289.1757-> took 0.13 seconds\n",
"Epoch 74/1000 - loss: 368.8479 - val_loss: 287.6287-> took 0.13 seconds\n",
"Epoch 75/1000 - loss: 367.0769 - val_loss: 286.0890-> took 0.14 seconds\n",
"Epoch 76/1000 - loss: 365.3135 - val_loss: 284.5566-> took 0.16 seconds\n",
"Epoch 77/1000 - loss: 363.5576 - val_loss: 283.0315-> took 0.17 seconds\n",
"Epoch 78/1000 - loss: 361.8090 - val_loss: 281.5136-> took 0.13 seconds\n",
"Epoch 79/1000 - loss: 360.0676 - val_loss: 280.0026-> took 0.13 seconds\n",
"Epoch 80/1000 - loss: 358.3332 - val_loss: 278.4985-> took 0.13 seconds\n",
"Epoch 81/1000 - loss: 356.6059 - val_loss: 277.0013-> took 0.13 seconds\n",
"Epoch 82/1000 - loss: 354.8853 - val_loss: 275.5107-> took 0.13 seconds\n",
"Epoch 83/1000 - loss: 353.1716 - val_loss: 274.0269-> took 0.13 seconds\n",
"Epoch 84/1000 - loss: 351.4646 - val_loss: 272.5495-> took 0.12 seconds\n",
"Epoch 85/1000 - loss: 349.7642 - val_loss: 271.0786-> took 0.14 seconds\n",
"Epoch 86/1000 - loss: 348.0704 - val_loss: 269.6144-> took 0.13 seconds\n",
"Epoch 87/1000 - loss: 346.3832 - val_loss: 268.1565-> took 0.12 seconds\n",
"Epoch 88/1000 - loss: 344.7024 - val_loss: 266.7048-> took 0.13 seconds\n",
"Epoch 89/1000 - loss: 343.0279 - val_loss: 265.2596-> took 0.14 seconds\n",
"Epoch 90/1000 - loss: 341.3599 - val_loss: 263.8206-> took 0.13 seconds\n",
"Epoch 91/1000 - loss: 339.6981 - val_loss: 262.3878-> took 0.13 seconds\n",
"Epoch 92/1000 - loss: 338.0426 - val_loss: 260.9612-> took 0.14 seconds\n",
"Epoch 93/1000 - loss: 336.3933 - val_loss: 259.5408-> took 0.12 seconds\n",
"Epoch 94/1000 - loss: 334.7502 - val_loss: 258.1266-> took 0.14 seconds\n",
"Epoch 95/1000 - loss: 333.1133 - val_loss: 256.7184-> took 0.14 seconds\n",
"Epoch 96/1000 - loss: 331.4826 - val_loss: 255.3163-> took 0.14 seconds\n",
"Epoch 97/1000 - loss: 329.8579 - val_loss: 253.9201-> took 0.13 seconds\n",
"Epoch 98/1000 - loss: 328.2395 - val_loss: 252.5301-> took 0.15 seconds\n",
"Epoch 99/1000 - loss: 326.6269 - val_loss: 251.1462-> took 0.14 seconds\n",
"Epoch 100/1000 - loss: 325.0205 - val_loss: 249.7681-> took 0.13 seconds\n",
"Epoch 101/1000 - loss: 323.4201 - val_loss: 248.3961-> took 0.12 seconds\n",
"Epoch 102/1000 - loss: 321.8257 - val_loss: 247.0300-> took 0.13 seconds\n",
"Epoch 103/1000 - loss: 320.2373 - val_loss: 245.6699-> took 0.13 seconds\n",
"Epoch 104/1000 - loss: 318.6548 - val_loss: 244.3157-> took 0.12 seconds\n",
"Epoch 105/1000 - loss: 317.0784 - val_loss: 242.9673-> took 0.15 seconds\n",
"Epoch 106/1000 - loss: 315.5078 - val_loss: 241.6250-> took 0.14 seconds\n",
"Epoch 107/1000 - loss: 313.9434 - val_loss: 240.2886-> took 0.13 seconds\n",
"Epoch 108/1000 - loss: 312.3847 - val_loss: 238.9581-> took 0.13 seconds\n",
"Epoch 109/1000 - loss: 310.8320 - val_loss: 237.6333-> took 0.13 seconds\n",
"Epoch 110/1000 - loss: 309.2853 - val_loss: 236.3146-> took 0.13 seconds\n",
"Epoch 111/1000 - loss: 307.7444 - val_loss: 235.0017-> took 0.14 seconds\n",
"Epoch 112/1000 - loss: 306.2095 - val_loss: 233.6947-> took 0.13 seconds\n",
"Epoch 113/1000 - loss: 304.6804 - val_loss: 232.3934-> took 0.14 seconds\n",
"Epoch 114/1000 - loss: 303.1572 - val_loss: 231.0981-> took 0.13 seconds\n",
"Epoch 115/1000 - loss: 301.6399 - val_loss: 229.8087-> took 0.13 seconds\n",
"Epoch 116/1000 - loss: 300.1285 - val_loss: 228.5250-> took 0.12 seconds\n",
"Epoch 117/1000 - loss: 298.6230 - val_loss: 227.2473-> took 0.13 seconds\n",
"Epoch 118/1000 - loss: 297.1233 - val_loss: 225.9752-> took 0.13 seconds\n",
"Epoch 119/1000 - loss: 295.6295 - val_loss: 224.7091-> took 0.13 seconds\n",
"Epoch 120/1000 - loss: 294.1416 - val_loss: 223.4488-> took 0.14 seconds\n",
"Epoch 121/1000 - loss: 292.6595 - val_loss: 222.1944-> took 0.13 seconds\n",
"Epoch 122/1000 - loss: 291.1833 - val_loss: 220.9457-> took 0.15 seconds\n",
"Epoch 123/1000 - loss: 289.7129 - val_loss: 219.7029-> took 0.13 seconds\n",
"Epoch 124/1000 - loss: 288.2484 - val_loss: 218.4657-> took 0.12 seconds\n",
"Epoch 125/1000 - loss: 286.7898 - val_loss: 217.2346-> took 0.13 seconds\n",
"Epoch 126/1000 - loss: 285.3369 - val_loss: 216.0093-> took 0.13 seconds\n",
"Epoch 127/1000 - loss: 283.8899 - val_loss: 214.7895-> took 0.13 seconds\n",
"Epoch 128/1000 - loss: 282.4487 - val_loss: 213.5759-> took 0.12 seconds\n",
"Epoch 129/1000 - loss: 281.0135 - val_loss: 212.3678-> took 0.13 seconds\n",
"Epoch 130/1000 - loss: 279.5840 - val_loss: 211.1657-> took 0.14 seconds\n",
"Epoch 131/1000 - loss: 278.1604 - val_loss: 209.9692-> took 0.13 seconds\n",
"Epoch 132/1000 - loss: 276.7426 - val_loss: 208.7788-> took 0.14 seconds\n",
"Epoch 133/1000 - loss: 275.3306 - val_loss: 207.5940-> took 0.13 seconds\n",
"Epoch 134/1000 - loss: 273.9244 - val_loss: 206.4150-> took 0.13 seconds\n",
"Epoch 135/1000 - loss: 272.5242 - val_loss: 205.2419-> took 0.14 seconds\n",
"Epoch 136/1000 - loss: 271.1296 - val_loss: 204.0744-> took 0.13 seconds\n",
"Epoch 137/1000 - loss: 269.7411 - val_loss: 202.9128-> took 0.13 seconds\n",
"Epoch 138/1000 - loss: 268.3584 - val_loss: 201.7571-> took 0.13 seconds\n",
"Epoch 139/1000 - loss: 266.9813 - val_loss: 200.6071-> took 0.13 seconds\n",
"Epoch 140/1000 - loss: 265.6102 - val_loss: 199.4629-> took 0.13 seconds\n",
"Epoch 141/1000 - loss: 264.2449 - val_loss: 198.3245-> took 0.13 seconds\n",
"Epoch 142/1000 - loss: 262.8855 - val_loss: 197.1919-> took 0.12 seconds\n",
"Epoch 143/1000 - loss: 261.5318 - val_loss: 196.0650-> took 0.13 seconds\n",
"Epoch 144/1000 - loss: 260.1841 - val_loss: 194.9440-> took 0.13 seconds\n",
"Epoch 145/1000 - loss: 258.8420 - val_loss: 193.8288-> took 0.15 seconds\n",
"Epoch 146/1000 - loss: 257.5058 - val_loss: 192.7193-> took 0.13 seconds\n",
"Epoch 147/1000 - loss: 256.1755 - val_loss: 191.6157-> took 0.13 seconds\n",
"Epoch 148/1000 - loss: 254.8510 - val_loss: 190.5178-> took 0.13 seconds\n",
"Epoch 149/1000 - loss: 253.5323 - val_loss: 189.4258-> took 0.13 seconds\n",
"Epoch 150/1000 - loss: 252.2195 - val_loss: 188.3395-> took 0.13 seconds\n",
"Epoch 151/1000 - loss: 250.9124 - val_loss: 187.2590-> took 0.13 seconds\n",
"Epoch 152/1000 - loss: 249.6112 - val_loss: 186.1842-> took 0.12 seconds\n",
"Epoch 153/1000 - loss: 248.3159 - val_loss: 185.1152-> took 0.13 seconds\n",
"Epoch 154/1000 - loss: 247.0264 - val_loss: 184.0521-> took 0.13 seconds\n",
"Epoch 155/1000 - loss: 245.7426 - val_loss: 182.9946-> took 0.13 seconds\n",
"Epoch 156/1000 - loss: 244.4647 - val_loss: 181.9431-> took 0.13 seconds\n",
"Epoch 157/1000 - loss: 243.1927 - val_loss: 180.8972-> took 0.13 seconds\n",
"Epoch 158/1000 - loss: 241.9265 - val_loss: 179.8573-> took 0.13 seconds\n",
"Epoch 159/1000 - loss: 240.6660 - val_loss: 178.8229-> took 0.13 seconds\n",
"Epoch 160/1000 - loss: 239.4115 - val_loss: 177.7946-> took 0.12 seconds\n",
"Epoch 161/1000 - loss: 238.1627 - val_loss: 176.7718-> took 0.13 seconds\n",
"Epoch 162/1000 - loss: 236.9199 - val_loss: 175.7550-> took 0.12 seconds\n",
"Epoch 163/1000 - loss: 235.6828 - val_loss: 174.7438-> took 0.13 seconds\n",
"Epoch 164/1000 - loss: 234.4515 - val_loss: 173.7384-> took 0.13 seconds\n",
"Epoch 165/1000 - loss: 233.2261 - val_loss: 172.7389-> took 0.12 seconds\n",
"Epoch 166/1000 - loss: 232.0066 - val_loss: 171.7450-> took 0.13 seconds\n",
"Epoch 167/1000 - loss: 230.7928 - val_loss: 170.7571-> took 0.13 seconds\n",
"Epoch 168/1000 - loss: 229.5849 - val_loss: 169.7749-> took 0.13 seconds\n",
"Epoch 169/1000 - loss: 228.3828 - val_loss: 168.7984-> took 0.14 seconds\n",
"Epoch 170/1000 - loss: 227.1865 - val_loss: 167.8277-> took 0.13 seconds\n",
"Epoch 171/1000 - loss: 225.9961 - val_loss: 166.8628-> took 0.14 seconds\n",
"Epoch 172/1000 - loss: 224.8115 - val_loss: 165.9037-> took 0.13 seconds\n",
"Epoch 173/1000 - loss: 223.6327 - val_loss: 164.9503-> took 0.12 seconds\n",
"Epoch 174/1000 - loss: 222.4598 - val_loss: 164.0028-> took 0.14 seconds\n",
"Epoch 175/1000 - loss: 221.2927 - val_loss: 163.0609-> took 0.13 seconds\n",
"Epoch 176/1000 - loss: 220.1315 - val_loss: 162.1249-> took 0.13 seconds\n",
"Epoch 177/1000 - loss: 218.9760 - val_loss: 161.1947-> took 0.12 seconds\n",
"Epoch 178/1000 - loss: 217.8264 - val_loss: 160.2702-> took 0.13 seconds\n",
"Epoch 179/1000 - loss: 216.6827 - val_loss: 159.3515-> took 0.12 seconds\n",
"Epoch 180/1000 - loss: 215.5447 - val_loss: 158.4385-> took 0.13 seconds\n",
"Epoch 181/1000 - loss: 214.4126 - val_loss: 157.5314-> took 0.12 seconds\n",
"Epoch 182/1000 - loss: 213.2864 - val_loss: 156.6299-> took 0.14 seconds\n",
"Epoch 183/1000 - loss: 212.1660 - val_loss: 155.7343-> took 0.13 seconds\n",
"Epoch 184/1000 - loss: 211.0514 - val_loss: 154.8445-> took 0.13 seconds\n",
"Epoch 185/1000 - loss: 209.9426 - val_loss: 153.9603-> took 0.12 seconds\n",
"Epoch 186/1000 - loss: 208.8398 - val_loss: 153.0820-> took 0.12 seconds\n",
"Epoch 187/1000 - loss: 207.7428 - val_loss: 152.2095-> took 0.13 seconds\n",
"Epoch 188/1000 - loss: 206.6515 - val_loss: 151.3427-> took 0.13 seconds\n",
"Epoch 189/1000 - loss: 205.5661 - val_loss: 150.4817-> took 0.12 seconds\n",
"Epoch 190/1000 - loss: 204.4866 - val_loss: 149.6264-> took 0.13 seconds\n",
"Epoch 191/1000 - loss: 203.4129 - val_loss: 148.7770-> took 0.12 seconds\n",
"Epoch 192/1000 - loss: 202.3451 - val_loss: 147.9332-> took 0.13 seconds\n",
"Epoch 193/1000 - loss: 201.2831 - val_loss: 147.0953-> took 0.13 seconds\n",
"Epoch 194/1000 - loss: 200.2269 - val_loss: 146.2631-> took 0.14 seconds\n",
"Epoch 195/1000 - loss: 199.1766 - val_loss: 145.4366-> took 0.14 seconds\n",
"Epoch 196/1000 - loss: 198.1322 - val_loss: 144.6160-> took 0.13 seconds\n",
"Epoch 197/1000 - loss: 197.0936 - val_loss: 143.8011-> took 0.12 seconds\n",
"Epoch 198/1000 - loss: 196.0609 - val_loss: 142.9921-> took 0.12 seconds\n",
"Epoch 199/1000 - loss: 195.0340 - val_loss: 142.1886-> took 0.12 seconds\n",
"Epoch 200/1000 - loss: 194.0129 - val_loss: 141.3911-> took 0.13 seconds\n",
"Epoch 201/1000 - loss: 192.9977 - val_loss: 140.5992-> took 0.13 seconds\n",
"Epoch 202/1000 - loss: 191.9884 - val_loss: 139.8131-> took 0.13 seconds\n",
"Epoch 203/1000 - loss: 190.9849 - val_loss: 139.0328-> took 0.13 seconds\n",
"Epoch 204/1000 - loss: 189.9873 - val_loss: 138.2582-> took 0.12 seconds\n",
"Epoch 205/1000 - loss: 188.9955 - val_loss: 137.4894-> took 0.12 seconds\n",
"Epoch 206/1000 - loss: 188.0096 - val_loss: 136.7263-> took 0.14 seconds\n",
"Epoch 207/1000 - loss: 187.0296 - val_loss: 135.9690-> took 0.13 seconds\n",
"Epoch 208/1000 - loss: 186.0554 - val_loss: 135.2175-> took 0.13 seconds\n",
"Epoch 209/1000 - loss: 185.0871 - val_loss: 134.4717-> took 0.13 seconds\n",
"Epoch 210/1000 - loss: 184.1246 - val_loss: 133.7317-> took 0.13 seconds\n",
"Epoch 211/1000 - loss: 183.1681 - val_loss: 132.9974-> took 0.12 seconds\n",
"Epoch 212/1000 - loss: 182.2174 - val_loss: 132.2689-> took 0.13 seconds\n",
"Epoch 213/1000 - loss: 181.2725 - val_loss: 131.5461-> took 0.13 seconds\n",
"Epoch 214/1000 - loss: 180.3335 - val_loss: 130.8291-> took 0.12 seconds\n",
"Epoch 215/1000 - loss: 179.4004 - val_loss: 130.1178-> took 0.13 seconds\n",
"Epoch 216/1000 - loss: 178.4732 - val_loss: 129.4124-> took 0.12 seconds\n",
"Epoch 217/1000 - loss: 177.5518 - val_loss: 128.7126-> took 0.15 seconds\n",
"Epoch 218/1000 - loss: 176.6363 - val_loss: 128.0186-> took 0.13 seconds\n",
"Epoch 219/1000 - loss: 175.7268 - val_loss: 127.3303-> took 0.12 seconds\n",
"Epoch 220/1000 - loss: 174.8230 - val_loss: 126.6478-> took 0.13 seconds\n",
"Epoch 221/1000 - loss: 173.9252 - val_loss: 125.9710-> took 0.12 seconds\n",
"Epoch 222/1000 - loss: 173.0333 - val_loss: 125.3000-> took 0.13 seconds\n",
"Epoch 223/1000 - loss: 172.1472 - val_loss: 124.6347-> took 0.13 seconds\n",
"Epoch 224/1000 - loss: 171.2671 - val_loss: 123.9752-> took 0.13 seconds\n",
"Epoch 225/1000 - loss: 170.3928 - val_loss: 123.3214-> took 0.13 seconds\n",
"Epoch 226/1000 - loss: 169.5244 - val_loss: 122.6733-> took 0.14 seconds\n",
"Epoch 227/1000 - loss: 168.6620 - val_loss: 122.0310-> took 0.13 seconds\n",
"Epoch 228/1000 - loss: 167.8054 - val_loss: 121.3944-> took 0.13 seconds\n",
"Epoch 229/1000 - loss: 166.9547 - val_loss: 120.7636-> took 0.13 seconds\n",
"Epoch 230/1000 - loss: 166.1099 - val_loss: 120.1385-> took 0.13 seconds\n",
"Epoch 231/1000 - loss: 165.2711 - val_loss: 119.5191-> took 0.14 seconds\n",
"Epoch 232/1000 - loss: 164.4381 - val_loss: 118.9055-> took 0.13 seconds\n",
"Epoch 233/1000 - loss: 163.6111 - val_loss: 118.2976-> took 0.13 seconds\n",
"Epoch 234/1000 - loss: 162.7900 - val_loss: 117.6954-> took 0.13 seconds\n",
"Epoch 235/1000 - loss: 161.9748 - val_loss: 117.0990-> took 0.13 seconds\n",
"Epoch 236/1000 - loss: 161.1655 - val_loss: 116.5083-> took 0.12 seconds\n",
"Epoch 237/1000 - loss: 160.3622 - val_loss: 115.9233-> took 0.13 seconds\n",
"Epoch 238/1000 - loss: 159.5648 - val_loss: 115.3440-> took 0.12 seconds\n",
"Epoch 239/1000 - loss: 158.7733 - val_loss: 114.7705-> took 0.12 seconds\n",
"Epoch 240/1000 - loss: 157.9877 - val_loss: 114.2027-> took 0.15 seconds\n",
"Epoch 241/1000 - loss: 157.2081 - val_loss: 113.6406-> took 0.13 seconds\n",
"Epoch 242/1000 - loss: 156.4345 - val_loss: 113.0843-> took 0.13 seconds\n",
"Epoch 243/1000 - loss: 155.6668 - val_loss: 112.5336-> took 0.13 seconds\n",
"Epoch 244/1000 - loss: 154.9050 - val_loss: 111.9887-> took 0.12 seconds\n",
"Epoch 245/1000 - loss: 154.1492 - val_loss: 111.4495-> took 0.13 seconds\n",
"Epoch 246/1000 - loss: 153.3994 - val_loss: 110.9160-> took 0.13 seconds\n",
"Epoch 247/1000 - loss: 152.6555 - val_loss: 110.3882-> took 0.13 seconds\n",
"Epoch 248/1000 - loss: 151.9176 - val_loss: 109.8661-> took 0.13 seconds\n",
"Epoch 249/1000 - loss: 151.1857 - val_loss: 109.3498-> took 0.13 seconds\n",
"Epoch 250/1000 - loss: 150.4597 - val_loss: 108.8391-> took 0.13 seconds\n",
"Epoch 251/1000 - loss: 149.7398 - val_loss: 108.3341-> took 0.13 seconds\n",
"Epoch 252/1000 - loss: 149.0258 - val_loss: 107.8349-> took 0.12 seconds\n",
"Epoch 253/1000 - loss: 148.3178 - val_loss: 107.3413-> took 0.13 seconds\n",
"Epoch 254/1000 - loss: 147.6159 - val_loss: 106.8535-> took 0.13 seconds\n",
"Epoch 255/1000 - loss: 146.9199 - val_loss: 106.3713-> took 0.12 seconds\n",
"Epoch 256/1000 - loss: 146.2300 - val_loss: 105.8948-> took 0.14 seconds\n",
"Epoch 257/1000 - loss: 145.5460 - val_loss: 105.4240-> took 0.13 seconds\n",
"Epoch 258/1000 - loss: 144.8681 - val_loss: 104.9590-> took 0.13 seconds\n",
"Epoch 259/1000 - loss: 144.1963 - val_loss: 104.4995-> took 0.12 seconds\n",
"Epoch 260/1000 - loss: 143.5304 - val_loss: 104.0458-> took 0.12 seconds\n",
"Epoch 261/1000 - loss: 142.8707 - val_loss: 103.5977-> took 0.15 seconds\n",
"Epoch 262/1000 - loss: 142.2169 - val_loss: 103.1553-> took 0.13 seconds\n",
"Epoch 263/1000 - loss: 141.5692 - val_loss: 102.7186-> took 0.14 seconds\n",
"Epoch 264/1000 - loss: 140.9276 - val_loss: 102.2875-> took 0.13 seconds\n",
"Epoch 265/1000 - loss: 140.2921 - val_loss: 101.8622-> took 0.12 seconds\n",
"Epoch 266/1000 - loss: 139.6627 - val_loss: 101.4424-> took 0.13 seconds\n",
"Epoch 267/1000 - loss: 139.0393 - val_loss: 101.0283-> took 0.13 seconds\n",
"Epoch 268/1000 - loss: 138.4221 - val_loss: 100.6199-> took 0.12 seconds\n",
"Epoch 269/1000 - loss: 137.8109 - val_loss: 100.2170-> took 0.14 seconds\n",
"Epoch 270/1000 - loss: 137.2059 - val_loss: 99.8199-> took 0.14 seconds\n",
"Epoch 271/1000 - loss: 136.6071 - val_loss: 99.4284-> took 0.12 seconds\n",
"Epoch 272/1000 - loss: 136.0143 - val_loss: 99.0425-> took 0.13 seconds\n",
"Epoch 273/1000 - loss: 135.4277 - val_loss: 98.6622-> took 0.13 seconds\n",
"Epoch 274/1000 - loss: 134.8473 - val_loss: 98.2875-> took 0.12 seconds\n",
"Epoch 275/1000 - loss: 134.2730 - val_loss: 97.9185-> took 0.13 seconds\n",
"Epoch 276/1000 - loss: 133.7049 - val_loss: 97.5550-> took 0.12 seconds\n",
"Epoch 277/1000 - loss: 133.1431 - val_loss: 97.1972-> took 0.13 seconds\n",
"Epoch 278/1000 - loss: 132.5873 - val_loss: 96.8449-> took 0.12 seconds\n",
"Epoch 279/1000 - loss: 132.0379 - val_loss: 96.4982-> took 0.14 seconds\n",
"Epoch 280/1000 - loss: 131.4946 - val_loss: 96.1571-> took 0.13 seconds\n",
"Epoch 281/1000 - loss: 130.9576 - val_loss: 95.8216-> took 0.13 seconds\n",
"Epoch 282/1000 - loss: 130.4268 - val_loss: 95.4916-> took 0.12 seconds\n",
"Epoch 283/1000 - loss: 129.9023 - val_loss: 95.1671-> took 0.12 seconds\n",
"Epoch 284/1000 - loss: 129.3840 - val_loss: 94.8482-> took 0.12 seconds\n",
"Epoch 285/1000 - loss: 128.8721 - val_loss: 94.5348-> took 0.14 seconds\n",
"Epoch 286/1000 - loss: 128.3665 - val_loss: 94.2269-> took 0.14 seconds\n",
"Epoch 287/1000 - loss: 127.8672 - val_loss: 93.9246-> took 0.14 seconds\n",
"Epoch 288/1000 - loss: 127.3742 - val_loss: 93.6276-> took 0.13 seconds\n",
"Epoch 289/1000 - loss: 126.8875 - val_loss: 93.3362-> took 0.12 seconds\n",
"Epoch 290/1000 - loss: 126.4072 - val_loss: 93.0502-> took 0.14 seconds\n",
"Epoch 291/1000 - loss: 125.9333 - val_loss: 92.7697-> took 0.13 seconds\n",
"Epoch 292/1000 - loss: 125.4658 - val_loss: 92.4947-> took 0.13 seconds\n",
"Epoch 293/1000 - loss: 125.0046 - val_loss: 92.2249-> took 0.14 seconds\n",
"Epoch 294/1000 - loss: 124.5499 - val_loss: 91.9607-> took 0.12 seconds\n",
"Epoch 295/1000 - loss: 124.1016 - val_loss: 91.7018-> took 0.13 seconds\n",
"Epoch 296/1000 - loss: 123.6597 - val_loss: 91.4482-> took 0.13 seconds\n",
"Epoch 297/1000 - loss: 123.2244 - val_loss: 91.2000-> took 0.13 seconds\n",
"Epoch 298/1000 - loss: 122.7954 - val_loss: 90.9571-> took 0.13 seconds\n",
"Epoch 299/1000 - loss: 122.3730 - val_loss: 90.7195-> took 0.13 seconds\n",
"Epoch 300/1000 - loss: 121.9570 - val_loss: 90.4871-> took 0.14 seconds\n",
"Epoch 301/1000 - loss: 121.5476 - val_loss: 90.2599-> took 0.12 seconds\n",
"Epoch 302/1000 - loss: 121.1446 - val_loss: 90.0380-> took 0.13 seconds\n",
"Epoch 303/1000 - loss: 120.7482 - val_loss: 89.8213-> took 0.12 seconds\n",
"Epoch 304/1000 - loss: 120.3583 - val_loss: 89.6097-> took 0.13 seconds\n",
"Epoch 305/1000 - loss: 119.9750 - val_loss: 89.4033-> took 0.13 seconds\n",
"Epoch 306/1000 - loss: 119.5983 - val_loss: 89.2019-> took 0.13 seconds\n",
"Epoch 307/1000 - loss: 119.2280 - val_loss: 89.0055-> took 0.13 seconds\n",
"Epoch 308/1000 - loss: 118.8644 - val_loss: 88.8142-> took 0.14 seconds\n",
"Epoch 309/1000 - loss: 118.5074 - val_loss: 88.6278-> took 0.13 seconds\n",
"Epoch 310/1000 - loss: 118.1569 - val_loss: 88.4464-> took 0.13 seconds\n",
"Epoch 311/1000 - loss: 117.8130 - val_loss: 88.2698-> took 0.13 seconds\n",
"Epoch 312/1000 - loss: 117.4757 - val_loss: 88.0981-> took 0.13 seconds\n",
"Epoch 313/1000 - loss: 117.1450 - val_loss: 87.9312-> took 0.12 seconds\n",
"Epoch 314/1000 - loss: 116.8208 - val_loss: 87.7690-> took 0.13 seconds\n",
"Epoch 315/1000 - loss: 116.5032 - val_loss: 87.6115-> took 0.14 seconds\n",
"Epoch 316/1000 - loss: 116.1921 - val_loss: 87.4587-> took 0.15 seconds\n",
"Epoch 317/1000 - loss: 115.8876 - val_loss: 87.3103-> took 0.13 seconds\n",
"Epoch 318/1000 - loss: 115.5896 - val_loss: 87.1666-> took 0.13 seconds\n",
"Epoch 319/1000 - loss: 115.2981 - val_loss: 87.0272-> took 0.13 seconds\n",
"Epoch 320/1000 - loss: 115.0131 - val_loss: 86.8923-> took 0.13 seconds\n",
"Epoch 321/1000 - loss: 114.7345 - val_loss: 86.7616-> took 0.13 seconds\n",
"Epoch 322/1000 - loss: 114.4624 - val_loss: 86.6352-> took 0.13 seconds\n",
"Epoch 323/1000 - loss: 114.1966 - val_loss: 86.5130-> took 0.13 seconds\n",
"Epoch 324/1000 - loss: 113.9372 - val_loss: 86.3949-> took 0.13 seconds\n",
"Epoch 325/1000 - loss: 113.6841 - val_loss: 86.2809-> took 0.14 seconds\n",
"Epoch 326/1000 - loss: 113.4373 - val_loss: 86.1707-> took 0.12 seconds\n",
"Epoch 327/1000 - loss: 113.1967 - val_loss: 86.0645-> took 0.13 seconds\n",
"Epoch 328/1000 - loss: 112.9622 - val_loss: 85.9620-> took 0.12 seconds\n",
"Epoch 329/1000 - loss: 112.7338 - val_loss: 85.8632-> took 0.13 seconds\n",
"Epoch 330/1000 - loss: 112.5115 - val_loss: 85.7680-> took 0.13 seconds\n",
"Epoch 331/1000 - loss: 112.2952 - val_loss: 85.6765-> took 0.13 seconds\n",
"Epoch 332/1000 - loss: 112.0847 - val_loss: 85.5883-> took 0.13 seconds\n",
"Epoch 333/1000 - loss: 111.8801 - val_loss: 85.5035-> took 0.12 seconds\n",
"Epoch 334/1000 - loss: 111.6812 - val_loss: 85.4219-> took 0.12 seconds\n",
"Epoch 335/1000 - loss: 111.4879 - val_loss: 85.3436-> took 0.14 seconds\n",
"Epoch 336/1000 - loss: 111.3003 - val_loss: 85.2683-> took 0.13 seconds\n",
"Epoch 337/1000 - loss: 111.1181 - val_loss: 85.1960-> took 0.13 seconds\n",
"Epoch 338/1000 - loss: 110.9413 - val_loss: 85.1266-> took 0.14 seconds\n",
"Epoch 339/1000 - loss: 110.7698 - val_loss: 85.0600-> took 0.14 seconds\n",
"Epoch 340/1000 - loss: 110.6035 - val_loss: 84.9962-> took 0.13 seconds\n",
"Epoch 341/1000 - loss: 110.4422 - val_loss: 84.9349-> took 0.13 seconds\n",
"Epoch 342/1000 - loss: 110.2860 - val_loss: 84.8763-> took 0.13 seconds\n",
"Epoch 343/1000 - loss: 110.1346 - val_loss: 84.8201-> took 0.13 seconds\n",
"Epoch 344/1000 - loss: 109.9880 - val_loss: 84.7662-> took 0.13 seconds\n",
"Epoch 345/1000 - loss: 109.8461 - val_loss: 84.7146-> took 0.13 seconds\n",
"Epoch 346/1000 - loss: 109.7087 - val_loss: 84.6653-> took 0.13 seconds\n",
"Epoch 347/1000 - loss: 109.5758 - val_loss: 84.6180-> took 0.12 seconds\n",
"Epoch 348/1000 - loss: 109.4472 - val_loss: 84.5728-> took 0.13 seconds\n",
"Epoch 349/1000 - loss: 109.3228 - val_loss: 84.5295-> took 0.13 seconds\n",
"Epoch 350/1000 - loss: 109.2025 - val_loss: 84.4881-> took 0.12 seconds\n",
"Epoch 351/1000 - loss: 109.0863 - val_loss: 84.4485-> took 0.13 seconds\n",
"Epoch 352/1000 - loss: 108.9739 - val_loss: 84.4107-> took 0.12 seconds\n",
"Epoch 353/1000 - loss: 108.8653 - val_loss: 84.3745-> took 0.14 seconds\n",
"Epoch 354/1000 - loss: 108.7604 - val_loss: 84.3399-> took 0.17 seconds\n",
"Epoch 355/1000 - loss: 108.6590 - val_loss: 84.3068-> took 0.13 seconds\n",
"Epoch 356/1000 - loss: 108.5611 - val_loss: 84.2752-> took 0.12 seconds\n",
"Epoch 357/1000 - loss: 108.4665 - val_loss: 84.2449-> took 0.13 seconds\n",
"Epoch 358/1000 - loss: 108.3751 - val_loss: 84.2159-> took 0.13 seconds\n",
"Epoch 359/1000 - loss: 108.2870 - val_loss: 84.1883-> took 0.12 seconds\n",
"Epoch 360/1000 - loss: 108.2019 - val_loss: 84.1619-> took 0.13 seconds\n",
"Epoch 361/1000 - loss: 108.1197 - val_loss: 84.1366-> took 0.13 seconds\n",
"Epoch 362/1000 - loss: 108.0404 - val_loss: 84.1125-> took 0.13 seconds\n",
"Epoch 363/1000 - loss: 107.9639 - val_loss: 84.0894-> took 0.13 seconds\n",
"Epoch 364/1000 - loss: 107.8900 - val_loss: 84.0673-> took 0.13 seconds\n",
"Epoch 365/1000 - loss: 107.8186 - val_loss: 84.0462-> took 0.13 seconds\n",
"Epoch 366/1000 - loss: 107.7498 - val_loss: 84.0260-> took 0.13 seconds\n",
"Epoch 367/1000 - loss: 107.6834 - val_loss: 84.0067-> took 0.12 seconds\n",
"Epoch 368/1000 - loss: 107.6194 - val_loss: 83.9882-> took 0.13 seconds\n",
"Epoch 369/1000 - loss: 107.5575 - val_loss: 83.9706-> took 0.14 seconds\n",
"Epoch 370/1000 - loss: 107.4979 - val_loss: 83.9537-> took 0.15 seconds\n",
"Epoch 371/1000 - loss: 107.4404 - val_loss: 83.9375-> took 0.14 seconds\n",
"Epoch 372/1000 - loss: 107.3848 - val_loss: 83.9220-> took 0.13 seconds\n",
"Epoch 373/1000 - loss: 107.3313 - val_loss: 83.9072-> took 0.12 seconds\n",
"Epoch 374/1000 - loss: 107.2796 - val_loss: 83.8930-> took 0.14 seconds\n",
"Epoch 375/1000 - loss: 107.2297 - val_loss: 83.8795-> took 0.12 seconds\n",
"Epoch 376/1000 - loss: 107.1816 - val_loss: 83.8665-> took 0.12 seconds\n",
"Epoch 377/1000 - loss: 107.1352 - val_loss: 83.8540-> took 0.14 seconds\n",
"Epoch 378/1000 - loss: 107.0905 - val_loss: 83.8421-> took 0.13 seconds\n",
"Epoch 379/1000 - loss: 107.0473 - val_loss: 83.8307-> took 0.12 seconds\n",
"Epoch 380/1000 - loss: 107.0056 - val_loss: 83.8197-> took 0.12 seconds\n",
"Epoch 381/1000 - loss: 106.9654 - val_loss: 83.8093-> took 0.12 seconds\n",
"Epoch 382/1000 - loss: 106.9266 - val_loss: 83.7992-> took 0.13 seconds\n",
"Epoch 383/1000 - loss: 106.8891 - val_loss: 83.7896-> took 0.13 seconds\n",
"Epoch 384/1000 - loss: 106.8530 - val_loss: 83.7804-> took 0.14 seconds\n",
"Epoch 385/1000 - loss: 106.8182 - val_loss: 83.7715-> took 0.13 seconds\n",
"Epoch 386/1000 - loss: 106.7845 - val_loss: 83.7630-> took 0.15 seconds\n",
"Epoch 387/1000 - loss: 106.7521 - val_loss: 83.7549-> took 0.14 seconds\n",
"Epoch 388/1000 - loss: 106.7208 - val_loss: 83.7471-> took 0.12 seconds\n",
"Epoch 389/1000 - loss: 106.6906 - val_loss: 83.7396-> took 0.12 seconds\n",
"Epoch 390/1000 - loss: 106.6614 - val_loss: 83.7324-> took 0.12 seconds\n",
"Epoch 391/1000 - loss: 106.6333 - val_loss: 83.7255-> took 0.13 seconds\n",
"Epoch 392/1000 - loss: 106.6061 - val_loss: 83.7189-> took 0.15 seconds\n",
"Epoch 393/1000 - loss: 106.5799 - val_loss: 83.7125-> took 0.13 seconds\n",
"Epoch 394/1000 - loss: 106.5546 - val_loss: 83.7064-> took 0.14 seconds\n",
"Epoch 395/1000 - loss: 106.5302 - val_loss: 83.7005-> took 0.12 seconds\n",
"Epoch 396/1000 - loss: 106.5067 - val_loss: 83.6950-> took 0.12 seconds\n",
"Epoch 397/1000 - loss: 106.4839 - val_loss: 83.6895-> took 0.13 seconds\n",
"Epoch 398/1000 - loss: 106.4620 - val_loss: 83.6843-> took 0.12 seconds\n",
"Epoch 399/1000 - loss: 106.4408 - val_loss: 83.6793-> took 0.13 seconds\n",
"Epoch 400/1000 - loss: 106.4204 - val_loss: 83.6745-> took 0.16 seconds\n",
"Epoch 401/1000 - loss: 106.4007 - val_loss: 83.6699-> took 0.13 seconds\n",
"Epoch 402/1000 - loss: 106.3816 - val_loss: 83.6655-> took 0.13 seconds\n",
"Epoch 403/1000 - loss: 106.3632 - val_loss: 83.6612-> took 0.13 seconds\n",
"Epoch 404/1000 - loss: 106.3455 - val_loss: 83.6571-> took 0.12 seconds\n",
"Epoch 405/1000 - loss: 106.3283 - val_loss: 83.6532-> took 0.12 seconds\n",
"Epoch 406/1000 - loss: 106.3118 - val_loss: 83.6494-> took 0.13 seconds\n",
"Epoch 407/1000 - loss: 106.2958 - val_loss: 83.6457-> took 0.13 seconds\n",
"Epoch 408/1000 - loss: 106.2804 - val_loss: 83.6422-> took 0.14 seconds\n",
"Epoch 409/1000 - loss: 106.2655 - val_loss: 83.6388-> took 0.13 seconds\n",
"Epoch 410/1000 - loss: 106.2511 - val_loss: 83.6356-> took 0.12 seconds\n",
"Epoch 411/1000 - loss: 106.2372 - val_loss: 83.6325-> took 0.14 seconds\n",
"Epoch 412/1000 - loss: 106.2239 - val_loss: 83.6294-> took 0.16 seconds\n",
"Epoch 413/1000 - loss: 106.2109 - val_loss: 83.6265-> took 0.14 seconds\n",
"Epoch 414/1000 - loss: 106.1984 - val_loss: 83.6237-> took 0.13 seconds\n",
"Epoch 415/1000 - loss: 106.1863 - val_loss: 83.6210-> took 0.14 seconds\n",
"Epoch 416/1000 - loss: 106.1747 - val_loss: 83.6184-> took 0.14 seconds\n",
"Epoch 417/1000 - loss: 106.1634 - val_loss: 83.6160-> took 0.12 seconds\n",
"Epoch 418/1000 - loss: 106.1526 - val_loss: 83.6135-> took 0.12 seconds\n",
"Epoch 419/1000 - loss: 106.1421 - val_loss: 83.6112-> took 0.13 seconds\n",
"Epoch 420/1000 - loss: 106.1319 - val_loss: 83.6090-> took 0.12 seconds\n",
"Epoch 421/1000 - loss: 106.1222 - val_loss: 83.6068-> took 0.12 seconds\n",
"Epoch 422/1000 - loss: 106.1127 - val_loss: 83.6048-> took 0.12 seconds\n",
"Epoch 423/1000 - loss: 106.1036 - val_loss: 83.6028-> took 0.13 seconds\n",
"Epoch 424/1000 - loss: 106.0948 - val_loss: 83.6008-> took 0.12 seconds\n",
"Epoch 425/1000 - loss: 106.0863 - val_loss: 83.5990-> took 0.13 seconds\n",
"Epoch 426/1000 - loss: 106.0781 - val_loss: 83.5972-> took 0.13 seconds\n",
"Epoch 427/1000 - loss: 106.0701 - val_loss: 83.5955-> took 0.12 seconds\n",
"Epoch 428/1000 - loss: 106.0625 - val_loss: 83.5938-> took 0.14 seconds\n",
"Epoch 429/1000 - loss: 106.0551 - val_loss: 83.5922-> took 0.14 seconds\n",
"Epoch 430/1000 - loss: 106.0479 - val_loss: 83.5907-> took 0.13 seconds\n",
"Epoch 431/1000 - loss: 106.0410 - val_loss: 83.5892-> took 0.13 seconds\n",
"Epoch 432/1000 - loss: 106.0343 - val_loss: 83.5878-> took 0.13 seconds\n",
"Epoch 433/1000 - loss: 106.0279 - val_loss: 83.5864-> took 0.13 seconds\n",
"Epoch 434/1000 - loss: 106.0217 - val_loss: 83.5850-> took 0.13 seconds\n",
"Epoch 435/1000 - loss: 106.0156 - val_loss: 83.5838-> took 0.13 seconds\n",
"Epoch 436/1000 - loss: 106.0098 - val_loss: 83.5825-> took 0.14 seconds\n",
"Epoch 437/1000 - loss: 106.0042 - val_loss: 83.5813-> took 0.13 seconds\n",
"Epoch 438/1000 - loss: 105.9988 - val_loss: 83.5802-> took 0.14 seconds\n",
"Epoch 439/1000 - loss: 105.9936 - val_loss: 83.5790-> took 0.14 seconds\n",
"Epoch 440/1000 - loss: 105.9885 - val_loss: 83.5780-> took 0.14 seconds\n",
"Epoch 441/1000 - loss: 105.9836 - val_loss: 83.5770-> took 0.12 seconds\n",
"Epoch 442/1000 - loss: 105.9789 - val_loss: 83.5759-> took 0.13 seconds\n",
"Epoch 443/1000 - loss: 105.9743 - val_loss: 83.5750-> took 0.12 seconds\n",
"Epoch 444/1000 - loss: 105.9700 - val_loss: 83.5740-> took 0.14 seconds\n",
"Epoch 445/1000 - loss: 105.9657 - val_loss: 83.5731-> took 0.13 seconds\n",
"Epoch 446/1000 - loss: 105.9616 - val_loss: 83.5723-> took 0.13 seconds\n",
"Epoch 447/1000 - loss: 105.9576 - val_loss: 83.5714-> took 0.13 seconds\n",
"Epoch 448/1000 - loss: 105.9538 - val_loss: 83.5706-> took 0.13 seconds\n",
"Epoch 449/1000 - loss: 105.9501 - val_loss: 83.5699-> took 0.12 seconds\n",
"Epoch 450/1000 - loss: 105.9465 - val_loss: 83.5691-> took 0.14 seconds\n",
"Epoch 451/1000 - loss: 105.9430 - val_loss: 83.5684-> took 0.13 seconds\n",
"Epoch 452/1000 - loss: 105.9397 - val_loss: 83.5677-> took 0.13 seconds\n",
"Epoch 453/1000 - loss: 105.9365 - val_loss: 83.5670-> took 0.13 seconds\n",
"Epoch 454/1000 - loss: 105.9334 - val_loss: 83.5663-> took 0.13 seconds\n",
"Epoch 455/1000 - loss: 105.9304 - val_loss: 83.5657-> took 0.13 seconds\n",
"Epoch 456/1000 - loss: 105.9274 - val_loss: 83.5651-> took 0.12 seconds\n",
"Epoch 457/1000 - loss: 105.9246 - val_loss: 83.5645-> took 0.13 seconds\n",
"Epoch 458/1000 - loss: 105.9219 - val_loss: 83.5639-> took 0.12 seconds\n",
"Epoch 459/1000 - loss: 105.9193 - val_loss: 83.5634-> took 0.13 seconds\n",
"Epoch 460/1000 - loss: 105.9167 - val_loss: 83.5629-> took 0.13 seconds\n",
"Epoch 461/1000 - loss: 105.9143 - val_loss: 83.5624-> took 0.14 seconds\n",
"Epoch 462/1000 - loss: 105.9119 - val_loss: 83.5619-> took 0.13 seconds\n",
"Epoch 463/1000 - loss: 105.9096 - val_loss: 83.5614-> took 0.14 seconds\n",
"Epoch 464/1000 - loss: 105.9074 - val_loss: 83.5609-> took 0.12 seconds\n",
"Epoch 465/1000 - loss: 105.9053 - val_loss: 83.5605-> took 0.15 seconds\n",
"Epoch 466/1000 - loss: 105.9032 - val_loss: 83.5601-> took 0.14 seconds\n",
"Epoch 467/1000 - loss: 105.9012 - val_loss: 83.5596-> took 0.13 seconds\n",
"Epoch 468/1000 - loss: 105.8993 - val_loss: 83.5593-> took 0.13 seconds\n",
"Epoch 469/1000 - loss: 105.8974 - val_loss: 83.5589-> took 0.14 seconds\n",
"Epoch 470/1000 - loss: 105.8956 - val_loss: 83.5585-> took 0.14 seconds\n",
"Epoch 471/1000 - loss: 105.8939 - val_loss: 83.5581-> took 0.13 seconds\n",
"Epoch 472/1000 - loss: 105.8922 - val_loss: 83.5578-> took 0.13 seconds\n",
"Epoch 473/1000 - loss: 105.8906 - val_loss: 83.5574-> took 0.14 seconds\n",
"Epoch 474/1000 - loss: 105.8890 - val_loss: 83.5571-> took 0.13 seconds\n",
"Epoch 475/1000 - loss: 105.8875 - val_loss: 83.5568-> took 0.14 seconds\n",
"Epoch 476/1000 - loss: 105.8861 - val_loss: 83.5565-> took 0.14 seconds\n",
"Epoch 477/1000 - loss: 105.8846 - val_loss: 83.5562-> took 0.13 seconds\n",
"Epoch 478/1000 - loss: 105.8833 - val_loss: 83.5559-> took 0.13 seconds\n",
"Epoch 479/1000 - loss: 105.8820 - val_loss: 83.5557-> took 0.13 seconds\n",
"Epoch 480/1000 - loss: 105.8807 - val_loss: 83.5554-> took 0.12 seconds\n",
"Epoch 481/1000 - loss: 105.8794 - val_loss: 83.5551-> took 0.12 seconds\n",
"Epoch 482/1000 - loss: 105.8783 - val_loss: 83.5549-> took 0.13 seconds\n",
"Epoch 483/1000 - loss: 105.8771 - val_loss: 83.5547-> took 0.13 seconds\n",
"Epoch 484/1000 - loss: 105.8760 - val_loss: 83.5544-> took 0.13 seconds\n",
"Epoch 485/1000 - loss: 105.8749 - val_loss: 83.5542-> took 0.12 seconds\n",
"Epoch 486/1000 - loss: 105.8739 - val_loss: 83.5540-> took 0.13 seconds\n",
"Epoch 487/1000 - loss: 105.8729 - val_loss: 83.5538-> took 0.13 seconds\n",
"Epoch 488/1000 - loss: 105.8719 - val_loss: 83.5536-> took 0.12 seconds\n",
"Epoch 489/1000 - loss: 105.8710 - val_loss: 83.5534-> took 0.13 seconds\n",
"Epoch 490/1000 - loss: 105.8701 - val_loss: 83.5532-> took 0.14 seconds\n",
"Epoch 491/1000 - loss: 105.8692 - val_loss: 83.5530-> took 0.12 seconds\n",
"Epoch 492/1000 - loss: 105.8684 - val_loss: 83.5528-> took 0.14 seconds\n",
"Epoch 493/1000 - loss: 105.8676 - val_loss: 83.5527-> took 0.14 seconds\n",
"Epoch 494/1000 - loss: 105.8668 - val_loss: 83.5525-> took 0.12 seconds\n",
"Epoch 495/1000 - loss: 105.8660 - val_loss: 83.5524-> took 0.13 seconds\n",
"Epoch 496/1000 - loss: 105.8653 - val_loss: 83.5522-> took 0.13 seconds\n",
"Epoch 497/1000 - loss: 105.8646 - val_loss: 83.5521-> took 0.12 seconds\n",
"Epoch 498/1000 - loss: 105.8639 - val_loss: 83.5519-> took 0.13 seconds\n",
"Epoch 499/1000 - loss: 105.8632 - val_loss: 83.5518-> took 0.13 seconds\n",
"Epoch 500/1000 - loss: 105.8626 - val_loss: 83.5517-> took 0.14 seconds\n",
"Epoch 501/1000 - loss: 105.8619 - val_loss: 83.5515-> took 0.12 seconds\n",
"Epoch 502/1000 - loss: 105.8614 - val_loss: 83.5514-> took 0.12 seconds\n",
"Epoch 503/1000 - loss: 105.8608 - val_loss: 83.5513-> took 0.12 seconds\n",
"Epoch 504/1000 - loss: 105.8602 - val_loss: 83.5512-> took 0.13 seconds\n",
"Epoch 505/1000 - loss: 105.8597 - val_loss: 83.5510-> took 0.15 seconds\n",
"Epoch 506/1000 - loss: 105.8591 - val_loss: 83.5510-> took 0.13 seconds\n",
"Epoch 507/1000 - loss: 105.8587 - val_loss: 83.5508-> took 0.14 seconds\n",
"Epoch 508/1000 - loss: 105.8582 - val_loss: 83.5508-> took 0.12 seconds\n",
"Epoch 509/1000 - loss: 105.8577 - val_loss: 83.5507-> took 0.12 seconds\n",
"Epoch 510/1000 - loss: 105.8572 - val_loss: 83.5506-> took 0.14 seconds\n",
"Epoch 511/1000 - loss: 105.8568 - val_loss: 83.5505-> took 0.14 seconds\n",
"Epoch 512/1000 - loss: 105.8564 - val_loss: 83.5504-> took 0.13 seconds\n",
"Epoch 513/1000 - loss: 105.8560 - val_loss: 83.5503-> took 0.13 seconds\n",
"Epoch 514/1000 - loss: 105.8556 - val_loss: 83.5502-> took 0.13 seconds\n",
"Epoch 515/1000 - loss: 105.8552 - val_loss: 83.5501-> took 0.14 seconds\n",
"Epoch 516/1000 - loss: 105.8548 - val_loss: 83.5501-> took 0.13 seconds\n",
"Epoch 517/1000 - loss: 105.8545 - val_loss: 83.5500-> took 0.13 seconds\n",
"Epoch 518/1000 - loss: 105.8541 - val_loss: 83.5499-> took 0.13 seconds\n",
"Epoch 519/1000 - loss: 105.8538 - val_loss: 83.5499-> took 0.14 seconds\n",
"Epoch 520/1000 - loss: 105.8534 - val_loss: 83.5498-> took 0.14 seconds\n",
"Epoch 521/1000 - loss: 105.8531 - val_loss: 83.5497-> took 0.12 seconds\n",
"Epoch 522/1000 - loss: 105.8528 - val_loss: 83.5497-> took 0.13 seconds\n",
"Epoch 523/1000 - loss: 105.8525 - val_loss: 83.5496-> took 0.13 seconds\n",
"Epoch 524/1000 - loss: 105.8523 - val_loss: 83.5496-> took 0.12 seconds\n",
"Epoch 525/1000 - loss: 105.8520 - val_loss: 83.5495-> took 0.13 seconds\n",
"Epoch 526/1000 - loss: 105.8517 - val_loss: 83.5494-> took 0.13 seconds\n",
"Epoch 527/1000 - loss: 105.8515 - val_loss: 83.5494-> took 0.13 seconds\n",
"Epoch 528/1000 - loss: 105.8512 - val_loss: 83.5493-> took 0.13 seconds\n",
"Epoch 529/1000 - loss: 105.8510 - val_loss: 83.5493-> took 0.13 seconds\n",
"Epoch 530/1000 - loss: 105.8508 - val_loss: 83.5492-> took 0.14 seconds\n",
"Epoch 531/1000 - loss: 105.8505 - val_loss: 83.5492-> took 0.12 seconds\n",
"Epoch 532/1000 - loss: 105.8503 - val_loss: 83.5491-> took 0.13 seconds\n",
"Epoch 533/1000 - loss: 105.8501 - val_loss: 83.5491-> took 0.14 seconds\n",
"Epoch 534/1000 - loss: 105.8499 - val_loss: 83.5491-> took 0.13 seconds\n",
"Epoch 535/1000 - loss: 105.8497 - val_loss: 83.5490-> took 0.13 seconds\n",
"Epoch 536/1000 - loss: 105.8495 - val_loss: 83.5490-> took 0.13 seconds\n",
"Epoch 537/1000 - loss: 105.8494 - val_loss: 83.5489-> took 0.13 seconds\n",
"Epoch 538/1000 - loss: 105.8492 - val_loss: 83.5489-> took 0.13 seconds\n",
"Epoch 539/1000 - loss: 105.8490 - val_loss: 83.5489-> took 0.13 seconds\n",
"Epoch 540/1000 - loss: 105.8489 - val_loss: 83.5489-> took 0.14 seconds\n",
"Epoch 541/1000 - loss: 105.8487 - val_loss: 83.5488-> took 0.14 seconds\n",
"Epoch 542/1000 - loss: 105.8486 - val_loss: 83.5488-> took 0.12 seconds\n",
"Epoch 543/1000 - loss: 105.8484 - val_loss: 83.5488-> took 0.13 seconds\n",
"Epoch 544/1000 - loss: 105.8483 - val_loss: 83.5487-> took 0.12 seconds\n",
"Epoch 545/1000 - loss: 105.8481 - val_loss: 83.5487-> took 0.14 seconds\n",
"Epoch 546/1000 - loss: 105.8480 - val_loss: 83.5487-> took 0.13 seconds\n",
"Epoch 547/1000 - loss: 105.8478 - val_loss: 83.5486-> took 0.13 seconds\n",
"Epoch 548/1000 - loss: 105.8477 - val_loss: 83.5486-> took 0.12 seconds\n",
"Epoch 549/1000 - loss: 105.8475 - val_loss: 83.5486-> took 0.14 seconds\n",
"Epoch 550/1000 - loss: 105.8474 - val_loss: 83.5486-> took 0.13 seconds\n",
"Epoch 551/1000 - loss: 105.8473 - val_loss: 83.5485-> took 0.12 seconds\n",
"Epoch 552/1000 - loss: 105.8472 - val_loss: 83.5485-> took 0.13 seconds\n",
"Epoch 553/1000 - loss: 105.8472 - val_loss: 83.5485-> took 0.13 seconds\n",
"Epoch 554/1000 - loss: 105.8470 - val_loss: 83.5485-> took 0.12 seconds\n",
"Epoch 555/1000 - loss: 105.8469 - val_loss: 83.5485-> took 0.13 seconds\n",
"Epoch 556/1000 - loss: 105.8469 - val_loss: 83.5485-> took 0.13 seconds\n",
"Epoch 557/1000 - loss: 105.8468 - val_loss: 83.5484-> took 0.14 seconds\n",
"Epoch 558/1000 - loss: 105.8467 - val_loss: 83.5484-> took 0.13 seconds\n",
"Epoch 559/1000 - loss: 105.8466 - val_loss: 83.5484-> took 0.13 seconds\n",
"Epoch 560/1000 - loss: 105.8466 - val_loss: 83.5484-> took 0.13 seconds\n",
"Epoch 561/1000 - loss: 105.8465 - val_loss: 83.5484-> took 0.13 seconds\n",
"Epoch 562/1000 - loss: 105.8464 - val_loss: 83.5483-> took 0.13 seconds\n",
"Epoch 563/1000 - loss: 105.8463 - val_loss: 83.5483-> took 0.12 seconds\n",
"Epoch 564/1000 - loss: 105.8462 - val_loss: 83.5483-> took 0.13 seconds\n",
"Epoch 565/1000 - loss: 105.8461 - val_loss: 83.5483-> took 0.12 seconds\n",
"Epoch 566/1000 - loss: 105.8461 - val_loss: 83.5483-> took 0.13 seconds\n",
"Epoch 567/1000 - loss: 105.8460 - val_loss: 83.5483-> took 0.12 seconds\n",
"Epoch 568/1000 - loss: 105.8459 - val_loss: 83.5482-> took 0.13 seconds\n",
"Epoch 569/1000 - loss: 105.8458 - val_loss: 83.5482-> took 0.15 seconds\n",
"Epoch 570/1000 - loss: 105.8458 - val_loss: 83.5482-> took 0.15 seconds\n",
"Epoch 571/1000 - loss: 105.8457 - val_loss: 83.5482-> took 0.13 seconds\n",
"Epoch 572/1000 - loss: 105.8457 - val_loss: 83.5482-> took 0.13 seconds\n",
"Epoch 573/1000 - loss: 105.8456 - val_loss: 83.5482-> took 0.15 seconds\n",
"Epoch 574/1000 - loss: 105.8456 - val_loss: 83.5482-> took 0.13 seconds\n",
"Epoch 575/1000 - loss: 105.8456 - val_loss: 83.5482-> took 0.13 seconds\n",
"Epoch 576/1000 - loss: 105.8455 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 577/1000 - loss: 105.8455 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 578/1000 - loss: 105.8454 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 579/1000 - loss: 105.8454 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 580/1000 - loss: 105.8453 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 581/1000 - loss: 105.8453 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 582/1000 - loss: 105.8452 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 583/1000 - loss: 105.8452 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 584/1000 - loss: 105.8451 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 585/1000 - loss: 105.8451 - val_loss: 83.5481-> took 0.13 seconds\n",
"Epoch 586/1000 - loss: 105.8451 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 587/1000 - loss: 105.8450 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 588/1000 - loss: 105.8450 - val_loss: 83.5481-> took 0.14 seconds\n",
"Epoch 589/1000 - loss: 105.8450 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 590/1000 - loss: 105.8449 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 591/1000 - loss: 105.8449 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 592/1000 - loss: 105.8449 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 593/1000 - loss: 105.8448 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 594/1000 - loss: 105.8448 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 595/1000 - loss: 105.8448 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 596/1000 - loss: 105.8447 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 597/1000 - loss: 105.8447 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 598/1000 - loss: 105.8447 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 599/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 600/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 601/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.12 seconds\n",
"Epoch 602/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 603/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.12 seconds\n",
"Epoch 604/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 605/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 606/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.12 seconds\n",
"Epoch 607/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 608/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 609/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 610/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 611/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 612/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.12 seconds\n",
"Epoch 613/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 614/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.12 seconds\n",
"Epoch 615/1000 - loss: 105.8446 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 616/1000 - loss: 105.8445 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 617/1000 - loss: 105.8445 - val_loss: 83.5480-> took 0.15 seconds\n",
"Epoch 618/1000 - loss: 105.8445 - val_loss: 83.5480-> took 0.14 seconds\n",
"Epoch 619/1000 - loss: 105.8445 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 620/1000 - loss: 105.8445 - val_loss: 83.5480-> took 0.13 seconds\n",
"Epoch 621/1000 - loss: 105.8445 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 622/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 623/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 624/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 625/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 626/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 627/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 628/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 629/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 630/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 631/1000 - loss: 105.8444 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 632/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 633/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 634/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 635/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 636/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 637/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 638/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 639/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 640/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 641/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 642/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 643/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 644/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 645/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 646/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 647/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 648/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 649/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 650/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 651/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 652/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 653/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 654/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 655/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 656/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 657/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 658/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 659/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 660/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 661/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 662/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 663/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 664/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 665/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 666/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 667/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 668/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 669/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 670/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 671/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 672/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 673/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 674/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 675/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 676/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 677/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 678/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 679/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 680/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 681/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 682/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 683/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 684/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 685/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 686/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 687/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 688/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 689/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 690/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 691/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 692/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 693/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 694/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 695/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 696/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 697/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 698/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 699/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 700/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 701/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 702/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.16 seconds\n",
"Epoch 703/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 704/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 705/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 706/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 707/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 708/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 709/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 710/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 711/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 712/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 713/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 714/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 715/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 716/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 717/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 718/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 719/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 720/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 721/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 722/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.16 seconds\n",
"Epoch 723/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 724/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 725/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 726/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 727/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 728/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 729/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 730/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 731/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 732/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 733/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 734/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 735/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 736/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 737/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 738/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 739/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 740/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 741/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 742/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 743/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 744/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 745/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 746/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 747/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 748/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 749/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 750/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 751/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 752/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 753/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 754/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 755/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 756/1000 - loss: 105.8443 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 757/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 758/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 759/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 760/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 761/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 762/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 763/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 764/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 765/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 766/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 767/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 768/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 769/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 770/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 771/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 772/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 773/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 774/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 775/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 776/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 777/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 778/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 779/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 780/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 781/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 782/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 783/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 784/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 785/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 786/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 787/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.12 seconds\n",
"Epoch 788/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 789/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 790/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 791/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 792/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 793/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 794/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 795/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 796/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 797/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 798/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 799/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 800/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 801/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 802/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 803/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 804/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 805/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 806/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 807/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 808/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 809/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 810/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 811/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 812/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 813/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 814/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 815/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 816/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 817/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 818/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 819/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 820/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 821/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 822/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 823/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 824/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 825/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 826/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 827/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 828/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 829/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 830/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 831/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 832/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 833/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 834/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 835/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 836/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 837/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 838/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 839/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 840/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 841/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 842/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 843/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 844/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 845/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 846/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 847/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 848/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 849/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 850/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 851/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 852/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 853/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 854/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 855/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 856/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 857/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 858/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 859/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 860/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 861/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 862/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.16 seconds\n",
"Epoch 863/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 864/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 865/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 866/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 867/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 868/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 869/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 870/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 871/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 872/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 873/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 874/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 875/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 876/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 877/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 878/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 879/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 880/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 881/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 882/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 883/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 884/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 885/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 886/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 887/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 888/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 889/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 890/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 891/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 892/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 893/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 894/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 895/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 896/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 897/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 898/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 899/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 900/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 901/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 902/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 903/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 904/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 905/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 906/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 907/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 908/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 909/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 910/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 911/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 912/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 913/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 914/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 915/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 916/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 917/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 918/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 919/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 920/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 921/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 922/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 923/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 924/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.15 seconds\n",
"Epoch 925/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 926/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 927/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.13 seconds\n",
"Epoch 928/1000 - loss: 105.8439 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 929/1000 - loss: 105.8439 - val_loss: 83.5478-> took 0.13 seconds\n",
"Epoch 930/1000 - loss: 105.8439 - val_loss: 83.5478-> took 0.13 seconds\n",
"Epoch 931/1000 - loss: 105.8439 - val_loss: 83.5478-> took 0.12 seconds\n",
"Epoch 932/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 933/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 934/1000 - loss: 105.8440 - val_loss: 83.5478-> took 0.14 seconds\n",
"Epoch 935/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 936/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 937/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 938/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 939/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 940/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 941/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 942/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 943/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 944/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 945/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 946/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 947/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 948/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 949/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 950/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 951/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 952/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 953/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 954/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 955/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 956/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 957/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 958/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 959/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 960/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 961/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 962/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 963/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 964/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 965/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 966/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 967/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 968/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 969/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 970/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 971/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 972/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.15 seconds\n",
"Epoch 973/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 974/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 975/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 976/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 977/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 978/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 979/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 980/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 981/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 982/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 983/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 984/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 985/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 986/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 987/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 988/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 989/1000 - loss: 105.8442 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 990/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 991/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 992/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 993/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 994/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 995/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 996/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.13 seconds\n",
"Epoch 997/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.12 seconds\n",
"Epoch 998/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 999/1000 - loss: 105.8441 - val_loss: 83.5479-> took 0.14 seconds\n",
"Epoch 1000/1000 - loss: 105.8440 - val_loss: 83.5479-> took 0.13 seconds\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "IVY-4U-KLcNc"
},
"source": [
"# Plot losses"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 265
},
"id": "KC_N3bDNLbYe",
"outputId": "7ed27389-584b-4d21-8167-a1732d32cff1"
},
"source": [
"plt.figure()\n",
"plt.plot(losses, label = 'Loss')\n",
"plt.plot(val_losses, label = 'Validation loss')\n",
"plt.grid(True)\n",
"plt.legend()\n",
"plt.show()"
],
"execution_count": 151,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD4CAYAAAAXUaZHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxU1fn48c8zWclCQhZCIECCbLIEwq7sIFXQgihacQEKlS7Wau3Xil9b0fqzP/yWVqWiX1eE1p+4FhFQqkgUtOwiOxogQDBsIQlZCGQ5vz/mggECmZnMJJk7z/v1mtfce+45N8/h6pOTu5wrxhiUUkrZi6OhA1BKKeV9mtyVUsqGNLkrpZQNaXJXSikb0uSulFI2FNzQAQAkJCSY1NRUj9qWlJQQGRnp3YAaOe1zYNA+B4a69Hnjxo3HjTGJNW1rFMk9NTWVDRs2eNQ2MzOTYcOGeTegRk77HBi0z4GhLn0Wkf2X2qanZZRSyoY0uSullA1pcldKKRtqFOfclVL1r7y8nJycHMrKyho6lHNiYmLYuXNnQ4dRr1zpc3h4OCkpKYSEhLi8X5eSu4jEAq8A3QADTAV2A28BqUA2cKsxJl9EBHgWGAOUAlOMMZtcjkgpVS9ycnKIjo4mNTUV5/+2Da+oqIjo6OiGDqNe1dZnYwx5eXnk5OSQlpbm8n5dPS3zLPCxMaYz0APYCcwAVhhjOgArrHWA0UAH6zMdeMHlaJRS9aasrIz4+PhGk9hVzUSE+Ph4t//CqjW5i0gMMAR4FcAYc8YYUwCMA+Zb1eYDN1rL44AFxmkNECsiyW5FpZSqF5rY/YMnx8mVkXsacAyYJyJfi8grIhIJJBljcq06h4Eka7kVcLBa+xyrzOvWZ5/g7d1n0GmLlVLqfK6ccw8GegH3GmPWisiz/HAKBgBjjBERtzKsiEzHedqGpKQkMjMz3WkOwPLscpbtK2fpJ5lEhQbOCKS4uNijfy9/pn32vpiYGIqKiny2f1ckJyeTm5t7br2ysrLBY6pvrva5rKzMrf8eXEnuOUCOMWattf4uzuR+RESSjTG51mmXo9b2Q0Drau1TrLLzGGNeAl4C6NOnj/HkCa2SLbm8uWsTV3TvzZXJTd1u76/0Kb7A4Os+79y5s1FcvKweg15QvbTw8HAyMjJc3m+tp2WMMYeBgyLSySoaCewAFgOTrbLJwAfW8mJgkjgNAAqrnb7xqhYxYQAcPtl4buVSStXN5s2bGTBgAOnp6YwfP578/HwA5syZQ5cuXUhPT+e2224D4PPPP6dnz5707NmTjIyMgBv1X46r97nfC7whIqHAXuCnOH8xvC0i04D9wK1W3WU4b4PMwnkr5E+9GnE1zaPDAThSqMldqbp4/MPt7Pj+pFf32aVlU2b+uKvb7SZNmsTf//53hg4dyqOPPsrjjz/OM888w6xZs9i3bx9hYWEUFBQAMHv2bObOncvAgQMpLi4mPDzcq33wZy7dCmmM2WyM6WOMSTfG3GiMyTfG5BljRhpjOhhjrjHGnLDqGmPMPcaYK4wx3Y0xns0I5oKkps4DqSN3peyhsLCQgoIChg4dCsDkyZP54osvAEhPT+eOO+7gn//8J8HBznHpwIEDeeCBB5gzZw4FBQXnypWfP6EaGuwgOhSOaHJXqk48GWHXt6VLl/LFF1/w4Ycf8uSTT7J161ZmzJjB9ddfz7Jlyxg4cCDLly+nc+fODR1qo+D3c8vEhzs4VKDJXSk7iImJoVmzZqxatQqAf/zjHwwdOpSqqioOHjzI8OHDeeqppygsLKS4uJg9e/bQvXt3HnroIfr27cuuXbsauAeNh1+P3AESI4SDJ0obOgyllAdKS0tJSUk5t37PPfcwf/58fvGLX1BaWkq7du2YN28elZWV3HnnnRQWFmKM4Te/+Q2xsbH88Y9/ZOXKlTgcDrp27cro0aMbsDeNi/8n9yYONh8opbLKEOQInHvdlbKDqqqq89bP3ha4Zs2ai+quXr36orK///3vPovN3/n9aZnmEUJ5pdGLqkopVY3fJ/fEJs4uHMjTUzNKKXWW/yf3COepGD3vrpRSP/D75B4XLgQ5hAOa3JVS6hy/T+7BDqFlbLgmd6WUqsbvkztA27hITe5KKVWNLZJ7m/gIsvNKGjoMpZQbhg8fzvLly88rmzt3Lr/85S8v2WbYsGFs2OCc0WTMmDHn5pip7rHHHmP27NmX/dmLFi1ix44d59YfffRRPv30U3fCr1FmZiY33HBDnffjDbZI7u0SIikoLedEyZmGDkUp5aKJEyeycOHC88ree+89Jk6c6FL7ZcuWERsb69HPvjC5/+lPf+Kaa67xaF+NlS2S+xWJUQDsPVbcwJEopVw1YcIEli5dypkzzkFZdnY2hw8fZvDgwfzyl7+kT58+dO3alZkzZ9bYPjU1lePHjwPw5JNP0rFjRwYNGsTu3bvP1Xn55Zfp27cvPXr04Oabb6a0tJSvvvqKxYsX8+CDD9KzZ0/27NnDlClTePfddwFYsWIFGRkZdO/enalTp3L69OlzP2/mzJn06tWL7t271zrVwYkTJ7jxxhtJT09nwIABbNmyBah5muLc3FyGDBlCz5496dat27npF+rC759QBWiXGAnA3mMl9EmNa+BolPJDH82Aw1u9u88W3WH0rEtujouLo1+/fnz00UeMGzeOhQsXMn78eESEJ598kri4OCorKxk5ciRbtmwhPT29xv1s3LiRhQsXsnnzZioqKujVqxe9e/cG4KabbuLuu+8G4A9/+AOvvvoq9957L2PHjuWGG25gwoQJ5+2rrKyMKVOmsGLFCjp27MikSZN44YUXuP/++wFISEhg06ZNPP/888yePZtXXnnlkv2bOXMmGRkZLFq0iM8++4xJkyaxefPmi6YpLi8v57XXXuPaa6/lkUceobKyktLSul9DtMXIPaVZBKFBDvYc15G7Uv6k+qmZhQsXnku2b7/9Nr169SIjI4Pt27efdwrlQqtWrWL8+PFERETQtGlTxo4de27btm3bGDx4MN27d+eNN95g+/btl41n9+7dpKWl0bFjR+D8KYfB+csCoHfv3mRnZ192X6tXr+auu+4CYMSIEeTl5XHy5Mkapynu27cv8+bN47HHHmPr1q1eeRuVLUbuQQ6hbXwEe4/pRVWlPHKZEbYvjRs3jt/+9rds2rSJ0tJSMjIy2LdvH7Nnz2b9+vU0a9aMKVOmUFbm2fQiU6ZMYdGiRfTo0YPXX3+9zu+kDQtzvv0tKCiIiooKj/Zx4TTF77//PkOGDOGLL75g6dKlTJkyhQceeIBJkybVKVZbjNzBeWpGz7kr5V+ioqIYPnw4U6dOPXch9eTJk0RGRhITE8ORI0f46KOPLruPIUOGsGjRIk6dOkVRUREffvjhuW1FRUUkJydTXl7OG2+8ca48Ojq6xlfyderUiezsbLKysoAfphz2xODBg8/9zMzMTBISEmjatOlF0xR/++237N+/n6SkJO6++25+9rOfsWnTJo9+ZnW2GLmD86LqZ7uOUl5ZRUiQbX5nKWV7EydOZPz48edOz/To0YOMjAw6d+5M69atGThw4GXb9+rVi5/85Cf06NGD5s2b07dv33PbnnjiCfr3709iYiL9+/c/l9Bvu+027r77bubMmXPuQio4X0I9b948brnlFioqKujbty+/+MUvPOrXY489xtSpU0lPTyciIoL58+cD8Mwzz5w3TfGoUaNYunQpf/nLXwgJCSEqKooFCxZ49DPPY4xp8E/v3r2Np1auXGmMMeadDQdN24eWmD1Hizzel7842+dAon32vh07dvh0/544efJkQ4dQ71ztc03HC9hgLpFXbTPErX7HjFJKBTrbJPcrEqx73fWOGaWUsk9yj4kIIT4yVEfuSrnB+Ze9auw8OU62Se4AVzSP4rujOnJXyhXh4eHk5eVpgm/kjDHk5eURHh7uVjvb3C0D0LlFNO9vOoQxBhF9n6pSl5OSkkJOTg7Hjh1r6FDOKSsrczuJ+TtX+hweHn7ei8RdYbPk3pTi0/vJyT9F67iIhg5HqUYtJCSEtLS0hg7jPJmZmWRkZDR0GPXKV3221WmZTi2cj+zuPnzxwwlKKRVIbJncdx0+2cCRKKVUw7JVco8KC6Z1XBN26chdKRXgbJXcATolNdXkrpQKeLZL7lcmR7PveAll5ZUNHYpSSjUYl5K7iGSLyFYR2SwiG6yyOBH5RES+s76bWeUiInNEJEtEtohIL1924EKdWkRTWWXI0vvdlVIBzJ2R+3BjTE9jTB9rfQawwhjTAVhhrQOMBjpYn+nAC94K1hWdz11U1VMzSqnAVZfTMuOA+dbyfODGauULrEnL1gCxIpJch5/jltT4SMKCHezM1TtmlFKBS1x59FhE9gH5gAFeNMa8JCIFxphYa7sA+caYWBFZAswyxqy2tq0AHjLGbLhgn9NxjuxJSkrqfeFb0F1VXFxMVFTUeWVP/OcUQQ747/5NPNpnY1dTn+1O+xwYtM/uGT58+MZqZ1PO4+oTqoOMMYdEpDnwiYic99pvY4wREbcmqDDGvAS8BNCnTx8zbNgwd5qfk5mZyYVtM09u5631Bxk0eAjBNnxxR019tjvtc2DQPnuPS5nPGHPI+j4K/AvoBxw5e7rF+j5qVT8EtK7WPMUq8749n9Fx91y44K+PHq1jOFVeSZa+dk8pFaBqTe4iEiki0WeXgR8B24DFwGSr2mTgA2t5MTDJumtmAFBojMn1euQAx7+jZe6/oeT4ecXpKbEAbDlY6JMfq5RSjZ0rI/ckYLWIfAOsA5YaYz4GZgGjROQ74BprHWAZsBfIAl4GfuX1qM+KsWZJKzx4XnFafCTR4cFszinw2Y9WSqnGrNZz7saYvUCPGsrzgJE1lBvgHq9EV5uzyf3kIWj1w+30DoeQnhLDFk3uSqkA5d9XG2OsU/uFORdt6pESy67cIn1SVSkVkPw7uTdpRqUjtMbk3rttMyqqDN8c1NG7Uirw+HdyF6EsvDkU7L9oU5+2cYjAun0nGiAwpZRqWP6d3IFTTZIhb+9F5TERIXRKimZdtiZ3pVTgsUFybwkn9kJV1UXb+qfFsXF/PuWVF29TSik78/vkXhrREipOQdH3F23rlxZP6ZlKtn+v88wopQKL3yf3U01aOhfysi7a1jetGQBr9+bVZ0hKKdXgbJ3cm0eHc0ViJF/u0eSulAosfp/cT4fFQUgE5O2pcfvQjs1ZuzdP73dXSgUUv0/uiAPirqhx5A4wtFMipyuqWKOnZpRSAcT/kztAfLtLJvf+aXGEBTvI3H2snoNSSqmGY4/kntAJ8rOhvOyiTeEhQQxoF88X32pyV0oFDnsk9+ZXgqmC49/WuHlox0T2Hi/hQF5pPQemlFINwz7JHeDYrho3D+2UCEDmt0dr3K6UUnZjj+QedwU4guHozho3t0uIpF1iJMu3H67nwJRSqmHYI7kHh0J8+0uO3EWE0d1asGbvCfJLztRzcEopVf/skdwBEjtfcuQOcF3XZCqrDJ/uPFKPQSmlVMOwT3JvfqXzjpkzNV807daqKa1im/DxNj01o5SyP/sk98TOgLnkHTMiwrVdW7Dqu+MUn66o39iUUqqe2Se5n71j5jKnZkZ3b8GZyio+2aGjd6WUvdknucddAcHhcHjrJav0btOMVrFNWPT1xdMDK6WUndgnuQcFQ1I3OLzlklUcDmFsz5aszjrOsaLT9RicUkrVL/skd4DkdMjdAsZcssr4jFZUVhmWbNHRu1LKvmyW3HvA6ULnXTOX0DEpmiuTm7JosyZ3pZR92Su5t0h3fud+c9lq4zNa8s3BAvYdL6mHoJRSqv7ZK7k37+KchuAy590BxvZohUPgvY059RSYUkrVL3sl95Bw5/3utYzcW8SEM7RjIu9sPEhFZVU9BaeUUvXHXskdnOfdc7+57EVVgNv6teHIydP6Eg+llC3ZL7m3SIeSY1B0+QeVRnRuTkJUGAvXH6ynwJRSqv64nNxFJEhEvhaRJdZ6moisFZEsEXlLREKt8jBrPcvanuqb0C+hZU/n96GNl60WEuTglj4prNx9lCMnL36Dk1JK+TN3Ru73AdWf7X8KeNoY0x7IB6ZZ5dOAfKv8aate/UnuCY4QyFlXa9Vb+7Smssrwrl5YVUrZjEvJXURSgOuBV6x1AUYA71pV5gM3WsvjrHWs7SOt+vUjJNw5ej9Ye3JPS4hkQLs4Fq4/QFXV5c/RK6WUPwl2sd4zwO+BaGs9HigwxpydXjEHaGUttwIOAhhjKkSk0Kp/vPoORWQ6MB0gKSmJzMxMjzpQXFx8UdsraEmrg8tY9dknGEfIZdv3jKpgzd7TPP/+CroluPrP0bBq6rPdaZ8Dg/bZe2rNZiJyA3DUGLNRRIZ56wcbY14CXgLo06ePGTbMs11nZmZyUdvmhfD2BwztGAcpvS/b/uqKKt7d+xmbS2L49YS+HsVQ32rss81pnwOD9tl7XDktMxAYKyLZwEKcp2OeBWJF5OwvhxTgkLV8CGgNYG2PAfK8GHPtUvo5vw+urbVqaLCDif1as2LXUQ6eqPlFH0op5W9qTe7GmIeNMSnGmFTgNuAzY8wdwEpgglVtMvCBtbzYWsfa/pkxtdx07m1NkyGmDRxc41L12/u3wSHCP9fs93FgSilVP+pyn/tDwAMikoXznPqrVvmrQLxV/gAwo24heqjt1ZD9JVTV/gRqckwTru2axML1Bzl1prIeglNKKd9yK7kbYzKNMTdYy3uNMf2MMe2NMbcYY05b5WXWentr+15fBF6rdkOh9Dgc3e5S9UlXpVJ4qpwPv9HZIpVS/s9+T6ielTbU+b0306Xq/dPi6JQUzetfZVPfZ5GUUsrb7JvcY1pBQkfY+7lL1UWESVe3ZUfuSTbuz/dxcEop5Vv2Te7gHL3v/xIqzrhUfXxGK5qGBzPvy2zfxqWUUj5m7+TebhiUl7p0SyRARGgwt/dvy0fbcvW2SKWUX7N5ch8KQaHw7ccuN5l8dVscIjp6V0r5NXsn97BoSB0Mu5fVOr/7WckxTfhxj5a8tf4AhafKfRygUkr5hr2TO0Cn0XBiLxz/zuUm0walUXKmkrfWH/BhYEop5TuBkdzBOXp3UbdWMVzVLp55X2ZTrq/hU0r5Ifsn95gU59uZdi11q9nPBqeRW1jGsq25PgpMKaV8x/7JHaDLOOfLOwpcP80yvFNz2iVG8sqqffpQk1LK7wRGcu92s/N723suN3E4hGmD0th6qJC1+074KDCllPKNwEjucWmQ0he2up7cAW7ulUJ8ZCj/+/keHwWmlFK+ERjJHaDbBDiyFY7ucrlJeEgQUwelkbn7GNu/L/RhcEop5V2Bk9y7jgdxwNZ33Gp254C2RIUF83ymjt6VUv4jcJJ7dBK0Gw7fLIQq1+dsj2kSwp0D2vLR1lz2HS/xYYBKKeU9gZPcAXpNgpM5sOczt5pNG5RGSJCDF/Xcu1LKTwRWcu80BiISYOPrbjVLjA7j1j6teW9TDocLy3wTm1JKeVFgJffgUOg50TmRWNERt5pOH9KOKgMvr2qYF0sppZQ7Aiu5A2RMgqoK+OZNt5q1jotgXI+WvLnuAPklrs0Pr5RSDSXwkntiR2hzNWx4za0LqwC/GHYFpWcqef2rbN/EppRSXhJ4yR2g/3Qo2O/WPO8AHZOiGdUlide/yqaoTKcDVko1XoGZ3Dv/GJqmwJoX3G5674j2FJ4qZ76O3pVSjVhgJvegYOh3N2SvgsPb3GqanhLLyM7NeXnVPh29K6UarcBM7uC85z0kAta6P3q//5qOOnpXSjVqgZvcI+Kgx22w5R0oPuZW0+4pMVxzpXP0flJH70qpRihwkzvAgF9B5RlY96LbTe8baY3e9UXaSqlGKLCTe0IHuPIGWPcSlJ10q6lz9J7EK6t19K6UanwCO7kDDPotlBW6PSUBwP3XdKDwVDmv6+hdKdXIaHJv1RvShsJ/5kLFabeadmtljd5X7dXRu1KqUdHkDs7Re/Fht6ckAOfo/WRZBa+t3ueDwJRSyjO1JncRCReRdSLyjYhsF5HHrfI0EVkrIlki8paIhFrlYdZ6lrU91bdd8IJ2w6BlBnz5rNtTEnRrFcN1XVvwyqp9nNA5Z5RSjYQrI/fTwAhjTA+gJ3CdiAwAngKeNsa0B/KBaVb9aUC+Vf60Va9xE3GO3k/she3/crv5f13bkdIzFTy/MssHwSmllPtqTe7GqdhaDbE+BhgBvGuVzwdutJbHWetY20eKiHgtYl/p/GNo3hU+f8rt0Xv75tFM6J3CgjX7OVRwykcBKqWU68QYU3slkSBgI9AemAv8BVhjjc4RkdbAR8aYbiKyDbjOGJNjbdsD9DfGHL9gn9OB6QBJSUm9Fy5c6FEHiouLiYqK8qjthRKOfUW37U+x48rfcjRpmFtt805V8dCqU1yVHMy07mFeiedSvNlnf6F9DgzaZ/cMHz58ozGmT40bjTEuf4BYYCUwCMiqVt4a2GYtbwNSqm3bAyRcbr+9e/c2nlq5cqXHbS9SWWnM81cbM6eXMRXlbjd/4sPtJm3GEvPdkZPei6kGXu2zn9A+Bwbts3uADeYSedWtu2WMMQVWcr8KiBWRYGtTCnDIWj5kJXus7TFAnjs/p8E4HDD0IcjLgm3v1l7/Ar8a3p6I0GBmL//WB8EppZTrXLlbJlFEYq3lJsAoYCfOJD/BqjYZ+MBaXmytY23/zPoN4x863wBJ3Z3n3isr3GoaFxnK3YPb8fH2w2w+WOCjAJVSqnaujNyTgZUisgVYD3xijFkCPAQ8ICJZQDzwqlX/VSDeKn8AmOH9sH3I4YBhM5x3zmx92+3m0wanER8ZylMf7cKffqcppewluLYKxpgtQEYN5XuBfjWUlwG3eCW6htL5emiR7hy9d5vgfLG2i6LCgvn1iPY8/uEOMncfY3jn5j4MVCmlaqZPqNZEBEb8EfKzYdP8Wqtf6I7+bUmNj+DJZTupqKzyfnxKKVULTe6X0mEUtB0In/8PnC6uvX41ocEOHh5zJVlHi3lz/UEfBaiUUpemyf1SROCax6HkKKx53u3mP+qSRP+0OJ7+5FudVEwpVe80uV9O677Ou2e+nAMlx2uvX42I8McbupBfeoa5Oi2BUqqeaXKvzchHobwEVv3V7abdWsVwU0YK81Znc/BEqQ+CU0qpmmlyr01iJ+h5O6x/BQoOuN38wWs7EeQQZn28ywfBKaVUzTS5u2LYw4DAyj+73bRFTDg/H9qOpVty2ZB9wvuxKaVUDTS5uyImBfr/HL5ZCIe3ut18+pB2JMeE8+gH26ms0geblFK+p8ndVYMfgCax8PHD4OaTpxGhwTxy/ZXsyD3JG2v3+yhApZT6gSZ3VzVpBsMfgexVsHuZ282v757M1VfEM3v5bvKK3XtXq1JKuUuTuzt6/xQSOsG//wAV7r1ST0T407iulJ6p5Cm9uKqU8jFN7u4ICoZr/+ycVGzdS243b988mqmD0nh7Qw6bDuT7IECllHLS5O6uDtdA+1HOaQncfLAJ4DcjO5DUNIxHP9imF1eVUj6jyd0T1z4JZ4o9ujUyKiyY/x5zJdsOneT/rXP/vnmllHKFJndPJHaCvtNg4zw4ssPt5mN7tOSqdvH8z8e7OHqyzAcBKqUCnSZ3Tw17GMKawrIH3b41UkT4803dOV1RxczF230UoFIqkGly91REHFwzE/avhq3vuN08LSGS+0Z24KNth/n39sM+CFApFcg0uddFr8nQshcsfwTKCt1uPn1IOzq3iObRD7ZTpNMCK6W8SJN7XTiC4Pq/Qskxjy6uhgQ5mHVzOkeKyvifj3f7IEClVKDS5F5XrXpBn6nO+95zt7jdvGfrWKZcnco/1+5n436dWEwp5R2a3L1h5B+hSRws/R1Uuf/O1P/6USdaxjThofe2UlZe6YMAlVKBRpO7NzRpBqP+BDnrYPMbbjePDAvmyfHdyDpazNOffuuDAJVSgUaTu7f0mAhtrnLOO1N81O3mwzo1Z2K/Nrz0xV49PaOUqjNN7t7icMCP50B5KXz0e4928cj1V9Iqtgm/e/sbSs9UeDlApVQg0eTuTYkdYejvYfu/YNdSt5tHhQUz+5YeZOeV8tRHOnOkUspzmty9beD9kNTNeXHVg3vfB7SLZ+rANOb/Zz9fZrk/MZlSSoEmd+8LCoGxf4fiI/DJox7t4vfXdaJdYiQPvvMNhaX6cJNSyn2a3H2hVS+46h7Y+DrsW+V28/CQIJ75SU+OFp1mxvtbMG7OXaOUUprcfWXYf0OzNFh8L5wpcbt5ekosv7+uEx9tO8yb6w76IECllJ3VmtxFpLWIrBSRHSKyXUTus8rjROQTEfnO+m5mlYuIzBGRLBHZIiK9fN2JRik0AsY9B/nZ8O8/erSLnw1qx+AOCTz+4Xa+PVLk3fiUUrbmysi9AvidMaYLMAC4R0S6ADOAFcaYDsAKax1gNNDB+kwHXvB61P4idZDz9MyGVyHrU7ebOxzCX2/tQXR4ML9582t9elUp5bJak7sxJtcYs8laLgJ2Aq2AccB8q9p84EZreRywwDitAWJFJNnrkfuLEX+ExCth0T1Q6v7DSc2jw5l9Sw92HS7i/yx1/8UgSqnAJO5crBORVOALoBtwwBgTa5ULkG+MiRWRJcAsY8xqa9sK4CFjzIYL9jUd58iepKSk3gsXLvSoA8XFxURFRXnUtr5EFe2l16b/4njCVezo+qBH+1i46wwfZ5czPT2M9KZljb7P3uYPx9nbtM+BoS59Hj58+EZjTJ8aNxpjXPoAUcBG4CZrveCC7fnW9xJgULXyFUCfy+27d+/exlMrV670uG29+vx/jJnZ1Jgt73jUvLyi0tz6v1+ZTn9YZhYsXuHl4Bo/vznOXqR9Dgx16TOwwVwir7p0t4yIhADvAW8YY963io+cPd1ifZ+dUOUQ0Lpa8xSrLLAN/C2k9IWlD0D+frebBwc5eO72XsQ0CeG5r8soPKX3vyulLs2Vu2UEeBXYaYz5W7VNi4HJ1vJk4INq5ZOsu2YGAIXGmFwvxuyfgoLhppecUwK/Nw0q3U/OidFhPH9HL46fMvzu7c1UVen970qpmrkych8I3AWMEJHN1mcMMAsYJSLfAddY6wDLgL1AFvAy8LHUI2QAAA8JSURBVCvvh+2n4trB2GchZz2sfNKjXfRuG8fEzqF8uvMoc1dmeTlApZRdBNdWwTgvjMolNo+sob4B7qljXPbV7WbY+zmsfhpSB0P7i/4JazWyTTAl4Yn89ZNv6ZAUxXXdAvdmJKVUzfQJ1YZw3Szn7ZH/+jkUHXG7uYjwf2/qTkabWO5/azNbc9yfoEwpZW+a3BtCaATc8jqcLob3fwaV7s/dHh4SxEt39SE+Moxp89eTW3jK+3EqpfyWJveG0rwzXP9X2PcFrHjco10kRofx2pS+lJ6pZNrrGyg5rS/4UEo5aXJvSBl3QJ9p8NUc2PZ+7fVr0KlFNH+/PYNdh09y38Kvqah0/wXdSin70eTe0K6bBa37wwe/hiOeTS8wvFNzHh/blU93HuXh97fqFMFKKU3uDS44FG6ZD2FR8NYdcKrAo93cdVUq943swDsbc5j1sb6iT6lAp8m9MWiaDLcugIID8J5nF1gB7r+mA3cNaMuLn+/lpS/2eDlIpZQ/0eTeWLQZAGNmQ9YnsPy/PdqFiPDY2K5cn57Mn5ftYuG6A14OUinlL2p9iEnVoz4/hbws+M9zzqdZB/zC7V0EOYS/3dqDktMVzHh/KyLwk75tfBCsUqox05F7YzPqCeh8Ayx/GHZ/5NEuwoKD+N87ezO0YyIPvbeVt9brCF6pQKPJvbFxOOCmlyG5B7w7Fb7f7NFuwkOCePEuTfBKBSpN7o1RaARMfAsi4uGNWyDPs4ujFyb4V1fv83KgSqnGSpN7YxWdBHe+B1UV8I8b4eT3Hu3mbIIf3a0FTyzZwVMf79L74JUKAJrcG7PETnDnu853r/5jvEfvYAVngn/u9l7c3r8NL2Tu4aH3tuiTrErZnCb3xq5Vb5j4JpzYB/+8GU4XebSbIIfw5I3d+M3IDry9IYe7F2ygqEzf5qSUXWly9wdpQ5yzSOZ+A/+cQFBFqUe7EREeGNWRJ27sxhffHeem57/iQJ5n+1JKNW6a3P1F5zEw4VXIWU/6lseg7KTHu7prQFsWTO3H0aLTjJ27mq/2HPdenEqpRkGTuz/pOh5umUd0URb88yYo8/wlHQPbJ7D41wNJiArjrlfXMXdllr6TVSkb0eTub7qMY0eXB+H7r2HBjR5fZAVoGx/Jv351Ndd1a8Fflu9m8rx1HCs67cVglVINRZO7HzqeeBXc+g84sh1euxYKDnq8r+jwEJ6bmMGfx3dn3b4TjJmzipW7j3oxWqVUQ9Dk7q86j4G7/uV8B+uro5yJ3kMiwu392/DBrwcS2ySEn85bzwNvbSa/5IwXA1ZK1SdN7v4sdSBMteafeW00ZK+u0+46t2jKkt8M4t4R7Vn8zfeMevpzPvzme33oSSk/pMnd3yV1hWmfOJ9oXXAjbJhXp92FBQfxux91YvGvB5Ec04R73/yaW1/8D1tzPL94q5Sqf5rc7SC2tTPBtxsKS+6Hpb+Dyro9oNSlZVMW3TOQ/3tTd/YeK+HHz63m/oVfs+dYsZeCVkr5kiZ3u2gSC7e/DQPvg/WvwIJxUHysTrsMcggT+7Vh5YPD+PnQdny8/TDX/O1zfv3/NrEz1/P77JVSvqfJ3U4cQTDqT3DTK3BoI/zvQNizss67bRoewsOjr2T1QyP4+ZArWLnrKKOfXcXNL3zF+5tyKCuv9ELwSilv0uRuR+m3wN2fQXisc8KxTx+r82kagISoMGaM7syXM0bwyJgrOVFyhgfe/oa+/+dT7lv4NR9vy+XUGU30SjUG+po9u0rqCtMz4eMZsPpp2JsJ4+Y6y+soNiKUu4e042eD0/jPnjwWbT7EJzuO8MHm7wkNdtCzdSwD2sXTN7UZXZKbEh8VVuefqZRyjyZ3OwuNgLFz4IoRzousLw6Fwb9zfoJD67x7EeHq9glc3T6Bisoq1u47wcpdR1m77wTPffYdZ2czSIwOo3OLaNrERdCqWRNaxTo/cZGhxEaEEtMkhCCH1DkepdQPak3uIvIacANw1BjTzSqLA94CUoFs4FZjTL6ICPAsMAYoBaYYYzb5JnTlsq43Qupg5yj+81mw4wMY/ZTz7hovCQ5yMLB9AgPbJwBwsqycLQcL2XX4JLsOF7H7cBHbDuWSX1rz6aGm4cHERIQQERJMeIiD8JAgmoQGER7s/M47dpp/528l2CE4RAh2CEEXfIIdgsPaDnD214UIiLUm1X6HSLV6Z8ulenm1Mmqo62vfHijn0Nr99fPDGolA7DPFvnm3gisj99eB54AF1cpmACuMMbNEZIa1/hAwGuhgffoDL1jfqqFFxsPNL0P3CbDsv2DBWOeLuEf9CeKv8PqPaxoewqAOCQzqkHBeecnpCr4vOMWhglMUlJaTX3qGgtJyCkrPUHiqnFPllZwqr6KsvJITJWcoK6/kVHklRSWV7Cg4TGWVoaLKUHX22zi/bfuc1Y5tDR1B/QuwPk/qUve/omtSa3I3xnwhIqkXFI8DhlnL84FMnMl9HLDAOB9pXCMisSKSbIzJ9VbAqo46XgtpQ2HNXFj1N5jbH/r8FAbeDzGtfP7jI8OC6ZAUTYekaLfaZWZmMmzYsEtur6oyVBpDpZXoDc5s71zGWjbVls8uXLqutflcXUO1CvXgq6++4uqrr66/H9gIBGKfv173H5/s19Nz7knVEvZhIMlabgVUn8UqxyrT5N6YhIQ7z7v3vANW/hk2vAYbX4eMO2HQbyG2TUNH6DaHQ3AghAQ1dCTeExvuoHnT8IYOo14FYp/Dgn1znk9cmTfEGrkvqXbOvcAYE1tte74xppmILAFmGWNWW+UrgIeMMRtq2Od0YDpAUlJS74ULF3rUgeLiYqKiojxq66+83eewsqO03f8uLQ6vQEwVefF9OdRqDPnN0kEax92yepwDg/bZPcOHD99ojOlT0zZPR+5Hzp5uEZFk4OwcsYeA1tXqpVhlFzHGvAS8BNCnTx9zuT+5L6e2P9ftyDd9vhUKc2D9qyRsWkDClpkQdwV0vwW63eR8WXcD0uMcGLTP3uPpsGwxMNlangx8UK18kjgNAAr1fLsfiUmBa2bCAzvgppehaUv4/CmY2w+evxpWPAH7VkGFvtBDqcbOlVsh38R58TRBRHKAmcAs4G0RmQbsB261qi/DeRtkFs5bIX/qg5iVrwWHQfqtzk/RYeetk9v/5XwYatVsCImA1v0guSe07AnJPSC2rXP6A6VUo+DK3TITL7FpZA11DXBPXYNSjUh0C+j/c+enrBCyv4S9K+HgWvjPXKiy7lt3hDgvxMalQUxriEyAyESIiHd+QiOdvxRCmvywHBwOjmBwNI7z+krZiT6hqlwXHuN8A1TnMc71itNwdAcc3gon9sKJfZC/z/l+19ITuH7foDhH/Y5gkKAfEr4j2CpzMOB0GWxqYlWXH9pV+/ph/cLt1dYvt62R6VtaAtsiGzqMehWIfU5s/mN+uLPcezS5K88Fh0HLDOfnQlWVcCofSo7DqRNwphTKS6D8FJwpgfJSqCiDqiqoqnB+TKWzXVXlD2VVFYAhPzeX5BbJnPuFUf1GdZfXa6vbuJQcO0ZkYmJDh1GvArHPFcG+uTtIk7vyDUeQdWomofa6LtidmUlygN1FsSMzk+baZ9vLz8z0yX71ZKdSStmQJnellLIhTe5KKWVDmtyVUsqGNLkrpZQNaXJXSikb0uSulFI2pMldKaVsyKX53H0ehMgxnBOQeSIBOO7FcPyB9jkwaJ8DQ1363NYYU+MjvY0iudeFiGy41GT1dqV9Dgza58Dgqz7raRmllLIhTe5KKWVDdkjuLzV0AA1A+xwYtM+BwSd99vtz7koppS5mh5G7UkqpC2hyV0opG/Lr5C4i14nIbhHJEpEZDR2Pt4hIaxFZKSI7RGS7iNxnlceJyCci8p313cwqFxGZY/07bBGRXg3bA8+ISJCIfC0iS6z1NBFZa/XrLREJtcrDrPUsa3tqQ8btKRGJFZF3RWSXiOwUkasC4Bj/1vpvepuIvCki4XY8ziLymogcFZFt1crcPrYiMtmq/52ITHYnBr9N7iISBMwFRgNdgIki0qVho/KaCuB3xpguwADgHqtvM4AVxpgOwAprHZz/Bh2sz3TghfoP2SvuA3ZWW38KeNoY0x7IB6ZZ5dOAfKv8aaueP3oW+NgY0xnogbPvtj3GItIK+A3QxxjTDQgCbsOex/l14LoLytw6tiISB8wE+gP9gJlnfyG4xBjjlx/gKmB5tfWHgYcbOi4f9fUDYBSwG0i2ypKB3dbyi8DEavXP1fOXD5Bi/Qc/AliC843Vx4HgC483sBy4yloOtupJQ/fBzf7GAPsujNvmx7gVcBCIs47bEuBaux5nIBXY5umxBSYCL1YrP69ebR+/Hbnzw38oZ+VYZbZi/SmaAawFkowxudamw0CStWyHf4tngN8DVdZ6PFBgjKmw1qv36Vx/re2FVn1/kgYcA+ZZp6JeEZFIbHyMjTGHgNnAASAX53HbiL2Pc3XuHts6HXN/Tu62JyJRwHvA/caYk9W3GeevclvcxyoiNwBHjTEbGzqWehQM9AJeMMZkACX88Gc6YK9jDGCdUhiH8xdbSyCSi09dBIT6OLb+nNwPAa2rradYZbYgIiE4E/sbxpj3reIjIpJsbU8Gjlrl/v5vMRAYKyLZwEKcp2aeBWJFJNiqU71P5/prbY8B8uozYC/IAXKMMWut9XdxJnu7HmOAa4B9xphjxphy4H2cx97Ox7k6d49tnY65Pyf39UAH60p7KM4LM4sbOCavEBEBXgV2GmP+Vm3TYuDsFfPJOM/Fny2fZF11HwAUVvvzr9EzxjxsjEkxxqTiPI6fGWPuAFYCE6xqF/b37L/DBKu+X41wjTGHgYMi0skqGgnswKbH2HIAGCAiEdZ/42f7bNvjfAF3j+1y4Eci0sz6q+dHVplrGvqiQx0vWIwBvgX2AI80dDxe7NcgnH+ybQE2W58xOM83rgC+Az4F4qz6gvPOoT3AVpx3IzR4Pzzs+zBgibXcDlgHZAHvAGFWebi1nmVtb9fQcXvY157ABus4LwKa2f0YA48Du4BtwD+AMDseZ+BNnNcVynH+lTbNk2MLTLX6nwX81J0YdPoBpZSyIX8+LaOUUuoSNLkrpZQNaXJXSikb0uSulFI2pMldKaVsSJO7UkrZkCZ3pZSyof8PYuSzu8YHxXEAAAAASUVORK5CYII=\n",
"text/plain": [
"
"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "eg5UUcCsJPQk"
},
"source": [
""
],
"execution_count": null,
"outputs": []
}
]
}