Friday, May 17, 2019

Keras Implementation of Diabetes Dataset Tensorflow ACC 80 %

Keras

Step 1:

import library

import tensorflow as tf

import numpy as np import pandas as pd

import matplotlib.pyplot as plt %matplotlib inline

from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix,classification_report import scikitplot as skplt

from keras.models import Sequential from keras.layers import Dense from keras.callbacks import ModelCheckpoint

Step 2:

Process Data

In [14]:
def Data_Process():
    
    """
    This will read the CSV and Normalize the Data and
    Perform Train Test Split and Return
    X_Train, X_Test, Y_Train, Y_Test
    
    """
    # Name for the column  or Features Map
    columns_to_named = ["Pregnancies","Glucose","BloodPressure",
           "SkinThickness","Insulin","BMI","DiabetesPedigreeFunction",
           "Age","Class"]
    
    # Read the Dataset and Rename the Column
    df = pd.read_csv("pima-indians-diabetes.csv",header=0,names=columns_to_named)

    col_norm =['Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness', 'Insulin',
       'BMI', 'DiabetesPedigreeFunction','Age']
    
    # Normalization using Custom Lambda Function
    
    df1_norm = df[col_norm].apply(lambda x :( (x - x.min()) / (x.max()-x.min()) ) )
    
    X_Data = df1_norm
    Y_Data = df["Class"]
    
    X_Train, X_Test, Y_Train, Y_Test = train_test_split(X_Data,Y_Data, test_size=0.3,random_state=101)
    
    return X_Train, X_Test, Y_Train, Y_Test
In [15]:
X_Train, X_Test, Y_Train, Y_Test = Data_Process()
X_Train.shape
Out[15]:
(536, 8)

Step 3:

Create Model

In [56]:
model = Sequential()
model.add(Dense(12, input_dim=8, init='uniform', activation='relu'))
# 2nd layer: 8 nodes, RELU
model.add(Dense(10, init='uniform', activation='relu'))
# output layer: dim=1, activation sigmoid
model.add(Dense(1, init='uniform', activation='sigmoid' ))


model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:2: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(12, input_dim=8, activation="relu", kernel_initializer="uniform")`
  
/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:4: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(10, activation="relu", kernel_initializer="uniform")`
  after removing the cwd from sys.path.
/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:6: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(1, activation="sigmoid", kernel_initializer="uniform")`
  

Train

In [57]:
history = model.fit(X_Train.to_numpy(),
                    Y_Train.to_numpy(),
                    epochs=200, batch_size=30,
                    validation_data=(X_Test.to_numpy(), Y_Test.to_numpy()),
                    verbose=2)
Train on 536 samples, validate on 231 samples
Epoch 1/200
 - 1s - loss: 0.6919 - acc: 0.6437 - val_loss: 0.6902 - val_acc: 0.6494
Epoch 2/200
 - 0s - loss: 0.6888 - acc: 0.6530 - val_loss: 0.6873 - val_acc: 0.6494
Epoch 3/200
 - 0s - loss: 0.6855 - acc: 0.6530 - val_loss: 0.6833 - val_acc: 0.6494
Epoch 4/200
 - 0s - loss: 0.6806 - acc: 0.6530 - val_loss: 0.6788 - val_acc: 0.6494
Epoch 5/200
 - 0s - loss: 0.6753 - acc: 0.6530 - val_loss: 0.6727 - val_acc: 0.6494
Epoch 6/200
 - 0s - loss: 0.6684 - acc: 0.6530 - val_loss: 0.6672 - val_acc: 0.6494
Epoch 7/200
 - 0s - loss: 0.6629 - acc: 0.6530 - val_loss: 0.6613 - val_acc: 0.6494
Epoch 8/200
 - 0s - loss: 0.6569 - acc: 0.6530 - val_loss: 0.6572 - val_acc: 0.6494
Epoch 9/200
 - 0s - loss: 0.6534 - acc: 0.6530 - val_loss: 0.6538 - val_acc: 0.6494
Epoch 10/200
 - 0s - loss: 0.6497 - acc: 0.6530 - val_loss: 0.6513 - val_acc: 0.6494
Epoch 11/200
 - 0s - loss: 0.6477 - acc: 0.6530 - val_loss: 0.6483 - val_acc: 0.6494
Epoch 12/200
 - 0s - loss: 0.6440 - acc: 0.6530 - val_loss: 0.6455 - val_acc: 0.6494
Epoch 13/200
 - 0s - loss: 0.6405 - acc: 0.6530 - val_loss: 0.6421 - val_acc: 0.6494
Epoch 14/200
 - 0s - loss: 0.6369 - acc: 0.6530 - val_loss: 0.6383 - val_acc: 0.6494
Epoch 15/200
 - 0s - loss: 0.6329 - acc: 0.6530 - val_loss: 0.6343 - val_acc: 0.6494
Epoch 16/200
 - 0s - loss: 0.6285 - acc: 0.6530 - val_loss: 0.6295 - val_acc: 0.6494
Epoch 17/200
 - 0s - loss: 0.6240 - acc: 0.6530 - val_loss: 0.6250 - val_acc: 0.6494
Epoch 18/200
 - 0s - loss: 0.6194 - acc: 0.6567 - val_loss: 0.6196 - val_acc: 0.6494
Epoch 19/200
 - 0s - loss: 0.6138 - acc: 0.6530 - val_loss: 0.6143 - val_acc: 0.6494
Epoch 20/200
 - 0s - loss: 0.6088 - acc: 0.6642 - val_loss: 0.6082 - val_acc: 0.6580
Epoch 21/200
 - 0s - loss: 0.6009 - acc: 0.6660 - val_loss: 0.6036 - val_acc: 0.6580
Epoch 22/200
 - 0s - loss: 0.5961 - acc: 0.6735 - val_loss: 0.5967 - val_acc: 0.6797
Epoch 23/200
 - 0s - loss: 0.5889 - acc: 0.6866 - val_loss: 0.5914 - val_acc: 0.6840
Epoch 24/200
 - 0s - loss: 0.5828 - acc: 0.6903 - val_loss: 0.5860 - val_acc: 0.7013
Epoch 25/200
 - 0s - loss: 0.5772 - acc: 0.6996 - val_loss: 0.5804 - val_acc: 0.7056
Epoch 26/200
 - 0s - loss: 0.5708 - acc: 0.7034 - val_loss: 0.5765 - val_acc: 0.7013
Epoch 27/200
 - 0s - loss: 0.5659 - acc: 0.7034 - val_loss: 0.5708 - val_acc: 0.6970
Epoch 28/200
 - 0s - loss: 0.5602 - acc: 0.7108 - val_loss: 0.5670 - val_acc: 0.7056
Epoch 29/200
 - 0s - loss: 0.5556 - acc: 0.7146 - val_loss: 0.5634 - val_acc: 0.7056
Epoch 30/200
 - 0s - loss: 0.5515 - acc: 0.7332 - val_loss: 0.5596 - val_acc: 0.7013
Epoch 31/200
 - 0s - loss: 0.5461 - acc: 0.7388 - val_loss: 0.5575 - val_acc: 0.7056
Epoch 32/200
 - 0s - loss: 0.5421 - acc: 0.7313 - val_loss: 0.5552 - val_acc: 0.7056
Epoch 33/200
 - 0s - loss: 0.5383 - acc: 0.7407 - val_loss: 0.5519 - val_acc: 0.6970
Epoch 34/200
 - 0s - loss: 0.5349 - acc: 0.7425 - val_loss: 0.5493 - val_acc: 0.6970
Epoch 35/200
 - 0s - loss: 0.5319 - acc: 0.7407 - val_loss: 0.5470 - val_acc: 0.6970
Epoch 36/200
 - 0s - loss: 0.5287 - acc: 0.7425 - val_loss: 0.5471 - val_acc: 0.7100
Epoch 37/200
 - 0s - loss: 0.5257 - acc: 0.7425 - val_loss: 0.5437 - val_acc: 0.7013
Epoch 38/200
 - 0s - loss: 0.5220 - acc: 0.7500 - val_loss: 0.5427 - val_acc: 0.7100
Epoch 39/200
 - 0s - loss: 0.5187 - acc: 0.7481 - val_loss: 0.5410 - val_acc: 0.7056
Epoch 40/200
 - 0s - loss: 0.5175 - acc: 0.7463 - val_loss: 0.5391 - val_acc: 0.7316
Epoch 41/200
 - 0s - loss: 0.5135 - acc: 0.7537 - val_loss: 0.5400 - val_acc: 0.7186
Epoch 42/200
 - 0s - loss: 0.5117 - acc: 0.7537 - val_loss: 0.5370 - val_acc: 0.7143
Epoch 43/200
 - 0s - loss: 0.5125 - acc: 0.7500 - val_loss: 0.5361 - val_acc: 0.7186
Epoch 44/200
 - 0s - loss: 0.5090 - acc: 0.7500 - val_loss: 0.5348 - val_acc: 0.7273
Epoch 45/200
 - 0s - loss: 0.5050 - acc: 0.7537 - val_loss: 0.5348 - val_acc: 0.7229
Epoch 46/200
 - 0s - loss: 0.5025 - acc: 0.7668 - val_loss: 0.5338 - val_acc: 0.7316
Epoch 47/200
 - 0s - loss: 0.5024 - acc: 0.7593 - val_loss: 0.5327 - val_acc: 0.7229
Epoch 48/200
 - 0s - loss: 0.4982 - acc: 0.7575 - val_loss: 0.5323 - val_acc: 0.7229
Epoch 49/200
 - 0s - loss: 0.4957 - acc: 0.7631 - val_loss: 0.5340 - val_acc: 0.7316
Epoch 50/200
 - 0s - loss: 0.4966 - acc: 0.7593 - val_loss: 0.5321 - val_acc: 0.7316
Epoch 51/200
 - 0s - loss: 0.4929 - acc: 0.7687 - val_loss: 0.5307 - val_acc: 0.7316
Epoch 52/200
 - 0s - loss: 0.4910 - acc: 0.7631 - val_loss: 0.5304 - val_acc: 0.7316
Epoch 53/200
 - 0s - loss: 0.4891 - acc: 0.7724 - val_loss: 0.5293 - val_acc: 0.7229
Epoch 54/200
 - 0s - loss: 0.4873 - acc: 0.7687 - val_loss: 0.5299 - val_acc: 0.7403
Epoch 55/200
 - 0s - loss: 0.4862 - acc: 0.7687 - val_loss: 0.5292 - val_acc: 0.7316
Epoch 56/200
 - 0s - loss: 0.4869 - acc: 0.7668 - val_loss: 0.5289 - val_acc: 0.7273
Epoch 57/200
 - 0s - loss: 0.4832 - acc: 0.7761 - val_loss: 0.5291 - val_acc: 0.7359
Epoch 58/200
 - 0s - loss: 0.4839 - acc: 0.7687 - val_loss: 0.5277 - val_acc: 0.7273
Epoch 59/200
 - 0s - loss: 0.4813 - acc: 0.7705 - val_loss: 0.5273 - val_acc: 0.7186
Epoch 60/200
 - 0s - loss: 0.4797 - acc: 0.7687 - val_loss: 0.5270 - val_acc: 0.7229
Epoch 61/200
 - 0s - loss: 0.4809 - acc: 0.7761 - val_loss: 0.5314 - val_acc: 0.7403
Epoch 62/200
 - 0s - loss: 0.4848 - acc: 0.7817 - val_loss: 0.5269 - val_acc: 0.7273
Epoch 63/200
 - 0s - loss: 0.4823 - acc: 0.7761 - val_loss: 0.5279 - val_acc: 0.7273
Epoch 64/200
 - 0s - loss: 0.4746 - acc: 0.7799 - val_loss: 0.5263 - val_acc: 0.7273
Epoch 65/200
 - 0s - loss: 0.4748 - acc: 0.7705 - val_loss: 0.5267 - val_acc: 0.7316
Epoch 66/200
 - 0s - loss: 0.4731 - acc: 0.7743 - val_loss: 0.5272 - val_acc: 0.7273
Epoch 67/200
 - 0s - loss: 0.4734 - acc: 0.7780 - val_loss: 0.5277 - val_acc: 0.7273
Epoch 68/200
 - 0s - loss: 0.4741 - acc: 0.7799 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 69/200
 - 0s - loss: 0.4707 - acc: 0.7780 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 70/200
 - 0s - loss: 0.4702 - acc: 0.7836 - val_loss: 0.5265 - val_acc: 0.7316
Epoch 71/200
 - 0s - loss: 0.4711 - acc: 0.7780 - val_loss: 0.5258 - val_acc: 0.7359
Epoch 72/200
 - 0s - loss: 0.4682 - acc: 0.7799 - val_loss: 0.5274 - val_acc: 0.7403
Epoch 73/200
 - 0s - loss: 0.4694 - acc: 0.7743 - val_loss: 0.5260 - val_acc: 0.7316
Epoch 74/200
 - 0s - loss: 0.4675 - acc: 0.7780 - val_loss: 0.5260 - val_acc: 0.7316
Epoch 75/200
 - 0s - loss: 0.4696 - acc: 0.7817 - val_loss: 0.5276 - val_acc: 0.7359
Epoch 76/200
 - 0s - loss: 0.4676 - acc: 0.7687 - val_loss: 0.5256 - val_acc: 0.7359
Epoch 77/200
 - 0s - loss: 0.4662 - acc: 0.7724 - val_loss: 0.5264 - val_acc: 0.7316
Epoch 78/200
 - 0s - loss: 0.4668 - acc: 0.7836 - val_loss: 0.5257 - val_acc: 0.7403
Epoch 79/200
 - 0s - loss: 0.4661 - acc: 0.7761 - val_loss: 0.5291 - val_acc: 0.7316
Epoch 80/200
 - 0s - loss: 0.4649 - acc: 0.7817 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 81/200
 - 0s - loss: 0.4659 - acc: 0.7873 - val_loss: 0.5269 - val_acc: 0.7273
Epoch 82/200
 - 0s - loss: 0.4645 - acc: 0.7854 - val_loss: 0.5272 - val_acc: 0.7273
Epoch 83/200
 - 0s - loss: 0.4642 - acc: 0.7854 - val_loss: 0.5261 - val_acc: 0.7359
Epoch 84/200
 - 0s - loss: 0.4633 - acc: 0.7854 - val_loss: 0.5274 - val_acc: 0.7316
Epoch 85/200
 - 0s - loss: 0.4633 - acc: 0.7761 - val_loss: 0.5259 - val_acc: 0.7403
Epoch 86/200
 - 0s - loss: 0.4627 - acc: 0.7799 - val_loss: 0.5264 - val_acc: 0.7316
Epoch 87/200
 - 0s - loss: 0.4634 - acc: 0.7780 - val_loss: 0.5276 - val_acc: 0.7359
Epoch 88/200
 - 0s - loss: 0.4643 - acc: 0.7780 - val_loss: 0.5260 - val_acc: 0.7403
Epoch 89/200
 - 0s - loss: 0.4618 - acc: 0.7836 - val_loss: 0.5276 - val_acc: 0.7403
Epoch 90/200
 - 0s - loss: 0.4621 - acc: 0.7873 - val_loss: 0.5259 - val_acc: 0.7403
Epoch 91/200
 - 0s - loss: 0.4605 - acc: 0.7817 - val_loss: 0.5272 - val_acc: 0.7359
Epoch 92/200
 - 0s - loss: 0.4612 - acc: 0.7854 - val_loss: 0.5271 - val_acc: 0.7359
Epoch 93/200
 - 0s - loss: 0.4601 - acc: 0.7892 - val_loss: 0.5261 - val_acc: 0.7403
Epoch 94/200
 - 0s - loss: 0.4610 - acc: 0.7780 - val_loss: 0.5271 - val_acc: 0.7359
Epoch 95/200
 - 0s - loss: 0.4609 - acc: 0.7892 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 96/200
 - 0s - loss: 0.4616 - acc: 0.7817 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 97/200
 - 0s - loss: 0.4610 - acc: 0.7836 - val_loss: 0.5283 - val_acc: 0.7403
Epoch 98/200
 - 0s - loss: 0.4602 - acc: 0.7817 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 99/200
 - 0s - loss: 0.4599 - acc: 0.7836 - val_loss: 0.5284 - val_acc: 0.7403
Epoch 100/200
 - 0s - loss: 0.4598 - acc: 0.7836 - val_loss: 0.5265 - val_acc: 0.7359
Epoch 101/200
 - 0s - loss: 0.4592 - acc: 0.7854 - val_loss: 0.5269 - val_acc: 0.7316
Epoch 102/200
 - 0s - loss: 0.4607 - acc: 0.7854 - val_loss: 0.5268 - val_acc: 0.7316
Epoch 103/200
 - 0s - loss: 0.4594 - acc: 0.7799 - val_loss: 0.5268 - val_acc: 0.7316
Epoch 104/200
 - 0s - loss: 0.4596 - acc: 0.7892 - val_loss: 0.5267 - val_acc: 0.7316
Epoch 105/200
 - 0s - loss: 0.4587 - acc: 0.7836 - val_loss: 0.5279 - val_acc: 0.7446
Epoch 106/200
 - 0s - loss: 0.4590 - acc: 0.7966 - val_loss: 0.5274 - val_acc: 0.7403
Epoch 107/200
 - 0s - loss: 0.4574 - acc: 0.7892 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 108/200
 - 0s - loss: 0.4587 - acc: 0.7817 - val_loss: 0.5269 - val_acc: 0.7273
Epoch 109/200
 - 0s - loss: 0.4589 - acc: 0.7873 - val_loss: 0.5268 - val_acc: 0.7273
Epoch 110/200
 - 0s - loss: 0.4592 - acc: 0.7910 - val_loss: 0.5279 - val_acc: 0.7403
Epoch 111/200
 - 0s - loss: 0.4580 - acc: 0.7910 - val_loss: 0.5279 - val_acc: 0.7403
Epoch 112/200
 - 0s - loss: 0.4572 - acc: 0.7910 - val_loss: 0.5269 - val_acc: 0.7273
Epoch 113/200
 - 0s - loss: 0.4574 - acc: 0.7854 - val_loss: 0.5268 - val_acc: 0.7316
Epoch 114/200
 - 0s - loss: 0.4581 - acc: 0.7910 - val_loss: 0.5273 - val_acc: 0.7403
Epoch 115/200
 - 0s - loss: 0.4570 - acc: 0.7910 - val_loss: 0.5267 - val_acc: 0.7316
Epoch 116/200
 - 0s - loss: 0.4569 - acc: 0.7892 - val_loss: 0.5269 - val_acc: 0.7316
Epoch 117/200
 - 0s - loss: 0.4571 - acc: 0.7910 - val_loss: 0.5264 - val_acc: 0.7273
Epoch 118/200
 - 0s - loss: 0.4574 - acc: 0.7948 - val_loss: 0.5276 - val_acc: 0.7359
Epoch 119/200
 - 0s - loss: 0.4595 - acc: 0.7892 - val_loss: 0.5269 - val_acc: 0.7316
Epoch 120/200
 - 0s - loss: 0.4584 - acc: 0.7892 - val_loss: 0.5268 - val_acc: 0.7316
Epoch 121/200
 - 0s - loss: 0.4577 - acc: 0.7854 - val_loss: 0.5268 - val_acc: 0.7316
Epoch 122/200
 - 0s - loss: 0.4574 - acc: 0.7854 - val_loss: 0.5279 - val_acc: 0.7446
Epoch 123/200
 - 0s - loss: 0.4576 - acc: 0.7910 - val_loss: 0.5261 - val_acc: 0.7316
Epoch 124/200
 - 0s - loss: 0.4587 - acc: 0.7948 - val_loss: 0.5276 - val_acc: 0.7359
Epoch 125/200
 - 0s - loss: 0.4561 - acc: 0.7910 - val_loss: 0.5262 - val_acc: 0.7359
Epoch 126/200
 - 0s - loss: 0.4579 - acc: 0.7836 - val_loss: 0.5264 - val_acc: 0.7316
Epoch 127/200
 - 0s - loss: 0.4578 - acc: 0.7948 - val_loss: 0.5283 - val_acc: 0.7446
Epoch 128/200
 - 0s - loss: 0.4584 - acc: 0.7854 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 129/200
 - 0s - loss: 0.4580 - acc: 0.7873 - val_loss: 0.5271 - val_acc: 0.7359
Epoch 130/200
 - 0s - loss: 0.4563 - acc: 0.7910 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 131/200
 - 0s - loss: 0.4568 - acc: 0.7910 - val_loss: 0.5274 - val_acc: 0.7359
Epoch 132/200
 - 0s - loss: 0.4569 - acc: 0.7892 - val_loss: 0.5264 - val_acc: 0.7316
Epoch 133/200
 - 0s - loss: 0.4587 - acc: 0.7892 - val_loss: 0.5291 - val_acc: 0.7446
Epoch 134/200
 - 0s - loss: 0.4581 - acc: 0.7854 - val_loss: 0.5269 - val_acc: 0.7359
Epoch 135/200
 - 0s - loss: 0.4572 - acc: 0.7892 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 136/200
 - 0s - loss: 0.4570 - acc: 0.7948 - val_loss: 0.5268 - val_acc: 0.7359
Epoch 137/200
 - 0s - loss: 0.4561 - acc: 0.7892 - val_loss: 0.5260 - val_acc: 0.7316
Epoch 138/200
 - 0s - loss: 0.4565 - acc: 0.7892 - val_loss: 0.5270 - val_acc: 0.7403
Epoch 139/200
 - 0s - loss: 0.4562 - acc: 0.7873 - val_loss: 0.5264 - val_acc: 0.7316
Epoch 140/200
 - 0s - loss: 0.4562 - acc: 0.7910 - val_loss: 0.5267 - val_acc: 0.7316
Epoch 141/200
 - 0s - loss: 0.4563 - acc: 0.7910 - val_loss: 0.5267 - val_acc: 0.7316
Epoch 142/200
 - 0s - loss: 0.4562 - acc: 0.7892 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 143/200
 - 0s - loss: 0.4572 - acc: 0.7873 - val_loss: 0.5262 - val_acc: 0.7316
Epoch 144/200
 - 0s - loss: 0.4575 - acc: 0.7910 - val_loss: 0.5269 - val_acc: 0.7403
Epoch 145/200
 - 0s - loss: 0.4563 - acc: 0.7892 - val_loss: 0.5266 - val_acc: 0.7316
Epoch 146/200
 - 0s - loss: 0.4560 - acc: 0.7873 - val_loss: 0.5274 - val_acc: 0.7403
Epoch 147/200
 - 0s - loss: 0.4562 - acc: 0.7873 - val_loss: 0.5266 - val_acc: 0.7403
Epoch 148/200
 - 0s - loss: 0.4568 - acc: 0.7873 - val_loss: 0.5260 - val_acc: 0.7359
Epoch 149/200
 - 0s - loss: 0.4584 - acc: 0.7892 - val_loss: 0.5265 - val_acc: 0.7403
Epoch 150/200
 - 0s - loss: 0.4561 - acc: 0.7910 - val_loss: 0.5258 - val_acc: 0.7316
Epoch 151/200
 - 0s - loss: 0.4549 - acc: 0.7929 - val_loss: 0.5275 - val_acc: 0.7403
Epoch 152/200
 - 0s - loss: 0.4561 - acc: 0.7892 - val_loss: 0.5278 - val_acc: 0.7489
Epoch 153/200
 - 0s - loss: 0.4562 - acc: 0.7948 - val_loss: 0.5260 - val_acc: 0.7316
Epoch 154/200
 - 0s - loss: 0.4586 - acc: 0.7817 - val_loss: 0.5295 - val_acc: 0.7446
Epoch 155/200
 - 0s - loss: 0.4552 - acc: 0.7910 - val_loss: 0.5260 - val_acc: 0.7403
Epoch 156/200
 - 0s - loss: 0.4570 - acc: 0.7817 - val_loss: 0.5260 - val_acc: 0.7446
Epoch 157/200
 - 0s - loss: 0.4578 - acc: 0.7910 - val_loss: 0.5277 - val_acc: 0.7489
Epoch 158/200
 - 0s - loss: 0.4563 - acc: 0.7892 - val_loss: 0.5260 - val_acc: 0.7316
Epoch 159/200
 - 0s - loss: 0.4554 - acc: 0.7948 - val_loss: 0.5265 - val_acc: 0.7403
Epoch 160/200
 - 0s - loss: 0.4560 - acc: 0.7892 - val_loss: 0.5264 - val_acc: 0.7403
Epoch 161/200
 - 0s - loss: 0.4555 - acc: 0.7892 - val_loss: 0.5257 - val_acc: 0.7316
Epoch 162/200
 - 0s - loss: 0.4557 - acc: 0.7910 - val_loss: 0.5258 - val_acc: 0.7359
Epoch 163/200
 - 0s - loss: 0.4559 - acc: 0.7948 - val_loss: 0.5260 - val_acc: 0.7359
Epoch 164/200
 - 0s - loss: 0.4561 - acc: 0.7873 - val_loss: 0.5257 - val_acc: 0.7359
Epoch 165/200
 - 0s - loss: 0.4559 - acc: 0.7948 - val_loss: 0.5280 - val_acc: 0.7489
Epoch 166/200
 - 0s - loss: 0.4558 - acc: 0.7910 - val_loss: 0.5259 - val_acc: 0.7403
Epoch 167/200
 - 0s - loss: 0.4571 - acc: 0.7854 - val_loss: 0.5255 - val_acc: 0.7446
Epoch 168/200
 - 0s - loss: 0.4543 - acc: 0.7948 - val_loss: 0.5277 - val_acc: 0.7489
Epoch 169/200
 - 0s - loss: 0.4559 - acc: 0.7948 - val_loss: 0.5265 - val_acc: 0.7446
Epoch 170/200
 - 0s - loss: 0.4562 - acc: 0.7854 - val_loss: 0.5256 - val_acc: 0.7403
Epoch 171/200
 - 0s - loss: 0.4642 - acc: 0.7854 - val_loss: 0.5270 - val_acc: 0.7446
Epoch 172/200
 - 0s - loss: 0.4582 - acc: 0.7761 - val_loss: 0.5256 - val_acc: 0.7403
Epoch 173/200
 - 0s - loss: 0.4552 - acc: 0.7892 - val_loss: 0.5276 - val_acc: 0.7489
Epoch 174/200
 - 0s - loss: 0.4554 - acc: 0.7910 - val_loss: 0.5261 - val_acc: 0.7403
Epoch 175/200
 - 0s - loss: 0.4558 - acc: 0.7873 - val_loss: 0.5253 - val_acc: 0.7403
Epoch 176/200
 - 0s - loss: 0.4555 - acc: 0.7873 - val_loss: 0.5268 - val_acc: 0.7446
Epoch 177/200
 - 0s - loss: 0.4563 - acc: 0.7929 - val_loss: 0.5254 - val_acc: 0.7359
Epoch 178/200
 - 0s - loss: 0.4565 - acc: 0.7948 - val_loss: 0.5278 - val_acc: 0.7489
Epoch 179/200
 - 0s - loss: 0.4568 - acc: 0.7873 - val_loss: 0.5261 - val_acc: 0.7446
Epoch 180/200
 - 0s - loss: 0.4554 - acc: 0.7929 - val_loss: 0.5256 - val_acc: 0.7359
Epoch 181/200
 - 0s - loss: 0.4556 - acc: 0.7892 - val_loss: 0.5254 - val_acc: 0.7359
Epoch 182/200
 - 0s - loss: 0.4558 - acc: 0.7873 - val_loss: 0.5271 - val_acc: 0.7489
Epoch 183/200
 - 0s - loss: 0.4564 - acc: 0.7948 - val_loss: 0.5266 - val_acc: 0.7489
Epoch 184/200
 - 0s - loss: 0.4553 - acc: 0.7910 - val_loss: 0.5251 - val_acc: 0.7403
Epoch 185/200
 - 0s - loss: 0.4560 - acc: 0.7892 - val_loss: 0.5262 - val_acc: 0.7446
Epoch 186/200
 - 0s - loss: 0.4579 - acc: 0.7966 - val_loss: 0.5257 - val_acc: 0.7403
Epoch 187/200
 - 0s - loss: 0.4574 - acc: 0.7799 - val_loss: 0.5252 - val_acc: 0.7446
Epoch 188/200
 - 0s - loss: 0.4560 - acc: 0.7985 - val_loss: 0.5273 - val_acc: 0.7489
Epoch 189/200
 - 0s - loss: 0.4550 - acc: 0.7966 - val_loss: 0.5250 - val_acc: 0.7403
Epoch 190/200
 - 0s - loss: 0.4577 - acc: 0.7892 - val_loss: 0.5252 - val_acc: 0.7403
Epoch 191/200
 - 0s - loss: 0.4569 - acc: 0.7892 - val_loss: 0.5252 - val_acc: 0.7403
Epoch 192/200
 - 0s - loss: 0.4577 - acc: 0.7929 - val_loss: 0.5271 - val_acc: 0.7489
Epoch 193/200
 - 0s - loss: 0.4546 - acc: 0.7910 - val_loss: 0.5250 - val_acc: 0.7403
Epoch 194/200
 - 0s - loss: 0.4554 - acc: 0.7910 - val_loss: 0.5255 - val_acc: 0.7403
Epoch 195/200
 - 0s - loss: 0.4551 - acc: 0.7910 - val_loss: 0.5253 - val_acc: 0.7403
Epoch 196/200
 - 0s - loss: 0.4568 - acc: 0.7910 - val_loss: 0.5252 - val_acc: 0.7403
Epoch 197/200
 - 0s - loss: 0.4553 - acc: 0.7910 - val_loss: 0.5249 - val_acc: 0.7446
Epoch 198/200
 - 0s - loss: 0.4564 - acc: 0.7892 - val_loss: 0.5266 - val_acc: 0.7489
Epoch 199/200
 - 0s - loss: 0.4556 - acc: 0.7854 - val_loss: 0.5253 - val_acc: 0.7403
Epoch 200/200
 - 0s - loss: 0.4551 - acc: 0.7892 - val_loss: 0.5251 - val_acc: 0.7403

Accuracy

In [59]:
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('Model Accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'])
plt.show()

Loss

In [58]:
# Model Losss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model Loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'])
plt.show()
In [60]:
scores = model.evaluate(X_Test, Y_Test, verbose=0)
print("%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
acc: 74.03%
In [ ]:
 
In [ ]:
 

No comments:

Post a Comment

Learn How to Connect to the Glue Data Catalog using AWS Glue Iceberg REST endpoint

gluecat Learn How to Connect to the Glue Data Catalog using AWS Glue Iceberg REST e...