import os
import pandas as pd
from sklearn.model_selection import train_test_split
import tensorflow as tf
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
from tensorflow.keras.callbacks import EarlyStopping

df_train = pd.read_csv(
    "https://data.heatonresearch.com/data/t81-558/datasets/"+\
    "kaggle_iris_train.csv", na_values=['NA','?'])

# Encode feature vector
df_train.drop('id', axis=1, inplace=True)

num_classes = len(df_train.groupby('species').species.nunique())

print("Number of classes: {}".format(num_classes))

# Convert to numpy - Classification
x = df_train[['sepal_l', 'sepal_w', 'petal_l', 'petal_w']].values
dummies = pd.get_dummies(df_train['species']) # Classification
species = dummies.columns
y = dummies.values
    
# Split into train/test
x_train, x_test, y_train, y_test = train_test_split(    
    x, y, test_size=0.25, random_state=45)

# Train, with early stopping
model = Sequential()
model.add(Dense(50, input_dim=x.shape[1], activation='relu'))
model.add(Dense(25))
model.add(Dense(y.shape[1],activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
monitor = EarlyStopping(monitor='val_loss', min_delta=1e-3, 
                        patience=5, verbose=1, mode='auto',
                       restore_best_weights=True)

model.fit(x_train,y_train,validation_data=(x_test,y_test),
          callbacks=[monitor],verbose=0,epochs=1000)
Number of classes: 3
Restoring model weights from the end of the best epoch.
Epoch 00055: early stopping
<tensorflow.python.keras.callbacks.History at 0x178e5493fc8>

Now that we've trained the neural network, we can check its log loss.

from sklearn import metrics

# Calculate multi log loss error
pred = model.predict(x_test)
score = metrics.log_loss(y_test, pred)
print("Log loss score: {}".format(score))
Log loss score: 0.3136451941728592

Now we are ready to generate the Kaggle submission file. We will use the iris test data that does not contain a $y$ target value. It is our job to predict this value and submit to Kaggle.

# Generate Kaggle submit file

# Encode feature vector
df_test = pd.read_csv(
    "https://data.heatonresearch.com/data/t81-558/datasets/"+\
    "kaggle_iris_test.csv", na_values=['NA','?'])

# Convert to numpy - Classification
ids = df_test['id']
df_test.drop('id', axis=1, inplace=True)
x = df_test[['sepal_l', 'sepal_w', 'petal_l', 'petal_w']].values
y = dummies.values

# Generate predictions
pred = model.predict(x)
#pred

# Create submission data set

df_submit = pd.DataFrame(pred)
df_submit.insert(0,'id',ids)
df_submit.columns = ['id','species-0','species-1','species-2']

# Write submit file locally
df_submit.to_csv("iris_submit.csv", index=False) 

print(df_submit)
     id  species-0  species-1  species-2
0   100   0.022236   0.533230   0.444534
1   101   0.003699   0.394908   0.601393
2   102   0.004600   0.420394   0.575007
3   103   0.956168   0.040161   0.003672
4   104   0.975333   0.022761   0.001906
5   105   0.966681   0.030938   0.002381
6   106   0.992637   0.007049   0.000314
7   107   0.002810   0.358485   0.638705
8   108   0.026152   0.557480   0.416368
9   109   0.001194   0.350682   0.648124
10  110   0.000649   0.268023   0.731328
11  111   0.994907   0.004923   0.000170
12  112   0.072954   0.587299   0.339747
13  113   0.000571   0.258208   0.741221
14  114   0.977138   0.021400   0.001463
15  115   0.004665   0.449740   0.545596
16  116   0.073553   0.567955   0.358493
17  117   0.968778   0.029240   0.001982
18  118   0.983742   0.015341   0.000918
19  119   0.986016   0.013193   0.000792
20  120   0.023752   0.583601   0.392647
21  121   0.032858   0.584882   0.382260
22  122   0.004007   0.395656   0.600338
23  123   0.000885   0.240763   0.758352
24  124   0.000531   0.271212   0.728256
25  125   0.985742   0.013471   0.000787
26  126   0.001298   0.320333   0.678369
27  127   0.001753   0.342856   0.655391
28  128   0.001147   0.317827   0.681026
29  129   0.981223   0.017589   0.001188
30  130   0.036438   0.578421   0.385140
31  131   0.976528   0.021834   0.001638
32  132   0.003681   0.405441   0.590878
33  133   0.024478   0.539376   0.436146
34  134   0.012039   0.466313   0.521649
35  135   0.963704   0.033453   0.002844
36  136   0.000614   0.244336   0.755050
37  137   0.008160   0.490362   0.501478
38  138   0.976859   0.021646   0.001495
39  139   0.003789   0.317224   0.678987
40  140   0.962254   0.034885   0.002861
41  141   0.000792   0.289380   0.709828
42  142   0.000253   0.239028   0.760719
43  143   0.001390   0.298506   0.700104
44  144   0.968422   0.029224   0.002354
45  145   0.029218   0.524128   0.446654
46  146   0.130497   0.579122   0.290381
47  147   0.023003   0.499443   0.477553
48  148   0.022195   0.527769   0.450036
49  149   0.983695   0.015325   0.000980
50  150   0.942703   0.052154   0.005144
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
from sklearn.model_selection import train_test_split
from tensorflow.keras.callbacks import EarlyStopping
import pandas as pd
import io
import os
import requests
import numpy as np
from sklearn import metrics

save_path = "."

df = pd.read_csv(
    "https://data.heatonresearch.com/data/t81-558/datasets/"+\
    "kaggle_auto_train.csv", 
    na_values=['NA', '?'])

cars = df['name']

# Handle missing value
df['horsepower'] = df['horsepower'].fillna(df['horsepower'].median())

# Pandas to Numpy
x = df[['cylinders', 'displacement', 'horsepower', 'weight',
       'acceleration', 'year', 'origin']].values
y = df['mpg'].values # regression

# Split into train/test
x_train, x_test, y_train, y_test = train_test_split(    
    x, y, test_size=0.25, random_state=42)

# Build the neural network
model = Sequential()
model.add(Dense(25, input_dim=x.shape[1], activation='relu')) # Hidden 1
model.add(Dense(10, activation='relu')) # Hidden 2
model.add(Dense(1)) # Output
model.compile(loss='mean_squared_error', optimizer='adam')
monitor = EarlyStopping(monitor='val_loss', min_delta=1e-3, patience=5, 
                        verbose=1, mode='auto', restore_best_weights=True)
model.fit(x_train,y_train,validation_data=(x_test,y_test),
          verbose=2,callbacks=[monitor],epochs=1000)

# Predict
pred = model.predict(x_test)
Train on 261 samples, validate on 88 samples
Epoch 1/1000
261/261 - 0s - loss: 382597.1196 - val_loss: 246687.4858
Epoch 2/1000
261/261 - 0s - loss: 192257.0072 - val_loss: 98804.3558
Epoch 3/1000
261/261 - 0s - loss: 67605.7908 - val_loss: 28617.0703
Epoch 4/1000
261/261 - 0s - loss: 15922.8367 - val_loss: 3325.1682
Epoch 5/1000
261/261 - 0s - loss: 1270.3832 - val_loss: 512.5387
Epoch 6/1000
261/261 - 0s - loss: 1118.9636 - val_loss: 1651.5679
Epoch 7/1000
261/261 - 0s - loss: 1703.0441 - val_loss: 1161.2368
Epoch 8/1000
261/261 - 0s - loss: 900.1420 - val_loss: 452.0660
Epoch 9/1000
261/261 - 0s - loss: 355.7248 - val_loss: 304.3305
Epoch 10/1000
261/261 - 0s - loss: 336.1776 - val_loss: 353.2767
Epoch 11/1000
261/261 - 0s - loss: 364.7770 - val_loss: 337.0882
Epoch 12/1000
261/261 - 0s - loss: 334.1086 - val_loss: 301.5655
Epoch 13/1000
261/261 - 0s - loss: 318.2330 - val_loss: 295.2506
Epoch 14/1000
261/261 - 0s - loss: 315.3628 - val_loss: 294.1454
Epoch 15/1000
261/261 - 0s - loss: 313.4151 - val_loss: 292.0427
Epoch 16/1000
261/261 - 0s - loss: 310.5834 - val_loss: 290.4511
Epoch 17/1000
261/261 - 0s - loss: 308.1132 - val_loss: 289.9176
Epoch 18/1000
261/261 - 0s - loss: 307.3153 - val_loss: 287.1054
Epoch 19/1000
261/261 - 0s - loss: 305.2746 - val_loss: 285.1501
Epoch 20/1000
261/261 - 0s - loss: 303.8164 - val_loss: 283.2582
Epoch 21/1000
261/261 - 0s - loss: 302.2492 - val_loss: 281.4607
Epoch 22/1000
261/261 - 0s - loss: 300.0016 - val_loss: 279.4577
Epoch 23/1000
261/261 - 0s - loss: 296.3905 - val_loss: 279.2795
Epoch 24/1000
261/261 - 0s - loss: 296.2508 - val_loss: 278.0922
Epoch 25/1000
261/261 - 0s - loss: 295.3600 - val_loss: 275.6349
Epoch 26/1000
261/261 - 0s - loss: 291.1920 - val_loss: 271.5592
Epoch 27/1000
261/261 - 0s - loss: 293.0040 - val_loss: 270.6060
Epoch 28/1000
261/261 - 0s - loss: 288.8120 - val_loss: 267.5230
Epoch 29/1000
261/261 - 0s - loss: 285.0153 - val_loss: 267.6846
Epoch 30/1000
261/261 - 0s - loss: 284.5063 - val_loss: 267.5903
Epoch 31/1000
261/261 - 0s - loss: 283.2598 - val_loss: 263.2579
Epoch 32/1000
261/261 - 0s - loss: 279.1897 - val_loss: 259.1413
Epoch 33/1000
261/261 - 0s - loss: 278.0727 - val_loss: 257.1468
Epoch 34/1000
261/261 - 0s - loss: 275.0580 - val_loss: 255.3159
Epoch 35/1000
261/261 - 0s - loss: 275.2246 - val_loss: 257.6078
Epoch 36/1000
261/261 - 0s - loss: 273.1009 - val_loss: 253.1600
Epoch 37/1000
261/261 - 0s - loss: 268.6169 - val_loss: 248.6043
Epoch 38/1000
261/261 - 0s - loss: 266.2035 - val_loss: 246.5989
Epoch 39/1000
261/261 - 0s - loss: 263.9700 - val_loss: 245.5532
Epoch 40/1000
261/261 - 0s - loss: 262.1468 - val_loss: 242.2550
Epoch 41/1000
261/261 - 0s - loss: 259.1994 - val_loss: 239.2889
Epoch 42/1000
261/261 - 0s - loss: 258.9926 - val_loss: 237.0006
Epoch 43/1000
261/261 - 0s - loss: 253.8787 - val_loss: 239.7331
Epoch 44/1000
261/261 - 0s - loss: 255.4787 - val_loss: 234.9061
Epoch 45/1000
261/261 - 0s - loss: 251.2081 - val_loss: 231.0518
Epoch 46/1000
261/261 - 0s - loss: 248.3354 - val_loss: 228.7012
Epoch 47/1000
261/261 - 0s - loss: 246.8801 - val_loss: 225.7509
Epoch 48/1000
261/261 - 0s - loss: 243.6159 - val_loss: 224.8320
Epoch 49/1000
261/261 - 0s - loss: 242.0351 - val_loss: 222.3293
Epoch 50/1000
261/261 - 0s - loss: 240.8072 - val_loss: 218.9842
Epoch 51/1000
261/261 - 0s - loss: 237.3082 - val_loss: 216.6910
Epoch 52/1000
261/261 - 0s - loss: 236.4236 - val_loss: 219.1308
Epoch 53/1000
261/261 - 0s - loss: 233.8834 - val_loss: 213.7722
Epoch 54/1000
261/261 - 0s - loss: 229.9621 - val_loss: 209.7647
Epoch 55/1000
261/261 - 0s - loss: 227.2555 - val_loss: 207.4864
Epoch 56/1000
261/261 - 0s - loss: 226.4306 - val_loss: 204.9454
Epoch 57/1000
261/261 - 0s - loss: 223.0296 - val_loss: 204.7334
Epoch 58/1000
261/261 - 0s - loss: 220.8694 - val_loss: 201.1248
Epoch 59/1000
261/261 - 0s - loss: 217.6376 - val_loss: 197.8849
Epoch 60/1000
261/261 - 0s - loss: 216.9886 - val_loss: 196.0564
Epoch 61/1000
261/261 - 0s - loss: 214.6863 - val_loss: 193.1452
Epoch 62/1000
261/261 - 0s - loss: 210.8178 - val_loss: 190.9064
Epoch 63/1000
261/261 - 0s - loss: 208.5358 - val_loss: 189.0982
Epoch 64/1000
261/261 - 0s - loss: 206.8594 - val_loss: 188.4019
Epoch 65/1000
261/261 - 0s - loss: 204.5793 - val_loss: 184.1434
Epoch 66/1000
261/261 - 0s - loss: 202.2459 - val_loss: 182.0629
Epoch 67/1000
261/261 - 0s - loss: 200.4653 - val_loss: 179.7517
Epoch 68/1000
261/261 - 0s - loss: 199.4847 - val_loss: 181.0924
Epoch 69/1000
261/261 - 0s - loss: 196.1007 - val_loss: 176.6571
Epoch 70/1000
261/261 - 0s - loss: 192.8669 - val_loss: 173.5703
Epoch 71/1000
261/261 - 0s - loss: 192.0731 - val_loss: 171.1448
Epoch 72/1000
261/261 - 0s - loss: 188.9124 - val_loss: 169.1036
Epoch 73/1000
261/261 - 0s - loss: 187.2660 - val_loss: 168.4244
Epoch 74/1000
261/261 - 0s - loss: 184.3366 - val_loss: 164.9515
Epoch 75/1000
261/261 - 0s - loss: 182.0560 - val_loss: 162.9232
Epoch 76/1000
261/261 - 0s - loss: 180.9339 - val_loss: 160.5111
Epoch 77/1000
261/261 - 0s - loss: 177.7289 - val_loss: 160.0768
Epoch 78/1000
261/261 - 0s - loss: 177.0166 - val_loss: 157.8780
Epoch 79/1000
261/261 - 0s - loss: 174.2729 - val_loss: 155.0140
Epoch 80/1000
261/261 - 0s - loss: 174.1473 - val_loss: 152.7101
Epoch 81/1000
261/261 - 0s - loss: 170.2462 - val_loss: 150.7686
Epoch 82/1000
261/261 - 0s - loss: 168.1250 - val_loss: 148.6464
Epoch 83/1000
261/261 - 0s - loss: 165.2611 - val_loss: 147.3025
Epoch 84/1000
261/261 - 0s - loss: 163.6456 - val_loss: 144.6445
Epoch 85/1000
261/261 - 0s - loss: 162.0391 - val_loss: 142.6984
Epoch 86/1000
261/261 - 0s - loss: 159.2869 - val_loss: 142.8578
Epoch 87/1000
261/261 - 0s - loss: 158.4979 - val_loss: 140.0451
Epoch 88/1000
261/261 - 0s - loss: 155.8697 - val_loss: 137.2706
Epoch 89/1000
261/261 - 0s - loss: 153.9711 - val_loss: 135.4351
Epoch 90/1000
261/261 - 0s - loss: 154.6780 - val_loss: 135.3691
Epoch 91/1000
261/261 - 0s - loss: 151.5339 - val_loss: 132.4053
Epoch 92/1000
261/261 - 0s - loss: 149.8378 - val_loss: 129.7334
Epoch 93/1000
261/261 - 0s - loss: 146.4563 - val_loss: 128.3390
Epoch 94/1000
261/261 - 0s - loss: 144.4933 - val_loss: 127.0931
Epoch 95/1000
261/261 - 0s - loss: 142.9235 - val_loss: 124.5410
Epoch 96/1000
261/261 - 0s - loss: 141.2332 - val_loss: 122.6840
Epoch 97/1000
261/261 - 0s - loss: 139.6225 - val_loss: 121.8140
Epoch 98/1000
261/261 - 0s - loss: 137.8158 - val_loss: 119.7630
Epoch 99/1000
261/261 - 0s - loss: 136.0081 - val_loss: 118.2237
Epoch 100/1000
261/261 - 0s - loss: 134.2485 - val_loss: 117.2276
Epoch 101/1000
261/261 - 0s - loss: 132.6553 - val_loss: 114.9724
Epoch 102/1000
261/261 - 0s - loss: 130.9867 - val_loss: 113.3426
Epoch 103/1000
261/261 - 0s - loss: 129.7633 - val_loss: 112.5253
Epoch 104/1000
261/261 - 0s - loss: 127.4988 - val_loss: 109.9802
Epoch 105/1000
261/261 - 0s - loss: 126.5202 - val_loss: 108.6993
Epoch 106/1000
261/261 - 0s - loss: 127.0090 - val_loss: 109.9802
Epoch 107/1000
261/261 - 0s - loss: 123.9040 - val_loss: 105.5228
Epoch 108/1000
261/261 - 0s - loss: 122.4337 - val_loss: 106.0400
Epoch 109/1000
261/261 - 0s - loss: 120.6300 - val_loss: 103.0620
Epoch 110/1000
261/261 - 0s - loss: 118.5036 - val_loss: 101.1414
Epoch 111/1000
261/261 - 0s - loss: 119.0572 - val_loss: 100.2416
Epoch 112/1000
261/261 - 0s - loss: 115.5790 - val_loss: 99.5907
Epoch 113/1000
261/261 - 0s - loss: 114.3071 - val_loss: 96.6901
Epoch 114/1000
261/261 - 0s - loss: 112.3629 - val_loss: 95.6015
Epoch 115/1000
261/261 - 0s - loss: 111.1829 - val_loss: 94.8623
Epoch 116/1000
261/261 - 0s - loss: 110.1737 - val_loss: 92.5723
Epoch 117/1000
261/261 - 0s - loss: 108.3667 - val_loss: 92.2069
Epoch 118/1000
261/261 - 0s - loss: 106.8793 - val_loss: 90.0196
Epoch 119/1000
261/261 - 0s - loss: 111.7453 - val_loss: 89.3325
Epoch 120/1000
261/261 - 0s - loss: 108.2630 - val_loss: 93.4876
Epoch 121/1000
261/261 - 0s - loss: 106.3677 - val_loss: 86.3017
Epoch 122/1000
261/261 - 0s - loss: 101.7241 - val_loss: 85.6503
Epoch 123/1000
261/261 - 0s - loss: 100.5858 - val_loss: 83.9417
Epoch 124/1000
261/261 - 0s - loss: 98.9622 - val_loss: 83.3914
Epoch 125/1000
261/261 - 0s - loss: 97.9784 - val_loss: 81.5708
Epoch 126/1000
261/261 - 0s - loss: 96.6995 - val_loss: 80.4465
Epoch 127/1000
261/261 - 0s - loss: 95.5034 - val_loss: 79.5468
Epoch 128/1000
261/261 - 0s - loss: 93.9933 - val_loss: 78.7416
Epoch 129/1000
261/261 - 0s - loss: 93.2547 - val_loss: 77.2559
Epoch 130/1000
261/261 - 0s - loss: 92.0739 - val_loss: 76.4692
Epoch 131/1000
261/261 - 0s - loss: 91.3897 - val_loss: 75.0902
Epoch 132/1000
261/261 - 0s - loss: 89.5802 - val_loss: 74.2796
Epoch 133/1000
261/261 - 0s - loss: 89.2358 - val_loss: 73.7019
Epoch 134/1000
261/261 - 0s - loss: 89.2894 - val_loss: 71.7912
Epoch 135/1000
261/261 - 0s - loss: 86.9927 - val_loss: 70.9630
Epoch 136/1000
261/261 - 0s - loss: 84.9979 - val_loss: 71.5301
Epoch 137/1000
261/261 - 0s - loss: 85.4751 - val_loss: 69.3716
Epoch 138/1000
261/261 - 0s - loss: 84.5646 - val_loss: 69.2690
Epoch 139/1000
261/261 - 0s - loss: 83.6890 - val_loss: 67.7983
Epoch 140/1000
261/261 - 0s - loss: 80.8676 - val_loss: 66.0073
Epoch 141/1000
261/261 - 0s - loss: 79.7220 - val_loss: 65.3198
Epoch 142/1000
261/261 - 0s - loss: 79.1109 - val_loss: 65.2558
Epoch 143/1000
261/261 - 0s - loss: 78.7909 - val_loss: 63.5800
Epoch 144/1000
261/261 - 0s - loss: 77.2276 - val_loss: 62.5765
Epoch 145/1000
261/261 - 0s - loss: 75.8473 - val_loss: 61.7780
Epoch 146/1000
261/261 - 0s - loss: 74.8493 - val_loss: 60.8583
Epoch 147/1000
261/261 - 0s - loss: 74.0530 - val_loss: 59.8856
Epoch 148/1000
261/261 - 0s - loss: 73.0771 - val_loss: 59.4027
Epoch 149/1000
261/261 - 0s - loss: 72.2401 - val_loss: 58.3119
Epoch 150/1000
261/261 - 0s - loss: 72.1309 - val_loss: 57.5037
Epoch 151/1000
261/261 - 0s - loss: 70.7773 - val_loss: 57.7769
Epoch 152/1000
261/261 - 0s - loss: 70.5883 - val_loss: 56.1087
Epoch 153/1000
261/261 - 0s - loss: 68.6020 - val_loss: 55.3935
Epoch 154/1000
261/261 - 0s - loss: 68.0137 - val_loss: 55.3946
Epoch 155/1000
261/261 - 0s - loss: 68.3630 - val_loss: 54.5067
Epoch 156/1000
261/261 - 0s - loss: 68.1104 - val_loss: 53.5928
Epoch 157/1000
261/261 - 0s - loss: 66.8734 - val_loss: 53.7972
Epoch 158/1000
261/261 - 0s - loss: 64.6184 - val_loss: 51.8031
Epoch 159/1000
261/261 - 0s - loss: 64.5744 - val_loss: 51.4003
Epoch 160/1000
261/261 - 0s - loss: 63.6910 - val_loss: 50.6856
Epoch 161/1000
261/261 - 0s - loss: 64.0145 - val_loss: 49.9536
Epoch 162/1000
261/261 - 0s - loss: 61.8386 - val_loss: 49.8901
Epoch 163/1000
261/261 - 0s - loss: 61.9306 - val_loss: 49.2521
Epoch 164/1000
261/261 - 0s - loss: 60.7556 - val_loss: 47.9911
Epoch 165/1000
261/261 - 0s - loss: 60.2802 - val_loss: 47.2594
Epoch 166/1000
261/261 - 0s - loss: 59.2542 - val_loss: 46.9898
Epoch 167/1000
261/261 - 0s - loss: 58.3004 - val_loss: 46.5502
Epoch 168/1000
261/261 - 0s - loss: 57.8545 - val_loss: 45.7245
Epoch 169/1000
261/261 - 0s - loss: 56.9617 - val_loss: 45.0827
Epoch 170/1000
261/261 - 0s - loss: 56.9749 - val_loss: 45.1476
Epoch 171/1000
261/261 - 0s - loss: 55.8050 - val_loss: 44.0151
Epoch 172/1000
261/261 - 0s - loss: 56.0478 - val_loss: 43.5957
Epoch 173/1000
261/261 - 0s - loss: 55.2461 - val_loss: 43.9503
Epoch 174/1000
261/261 - 0s - loss: 54.0493 - val_loss: 42.5281
Epoch 175/1000
261/261 - 0s - loss: 54.2585 - val_loss: 42.0300
Epoch 176/1000
261/261 - 0s - loss: 52.9849 - val_loss: 42.1091
Epoch 177/1000
261/261 - 0s - loss: 52.6699 - val_loss: 41.1280
Epoch 178/1000
261/261 - 0s - loss: 52.5766 - val_loss: 40.6279
Epoch 179/1000
261/261 - 0s - loss: 51.2797 - val_loss: 41.5560
Epoch 180/1000
261/261 - 0s - loss: 51.3167 - val_loss: 39.8998
Epoch 181/1000
261/261 - 0s - loss: 50.6548 - val_loss: 40.3602
Epoch 182/1000
261/261 - 0s - loss: 49.9360 - val_loss: 38.9575
Epoch 183/1000
261/261 - 0s - loss: 49.3195 - val_loss: 38.5161
Epoch 184/1000
261/261 - 0s - loss: 48.8159 - val_loss: 38.2727
Epoch 185/1000
261/261 - 0s - loss: 48.5230 - val_loss: 38.3134
Epoch 186/1000
261/261 - 0s - loss: 48.1472 - val_loss: 37.5338
Epoch 187/1000
261/261 - 0s - loss: 49.0451 - val_loss: 37.0337
Epoch 188/1000
261/261 - 0s - loss: 46.9509 - val_loss: 37.4614
Epoch 189/1000
261/261 - 0s - loss: 46.8951 - val_loss: 36.4360
Epoch 190/1000
261/261 - 0s - loss: 46.1027 - val_loss: 36.2615
Epoch 191/1000
261/261 - 0s - loss: 45.6384 - val_loss: 35.6610
Epoch 192/1000
261/261 - 0s - loss: 46.9916 - val_loss: 35.5148
Epoch 193/1000
261/261 - 0s - loss: 49.5148 - val_loss: 37.5214
Epoch 194/1000
261/261 - 0s - loss: 46.2516 - val_loss: 35.9050
Epoch 195/1000
261/261 - 0s - loss: 45.1961 - val_loss: 35.2473
Epoch 196/1000
261/261 - 0s - loss: 43.8845 - val_loss: 34.1322
Epoch 197/1000
261/261 - 0s - loss: 43.4610 - val_loss: 33.6880
Epoch 198/1000
261/261 - 0s - loss: 42.6286 - val_loss: 33.7127
Epoch 199/1000
261/261 - 0s - loss: 42.4154 - val_loss: 33.2152
Epoch 200/1000
261/261 - 0s - loss: 42.0020 - val_loss: 32.9451
Epoch 201/1000
261/261 - 0s - loss: 41.6191 - val_loss: 32.5093
Epoch 202/1000
261/261 - 0s - loss: 43.7235 - val_loss: 32.3695
Epoch 203/1000
261/261 - 0s - loss: 43.0863 - val_loss: 34.2041
Epoch 204/1000
261/261 - 0s - loss: 41.0544 - val_loss: 32.0973
Epoch 205/1000
261/261 - 0s - loss: 40.7787 - val_loss: 32.5461
Epoch 206/1000
261/261 - 0s - loss: 41.6360 - val_loss: 31.2820
Epoch 207/1000
261/261 - 0s - loss: 40.7417 - val_loss: 32.2974
Epoch 208/1000
261/261 - 0s - loss: 39.9822 - val_loss: 30.7600
Epoch 209/1000
261/261 - 0s - loss: 39.3857 - val_loss: 32.5769
Epoch 210/1000
261/261 - 0s - loss: 39.1410 - val_loss: 30.4246
Epoch 211/1000
261/261 - 0s - loss: 38.7447 - val_loss: 30.0492
Epoch 212/1000
261/261 - 0s - loss: 37.9753 - val_loss: 29.8627
Epoch 213/1000
261/261 - 0s - loss: 38.3355 - val_loss: 29.6306
Epoch 214/1000
261/261 - 0s - loss: 37.3530 - val_loss: 29.6433
Epoch 215/1000
261/261 - 0s - loss: 37.1885 - val_loss: 29.3205
Epoch 216/1000
261/261 - 0s - loss: 36.7803 - val_loss: 29.0165
Epoch 217/1000
261/261 - 0s - loss: 37.1867 - val_loss: 28.8259
Epoch 218/1000
261/261 - 0s - loss: 36.1244 - val_loss: 29.7593
Epoch 219/1000
261/261 - 0s - loss: 37.7266 - val_loss: 28.7380
Epoch 220/1000
261/261 - 0s - loss: 35.7875 - val_loss: 28.4919
Epoch 221/1000
261/261 - 0s - loss: 35.6227 - val_loss: 28.1351
Epoch 222/1000
261/261 - 0s - loss: 35.3527 - val_loss: 27.9021
Epoch 223/1000
261/261 - 0s - loss: 34.9739 - val_loss: 27.9576
Epoch 224/1000
261/261 - 0s - loss: 34.7204 - val_loss: 27.6015
Epoch 225/1000
261/261 - 0s - loss: 34.7849 - val_loss: 27.3595
Epoch 226/1000
261/261 - 0s - loss: 34.6823 - val_loss: 27.3375
Epoch 227/1000
261/261 - 0s - loss: 34.6561 - val_loss: 27.0810
Epoch 228/1000
261/261 - 0s - loss: 34.7526 - val_loss: 26.9909
Epoch 229/1000
261/261 - 0s - loss: 33.2568 - val_loss: 28.1786
Epoch 230/1000
261/261 - 0s - loss: 33.8126 - val_loss: 26.6188
Epoch 231/1000
261/261 - 0s - loss: 33.7432 - val_loss: 26.5590
Epoch 232/1000
261/261 - 0s - loss: 32.8556 - val_loss: 26.4733
Epoch 233/1000
261/261 - 0s - loss: 32.6516 - val_loss: 26.1711
Epoch 234/1000
261/261 - 0s - loss: 32.9949 - val_loss: 25.9887
Epoch 235/1000
261/261 - 0s - loss: 33.1170 - val_loss: 26.3875
Epoch 236/1000
261/261 - 0s - loss: 32.5374 - val_loss: 25.7300
Epoch 237/1000
261/261 - 0s - loss: 32.6500 - val_loss: 25.6121
Epoch 238/1000
261/261 - 0s - loss: 32.4430 - val_loss: 25.6679
Epoch 239/1000
261/261 - 0s - loss: 32.0512 - val_loss: 25.5607
Epoch 240/1000
261/261 - 0s - loss: 31.8485 - val_loss: 25.3327
Epoch 241/1000
261/261 - 0s - loss: 31.3820 - val_loss: 25.6274
Epoch 242/1000
261/261 - 0s - loss: 32.3983 - val_loss: 24.9405
Epoch 243/1000
261/261 - 0s - loss: 30.7282 - val_loss: 24.9071
Epoch 244/1000
261/261 - 0s - loss: 30.4659 - val_loss: 24.7043
Epoch 245/1000
261/261 - 0s - loss: 30.7127 - val_loss: 24.6449
Epoch 246/1000
261/261 - 0s - loss: 29.9609 - val_loss: 24.4984
Epoch 247/1000
261/261 - 0s - loss: 30.2372 - val_loss: 24.3524
Epoch 248/1000
261/261 - 0s - loss: 30.2689 - val_loss: 24.3986
Epoch 249/1000
261/261 - 0s - loss: 30.7721 - val_loss: 24.2271
Epoch 250/1000
261/261 - 0s - loss: 30.6043 - val_loss: 24.0360
Epoch 251/1000
261/261 - 0s - loss: 30.3024 - val_loss: 24.0987
Epoch 252/1000
261/261 - 0s - loss: 28.9162 - val_loss: 23.7909
Epoch 253/1000
261/261 - 0s - loss: 29.2801 - val_loss: 23.8153
Epoch 254/1000
261/261 - 0s - loss: 29.3222 - val_loss: 23.5515
Epoch 255/1000
261/261 - 0s - loss: 28.5132 - val_loss: 23.8399
Epoch 256/1000
261/261 - 0s - loss: 28.9835 - val_loss: 23.3674
Epoch 257/1000
261/261 - 0s - loss: 28.2271 - val_loss: 23.4548
Epoch 258/1000
261/261 - 0s - loss: 27.8565 - val_loss: 23.1535
Epoch 259/1000
261/261 - 0s - loss: 27.8770 - val_loss: 23.1761
Epoch 260/1000
261/261 - 0s - loss: 27.5445 - val_loss: 22.9507
Epoch 261/1000
261/261 - 0s - loss: 27.6223 - val_loss: 22.8882
Epoch 262/1000
261/261 - 0s - loss: 27.3854 - val_loss: 22.9048
Epoch 263/1000
261/261 - 0s - loss: 27.3946 - val_loss: 22.6476
Epoch 264/1000
261/261 - 0s - loss: 27.0089 - val_loss: 22.5546
Epoch 265/1000
261/261 - 0s - loss: 26.9027 - val_loss: 22.4856
Epoch 266/1000
261/261 - 0s - loss: 26.7630 - val_loss: 22.4675
Epoch 267/1000
261/261 - 0s - loss: 27.0150 - val_loss: 22.3077
Epoch 268/1000
261/261 - 0s - loss: 26.3339 - val_loss: 22.1958
Epoch 269/1000
261/261 - 0s - loss: 26.5861 - val_loss: 22.3650
Epoch 270/1000
261/261 - 0s - loss: 26.3245 - val_loss: 22.0337
Epoch 271/1000
261/261 - 0s - loss: 26.0610 - val_loss: 21.9219
Epoch 272/1000
261/261 - 0s - loss: 25.9908 - val_loss: 22.0404
Epoch 273/1000
261/261 - 0s - loss: 25.7291 - val_loss: 22.0628
Epoch 274/1000
261/261 - 0s - loss: 28.5037 - val_loss: 22.0770
Epoch 275/1000
261/261 - 0s - loss: 26.8031 - val_loss: 21.6196
Epoch 276/1000
261/261 - 0s - loss: 26.1467 - val_loss: 21.5624
Epoch 277/1000
261/261 - 0s - loss: 25.3375 - val_loss: 22.5604
Epoch 278/1000
261/261 - 0s - loss: 25.9187 - val_loss: 21.3272
Epoch 279/1000
261/261 - 0s - loss: 25.1155 - val_loss: 21.4415
Epoch 280/1000
261/261 - 0s - loss: 24.9094 - val_loss: 21.2730
Epoch 281/1000
261/261 - 0s - loss: 24.6625 - val_loss: 21.0808
Epoch 282/1000
261/261 - 0s - loss: 24.9405 - val_loss: 21.0107
Epoch 283/1000
261/261 - 0s - loss: 24.4015 - val_loss: 21.2510
Epoch 284/1000
261/261 - 0s - loss: 24.8920 - val_loss: 20.8377
Epoch 285/1000
261/261 - 0s - loss: 24.1691 - val_loss: 20.7580
Epoch 286/1000
261/261 - 0s - loss: 24.4140 - val_loss: 20.6990
Epoch 287/1000
261/261 - 0s - loss: 24.0014 - val_loss: 20.6096
Epoch 288/1000
261/261 - 0s - loss: 23.7787 - val_loss: 20.5134
Epoch 289/1000
261/261 - 0s - loss: 23.7206 - val_loss: 20.6378
Epoch 290/1000
261/261 - 0s - loss: 24.6226 - val_loss: 20.3711
Epoch 291/1000
261/261 - 0s - loss: 23.3452 - val_loss: 20.4992
Epoch 292/1000
261/261 - 0s - loss: 23.6446 - val_loss: 20.4952
Epoch 293/1000
261/261 - 0s - loss: 24.2056 - val_loss: 20.6786
Epoch 294/1000
261/261 - 0s - loss: 25.1895 - val_loss: 20.4868
Epoch 295/1000
Restoring model weights from the end of the best epoch.
261/261 - 0s - loss: 23.6036 - val_loss: 20.3795
Epoch 00295: early stopping
import numpy as np

# Measure RMSE error.  RMSE is common for regression.
score = np.sqrt(metrics.mean_squared_error(pred,y_test))
print("Final score (RMSE): {}".format(score))
Final score (RMSE): 4.5134384517538795
import pandas as pd

# Generate Kaggle submit file

# Encode feature vector
df_test = pd.read_csv(
    "https://data.heatonresearch.com/data/t81-558/datasets/"+\
    "kaggle_auto_test.csv", na_values=['NA','?'])

# Convert to numpy - regression
ids = df_test['id']
df_test.drop('id', axis=1, inplace=True)

# Handle missing value
df_test['horsepower'] = df_test['horsepower'].\
    fillna(df['horsepower'].median())

x = df_test[['cylinders', 'displacement', 'horsepower', 'weight',
       'acceleration', 'year', 'origin']].values




# Generate predictions
pred = model.predict(x)
#pred

# Create submission data set

df_submit = pd.DataFrame(pred)
df_submit.insert(0,'id',ids)
df_submit.columns = ['id','mpg']

# Write submit file locally
df_submit.to_csv("auto_submit.csv", index=False) 

print(df_submit)
     id        mpg
0   350  29.112602
1   351  27.803200
2   352  27.981804
3   353  30.487831
4   354  27.227440
5   355  26.438324
6   356  27.886986
7   357  29.103935
8   358  26.447609
9   359  30.027260
10  360  30.312553
11  361  30.712151
12  362  23.952263
13  363  24.858467
14  364  23.459129
15  365  22.638985
16  366  26.032127
17  367  26.197884
18  368  28.448906
19  369  28.138954
20  370  27.352821
21  371  27.313377
22  372  26.464119
23  373  26.689583
24  374  26.546562
25  375  27.829781
26  376  27.466354
27  377  30.343369
28  378  29.985909
29  379  27.807251
30  380  28.450882
31  381  26.574844
32  382  28.199501
33  383  29.615051
34  384  29.048317
35  385  29.320534
36  386  29.582710
37  387  24.533165
38  388  24.426888
39  389  24.658607
40  390  21.805504
41  391  26.026482
42  392  24.947670
43  393  26.902489
44  394  26.575218
45  395  33.546684
46  396  24.233910
47  397  28.609993
48  398  28.913261