DEEP LEARNING

Artificial Neural Nets in comparison with Linear Regression

In [1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt 
import seaborn as sns

import os
path = "C:/Users/HP/OneDrive/Documents/Python Anaconda/TensorFlow_FILES/DATA"
os.chdir(path)

df = pd.read_csv("kc_house_data.csv")

# Exploratory Analysis
df.isnull().sum() # NAs values
Out[1]:
id               0
date             0
price            0
bedrooms         0
bathrooms        0
sqft_living      0
sqft_lot         0
floors           0
waterfront       0
view             0
condition        0
grade            0
sqft_above       0
sqft_basement    0
yr_built         0
yr_renovated     0
zipcode          0
lat              0
long             0
sqft_living15    0
sqft_lot15       0
dtype: int64
In [2]:
plt.figure(figsize=(12,6))
plt.hist(df['price'], bins=80)
plt.xlabel("price")
plt.show()
In [3]:
plt.figure(figsize=(12,6))
counts = df["bedrooms"].value_counts()
plt.bar(counts.index, counts.values)
plt.xlabel("num_of_bedrooms")
plt.show()
In [4]:
print(df.corr()['price'].sort_values().tail(6)) # Correlation with sqft_living
bathrooms        0.525906
sqft_living15    0.585241
sqft_above       0.605368
grade            0.667951
sqft_living      0.701917
price            1.000000
Name: price, dtype: float64
In [5]:
df.sort_values('price',ascending=False).head(10)
percent = 0.005
print(round(len(df)*percent))
n = round(len(df)*percent)
without_outliers = df.sort_values('price',ascending=False).iloc[n:]
without_outliers = without_outliers.sort_values('sqft_living',ascending=False).iloc[n:]
108
In [6]:
X = without_outliers[['sqft_living']]
y = without_outliers['price']
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3)
from sklearn.linear_model import LinearRegression
lm = LinearRegression() # Linear regression for getting the coeficient
lm.fit(X_train,y_train)
coefficient = pd.DataFrame(lm.coef_,X.columns)
coefficient.columns = ['Coefficient']
coefficient = pd.DataFrame(coefficient)

coef = round(coefficient['Coefficient'][0],2)
inter = round(lm.intercept_,2)
print('Coefficient: ', round(coefficient['Coefficient'][0],2))
print('Intercept: ', round(lm.intercept_,2))
Coefficient:  242.65
Intercept:  25413.06
In [7]:
coefficient
Out[7]:
Coefficient
sqft_living 242.651551
In [8]:
import matplotlib.pyplot as plt
import numpy as np

X = np.array(without_outliers.sqft_living)
y = np.array(without_outliers.price)

plt.figure(figsize=(12,6))
plt.scatter(X, y, alpha=0.1)

x = np.array(without_outliers.sqft_living)

# Fit function
f = lambda x: coef*x + inter
# Plot fit
plt.plot(x,f(x),lw=2.5, c="r", label="predicted price for sqft")
plt.xlabel("sqft_living")
plt.ylabel("price")
plt.legend()
plt.show()
In [9]:
plt.figure(figsize=(12,6))
sns.scatterplot(x='long',y='lat',data=without_outliers, edgecolor=None, hue='price', palette='ch:s=-.2,r=.6') # Price map
plt.show()
In [20]:
# Feature engineering
df['date'] = pd.to_datetime(df['date']) # Transform to the normal date
df['year'] = df['date'].apply(lambda date: date.year)
df['month'] = df['date'].apply(lambda date: date.month)
df = df.drop('date', axis=1) # Not needed

df = df.drop(['id', 'zipcode', 'yr_renovated'], axis=1) # Not needed
In [21]:
df.head()
Out[21]:
price bedrooms bathrooms sqft_living sqft_lot floors waterfront view condition grade sqft_above sqft_basement yr_built lat long sqft_living15 sqft_lot15 year month
0 221900.0 3 1.00 1180 5650 1.0 0 0 3 7 1180 0 1955 47.5112 -122.257 1340 5650 2014 10
1 538000.0 3 2.25 2570 7242 2.0 0 0 3 7 2170 400 1951 47.7210 -122.319 1690 7639 2014 12
2 180000.0 2 1.00 770 10000 1.0 0 0 3 6 770 0 1933 47.7379 -122.233 2720 8062 2015 2
3 604000.0 4 3.00 1960 5000 1.0 0 0 5 7 1050 910 1965 47.5208 -122.393 1360 5000 2014 12
4 510000.0 3 2.00 1680 8080 1.0 0 0 3 8 1680 0 1987 47.6168 -122.045 1800 7503 2015 2
In [22]:
# Spliting
X = df.drop('price', axis=1).values # Values are important for the model
y = df['price'].values
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3)

# Scaling
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

# Model preparation
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.callbacks import EarlyStopping

print(X_train.shape) # Number of neurons
(15117, 18)
In [29]:
# Model
model = Sequential()
model.add(Dense(18, activation='relu'))
model.add(Dropout(0.2))

model.add(Dense(18, activation='relu'))
model.add(Dropout(0.2))

model.add(Dense(18, activation='relu'))
model.add(Dropout(0.2))

model.add(Dense(18, activation='relu'))
model.add(Dropout(0.2))

model.add(Dense(1))
model.compile(optimizer='adam', loss='mse')

# Model fitting
early_stop = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=25)
model.fit(x=X_train, y=y_train, 
         validation_data=(X_test, y_test),
         batch_size=128, epochs=168, callbacks=[early_stop]) # batch_size agains overfitting
892.2667 - val_loss: 104303353856.0000
Epoch 6/168
119/119 [==============================] - 0s 2ms/step - loss: 118935425024.0000 - val_loss: 101773180928.0000
Epoch 7/168
119/119 [==============================] - 0s 3ms/step - loss: 122342247287.4667 - val_loss: 100049059840.0000
Epoch 8/168
119/119 [==============================] - 0s 2ms/step - loss: 115210216925.8667 - val_loss: 97711382528.0000
Epoch 9/168
119/119 [==============================] - 0s 3ms/step - loss: 111487921902.9333 - val_loss: 95593308160.0000
Epoch 10/168
119/119 [==============================] - 0s 2ms/step - loss: 117049752507.7333 - val_loss: 92860399616.0000
Epoch 11/168
119/119 [==============================] - 0s 2ms/step - loss: 106205069448.5333 - val_loss: 90321903616.0000
Epoch 12/168
119/119 [==============================] - 0s 2ms/step - loss: 110993503436.8000 - val_loss: 88425922560.0000
Epoch 13/168
119/119 [==============================] - 0s 2ms/step - loss: 106651068006.4000 - val_loss: 85285888000.0000
Epoch 14/168
119/119 [==============================] - 0s 2ms/step - loss: 96629423991.4667 - val_loss: 82029371392.0000
Epoch 15/168
119/119 [==============================] - 0s 2ms/step - loss: 99953277815.4667 - val_loss: 80121520128.0000
Epoch 16/168
119/119 [==============================] - 0s 2ms/step - loss: 96366063342.9333 - val_loss: 76512534528.0000
Epoch 17/168
119/119 [==============================] - 0s 2ms/step - loss: 88949838984.5333 - val_loss: 72902344704.0000
Epoch 18/168
119/119 [==============================] - 0s 2ms/step - loss: 92941527040.0000 - val_loss: 69899223040.0000
Epoch 19/168
119/119 [==============================] - 0s 2ms/step - loss: 90512303172.2667 - val_loss: 68160458752.0000
Epoch 20/168
119/119 [==============================] - 0s 2ms/step - loss: 82837923771.7333 - val_loss: 64014434304.0000
Epoch 21/168
119/119 [==============================] - 0s 2ms/step - loss: 80445772014.9333 - val_loss: 61522878464.0000
Epoch 22/168
119/119 [==============================] - 0s 2ms/step - loss: 80667270041.6000 - val_loss: 58304684032.0000
Epoch 23/168
119/119 [==============================] - 0s 2ms/step - loss: 79318741401.6000 - val_loss: 56463577088.0000
Epoch 24/168
119/119 [==============================] - 0s 2ms/step - loss: 81334493525.3333 - val_loss: 55647518720.0000
Epoch 25/168
119/119 [==============================] - 0s 2ms/step - loss: 80228981623.4667 - val_loss: 53913919488.0000
Epoch 26/168
119/119 [==============================] - 0s 2ms/step - loss: 81081640550.4000 - val_loss: 52847624192.0000
Epoch 27/168
119/119 [==============================] - 0s 2ms/step - loss: 79965267899.7333 - val_loss: 51885330432.0000
Epoch 28/168
119/119 [==============================] - 0s 2ms/step - loss: 79147964211.2000 - val_loss: 51841994752.0000
Epoch 29/168
119/119 [==============================] - 0s 3ms/step - loss: 70649376221.8667 - val_loss: 51036938240.0000
Epoch 30/168
119/119 [==============================] - 0s 2ms/step - loss: 81464288324.2667 - val_loss: 51034116096.0000
Epoch 31/168
119/119 [==============================] - 0s 2ms/step - loss: 76365847415.4667 - val_loss: 49975451648.0000
Epoch 32/168
119/119 [==============================] - 0s 2ms/step - loss: 73294756795.7333 - val_loss: 49335906304.0000
Epoch 33/168
119/119 [==============================] - 0s 2ms/step - loss: 66873256209.0667 - val_loss: 49077043200.0000
Epoch 34/168
119/119 [==============================] - 0s 2ms/step - loss: 66108501947.7333 - val_loss: 48669208576.0000
Epoch 35/168
119/119 [==============================] - 0s 2ms/step - loss: 68616087927.4667 - val_loss: 48702414848.0000
Epoch 36/168
119/119 [==============================] - 0s 3ms/step - loss: 64362030626.1333 - val_loss: 47883776000.0000
Epoch 37/168
119/119 [==============================] - 0s 2ms/step - loss: 75620958958.9333 - val_loss: 48037351424.0000
Epoch 38/168
119/119 [==============================] - 0s 2ms/step - loss: 70154464802.1333 - val_loss: 48121626624.0000
Epoch 39/168
119/119 [==============================] - 0s 2ms/step - loss: 66433818521.6000 - val_loss: 47405559808.0000
Epoch 40/168
119/119 [==============================] - 0s 2ms/step - loss: 71910645179.7333 - val_loss: 47433932800.0000
Epoch 41/168
119/119 [==============================] - 0s 2ms/step - loss: 65381349000.5333 - val_loss: 46993235968.0000
Epoch 42/168
119/119 [==============================] - 0s 2ms/step - loss: 66765719278.9333 - val_loss: 47106732032.0000
Epoch 43/168
119/119 [==============================] - 0s 2ms/step - loss: 68307151308.8000 - val_loss: 46469206016.0000
Epoch 44/168
119/119 [==============================] - 0s 2ms/step - loss: 66649337924.2667 - val_loss: 47351103488.0000
Epoch 45/168
119/119 [==============================] - 0s 2ms/step - loss: 65948894446.9333 - val_loss: 45691428864.0000
Epoch 46/168
119/119 [==============================] - 0s 2ms/step - loss: 66163294549.3333 - val_loss: 45792063488.0000
Epoch 47/168
119/119 [==============================] - 0s 2ms/step - loss: 66531212765.8667 - val_loss: 45564256256.0000
Epoch 48/168
119/119 [==============================] - 0s 3ms/step - loss: 63672691029.3333 - val_loss: 45919121408.0000
Epoch 49/168
119/119 [==============================] - 0s 2ms/step - loss: 61165789661.8667 - val_loss: 45418414080.0000
Epoch 50/168
119/119 [==============================] - 0s 2ms/step - loss: 74755595332.2667 - val_loss: 44928450560.0000
Epoch 51/168
119/119 [==============================] - 0s 3ms/step - loss: 62316198468.2667 - val_loss: 44956475392.0000
Epoch 52/168
119/119 [==============================] - 0s 2ms/step - loss: 69287596578.1333 - val_loss: 44441362432.0000
Epoch 53/168
119/119 [==============================] - 0s 3ms/step - loss: 66511563366.4000 - val_loss: 44292284416.0000
Epoch 54/168
119/119 [==============================] - 0s 3ms/step - loss: 65179387682.1333 - val_loss: 44257333248.0000
Epoch 55/168
119/119 [==============================] - 1s 5ms/step - loss: 62307870412.8000 - val_loss: 43861073920.0000
Epoch 56/168
119/119 [==============================] - 0s 4ms/step - loss: 67087082018.1333 - val_loss: 44365373440.0000
Epoch 57/168
119/119 [==============================] - 0s 3ms/step - loss: 68964252160.0000 - val_loss: 43328598016.0000
Epoch 58/168
119/119 [==============================] - 0s 3ms/step - loss: 64424750114.1333 - val_loss: 43910090752.0000
Epoch 59/168
119/119 [==============================] - 0s 2ms/step - loss: 62166247833.6000 - val_loss: 42708598784.0000
Epoch 60/168
119/119 [==============================] - 0s 2ms/step - loss: 67162864332.8000 - val_loss: 42387296256.0000
Epoch 61/168
119/119 [==============================] - 0s 2ms/step - loss: 62366705902.9333 - val_loss: 43639877632.0000
Epoch 62/168
119/119 [==============================] - 0s 2ms/step - loss: 62433203029.3333 - val_loss: 42154246144.0000
Epoch 63/168
119/119 [==============================] - 0s 3ms/step - loss: 60903370308.2667 - val_loss: 41942761472.0000
Epoch 64/168
119/119 [==============================] - 0s 3ms/step - loss: 60843646873.6000 - val_loss: 41730588672.0000
Epoch 65/168
119/119 [==============================] - 0s 2ms/step - loss: 67409643212.8000 - val_loss: 42148880384.0000
Epoch 66/168
119/119 [==============================] - 0s 3ms/step - loss: 59963525632.0000 - val_loss: 42282180608.0000
Epoch 67/168
119/119 [==============================] - 0s 2ms/step - loss: 64434341102.9333 - val_loss: 41683263488.0000
Epoch 68/168
119/119 [==============================] - 0s 3ms/step - loss: 63016146312.5333 - val_loss: 41345425408.0000
Epoch 69/168
119/119 [==============================] - 0s 2ms/step - loss: 62928062293.3333 - val_loss: 40640327680.0000
Epoch 70/168
119/119 [==============================] - 0s 2ms/step - loss: 60595561574.4000 - val_loss: 40363147264.0000
Epoch 71/168
119/119 [==============================] - 0s 2ms/step - loss: 65869276228.2667 - val_loss: 40893370368.0000
Epoch 72/168
119/119 [==============================] - 0s 2ms/step - loss: 60130580002.1333 - val_loss: 40437682176.0000
Epoch 73/168
119/119 [==============================] - 0s 2ms/step - loss: 62621206289.0667 - val_loss: 40838008832.0000
Epoch 74/168
119/119 [==============================] - 0s 3ms/step - loss: 59815412872.5333 - val_loss: 40034267136.0000
Epoch 75/168
119/119 [==============================] - 0s 2ms/step - loss: 60428111598.9333 - val_loss: 40458780672.0000
Epoch 76/168
119/119 [==============================] - 0s 2ms/step - loss: 66590963268.2667 - val_loss: 39786250240.0000
Epoch 77/168
119/119 [==============================] - 0s 3ms/step - loss: 59304639726.9333 - val_loss: 40536825856.0000
Epoch 78/168
119/119 [==============================] - 0s 2ms/step - loss: 59667477538.1333 - val_loss: 39814815744.0000
Epoch 79/168
119/119 [==============================] - 0s 2ms/step - loss: 62318683067.7333 - val_loss: 39874572288.0000
Epoch 80/168
119/119 [==============================] - 0s 2ms/step - loss: 63918433587.2000 - val_loss: 39519641600.0000
Epoch 81/168
119/119 [==============================] - 0s 2ms/step - loss: 60003941273.6000 - val_loss: 39587115008.0000
Epoch 82/168
119/119 [==============================] - 0s 2ms/step - loss: 57583923814.4000 - val_loss: 39852576768.0000
Epoch 83/168
119/119 [==============================] - 0s 2ms/step - loss: 56733086071.4667 - val_loss: 38883979264.0000
Epoch 84/168
119/119 [==============================] - 0s 2ms/step - loss: 58605196970.6667 - val_loss: 39151235072.0000
Epoch 85/168
119/119 [==============================] - 0s 2ms/step - loss: 59894367095.4667 - val_loss: 38815883264.0000
Epoch 86/168
119/119 [==============================] - 0s 2ms/step - loss: 56127403042.1333 - val_loss: 38809858048.0000
Epoch 87/168
119/119 [==============================] - 0s 2ms/step - loss: 59359614327.4667 - val_loss: 38732488704.0000
Epoch 88/168
119/119 [==============================] - 0s 2ms/step - loss: 61404906325.3333 - val_loss: 38893228032.0000
Epoch 89/168
119/119 [==============================] - 0s 2ms/step - loss: 58002806784.0000 - val_loss: 38492676096.0000
Epoch 90/168
119/119 [==============================] - 0s 2ms/step - loss: 57761854702.9333 - val_loss: 38408036352.0000
Epoch 91/168
119/119 [==============================] - 0s 2ms/step - loss: 57859329297.0667 - val_loss: 38355574784.0000
Epoch 92/168
119/119 [==============================] - 0s 2ms/step - loss: 59670010197.3333 - val_loss: 37865897984.0000
Epoch 93/168
119/119 [==============================] - 0s 2ms/step - loss: 54951040785.0667 - val_loss: 38368387072.0000
Epoch 94/168
119/119 [==============================] - 0s 2ms/step - loss: 58866987383.4667 - val_loss: 38882574336.0000
Epoch 95/168
119/119 [==============================] - 0s 2ms/step - loss: 59065426158.9333 - val_loss: 37940350976.0000
Epoch 96/168
119/119 [==============================] - 0s 2ms/step - loss: 57854604083.2000 - val_loss: 38242648064.0000
Epoch 97/168
119/119 [==============================] - 0s 2ms/step - loss: 56650988578.1333 - val_loss: 37134757888.0000
Epoch 98/168
119/119 [==============================] - 0s 2ms/step - loss: 59085774711.4667 - val_loss: 38211076096.0000
Epoch 99/168
119/119 [==============================] - 0s 2ms/step - loss: 57046223633.0667 - val_loss: 37547388928.0000
Epoch 100/168
119/119 [==============================] - 0s 2ms/step - loss: 60127467281.0667 - val_loss: 36929687552.0000
Epoch 101/168
119/119 [==============================] - 0s 2ms/step - loss: 58532199833.6000 - val_loss: 38848430080.0000
Epoch 102/168
119/119 [==============================] - 0s 2ms/step - loss: 55055843362.1333 - val_loss: 37289373696.0000
Epoch 103/168
119/119 [==============================] - 0s 2ms/step - loss: 59844001962.6667 - val_loss: 38432616448.0000
Epoch 104/168
119/119 [==============================] - 0s 3ms/step - loss: 56973018248.5333 - val_loss: 37672656896.0000
Epoch 105/168
119/119 [==============================] - 0s 2ms/step - loss: 55950753996.8000 - val_loss: 36962758656.0000
Epoch 106/168
119/119 [==============================] - 0s 2ms/step - loss: 55268060125.8667 - val_loss: 37084610560.0000
Epoch 107/168
119/119 [==============================] - 0s 2ms/step - loss: 55904428322.1333 - val_loss: 37246836736.0000
Epoch 108/168
119/119 [==============================] - 0s 2ms/step - loss: 62829385864.5333 - val_loss: 37638217728.0000
Epoch 109/168
119/119 [==============================] - 0s 2ms/step - loss: 54788077670.4000 - val_loss: 37648683008.0000
Epoch 110/168
119/119 [==============================] - 0s 2ms/step - loss: 57390727338.6667 - val_loss: 37594181632.0000
Epoch 111/168
119/119 [==============================] - 0s 2ms/step - loss: 57774258141.8667 - val_loss: 36598317056.0000
Epoch 112/168
119/119 [==============================] - 0s 2ms/step - loss: 61538402747.7333 - val_loss: 37091479552.0000
Epoch 113/168
119/119 [==============================] - 0s 2ms/step - loss: 59013037943.4667 - val_loss: 37488721920.0000
Epoch 114/168
119/119 [==============================] - 0s 2ms/step - loss: 60545423121.0667 - val_loss: 36781158400.0000
Epoch 115/168
119/119 [==============================] - 0s 2ms/step - loss: 59536617335.4667 - val_loss: 37053386752.0000
Epoch 116/168
119/119 [==============================] - 0s 2ms/step - loss: 57195725141.3333 - val_loss: 37041856512.0000
Epoch 117/168
119/119 [==============================] - 0s 2ms/step - loss: 55682157294.9333 - val_loss: 37333123072.0000
Epoch 118/168
119/119 [==============================] - 0s 2ms/step - loss: 61293152938.6667 - val_loss: 37022314496.0000
Epoch 119/168
119/119 [==============================] - 0s 2ms/step - loss: 59277454301.8667 - val_loss: 37041979392.0000
Epoch 120/168
119/119 [==============================] - 0s 2ms/step - loss: 56901990843.7333 - val_loss: 36559683584.0000
Epoch 121/168
119/119 [==============================] - 0s 2ms/step - loss: 57678962312.5333 - val_loss: 36536598528.0000
Epoch 122/168
119/119 [==============================] - 0s 2ms/step - loss: 56244899054.9333 - val_loss: 36549775360.0000
Epoch 123/168
119/119 [==============================] - 0s 2ms/step - loss: 56814669687.4667 - val_loss: 36821475328.0000
Epoch 124/168
119/119 [==============================] - 0s 2ms/step - loss: 55258488320.0000 - val_loss: 36628459520.0000
Epoch 125/168
119/119 [==============================] - 0s 2ms/step - loss: 56951055872.0000 - val_loss: 36224028672.0000
Epoch 126/168
119/119 [==============================] - 0s 3ms/step - loss: 61250714726.4000 - val_loss: 36489383936.0000
Epoch 127/168
119/119 [==============================] - 0s 2ms/step - loss: 56712045806.9333 - val_loss: 36970672128.0000
Epoch 128/168
119/119 [==============================] - 0s 2ms/step - loss: 59221909777.0667 - val_loss: 35874840576.0000
Epoch 129/168
119/119 [==============================] - 0s 2ms/step - loss: 57501644492.8000 - val_loss: 36263260160.0000
Epoch 130/168
119/119 [==============================] - 0s 2ms/step - loss: 56152667921.0667 - val_loss: 36580511744.0000
Epoch 131/168
119/119 [==============================] - 0s 2ms/step - loss: 58148905062.4000 - val_loss: 36149190656.0000
Epoch 132/168
119/119 [==============================] - 0s 2ms/step - loss: 53734992349.8667 - val_loss: 36269928448.0000
Epoch 133/168
119/119 [==============================] - 0s 2ms/step - loss: 55553278532.2667 - val_loss: 35535503360.0000
Epoch 134/168
119/119 [==============================] - 0s 2ms/step - loss: 54568393113.6000 - val_loss: 36244033536.0000
Epoch 135/168
119/119 [==============================] - 0s 2ms/step - loss: 58455277226.6667 - val_loss: 36270120960.0000
Epoch 136/168
119/119 [==============================] - 0s 2ms/step - loss: 56349065949.8667 - val_loss: 35591897088.0000
Epoch 137/168
119/119 [==============================] - 0s 2ms/step - loss: 58544023859.2000 - val_loss: 35998588928.0000
Epoch 138/168
119/119 [==============================] - 0s 2ms/step - loss: 59383904665.6000 - val_loss: 36832104448.0000
Epoch 139/168
119/119 [==============================] - 0s 2ms/step - loss: 56218771080.5333 - val_loss: 35876339712.0000
Epoch 140/168
119/119 [==============================] - 0s 2ms/step - loss: 54969912934.4000 - val_loss: 36040216576.0000
Epoch 141/168
119/119 [==============================] - 0s 2ms/step - loss: 55868948616.5333 - val_loss: 35818283008.0000
Epoch 142/168
119/119 [==============================] - 0s 2ms/step - loss: 54903581491.2000 - val_loss: 35129712640.0000
Epoch 143/168
119/119 [==============================] - 0s 2ms/step - loss: 53974495470.9333 - val_loss: 35200507904.0000
Epoch 144/168
119/119 [==============================] - 0s 2ms/step - loss: 50593920750.9333 - val_loss: 35995824128.0000
Epoch 145/168
119/119 [==============================] - 0s 2ms/step - loss: 56921428616.5333 - val_loss: 35619610624.0000
Epoch 146/168
119/119 [==============================] - 0s 2ms/step - loss: 57583598387.2000 - val_loss: 35761848320.0000
Epoch 147/168
119/119 [==============================] - 0s 2ms/step - loss: 51742019106.1333 - val_loss: 35312504832.0000
Epoch 148/168
119/119 [==============================] - 0s 2ms/step - loss: 58067114564.2667 - val_loss: 35016867840.0000
Epoch 149/168
119/119 [==============================] - 0s 2ms/step - loss: 54690483609.6000 - val_loss: 36150710272.0000
Epoch 150/168
119/119 [==============================] - 0s 2ms/step - loss: 57987539319.4667 - val_loss: 35310243840.0000
Epoch 151/168
119/119 [==============================] - 0s 2ms/step - loss: 54345480499.2000 - val_loss: 35444920320.0000
Epoch 152/168
119/119 [==============================] - 0s 3ms/step - loss: 57595398519.4667 - val_loss: 35502305280.0000
Epoch 153/168
119/119 [==============================] - 0s 2ms/step - loss: 57005702144.0000 - val_loss: 35032088576.0000
Epoch 154/168
119/119 [==============================] - 0s 2ms/step - loss: 54344140288.0000 - val_loss: 35795025920.0000
Epoch 155/168
119/119 [==============================] - 0s 2ms/step - loss: 54914674380.8000 - val_loss: 35129081856.0000
Epoch 156/168
119/119 [==============================] - 0s 3ms/step - loss: 55056750489.6000 - val_loss: 35581890560.0000
Epoch 157/168
119/119 [==============================] - 0s 2ms/step - loss: 54565306914.1333 - val_loss: 35037159424.0000
Epoch 158/168
119/119 [==============================] - 0s 2ms/step - loss: 54399870907.7333 - val_loss: 35673939968.0000
Epoch 159/168
119/119 [==============================] - 0s 2ms/step - loss: 52948455577.6000 - val_loss: 35274731520.0000
Epoch 160/168
119/119 [==============================] - 0s 2ms/step - loss: 55912188108.8000 - val_loss: 34808872960.0000
Epoch 161/168
119/119 [==============================] - 0s 2ms/step - loss: 54534946406.4000 - val_loss: 35245563904.0000
Epoch 162/168
119/119 [==============================] - 0s 2ms/step - loss: 56473327991.4667 - val_loss: 35403022336.0000
Epoch 163/168
119/119 [==============================] - 0s 2ms/step - loss: 54433497019.7333 - val_loss: 35206324224.0000
Epoch 164/168
119/119 [==============================] - 0s 2ms/step - loss: 52735798169.6000 - val_loss: 34797322240.0000
Epoch 165/168
119/119 [==============================] - 0s 2ms/step - loss: 52657801966.9333 - val_loss: 35068190720.0000
Epoch 166/168
119/119 [==============================] - 0s 2ms/step - loss: 51202461508.2667 - val_loss: 35226914816.0000
Epoch 167/168
119/119 [==============================] - 0s 2ms/step - loss: 51152246579.2000 - val_loss: 35386683392.0000
Epoch 168/168
119/119 [==============================] - 0s 2ms/step - loss: 55019443268.2667 - val_loss: 35645005824.0000
Out[29]:
<tensorflow.python.keras.callbacks.History at 0x213eb6cebc8>
In [30]:
losses = pd.DataFrame(model.history.history)
losses.plot(); plt.show()
In [31]:
# Model errors
from sklearn.metrics import mean_squared_error, mean_absolute_error, explained_variance_score
predictions = model.predict(X_test)
print('Mean squared error: ',round(np.sqrt(mean_squared_error(y_test, predictions)),2))
print('Mean absolute error: ',round(mean_absolute_error(y_test, predictions),2))
print('Explained variance: ',round(explained_variance_score(y_test, predictions),2))
Mean squared error:  188798.83
Mean absolute error:  111445.38
Explained variance:  0.76
In [59]:
sample = df.drop('price', axis=1).iloc[0:5]
sample = scaler.transform(sample.values.reshape(-1, 18))
In [60]:
samplePred = round(pd.DataFrame(model.predict(sample)))
samplePred = samplePred.rename(columns={0: 'PredictionANN'})
print(samplePred)

df.head(5)
   PredictionANN
0       293866.0
1       582203.0
2       402116.0
3       383690.0
4       430621.0
Out[60]:
price bedrooms bathrooms sqft_living sqft_lot floors waterfront view condition grade sqft_above sqft_basement yr_built zipcode lat long sqft_living15 sqft_lot15 year month
0 221900.0 3 1.00 1180 5650 1.0 0 0 3 7 1180 0 1955 98178 47.5112 -122.257 1340 5650 2014 10
1 538000.0 3 2.25 2570 7242 2.0 0 0 3 7 2170 400 1951 98125 47.7210 -122.319 1690 7639 2014 12
2 180000.0 2 1.00 770 10000 1.0 0 0 3 6 770 0 1933 98028 47.7379 -122.233 2720 8062 2015 2
3 604000.0 4 3.00 1960 5000 1.0 0 0 5 7 1050 910 1965 98136 47.5208 -122.393 1360 5000 2014 12
4 510000.0 3 2.00 1680 8080 1.0 0 0 3 8 1680 0 1987 98074 47.6168 -122.045 1800 7503 2015 2
In [58]:
test = df[['sqft_living']].iloc[0:5]
test['PredictionLIR'] = test['sqft_living']*coef
print(pd.DataFrame(test['PredictionLIR']))
   PredictionLIR
0       287530.6
1       626231.9
2       187625.9
3       477593.2
4       409365.6