Conv_example.py 2.78 KB
Newer Older
g-weatherill's avatar
g-weatherill committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
import pandas as pd
import numpy as np
from sklearn.preprocessing import normalize

from keras.models import Sequential
from keras.layers import Dense,Dropout,Conv1D, Flatten, MaxPooling1D
from keras import optimizers
from keras import backend as K

from sklearn.model_selection import train_test_split
import sklearn
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler
import os 
import sys



df = pd.read_csv("LUCAS.csv",header=0)
#XN = df[df.columns[6:]].values
#XD = (XN[:,4:np.shape(XN)[1]-1]-XN[:,5:np.shape(XN)[1]])/2

#X3 = X2[:,np.random.choice(np.arange(np.shape(XN)[1]),10)]

input_data = df[df.columns[4:]].values
output_data = df[["SOC"]].values
#out_data = df[df.columns[4:6]].values
#input_data = XD
from sklearn.model_selection import train_test_split
input_data = np.expand_dims(input_data, axis=2)

input_train, input_test, output_train, output_test = train_test_split(input_data, output_data, test_size=0.33, random_state=42)


input_shape = np.shape(input_train)

#idx = np.random.permutation(np.shape(input_data)[0])
#ida = idx[:int(0.5*len(input_data))]
#idb = idx[int(0.5*len(input_data)):]

#input_train = input_data[ida]
#input_test = input_data[idb]

#output_train = out_data[ida]
#output_test = out_data[idb]


def coeff_determination(y_true, y_pred):
    from keras import backend as K
    SS_res =  K.sum(K.square( y_true-y_pred ))
    SS_tot = K.sum(K.square( y_true - K.mean(y_true) ) )
    return ( 1 - SS_res/(SS_tot + K.epsilon()) )




#Build keras NN model
K.clear_session()


    
	
model = Sequential()
model.add(Conv1D(filters=64, kernel_size=27, activation='relu', input_shape = (1000,1)))
model.add(Conv1D(filters=64, kernel_size=3, activation='relu'))
model.add(Dropout(0.5))
model.add(MaxPooling1D(pool_size=2))
model.add(Flatten())
model.add(Dense(100, activation='relu'))
model.add(Dense(1, activation="relu"))



##model.add(Conv1D(64,2, activation="relu", input_dim=len(input_train[0])))
#model.add(Conv1D(filters=64, kernel_size=27, activation='relu', input_shape = (1000,1)))
##model.add(Dense(8, activation="softmax"))
#model.add(Dropout(0.5))
#model.add(MaxPooling1D(pool_size=2))
#model.add(Flatten())
#model.add(Dense(64, activation="relu"))
#model.add(Dense(1, activation="relu"))


model.compile(optimizer="RMSprop", loss="mean_squared_error",metrics=[coeff_determination])


model.fit(input_train,output_train,epochs=200,batch_size=32)

calculated_cal = model.predict(input_train)
calculated_val = model.predict(input_test)

rmse_train = np.sqrt(mean_squared_error(output_train,calculated_cal))
rmse_val = np.sqrt(mean_squared_error(output_test,calculated_val))
R2_train=sklearn.metrics.r2_score(output_train,calculated_cal)
R2_val=sklearn.metrics.r2_score(output_test,calculated_val)



print(rmse_train)
print(rmse_val)
print(R2_train)
print(R2_val)