Hi,
I am a student learning deep learning, and I have some doubts about solving errors. I have a time-series dataset with 10 sensor data. I want to do classification using CNN. I am getting errors while fitting the model.
from numpy import array
from keras.models import Sequential
from keras.layers import Dense,Flatten
from keras.layers.convolutional import Conv2D,MaxPooling2D,Conv1D,MaxPooling1D
from keras.layers import Dropout
import pandas as pd
#import the modules
import os
import pandas as pd
#read the path
file_path = r"C:\Users\USER\Desktop\vevi\Project\Final year project\Code\common review data\csv"
#list all the files from the directory
file_list = os.listdir(file_path)
file_list
df_concat = pd.concat([pd.read_csv(r"C:\Users\USER\Desktop\vevi\Project\Final year project\Code\common review data\csv"+‘\’+f) for f in file_list ], ignore_index=True)
df_concat
df_concat=df_concat.drop(columns=[‘S.No’])
df_concat
raw_sequence=
for i in range(len(df_concat)):
raw_sequence.append(list(df_concat.iloc[i,:]))
trainx=df_concat.iloc[:,:10].to_numpy()
trainx.shape
trainx= trainx.reshape(1400,10,1)
trainx.shape
trainy=df_concat.iloc[:,10].to_numpy()
from keras.utils import to_categorical
trainy=to_categorical(trainy)
trainy= trainy.reshape(1400,1,1)
trainy.shape
n_outputs=5
n_timesteps=10
n_features=1
verbose, epochs, batch_size = 0, 10, 32
model = Sequential()
model.add(Conv1D(filters=64, kernel_size=1, activation=‘relu’, input_shape=(10,1)))
model.add(Conv1D(filters=64, kernel_size=1, activation=‘relu’))
model.add(Dropout(0.5))
model.add(MaxPooling1D(pool_size=2))
model.add(Flatten())
model.add(Dense(100, activation=‘relu’))
model.add(Dense(n_outputs, activation=‘softmax’))
model.summary()
model.compile(loss=‘categorical_crossentropy’, optimizer=‘adam’, metrics=[‘accuracy’])
fit network
model.fit(trainx, trainy, epochs=epochs, batch_size=batch_size, verbose=verbose)
I have included my code. While debugging I also changed my loss function to ‘sparse_categorical_crossentropy’ but my error was not solved. Kindly help me solve this issue.