I have been trying to work out how to do classification using ImageDataGenerator and not having much luck. When I try to fit the model the accuracy output is different each time, from .14% to 90% at the start but then it stays at that accuracy no matter how much training time it is given. I am not sure where I have gone wrong?
I am using the fruit dataset form kaggle https://www.kaggle.com/moltean/fruits
import tensorflow as tf
from tensorflow import keras
# Importing all necessary libraries
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from keras.optimizers import SGD
# Helper libraries
import numpy as np
import matplotlib.pyplot as plt
print(tf.__version__)
trainDirectory = fruits 360/Training"
testDirectory = fruits 360/Test"
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.1,
zoom_range=0.1,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
trainDirectory,
target_size=(100,100),
batch_size=32,
class_mode='categorical',
shuffle=True)
validation_generator = test_datagen.flow_from_directory(
testDirectory,
target_size=(100, 100),
batch_size=32,
shuffle=True)
img_width=100
img_height=100
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)
model = Sequential()
model.add(Conv2D(32, (2, 2), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
opt = SGD(lr=0.01)
model.compile(loss=keras.losses.CategoricalCrossentropy(),
optimizer = opt,
metrics=['accuracy'])
model.fit(
train_generator,
steps_per_epoch=2000,
epochs=3,
validation_data=validation_generator,
validation_steps=800)
#output
Epoch 1/3
200/200 [==============================] - 34s 172ms/step - loss: 1.1921e-07 - accuracy: 0.2281 -
val_loss: 1.1921e-07 - val_accuracy: 0.0230
Epoch 2/3
200/200 [==============================] - 30s 149ms/step - loss: 1.1921e-07 - accuracy: 0.2254 -
val_loss: 1.1921e-07 - val_accuracy: 0.0296