import tensorflow as tf from tensorflow import keras from tensorflow.keras.layers import * from tensorflow.keras.optimizers import * from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.callbacks import ReduceLROnPlateau import pathlib import os import PIL skin_images_dir = os.path.join('..', 'HAM_3_Categories/data_augmented') data_dir = pathlib.Path(skin_images_dir) image_count = len(list(data_dir.glob('*/*.jpg'))) print(image_count) # mel = list(data_dir.glob('mel/*')) # image = PIL.Image.open(str(mel[1])) # image.show() image_size = (75,100) batch_size = 256 epochs = 10 datagen = tf.keras.preprocessing.image.ImageDataGenerator( rescale=1./255, shear_range =0.2, zoom_range =0.2, horizontal_flip =True, #validation_split=0.2 ) train_generator = datagen.flow_from_directory( "data_augmented", seed = 1234, subset= "training", target_size=(image_size), batch_size=batch_size, ) # val_generator = datagen.flow_from_directory( # "data_augmented", # seed = 1234, # subset="validation", # target_size=(image_size), # batch_size=batch_size, # ) # image_batch, label_batch = next(val_generator) # image_batch.shape, label_batch.shape print (train_generator.class_indices) labels = '\n'.join(sorted(train_generator.class_indices.keys())) with open('ham_labels.txt', 'w') as f: f.write(labels) num_classes = 3 input_shape = (75,100,3,) model = tf.keras.Sequential() model.add(Conv2D(32, 3, activation='relu',padding = 'same',input_shape=input_shape)) model.add(Conv2D(32, 3, activation='relu')) model.add(MaxPooling2D(pool_size =(2,2))) model.add(Dropout(0.2)) model.add(Conv2D(64, 3, activation='relu',padding ='same')) model.add(Conv2D(64, 3, activation='relu',padding ='same')) model.add(MaxPooling2D(pool_size =(2,2))) model.add(Dropout(0.2)) model.add(Conv2D(128, 3, activation='relu',padding ='same')) model.add(Conv2D(128, 3, activation='relu',padding ='same')) model.add(MaxPooling2D(pool_size =(2,2))) model.add(Dropout(0.2)) model.add(Conv2D(256, 3, activation='relu',padding ='same')) model.add(Conv2D(256, 3, activation='relu',padding ='same')) model.add(MaxPooling2D(pool_size =(2,2))) model.add(Dropout(0.2)) model.add(Conv2D(128, 1, activation='relu',padding ='same')) model.add(Flatten()) model.add(Dense(128, activation='relu')) model.add(Dropout(0.2)) model.add(Dense(num_classes,activation='softmax')) optimizer = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon= None, decay=0.0, amsgrad=False) model.compile(optimizer = optimizer , loss = 'categorical_crossentropy', metrics=['accuracy']) learning_rate_reduction = ReduceLROnPlateau(monitor='accuracy', patience=3, verbose=1, factor=0.5, min_lr=0.00001) model.summary() print('Number of trainable weights = {}'.format(len(model.trainable_weights))) history = model.fit(train_generator , epochs = epochs, steps_per_epoch= len(train_generator), verbose=1, callbacks= [learning_rate_reduction]) model.save('my_model_0211_75_100')