1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
|
- import tensorboard
- import tensorflow as tf
- from tensorflow import keras
- from datetime import datetime
- from src.sup.evaluation import *
- from src.sup.support import *
- from src.sup.test_set_eval import *
- from tensorflow.keras.optimizers import RMSprop,SGD,Adam
- from tensorflow.keras.layers import Dense,BatchNormalization,Dropout
- from tensorflow.keras.layers import Conv2D,MaxPool2D,Flatten
- model_name = "end-to-end"
- # Load the TensorBoard notebook extension.
- #%load_ext tensorboard
- # call inline plt.
- # Clear any logs from previous runs
- #!del -rf
- classes = ['Heart','Brain','Eye','Kidney','Skull','Other']
- root_dir = 'F:/JetBrain Project Files/Pycharm/human_organs_image_classifiaction/datasets/'
- train_dir = os.path.join(root_dir,'train/')
- validation_dir = os.path.join(root_dir,'validation/')
- tr_heart_dir,tr_brain_dir,tr_eye_dir,tr_kidney_dir,tr_skull_dir,tr_other_dir = path_update(train_dir,classes)
- vl_heart_dir,vl_brain_dir,vl_eye_dir,vl_kidney_dir,vl_skull_dir,vl_other_dir = path_update(validation_dir,classes)
- plot_sample_of_img(4,4,path_update(tr_heart_dir,os.listdir(tr_heart_dir))[10:18]+path_update(tr_eye_dir,os.listdir(tr_eye_dir))[12:19])
- train_gen_tmp = ImageDataGenerator(rescale=1./255,
- rotation_range=40,
- width_shift_range=0.2,
- height_shift_range=0.2,
- shear_range=0.2,
- zoom_range=0.2,
- horizontal_flip=True,
- fill_mode='nearest')
- validation_gen_tmp = ImageDataGenerator(rescale=1/225.)
- train_gen = train_gen_tmp.flow_from_directory(train_dir,
- target_size=(150,150),
- color_mode='rgb',
- class_mode='categorical',
- batch_size= 20,
- shuffle=True,
- seed=42)
- validation_gen = validation_gen_tmp.flow_from_directory(validation_dir,
- target_size=(150,150),
- color_mode='rgb',
- class_mode='categorical',
- batch_size= 20,
- shuffle=True,
- seed=42)
- STEP_SIZE_TRAIN=train_gen.n//train_gen.batch_size
- STEP_SIZE_VALID=validation_gen.n//validation_gen.batch_size
- clToInt_dict = train_gen.class_indices
- clToInt_dict = dict((v,k) for v,k in clToInt_dict.items())
- model = tf.keras.models.Sequential()
- model.add(Conv2D(32,(3,3),activation='relu',input_shape=(150,150,3)))
- model.add(MaxPool2D(2,2))
- model.add(Conv2D(64,(3,3),activation='relu'))
- model.add(MaxPool2D(2,2))
- model.add(Conv2D(128,(3,3),activation='relu'))
- model.add(Flatten())
- model.add(Dense(256,activation='relu'))
- model.add(Dense(128,activation='relu'))
- model.add(Dense(6,activation='softmax'))
- model.compile(optimizer='adam',
- loss='categorical_crossentropy',
- metrics=['accuracy'])
- # Define the Keras TensorBoard callback.
- logdir="logs/fit/" + datetime.now().strftime("%Y%m%d-%H%M%S")
- tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir)
- history = model.fit_generator(train_gen,
- steps_per_epoch=STEP_SIZE_TRAIN,
- validation_data=validation_gen,
- validation_steps=STEP_SIZE_VALID,
- epochs=20,
- verbose=1)
- #visualize_model(model,img_path)
- #call to the tensorboard
- #%tensorboard --logdir logs
- #look at training model performance
- acc_n_loss(history)
- model.evaluate_generator(validation_gen,
- steps=STEP_SIZE_VALID)
- y_pred,y_test = test_eval(model,classes)
- plot_confusion_metrix(y_test,y_pred,classes)
- ROC_classes(6,y_test,y_pred,classes)
- model_path,model_weight_path = save(model,datetime.now()+model_name)
- #rnd_predict(model_path,model_weight_path,img_path,clToInt_dict)
|