diff --git a/models/MyModel/Model - Architecture.jpg b/models/MyModel/Model - Architecture.jpg new file mode 100644 index 0000000..ba24f9a Binary files /dev/null and b/models/MyModel/Model - Architecture.jpg differ diff --git a/models/MyModel/Model - Train and Validation Accuracy.jpg b/models/MyModel/Model - Train and Validation Accuracy.jpg new file mode 100644 index 0000000..219eaf5 Binary files /dev/null and b/models/MyModel/Model - Train and Validation Accuracy.jpg differ diff --git a/models/MyModel/lite.py b/models/MyModel/lite.py new file mode 100644 index 0000000..e4d9165 --- /dev/null +++ b/models/MyModel/lite.py @@ -0,0 +1,67 @@ +# import the necessary packages +from keras.models import Sequential +from keras.layers.normalization import BatchNormalization +from keras.layers.convolutional import Conv2D +from keras.layers.convolutional import MaxPooling2D +from keras.layers.core import Activation +from keras.layers.core import Flatten +from keras.layers.core import Dropout +from keras.layers.core import Dense +from keras import backend as K + +class Lite: + @staticmethod + def build(width, height, depth, classes): + # initialize the model along with the input shape to be + # "channels last" and the channels dimension itself + model = Sequential() + inputShape = (height, width, depth) + chanDim = -1 + + # if we are using "channels first", update the input shape + # and channels dimension + if K.image_data_format() == "channels_first": + inputShape = (depth, height, width) + chanDim = 1 + + # CONV => RELU => POOL + model.add(Conv2D(32, (3, 3), padding="same", + input_shape=inputShape)) + model.add(Activation("relu")) + model.add(BatchNormalization(axis=chanDim)) + model.add(MaxPooling2D(pool_size=(3, 3))) + model.add(Dropout(0.25)) + + # (CONV => RELU) * 2 => POOL + model.add(Conv2D(64, (3, 3), padding="same")) + model.add(Activation("relu")) + model.add(BatchNormalization(axis=chanDim)) + model.add(Conv2D(64, (3, 3), padding="same")) + model.add(Activation("relu")) + model.add(BatchNormalization(axis=chanDim)) + model.add(MaxPooling2D(pool_size=(2, 2))) + model.add(Dropout(0.25)) + + # (CONV => RELU) * 2 => POOL + model.add(Conv2D(128, (3, 3), padding="same")) + model.add(Activation("relu")) + model.add(BatchNormalization(axis=chanDim)) + model.add(Conv2D(128, (3, 3), padding="same")) + model.add(Activation("relu")) + model.add(BatchNormalization(axis=chanDim)) + model.add(MaxPooling2D(pool_size=(2, 2))) + model.add(Dropout(0.25)) + + # first (and only) set of FC => RELU layers + model.add(Flatten()) + model.add(Dense(1024)) + model.add(Activation("relu")) + model.add(BatchNormalization()) + model.add(Dropout(0.5)) + + # softmax classifier + model.add(Dense(classes)) + model.add(Activation("softmax")) + + # return the constructed network architecture + return model \ No newline at end of file