-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathalexnet.py
68 lines (52 loc) · 1.91 KB
/
alexnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from tensorflow.keras import layers, Model
def alexnet():
# Input layer.
x_in = layers.Input(shape=(224, 224, 3))
# 1st convolutional layer.
x = layers.Conv2D(filters=96,
kernel_size=11,
strides=4,
padding="same",
activation="relu")(x_in)
x = layers.BatchNormalization()(x)
x = layers.MaxPool2D(pool_size=3, strides=2)(x)
# 2nd convolutional layer.
x = layers.Conv2D(filters=256,
kernel_size=5,
padding="same",
activation="relu")(x)
x = layers.BatchNormalization()(x)
x = layers.MaxPool2D(pool_size=3, strides=2)(x)
# 3rd convolutional layer.
x = layers.Conv2D(filters=384,
kernel_size=3,
padding="same",
activation="relu")(x)
# 4th convolutional layer.
x = layers.Conv2D(filters=384,
kernel_size=3,
padding="same",
activation="relu")(x)
# 5th convolutional layer.
x = layers.Conv2D(filters=256,
kernel_size=3,
padding="same",
activation="relu")(x)
x = layers.BatchNormalization()(x)
x = layers.MaxPool2D(pool_size=3, strides=2)(x)
# Fully-connected layers.
x = layers.Flatten()(x)
x = layers.Dense(units=4096, activation="relu")(x)
x = layers.Dropout(rate=0.5)(x)
x = layers.Dense(units=4096, activation="relu")(x)
x = layers.Dropout(rate=0.5)(x)
# Output layer.
x_out = layers.Dense(units=1000, activation="softmax")(x)
# Model.
model = Model(inputs=x_in, outputs=x_out)
return model
if __name__ == "__main__":
from tensorflow.keras.utils import plot_model
model = alexnet()
model.summary()
plot_model(model, to_file="alexnet.png", show_shapes=True)