Skip to content

Commit a070e2e

Browse files
author
whyboris
committed
pre-trained works
1 parent 5935b4f commit a070e2e

File tree

3 files changed

+185
-1
lines changed

3 files changed

+185
-1
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,4 @@
33
__pycache__
44
output
55
venv
6-
6+
*.h5

cat-dog.py

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
from hack import hack
2+
hack()
3+
4+
from keras import models
5+
from keras import layers
6+
7+
model = models.Sequential()
8+
9+
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
10+
model.add(layers.MaxPooling2D(2, 2))
11+
12+
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
13+
model.add(layers.MaxPooling2D(2, 2))
14+
15+
model.add(layers.Conv2D(128, (3,3), activation='relu'))
16+
model.add(layers.MaxPooling2D(2, 2))
17+
18+
model.add(layers.Conv2D(128, (3,3), activation='relu'))
19+
model.add(layers.MaxPooling2D(2, 2))
20+
21+
model.add(layers.Flatten())
22+
23+
model.add(layers.Dropout(0.5))
24+
25+
model.add(layers.Dense(512, activation='relu'))
26+
model.add(layers.Dense(1, activation='sigmoid'))
27+
28+
model.summary()
29+
30+
from keras import optimizers
31+
32+
model.compile(optimizer=optimizers.RMSprop(lr=1e-4),
33+
loss='binary_crossentropy',
34+
metrics=['acc'])
35+
36+
import os
37+
38+
base_dir = '../catsdogssmall'
39+
train_dir = os.path.join(base_dir, 'train')
40+
test_dir = os.path.join(base_dir, 'test')
41+
42+
from keras.preprocessing.image import ImageDataGenerator
43+
44+
train_datagen = ImageDataGenerator(
45+
height_shift_range=0.2,
46+
horizontal_flip=True,
47+
rescale=1./255,
48+
rotation_range=40,
49+
shear_range=0.2,
50+
width_shift_range=0.2,
51+
zoom_range=0.2)
52+
53+
test_datagen = ImageDataGenerator(rescale=1./255)
54+
55+
train_generator = train_datagen.flow_from_directory(
56+
train_dir,
57+
target_size=(150, 150),
58+
batch_size=20,
59+
class_mode='binary')
60+
61+
validation_generator = test_datagen.flow_from_directory(
62+
test_dir,
63+
target_size=(150, 150),
64+
batch_size=20,
65+
class_mode='binary')
66+
67+
for data_batch, labels_batch in train_generator:
68+
print('data batch shape:', data_batch.shape)
69+
print('labels batch shape:', labels_batch.shape)
70+
break
71+
72+
history = model.fit_generator(
73+
train_generator,
74+
steps_per_epoch=100,
75+
epochs=10,
76+
validation_data=validation_generator,
77+
validation_steps=50)
78+
79+
model.save('cats_and_dogs_small.h5')
80+
81+
import matplotlib.pyplot as plt
82+
83+
acc = history.history['acc']
84+
val_acc = history.history['val_acc']
85+
loss = history.history['loss']
86+
val_loss = history.history['val_loss']
87+
88+
epochs = range(1, len(acc) + 1)
89+
90+
plt.plot(epochs, acc, 'bo', label='Training acc')
91+
plt.plot(epochs, val_acc, 'b', label='Validation acc')
92+
plt.title('Training and validation accuracy')
93+
plt.legend()
94+
95+
plt.figure()
96+
97+
plt.plot(epochs, loss, 'bo', label='Training loss')
98+
plt.plot(epochs, val_loss, 'b', label='Validation loss')
99+
plt.title('Training and validation loss')
100+
plt.legend()
101+
102+
plt.show()
103+
104+
105+
106+

pre-trained.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
print('Warning -- the first step of extracting features takes 5+ min!')
2+
3+
from hack import hack
4+
hack()
5+
6+
import os
7+
import numpy as np
8+
9+
from keras.preprocessing.image import ImageDataGenerator
10+
11+
from keras.applications import VGG16
12+
13+
conv_base = VGG16(weights='imagenet',
14+
include_top=False,
15+
input_shape=(150, 150, 3))
16+
17+
conv_base.summary()
18+
19+
base_dir = '../catsdogssmall'
20+
21+
train_dir = os.path.join(base_dir, 'train')
22+
validate_dir = os.path.join(base_dir, 'validation')
23+
test_dir = os.path.join(base_dir, 'test')
24+
25+
datagen = ImageDataGenerator(rescale=1./255)
26+
27+
batch_size = 20
28+
29+
def extract_features(directory, sample_count):
30+
features = np.zeros(shape=(sample_count, 4, 4, 512))
31+
labels = np.zeros(shape=(sample_count))
32+
generator = datagen.flow_from_directory(
33+
directory,
34+
target_size=(150, 150),
35+
batch_size=batch_size,
36+
class_mode='binary')
37+
i = 0
38+
for inputs_batch, labels_batch in generator:
39+
features_batch = conv_base.predict(inputs_batch)
40+
features[i * batch_size : (i + 1) * batch_size] = features_batch
41+
labels[i * batch_size: (i + 1) * batch_size] = labels_batch
42+
i += 1
43+
if i * batch_size >= sample_count:
44+
break
45+
return features, labels
46+
47+
train_features, train_labels = extract_features(train_dir, 2000)
48+
validation_features, validation_labels = extract_features(validate_dir, 1000)
49+
test_features, test_labels = extract_features(test_dir, 1000)
50+
51+
train_features = np.reshape(train_features, (2000, 4 * 4 * 512))
52+
validation_features = np.reshape(validation_features, (1000, 4 * 4 * 512))
53+
test_features = np.reshape(test_features, (1000, 4 * 4 * 512))
54+
55+
from keras import models
56+
from keras import layers
57+
from keras import optimizers
58+
59+
model = models.Sequential()
60+
model.add(layers.Dense(256, activation='relu', input_dim=(4 * 4 * 512)))
61+
model.add(layers.Dropout(0.5))
62+
model.add(layers.Dense(1, activation='sigmoid'))
63+
64+
model.compile(optimizer=optimizers.RMSprop(lr=2e-5),
65+
loss='binary_crossentropy',
66+
metrics=['acc'])
67+
68+
history = model.fit(train_features, train_labels,
69+
epochs=30,
70+
batch_size=20,
71+
validation_data=(validation_features, validation_labels))
72+
73+
74+
75+
76+
77+
78+

0 commit comments

Comments
 (0)