-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconvnet_nn.py
84 lines (70 loc) · 3.34 KB
/
convnet_nn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import numpy as np
from keras.models import Model, Sequential
from keras.layers import Dense, Dropout, Input, merge
from sklearn import preprocessing
# from keras.utils.visualize_util import plot
INPUT_SHAPE = (17 * 26 * 256,)
AUX_INPUT_SHAPE = (4096,)
DO_TRAINING = True
features_dir = '../data-livdet-2015/z_features_252x324'
aux_features_dir = '../data-livdet-2015/z_other_features/BSIF-DigPer-2015-features'
PRE_TRAINED_WEIGHTS_FILE = 'pretrain.h5'
# model = Sequential()
# model.add(Dense(32, activation='relu', input_shape=INPUT_SHAPE))
# model.add(Dropout(0.5))
# model.add(Dense(1, activation='sigmoid'))
# model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
main_input = Input(shape=INPUT_SHAPE)
aux_input = Input(shape=AUX_INPUT_SHAPE)
x = merge([main_input, aux_input], mode='concat')
# x = main_input
x = Dense(64, activation='relu')(x)
x = Dense(64, activation='relu')(x)
x = Dense(64, activation='relu')(x)
x = Dropout(0.5)(x)
predictions = Dense(1, activation='sigmoid')(x)
model = Model(input=[main_input, aux_input], output=predictions)
# model = Model(input=main_input, output=predictions)
model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
model.summary()
print('Trainable weights', model.trainable_weights)
# plot(model, to_file='model.png')
if DO_TRAINING:
train_fake = np.load(features_dir + '/train_fake.npy')
train_live = np.load(features_dir + '/train_live.npy')
train_data = np.concatenate((train_fake, train_live))
train_labels = [1] * len(train_fake) + [0] * len(train_live)
del train_fake, train_live
preprocessing.scale(train_data, copy=False)
aux_train_data = np.concatenate((
np.loadtxt(aux_features_dir + '/Data_2015_BSIF_7_12_motion_Train_Spoof_DigPerson.txt'),
np.loadtxt(aux_features_dir + '/Data_2015_BSIF_7_12_motion_Train_Real_DigPerson.txt')))
preprocessing.scale(aux_train_data, copy=False)
model.fit([train_data, aux_train_data], train_labels,
# model.fit(train_data, train_labels,
batch_size=32,
nb_epoch=50,
verbose=2,
validation_split=0.1)
model.save_weights(PRE_TRAINED_WEIGHTS_FILE)
else:
model.load_weights(PRE_TRAINED_WEIGHTS_FILE)
test_fake = np.load(features_dir + '/test_fake.npy')
test_live = np.load(features_dir + '/test_live.npy')
test_data = np.concatenate((test_fake, test_live))
test_labels = np.array([1] * len(test_fake) + [0] * len(test_live))
del test_fake, test_live
preprocessing.scale(test_data, copy=False)
aux_test_data = np.concatenate((
np.loadtxt(aux_features_dir + '/Data_2015_BSIF_7_12_motion_Test_Spoof_DigPerson.txt'),
np.loadtxt(aux_features_dir + '/Data_2015_BSIF_7_12_motion_Test_Real_DigPerson.txt')))
preprocessing.scale(aux_test_data, copy=False)
predicted = model.predict([test_data, aux_test_data])
# predicted = model.predict(test_data)
predicted = np.array([0 if x < 0.5 else 1 for x in predicted])
n_ok = np.sum(predicted == test_labels)
print 'Validation accuracy = {:.2f} ({:d}/{:d})'.format(float(n_ok) * 100 / len(predicted), n_ok, len(predicted))
fpr = float(np.sum((predicted != test_labels) & (test_labels == 0))) / np.sum(test_labels == 0)
fnr = float(np.sum((predicted != test_labels) & (test_labels == 1))) / np.sum(test_labels == 1)
ace = (fpr + fnr) / 2
print 'average classification error = {:.2f}'.format(ace * 100)