train_mnist.py 4.08 KB
Newer Older
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
1 2 3 4 5 6 7 8 9 10
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Wed 11 May 2016 09:39:36 CEST 


"""
Simple script that trains MNIST with LENET using Tensor flow

Usage:
11
  train_mnist.py [--batch-size=<arg> --validation-batch-size=<arg> --iterations=<arg> --validation-interval=<arg> --use-gpu]
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
12 13 14 15
  train_mnist.py -h | --help
Options:
  -h --help     Show this screen.
  --batch-size=<arg>  [default: 1]
16
  --validation-batch-size=<arg>   [default:128]
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
17 18 19 20 21 22 23 24
  --iterations=<arg>  [default: 30000]
  --validation-interval=<arg>  [default: 100]  
"""

from docopt import docopt
import tensorflow as tf
from .. import util
SEED = 10
25
from bob.learn.tensorflow.data import MemoryDataShuffler, TextDataShuffler
26
from bob.learn.tensorflow.network import Lenet, MLP
27
from bob.learn.tensorflow.trainers import Trainer
28 29
from bob.learn.tensorflow.loss import BaseLoss

30
import numpy
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
31 32 33 34 35

def main():
    args = docopt(__doc__, version='Mnist training with TensorFlow')

    BATCH_SIZE = int(args['--batch-size'])
36
    VALIDATION_BATCH_SIZE = int(args['--validation-batch-size'])
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
37 38 39 40 41
    ITERATIONS = int(args['--iterations'])
    VALIDATION_TEST = int(args['--validation-interval'])
    USE_GPU = args['--use-gpu']
    perc_train = 0.9

42
    mnist = True
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
43

44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
    # Loading data
    if mnist:
        train_data, train_labels, validation_data, validation_labels = \
            util.load_mnist(data_dir="./src/bob.db.mnist/bob/db/mnist/")

        train_data = numpy.reshape(train_data, (train_data.shape[0], 28, 28, 1))
        validation_data = numpy.reshape(validation_data, (validation_data.shape[0], 28, 28, 1))

        train_data_shuffler = MemoryDataShuffler(train_data, train_labels,
                                                 input_shape=[28, 28, 1],
                                                 batch_size=BATCH_SIZE)

        validation_data_shuffler = MemoryDataShuffler(validation_data, validation_labels,
                                                      input_shape=[28, 28, 1],
                                                      batch_size=VALIDATION_BATCH_SIZE)
    else:
        import bob.db.mobio
        db = bob.db.mobio.Database()

        # Preparing train set
        train_objects = db.objects(protocol="male", groups="world")
        train_labels = [o.client_id for o in train_objects]
        train_file_names = [o.make_path(
67
            directory="/idiap/user/tpereira/face/baselines/eigenface/preprocessed",
68 69 70 71 72 73 74 75 76 77 78 79
            extension=".hdf5")
                      for o in train_objects]

        train_data_shuffler = TextDataShuffler(train_file_names, train_labels,
                                               scale=False,
                                               input_shape=[80, 64, 1],
                                               batch_size=BATCH_SIZE)

        # Preparing train set
        validation_objects = db.objects(protocol="male", groups="dev")
        validation_labels = [o.client_id for o in validation_objects]
        validation_file_names = [o.make_path(
80
            directory="/idiap/user/tpereira/face/baselines/eigenface/preprocessed",
81 82 83 84 85 86 87
            extension=".hdf5")
                            for o in validation_objects]

        validation_data_shuffler = TextDataShuffler(validation_file_names, validation_labels,
                                                    input_shape=[80, 64, 1],
                                                    scale=False,
                                                    batch_size=VALIDATION_BATCH_SIZE)
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
88

89 90 91 92 93 94 95 96 97 98 99 100
    # Preparing the architecture
    cnn = True
    if cnn:
        lenet = Lenet(seed=SEED)
        loss = BaseLoss(tf.nn.sparse_softmax_cross_entropy_with_logits, tf.reduce_mean)
        trainer = Trainer(architecture=lenet, loss=loss, iterations=ITERATIONS)
        trainer.train(train_data_shuffler, validation_data_shuffler)
    else:
        mlp = MLP(10, hidden_layers=[15, 20])
        loss = BaseLoss(tf.nn.sparse_softmax_cross_entropy_with_logits, tf.reduce_mean)
        trainer = Trainer(architecture=mlp, loss=loss, iterations=ITERATIONS)
        trainer.train(train_data_shuffler, validation_data_shuffler)
Tiago de Freitas Pereira's avatar
Scratch  
Tiago de Freitas Pereira committed
101