Commit 90b043a5 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI

Convert commands to click commands

parent a05da373
Pipeline #19057 failed with stage
in 34 minutes and 5 seconds
......@@ -11,3 +11,4 @@ src
develop-eggs
sphinx
dist
temp/
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Wed 11 May 2016 09:39:36 CEST
"""Script that computes statistics for image.
"""
Script that computes statistics for image
Usage:
compute_statistics.py <base_path> <output_file> --extension=<arg>
compute_statistics.py -h | --help
Options:
-h --help Show this screen.
--extension=<arg> [default: .hdf5]
"""
from docopt import docopt
import bob.io.base
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
import os
import logging
import click
import numpy
import bob.io.image
import bob.io.image # to be able to load images
from bob.io.base import save, load
from bob.extension.scripts.click_helper import verbosity_option
logger = logging.getLogger(__name__)
def process_images(base_path, extension, shape):
files = os.listdir(base_path)
sum_data = numpy.zeros(shape=shape)
print("Processing {0}".format(base_path))
logging.info("Processing {0}".format(base_path))
count = 0
for f in files:
path = os.path.join(base_path, f)
......@@ -34,27 +31,34 @@ def process_images(base_path, extension, shape):
sum_data += s
if os.path.splitext(path)[1] == extension:
data = bob.io.base.load(path)
data = load(path)
count += 1
sum_data += data
return count, sum_data
def main():
args = docopt(__doc__, version='Mnist training with TensorFlow')
@click.command()
@click.argument('base_path')
@click.argument('output_file')
@click.option('--extension', default='.hdf5', show_default=True)
@verbosity_option()
def compute_statistics(base_path, output_file, extension, **kwargs):
"""Script that computes statistics for image.
"""
logger.debug('base_path: %s', base_path)
logger.debug('output_file: %s', output_file)
logger.debug('extension: %s', extension)
logger.debug('kwargs: %s', kwargs)
BASE_PATH = args['<base_path>']
EXTENSION = args['--extension']
OUTPUT_FILE = args['<output_file>']
#SHAPE = [3, 224, 224]
# SHAPE = [3, 224, 224]
SHAPE = [1, 64, 64]
count, sum_data = process_images(BASE_PATH, EXTENSION, SHAPE)
count, sum_data = process_images(base_path, extension, SHAPE)
means = numpy.zeros(shape=SHAPE)
for s in range(SHAPE[0]):
means[s, ...] = sum_data[s, ...] / float(count)
bob.io.base.save(means, OUTPUT_FILE)
bob.io.base.save(means[0, :, :].astype("uint8"), "xuxa.png")
save(means, output_file)
save(means[0, :, :].astype("uint8"), "xuxa.png")
#!/usr/bin/env python
"""Evaluates networks trained with tf.train.MonitoredTrainingSession
Usage:
%(prog)s [options] <config_files>...
%(prog)s --help
%(prog)s --version
Arguments:
<config_files> The configuration files. The configuration files are loaded
in order and they need to have several objects inside
totally. See below for explanation.
Options:
-h --help show this help message and exit
--version show version and exit
The configuration files should have the following objects totally:
## Required objects:
estimator
eval_input_fn
## Optional objects:
eval_interval_secs
run_once
hooks
name
For an example configuration, please see:
bob.learn.tensorflow/bob/learn/tensorflow/examples/mnist/mnist_config.py
"""Evaluates networks using Tensorflow estimators.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
import logging
import os
import time
import six
import sys
import tensorflow as tf
from bob.extension.config import load as read_config_file
from ..utils.eval import get_global_step
def main(argv=None):
from docopt import docopt
import sys
docs = __doc__ % {'prog': os.path.basename(sys.argv[0])}
version = pkg_resources.require('bob.learn.tensorflow')[0].version
args = docopt(docs, argv=argv, version=version)
config_files = args['<config_files>']
config = read_config_file(config_files)
estimator = config.estimator
eval_input_fn = config.eval_input_fn
eval_interval_secs = getattr(config, 'eval_interval_secs', 60)
run_once = getattr(config, 'run_once', False)
hooks = getattr(config, 'hooks', None)
name = getattr(config, 'eval_name', None)
import click
from bob.extension.scripts.click_helper import (
verbosity_option, ConfigCommand, ResourceOption)
logger = logging.getLogger(__name__)
@click.command(entry_point_group='bob.learn.tensorflow.config',
cls=ConfigCommand)
@click.option('--estimator', '-e', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.estimator')
@click.option('--eval-input-fn', '-i', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.input_fn')
@click.option('--hooks', cls=ResourceOption, multiple=True,
entry_point_group='bob.learn.tensorflow.hook')
@click.option('--run-once', cls=ResourceOption, default=False,
show_default=True)
@click.option('--eval-interval-secs', cls=ResourceOption, type=click.types.INT,
default=60, show_default=True)
@click.option('--name', cls=ResourceOption)
@verbosity_option(cls=ResourceOption)
def eval(estimator, eval_input_fn, hooks, run_once, eval_interval_secs, name,
**kwargs):
"""Evaluates networks using Tensorflow estimators.
\b
Parameters
----------
estimator : tf.estimator.Estimator
The estimator that will be trained. Can be a
``bob.learn.tensorflow.estimator`` entry point or a path to a Python
file which contains a variable named `estimator`.
eval_input_fn : callable
The ``input_fn`` that will be given to
:any:`tf.estimator.Estimator.train`. Can be a
``bob.learn.tensorflow.input_fn`` entry point or a path to a Python
file which contains a variable named `eval_input_fn`.
hooks : [tf.train.SessionRunHook], optional
List of SessionRunHook subclass instances. Used for callbacks inside
the training loop. Can be a ``bob.learn.tensorflow.hook`` entry point
or a path to a Python file which contains a variable named `hooks`.
run_once : bool, optional
If given, the model will be evaluated only once.
eval_interval_secs : int, optional
The seconds to wait for the next evaluation.
name : str, optional
Name of the evaluation
verbose : int, optional
Increases verbosity (see help for --verbose).
\b
[CONFIG]... Configuration files. It is possible to pass one or
several Python files (or names of
``bob.learn.tensorflow.config`` entry points or
module names) which contain the parameters listed
above as Python variables. The options through the
command-line (see below) will override the values of
configuration files.
"""
logger.debug('estimator: %s', estimator)
logger.debug('eval_input_fn: %s', eval_input_fn)
logger.debug('hooks: %s', hooks)
logger.debug('run_once: %s', run_once)
logger.debug('eval_interval_secs: %s', eval_interval_secs)
logger.debug('name: %s', name)
logger.debug('kwargs: %s', kwargs)
if name:
real_name = 'eval_' + name
......@@ -108,7 +126,3 @@ def main(argv=None):
if run_once:
break
time.sleep(eval_interval_secs)
if __name__ == '__main__':
main()
This diff is collapsed.
#!/usr/bin/env python
"""Returns predictions of networks trained with
tf.train.MonitoredTrainingSession
Usage:
%(prog)s [-v...] [-k KEY]... [options] <config_files>...
%(prog)s --help
%(prog)s --version
Arguments:
<config_files> The configuration files. The configuration
files are loaded in order and they need to
have several objects inside totally. See
below for explanation.
Options:
-h --help Show this help message and exit
--version Show version and exit
-o PATH, --output-dir PATH Name of the output file.
-k KEY, --predict-keys KEY List of `str`, name of the keys to predict.
It is used if the
`EstimatorSpec.predictions` is a `dict`. If
`predict_keys` is used then rest of the
predictions will be filtered from the
dictionary. If `None`, returns all.
--checkpoint-path=<path> Path of a specific checkpoint to predict.
If `None`, the latest checkpoint in
`model_dir` is used.
-v, --verbose Increases the output verbosity level
The configuration files should have the following objects totally:
# Required objects:
estimator
predict_input_fn
# Optional objects:
hooks
For an example configuration, please see:
bob.learn.tensorflow/bob/learn/tensorflow/examples/mnist/mnist_config.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
import os
from multiprocessing import Pool
from collections import defaultdict
import numpy as np
from bob.io.base import create_directories_safe
from bob.extension.config import load as read_config_file
from bob.bio.base.utils import save
from bob.learn.tensorflow.utils.commandline import \
get_from_config_or_commandline
from bob.core.log import setup, set_verbosity_level
logger = setup(__name__)
def save_predictions(pool, output_dir, key, pred_buffer):
outpath = os.path.join(output_dir, key + '.hdf5')
create_directories_safe(os.path.dirname(outpath))
pool.apply_async(save, (np.mean(pred_buffer[key], axis=0), outpath))
def main(argv=None):
from docopt import docopt
import sys
docs = __doc__ % {'prog': os.path.basename(sys.argv[0])}
version = pkg_resources.require('bob.learn.tensorflow')[0].version
defaults = docopt(docs, argv=[""])
args = docopt(docs, argv=argv, version=version)
config_files = args['<config_files>']
config = read_config_file(config_files)
# optional arguments
verbosity = get_from_config_or_commandline(config, 'verbose', args,
defaults)
predict_keys = get_from_config_or_commandline(config, 'predict_keys', args,
defaults)
checkpoint_path = get_from_config_or_commandline(config, 'checkpoint_path',
args, defaults)
hooks = getattr(config, 'hooks', None)
# Sets-up logging
set_verbosity_level(logger, verbosity)
# required arguments
estimator = config.estimator
predict_input_fn = config.predict_input_fn
output_dir = get_from_config_or_commandline(config, 'output_dir', args,
defaults, False)
predictions = estimator.predict(
predict_input_fn,
predict_keys=predict_keys,
hooks=hooks,
checkpoint_path=checkpoint_path,
)
pool = Pool()
try:
pred_buffer = defaultdict(list)
for i, pred in enumerate(predictions):
key = pred['key']
prob = pred.get('probabilities', pred.get('embeddings'))
pred_buffer[key].append(prob)
if i == 0:
last_key = key
if last_key == key:
continue
else:
save_predictions(pool, output_dir, last_key, pred_buffer)
last_key = key
# else below is for the for loop
else:
save_predictions(pool, output_dir, key, pred_buffer)
finally:
pool.close()
pool.join()
if __name__ == '__main__':
main()
"""The main entry for bob tf (click-based) scripts.
"""
import click
import pkg_resources
from click_plugins import with_plugins
@with_plugins(pkg_resources.iter_entry_points('bob.learn.tensorflow.cli'))
@click.group()
def tf():
"""Tensorflow-related commands."""
pass
#!/usr/bin/env python
"""Trains networks using Tensorflow estimators.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
import logging
import click
from bob.extension.scripts.click_helper import (
verbosity_option, ConfigCommand, ResourceOption)
logger = logging.getLogger(__name__)
@click.command(entry_point_group='bob.learn.tensorflow.config',
cls=ConfigCommand)
@click.option('--estimator', '-e', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.estimator')
@click.option('--train-input-fn', '-i', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.input_fn')
@click.option('--hooks', cls=ResourceOption, multiple=True,
entry_point_group='bob.learn.tensorflow.hook')
@click.option('--steps', '-s', cls=ResourceOption, type=click.types.INT)
@click.option('--max-steps', '-m', cls=ResourceOption, type=click.types.INT)
@verbosity_option(cls=ResourceOption)
def train(estimator, train_input_fn, hooks, steps, max_steps, **kwargs):
"""Trains networks using Tensorflow estimators.
\b
Parameters
----------
estimator : tf.estimator.Estimator
The estimator that will be trained. Can be a
``bob.learn.tensorflow.estimator`` entry point or a path to a Python
file which contains a variable named `estimator`.
train_input_fn : callable
The ``input_fn`` that will be given to
:any:`tf.estimator.Estimator.train`. Can be a
``bob.learn.tensorflow.input_fn`` entry point or a path to a Python
file which contains a variable named `train_input_fn`.
hooks : [tf.train.SessionRunHook], optional
List of SessionRunHook subclass instances. Used for callbacks inside
the training loop. Can be a ``bob.learn.tensorflow.hook`` entry point
or a path to a Python file which contains a variable named `hooks`.
steps : int, optional
Number of steps for which to train model. See
:any:`tf.estimator.Estimator.train`.
max_steps : int, optional
Number of total steps for which to train model. See
:any:`tf.estimator.Estimator.train`.
verbose : int, optional
Increases verbosity (see help for --verbose).
\b
[CONFIG]... Configuration files. It is possible to pass one or
several Python files (or names of
``bob.learn.tensorflow.config`` entry points or
module names) which contain the parameters listed
above as Python variables. The options through the
command-line (see below) will override the values of
configuration files.
"""
logger.debug('estimator: %s', estimator)
logger.debug('train_input_fn: %s', train_input_fn)
logger.debug('hooks: %s', hooks)
logger.debug('steps: %s', steps)
logger.debug('max_steps: %s', max_steps)
logger.debug('kwargs: %s', kwargs)
# Train
logger.info("Training a model in %s", estimator.model_dir)
estimator.train(
input_fn=train_input_fn, hooks=hooks, steps=steps, max_steps=max_steps)
#!/usr/bin/env python
"""Trains and evaluates a network using Tensorflow estimators.
This script calls the estimator.train_and_evaluate function. Please see:
https://www.tensorflow.org/api_docs/python/tf/estimator/train_and_evaluate
https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec
https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec
for more details.
Usage:
%(prog)s [-v...] [options] <config_files>...
%(prog)s --help
%(prog)s --version
Arguments:
<config_files> The configuration files. The
configuration files are loaded in order
and they need to have several objects
inside totally. See below for
explanation.
Options:
-h --help Show this help message and exit
--version Show version and exit
-v, --verbose Increases the output verbosity level
The configuration files should have the following objects totally:
## Required objects:
estimator
train_spec
eval_spec
## Optional objects:
exit_ok_exceptions : [Exception]
A list of exceptions to exit properly if they occur. If nothing is
provided, the EarlyStopException is handled by default.
"""
from __future__ import absolute_import
from __future__ import division
......@@ -42,42 +7,68 @@ from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
import tensorflow as tf
from bob.extension.config import load as read_config_file
from bob.learn.tensorflow.utils.commandline import \
get_from_config_or_commandline
from bob.learn.tensorflow.utils.hooks import EarlyStopException
from bob.core.log import setup, set_verbosity_level
logger = setup(__name__)
def main(argv=None):
from docopt import docopt
import os
import sys
docs = __doc__ % {'prog': os.path.basename(sys.argv[0])}
version = pkg_resources.require('bob.learn.tensorflow')[0].version
defaults = docopt(docs, argv=[""])
args = docopt(docs, argv=argv, version=version)
config_files = args['<config_files>']
config = read_config_file(config_files)
# optional arguments
verbosity = get_from_config_or_commandline(config, 'verbose', args,
defaults)
# Sets-up logging
set_verbosity_level(logger, verbosity)
# required objects
estimator = config.estimator
train_spec = config.train_spec
eval_spec = config.eval_spec
# optional objects
exit_ok_exceptions = getattr(config, 'exit_ok_exceptions',
(EarlyStopException,))
logger.info("Training a model in %s", estimator.model_dir)
import logging
import click
from bob.extension.scripts.click_helper import (
verbosity_option, ConfigCommand, ResourceOption)
logger = logging.getLogger(__name__)
@click.command(entry_point_group='bob.learn.tensorflow.config',
cls=ConfigCommand)
@click.option('--estimator', '-e', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.estimator')
@click.option('--train-spec', '-it', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.trainspec')
@click.option('--eval-spec', '-ie', required=True, cls=ResourceOption,
entry_point_group='bob.learn.tensorflow.evalspec')
@click.option('--exit-ok-exceptions', cls=ResourceOption, multiple=True,
default=(EarlyStopException,), show_default=True,
entry_point_group='bob.learn.tensorflow.exception')
@verbosity_option(cls=ResourceOption)
def train_and_evaluate(estimator, train_spec, eval_spec, exit_ok_exceptions,
**kwargs):
"""Trains and evaluates a network using Tensorflow estimators.
This script calls the estimator.train_and_evaluate function. Please see:
https://www.tensorflow.org/api_docs/python/tf/estimator/train_and_evaluate
https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec
https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec
for more details.
\b
Parameters
----------
estimator : tf.estimator.Estimator
The estimator that will be trained. Can be a
``bob.learn.tensorflow.estimator`` entry point or a path to a Python
file which contains a variable named `estimator`.
train_spec : tf.estimator.TrainSpec
See :any:`tf.estimator.Estimator.train_and_evaluate`.
eval_spec : tf.estimator.EvalSpec
See :any:`tf.estimator.Estimator.train_and_evaluate`.
exit_ok_exceptions : [Exception], optional
A list of exceptions to exit properly if they occur. If nothing is
provided, the EarlyStopException is handled by default.
verbose : int, optional
Increases verbosity (see help for --verbose).
\b
[CONFIG]... Configuration files. It is possible to pass one or
several Python files (or names of
``bob.learn.tensorflow.config`` entry points or
module names) which contain the parameters listed
above as Python variables. The options through the
command-line (see below) will override the values of
configuration files.
"""
logger.debug('estimator: %s', estimator)
logger.debug('train_spec: %s', train_spec)
logger.debug('eval_spec: %s', eval_spec)
logger.debug('exit_ok_exceptions: %s', exit_ok_exceptions)
logger.debug('kwargs: %s', kwargs)
# Train and evaluate
try:
......@@ -85,7 +76,3 @@ def main(argv=None):
except exit_ok_exceptions as e:
logger.exception(e)
return
if __name__ == '__main__':
main()
#!/usr/bin/env python
"""Trains networks using Tensorflow estimators.
Usage:
%(prog)s [-v...] [options] <config_files>...
%(prog)s --help
%(prog)s --version
Arguments:
<config_files> The configuration files. The
configuration files are loaded in order
and they need to have several objects
inside totally. See below for
explanation.
Options:
-h --help Show this help message and exit
--version Show version and exit
-v, --verbose Increases the output verbosity level
-s N, --steps N The number of steps to train.
-m N, --max-steps N The maximum number of steps to train.
This is a limit for global step which
continues in separate runs.
The configuration files should have the following objects totally:
## Required objects:
estimator
train_input_fn
## Optional objects:
hooks
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import pkg_resources so that bob imports work properly:
import pkg_resources
from bob.extension.config import load as read_config_file
from bob.learn.tensorflow.utils.commandline import \
get_from_config_or_commandline
from bob.core.log import setup, set_verbosity_level
logger = setup(__name__)
def main(argv=None):
from docopt import docopt
import os
import sys
docs = __doc__ % {'prog': os.path.basename(sys.argv[0])}
version = pkg_resources.require('bob.learn.tensorflow')[0].version
defaults = docopt(docs, argv=[""])
args = docopt(docs, argv=argv, version=version)
config_files = args['<config_files>']
config = read_config_file(config_files)
# optional arguments
verbosity = get_from_config_or_commandline(config, 'verbose', args,
defaults)
max_steps = get_from_config_or_commandline(config, 'max_steps', args,
defaults)
steps = get_from_config_or_commandline(config, 'steps', args, defaults)
hooks = getattr(config, 'hooks', None)
# Sets-up logging
set_verbosity_level(logger, verbosity)
# required arguments
estimator = config.estimator
train_input_fn = config.train_input_fn
# Train
logger.info("Training a model in %s", estimator.model_dir)
estimator.train(
input_fn=train_input_fn, hooks=hooks, steps=steps, max_steps=max_steps)
if __name__ == '__main__':
main()
; vim: set fileencoding=utf-8 :
; Mon 15 Aug 2016 09:48:28 CEST
; Mon Apr 16 11:41:03 CEST 2018
[buildout]
parts = scripts
......@@ -10,4 +10,5 @@ newest = false
verbose = true
[scripts]
recipe = bob.buildout:scripts
\ No newline at end of file
recipe = bob.buildout:scripts
dependent-scripts = true
\ No newline at end of file
......@@ -6,15 +6,6 @@ package:
version: {{ environ.get('BOB_PACKAGE_VERSION', '0.0.1') }}
build:
entry_points:
- bob_tf_compute_statistics.py = bob.learn.tensorflow.script.compute_statistics:main
- bob_tf_db_to_tfrecords = bob.learn.tensorflow.script.db_to_tfrecords:main
- bob_tf_train_generic = bob.learn.tensorflow.script.train_generic:main
- bob_tf_eval_generic = bob.learn.tensorflow.script.eval_generic:main