Commit a11d5d41 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

[sphinx] Fixed doctests

parent bc1a6059
Pipeline #29581 passed with stage
in 153 minutes and 15 seconds
......@@ -84,6 +84,27 @@ class Generator:
def dataset_using_generator(*args, **kwargs):
"""
A generator class which wraps samples so that they can
be used with tf.data.Dataset.from_generator
Attributes
----------
samples : [:obj:`object`]
A list of samples to be given to ``reader`` to load the data.
reader : :obj:`object`, optional
A callable with the signature of ``data, label, key = reader(sample)``
which takes a sample and loads it.
multiple_samples : :obj:`bool`, optional
If true, it assumes that the bio database's samples actually contain
multiple samples. This is useful for when you want to for example treat
video databases as image databases.
"""
generator = Generator(*args, **kwargs)
dataset = tf.data.Dataset.from_generator(
generator, generator.output_types, generator.output_shapes
......
......@@ -30,7 +30,10 @@ def balanced_softmax_cross_entropy_loss_weights(labels, dtype="float32"):
Examples
--------
>>> labels = array([[1, 0, 0],
>>> import numpy
>>> import tensorflow as tf
>>> from bob.learn.tensorflow.loss import balanced_softmax_cross_entropy_loss_weights
>>> labels = numpy.array([[1, 0, 0],
... [1, 0, 0],
... [0, 0, 1],
... [0, 1, 0],
......@@ -61,10 +64,11 @@ def balanced_softmax_cross_entropy_loss_weights(labels, dtype="float32"):
... [0, 1, 0],
... [1, 0, 0],
... [0, 0, 1],
... [1, 0, 0]], dtype=int32)
>>> tf.reduce_sum(labels, axis=0)
... [1, 0, 0]], dtype="int32")
>>> session = tf.Session() # Eager execution is also possible check https://www.tensorflow.org/guide/eager
>>> session.run(tf.reduce_sum(labels, axis=0))
array([20, 5, 7], dtype=int32)
>>> balanced_softmax_cross_entropy_loss_weights(labels, dtype='float32')
>>> session.run(balanced_softmax_cross_entropy_loss_weights(labels, dtype='float32'))
array([0.53333336, 0.53333336, 1.5238096 , 2.1333334 , 1.5238096 ,
0.53333336, 0.53333336, 1.5238096 , 0.53333336, 0.53333336,
0.53333336, 0.53333336, 0.53333336, 0.53333336, 2.1333334 ,
......@@ -75,8 +79,8 @@ def balanced_softmax_cross_entropy_loss_weights(labels, dtype="float32"):
You would use it like this:
>>> weights = balanced_softmax_cross_entropy_loss_weights(labels, dtype=logits.dtype)
>>> loss = tf.losses.softmax_cross_entropy(logits=logits, labels=labels, weights=weights)
>>> #weights = balanced_softmax_cross_entropy_loss_weights(labels, dtype=logits.dtype)
>>> #loss = tf.losses.softmax_cross_entropy(logits=logits, labels=labels, weights=weights)
"""
shape = tf.cast(tf.shape(labels), dtype=dtype)
batch_size, n_classes = shape[0], shape[1]
......@@ -110,11 +114,15 @@ def balanced_sigmoid_cross_entropy_loss_weights(labels, dtype="float32"):
Examples
--------
>>> labels = array([1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0,
... 1, 1, 0, 1, 1, 1, 0, 1, 0, 1], dtype=int32)
>>> import numpy
>>> import tensorflow as tf
>>> from bob.learn.tensorflow.loss import balanced_sigmoid_cross_entropy_loss_weights
>>> labels = numpy.array([1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0,
... 1, 1, 0, 1, 1, 1, 0, 1, 0, 1], dtype="int32")
>>> sum(labels), len(labels)
20, 32
>>> balanced_sigmoid_cross_entropy_loss_weights(labels, dtype='float32')
(20, 32)
>>> session = tf.Session() # Eager execution is also possible check https://www.tensorflow.org/guide/eager
>>> session.run(balanced_sigmoid_cross_entropy_loss_weights(labels, dtype='float32'))
array([0.8 , 0.8 , 1.3333334, 1.3333334, 1.3333334, 0.8 ,
0.8 , 1.3333334, 0.8 , 0.8 , 0.8 , 0.8 ,
0.8 , 0.8 , 1.3333334, 0.8 , 1.3333334, 0.8 ,
......@@ -124,8 +132,8 @@ def balanced_sigmoid_cross_entropy_loss_weights(labels, dtype="float32"):
You would use it like this:
>>> weights = balanced_sigmoid_cross_entropy_loss_weights(labels, dtype=logits.dtype)
>>> loss = tf.losses.sigmoid_cross_entropy(logits=logits, labels=labels, weights=weights)
>>> #weights = balanced_sigmoid_cross_entropy_loss_weights(labels, dtype=logits.dtype)
>>> #loss = tf.losses.sigmoid_cross_entropy(logits=logits, labels=labels, weights=weights)
"""
labels = tf.cast(labels, dtype='int32')
batch_size = tf.cast(tf.shape(labels)[0], dtype=dtype)
......
......@@ -27,6 +27,7 @@ Architectures
bob.learn.tensorflow.network.light_cnn9
bob.learn.tensorflow.network.dummy
bob.learn.tensorflow.network.mlp
bob.learn.tensorflow.network.mlp_with_batchnorm_and_dropout
bob.learn.tensorflow.network.inception_resnet_v2
bob.learn.tensorflow.network.inception_resnet_v1
bob.learn.tensorflow.network.inception_resnet_v2_batch_norm
......@@ -46,6 +47,7 @@ Data
bob.learn.tensorflow.dataset.triplet_image.shuffle_data_and_labels_image_augmentation
bob.learn.tensorflow.dataset.tfrecords.shuffle_data_and_labels_image_augmentation
bob.learn.tensorflow.dataset.tfrecords.shuffle_data_and_labels
bob.learn.tensorflow.dataset.generator.dataset_using_generator
bob.learn.tensorflow.utils.util.to_channels_last
bob.learn.tensorflow.utils.util.to_channels_first
......@@ -70,6 +72,8 @@ Losses
bob.learn.tensorflow.loss.linear_gram_style_loss
bob.learn.tensorflow.loss.content_loss
bob.learn.tensorflow.loss.denoising_loss
bob.learn.tensorflow.loss.balanced_softmax_cross_entropy_loss_weights
bob.learn.tensorflow.loss.balanced_sigmoid_cross_entropy_loss_weights
Please register or sign in to reply
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment