[sphinx] Fixed all warnings

parent 134ff956
Pipeline #39072 passed with stage
in 4 minutes and 7 seconds
......@@ -169,57 +169,57 @@ class SGEIdiapCluster(JobQueueCluster):
Below follow a vanilla-example that will create a set of jobs on all.q:
>>> from bob.pipelines.distributed.sge import SGEIdiapCluster
>>> from dask.distributed import Client
>>> cluster = SGEIdiapCluster()
>>> cluster.scale_up(10)
>>> client = Client(cluster)
>>> from bob.pipelines.distributed.sge import SGEIdiapCluster # doctest: +SKIP
>>> from dask.distributed import Client # doctest: +SKIP
>>> cluster = SGEIdiapCluster() # doctest: +SKIP
>>> cluster.scale_up(10) # doctest: +SKIP
>>> client = Client(cluster) # doctest: +SKIP
It's possible to demand a resource specification yourself:
>>> Q_1DAY_IO_BIG_SPEC = {
>>> "default": {
>>> "queue": "q_1day",
>>> "memory": "8GB",
>>> "io_big": True,
>>> "resource_spec": "",
>>> "resources": "",
>>> }
>>> }
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_IO_BIG_SPEC)
>>> cluster.scale_up(10)
>>> client = Client(cluster)
... "default": {
... "queue": "q_1day",
... "memory": "8GB",
... "io_big": True,
... "resource_spec": "",
... "resources": "",
... }
... }
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_IO_BIG_SPEC) # doctest: +SKIP
>>> cluster.scale_up(10) # doctest: +SKIP
>>> client = Client(cluster) # doctest: +SKIP
More than one jon spec can be set:
>>> Q_1DAY_GPU_SPEC = {
>>> "default": {
>>> "queue": "q_1day",
>>> "memory": "8GB",
>>> "io_big": True,
>>> "resource_spec": "",
>>> "resources": "",
>>> },
>>> "gpu": {
>>> "queue": "q_gpu",
>>> "memory": "12GB",
>>> "io_big": False,
>>> "resource_spec": "",
>>> "resources": {"GPU":1},
>>> },
>>> }
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_GPU_SPEC)
>>> cluster.scale_up(10)
>>> cluster.scale_up(1, sge_job_spec_key="gpu")
>>> client = Client(cluster)
... "default": {
... "queue": "q_1day",
... "memory": "8GB",
... "io_big": True,
... "resource_spec": "",
... "resources": "",
... },
... "gpu": {
... "queue": "q_gpu",
... "memory": "12GB",
... "io_big": False,
... "resource_spec": "",
... "resources": {"GPU":1},
... },
... }
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_GPU_SPEC) # doctest: +SKIP
>>> cluster.scale_up(10) # doctest: +SKIP
>>> cluster.scale_up(1, sge_job_spec_key="gpu") # doctest: +SKIP
>>> client = Client(cluster) # doctest: +SKIP
Adaptive job allocation can also be used via `AdaptiveIdiap` extension:
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_GPU_SPEC)
>>> cluster.adapt(Adaptive=AdaptiveIdiap,minimum=2, maximum=10)
>>> client = Client(cluster)
>>> cluster = SGEIdiapCluster(sge_job_spec=Q_1DAY_GPU_SPEC) # doctest: +SKIP
>>> cluster.adapt(Adaptive=AdaptiveIdiap,minimum=2, maximum=10) # doctest: +SKIP
>>> client = Client(cluster) # doctest: +SKIP
"""
......@@ -327,7 +327,7 @@ class SGEIdiapCluster(JobQueueCluster):
Quantity of jobs to scale
sge_job_spec_key: str
One of the specs :py:attr:`SGEIdiapCluster.sge_job_spec`
One of the specs `SGEIdiapCluster.sge_job_spec`
"""
if n_jobs == 0:
......@@ -365,8 +365,8 @@ class AdaptiveIdiap(Adaptive):
"""
Custom mechanism to adaptively allocate workers based on scheduler load
This custom implementation extends the :py:meth:`Adaptive.recommendations` by looking
at the :py:meth:`distributed.scheduler.TaskState.resource_restrictions`.
This custom implementation extends the `Adaptive.recommendations` by looking
at the `distributed.scheduler.TaskState.resource_restrictions`.
The heristics is:
......@@ -455,7 +455,7 @@ class SchedulerIdiap(Scheduler):
"""
Idiap extended distributed scheduler
This scheduler extends :py:class:`Scheduler` by just adding a handler
This scheduler extends `Scheduler` by just adding a handler
that fetches, at every scheduler cycle, the resource restrictions of
a task that has status `no-worker`
"""
......
......@@ -47,23 +47,25 @@ def estimator_dask_it(
--------
Vanilla example
>>> pipeline = estimator_dask_it(pipeline) # Take some pipeline and make the methods `fit`and `transform` run over dask
>>> pipeline.fit(samples).compute()
>>> from bob.pipelines.mixins import estimator_dask_it
>>> pipeline = estimator_dask_it(pipeline) # Take some pipeline and make the methods `fit`and `transform` run over dask # doctest: +SKIP
>>> pipeline.fit(samples).compute() # doctest: +SKIP
In this example we will "mark" the fit method with a particular tag
Hence, we can set the `dask.delayed.compute` method to place some
delayeds to be executed in particular resources
>>> pipeline = estimator_dask_it(pipeline, fit_tag=[(1, "GPU")]) # Take some pipeline and make the methods `fit`and `transform` run over dask
>>> fit = pipeline.fit(samples)
>>> fit.compute(resources=pipeline.dask_tags())
>>> from bob.pipelines.mixins import estimator_dask_it # doctest: +SKIP
>>> pipeline = estimator_dask_it(pipeline, fit_tag=[(1, "GPU")]) # Take some pipeline and make the methods `fit`and `transform` run over dask # doctest: +SKIP
>>> fit = pipeline.fit(samples) # doctest: +SKIP
>>> fit.compute(resources=pipeline.dask_tags()) # doctest: +SKIP
Taging estimator
>>> estimator = estimator_dask_it(estimator)
>>> transf = estimator.transform(samples)
>>> transf.compute(resources=estimator.dask_tags())
>>> from bob.pipelines.mixins import estimator_dask_it # doctest: +SKIP
>>> estimator = estimator_dask_it(estimator) # doctest: +SKIP
>>> transf = estimator.transform(samples) # doctest: +SKIP
>>> transf.compute(resources=estimator.dask_tags()) # doctest: +SKIP
"""
......@@ -122,32 +124,34 @@ def mix_me_up(bases, o):
Dynamically creates a new class from :any:`object` or :any:`class`.
For instance, mix_me_up((A,B), class_c) is equal to `class ABC(A,B,C) pass:`
Parameters
----------
bases: or :any:`tuple`
Base classes to be mixed in
o: :any:`class`, :any:`object` or :py:class:`sklearn.pipeline.Pipeline`
Base element to be extended
Example
-------
>>> my_mixed_class = mix_me_up([MixInA, MixInB], OriginalClass)
>>> mixed_object = my_mixed_class(*args)
>>> from bob.pipelines.mixins import mix_me_up # doctest: +SKIP
>>> my_mixed_class = mix_me_up([MixInA, MixInB], OriginalClass) # doctest: +SKIP
>>> mixed_object = my_mixed_class(*args) # doctest: +SKIP
It's also possible to mix up an instance:
Example
-------
>>> instance = OriginalClass()
>>> mixed_object = mix_me_up([MixInA, MixInB], instance)
>>> instance = OriginalClass() # doctest: +SKIP
>>> mixed_object = mix_me_up([MixInA, MixInB], instance) # doctest: +SKIP
It's also possible to mix up a :py:class:`sklearn.pipeline.Pipeline`.
In this case, every estimator inside of :py:meth:`sklearn.pipeline.Pipeline.steps`
In this case, every estimator inside of `sklearn.pipeline.Pipeline.steps`
will be mixed up
Parameters
----------
bases: or :any:`tuple`
Base classes to be mixed in
o: :any:`class`, :any:`object` or :py:class:`sklearn.pipeline.Pipeline`
Base element to be extended
"""
......@@ -196,7 +200,7 @@ def _make_kwargs_from_samples(samples, arg_attr_list):
class SampleMixin:
"""Mixin class to make scikit-learn estimators work in :any:`Sample`-based
"""Mixin class to make scikit-learn estimators work in :py:class:`bob.pipelines.sample.Sample`-based
pipelines.
Do not use this class except for scikit-learn estimators.
......@@ -205,9 +209,9 @@ class SampleMixin:
Also implement ``predict``, ``predict_proba``, and ``score``. See:
https://scikit-learn.org/stable/developers/develop.html#apis-of-scikit-learn-objects
Attributes
Parameters
----------
fit_extra_arguments : [tuple], optional
fit_extra_arguments : [tuple]
Use this option if you want to pass extra arguments to the fit method of the
mixed instance. The format is a list of two value tuples. The first value in
tuples is the name of the argument that fit accepts, like ``y``, and the second
......@@ -215,7 +219,8 @@ class SampleMixin:
passing samples to the fit method and want to pass ``subject`` attributes of
samples as the ``y`` argument to the fit method, you can provide ``[("y",
"subject")]`` as the value for this attribute.
transform_extra_arguments : [tuple], optional
transform_extra_arguments : [tuple]
Similar to ``fit_extra_arguments`` but for the transform method.
"""
......@@ -260,7 +265,7 @@ class SampleMixin:
class CheckpointMixin:
"""Mixin class that allows :any:`Sample`-based estimators save their results into
"""Mixin class that allows :py:class:`bob.pipelines.sample.Sample`-based estimators save their results into
disk."""
def __init__(
......@@ -340,7 +345,7 @@ class CheckpointMixin:
raise ValueError("Type for sample not supported %s" % type(sample))
def load(self, sample, path):
# because we are checkpointing, we return a DelayedSample
# because we are checkpointing, we return a :py:class:`bob.pipelines.sample.DelayedSample`
# instead of a normal (preloaded) sample. This allows the next
# phase to avoid loading it would it be unnecessary (e.g. next
# phase is already check-pointed)
......@@ -367,7 +372,7 @@ class CheckpointMixin:
class SampleFunctionTransformer(SampleMixin, FunctionTransformer):
"""Mixin class that transforms Scikit learn FunctionTransformer (https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.FunctionTransformer.html)
work with :any:`Sample`-based pipelines.
work with :py:class:`bob.pipelines.sample.Sample`-based pipelines.
"""
pass
......@@ -377,7 +382,7 @@ class CheckpointSampleFunctionTransformer(
CheckpointMixin, SampleMixin, FunctionTransformer
):
"""Mixin class that transforms Scikit learn FunctionTransformer (https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.FunctionTransformer.html)
work with :any:`Sample`-based pipelines.
work with :py:class:`bob.pipelines.sample.Sample`-based pipelines.
Furthermore, it makes it checkpointable
"""
......@@ -388,22 +393,21 @@ class CheckpointSampleFunctionTransformer(
class NonPicklableMixin:
"""Class that wraps estimators that are not picklable
Parameters
----------
callable:
Calleble function that instantiates the scikit estimator
Example
-------
>>> from bob.pipelines.processor import NonPicklableMixin
>>> wrapper = NonPicklableMixin(my_non_picklable_class_callable)
>>> from bob.pipelines.processor import NonPicklableMixin # doctest: +SKIP
>>> wrapper = NonPicklableMixin(my_non_picklable_class_callable) # doctest: +SKIP
Example
-------
>>> from bob.pipelines.processor import NonPicklableMixin
>>> import functools
>>> wrapper = NonPicklableMixin(functools.partial(MyNonPicklableClass, arg1, arg2))
Parameters
----------
callable: callable
Calleble function that instantiates the scikit estimator
>>> from bob.pipelines.processor import NonPicklableMixin # doctest: +SKIP
>>> import functools # doctest: +SKIP
>>> wrapper = NonPicklableMixin(functools.partial(MyNonPicklableClass, arg1, arg2)) # doctest: +SKIP
"""
......@@ -471,22 +475,22 @@ class DaskEstimatorMixin:
class DaskBagMixin(TransformerMixin):
"""Transform an arbitrary iterator into a :py:class:`dask.bag`
"""Transform an arbitrary iterator into a `dask.bag`
Parameters
----------
npartitions: int
Number of partitions used it :py:meth:`dask.bag.npartitions`
Number of partitions used it `dask.bag.npartitions`
Example
-------
>>> transformer = DaskBagMixin()
>>> dask_bag = transformer.transform([1,2,3])
>>> dask_bag.map_partitions.....
>>> transformer = DaskBagMixin() # doctest: +SKIP
>>> dask_bag = transformer.transform([1,2,3]) # doctest: +SKIP
>>> dask_bag.map_partitions # doctest: +SKIP
"""
......
......@@ -21,11 +21,11 @@ class DelayedSample:
Parameters
----------
load : function
load:
A python function that can be called parameterlessly, to load the
sample in question from whatever medium
parent : :py:class:`DelayedSample`, :py:class:`Sample`, None
parent : :py:class:`bob.pipelines.sample.DelayedSample`, :py:class:`bob.pipelines.sample.Sample`, None
If passed, consider this as a parent of this sample, to copy
information
......
......@@ -33,7 +33,8 @@ requirements:
- setuptools
- numpy
- dask
- dask-jobqueue
- dask-jobqueue
- distributed
- scikit-learn
test:
......
py:meth dask.distributed.Adaptive
py:class dask_jobqueue.core.JobQueueCluster
py:class distributed.deploy.adaptive.Adaptive
py:class dask_jobqueue.core.Job
py:class sklearn.preprocessing._function_transformer.FunctionTransformer
\ No newline at end of file
......@@ -15,7 +15,7 @@ What is a Sample ?
------------------
A :py:class:`bob.pipelines.sample.Sample` is simple container that wraps a datapoint.
The example below shows how this can be used to wrap a :py:class:`numpy.array`.
The example below shows how this can be used to wrap a :py:func:`numpy.array`.
.. code:: python
......@@ -28,7 +28,7 @@ The example below shows how this can be used to wrap a :py:class:`numpy.array`.
Sample and metadata
-------------------
Metadata can be added as keyword arguments in :py:meth:`bob.pipelines.sample.Sample.__init__.py`, like:
Metadata can be added as keyword arguments in :py:class:`bob.pipelines.sample.Sample`, like:
.. code:: python
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment