diff --git a/bob/pipelines/config/distributed/sge_demanding.py b/bob/pipelines/config/distributed/sge_demanding.py deleted file mode 100644 index c0fc113210f3b6a33ff9a78200f7e303a98ba4d9..0000000000000000000000000000000000000000 --- a/bob/pipelines/config/distributed/sge_demanding.py +++ /dev/null @@ -1,7 +0,0 @@ -from dask.distributed import Client - -from bob.pipelines.distributed.sge import SGEMultipleQueuesCluster -from bob.pipelines.distributed.sge_queues import QUEUE_DEMANDING - -cluster = SGEMultipleQueuesCluster(min_jobs=20, sge_job_spec=QUEUE_DEMANDING) -dask_client = Client(cluster) diff --git a/bob/pipelines/distributed/sge_queues.py b/bob/pipelines/distributed/sge_queues.py index 27dee9ea31c105dda6745d6c965173f4364f76b9..e8b5920b2124b287b7c81a10e85a62a8b27d7e3f 100644 --- a/bob/pipelines/distributed/sge_queues.py +++ b/bob/pipelines/distributed/sge_queues.py @@ -101,56 +101,3 @@ QUEUE_LIGHT = { This queue setup has a light arrangement. For CPU jobs, it prioritizes all.q and not io_big """ - - -QUEUE_DEMANDING = { - "default": { - "queue": "q_1day", - "memory": "8GB", - "io_big": True, - "resource_spec": "", - "max_jobs": 48, - "resources": "", - }, - "q_1day": { - "queue": "q_1day", - "memory": "4GB", - "io_big": False, - "resource_spec": "", - "max_jobs": 48, - "resources": {"q_1day": 1}, - }, - "q_1week": { - "queue": "q_1week", - "memory": "4GB", - "io_big": True, - "resource_spec": "", - "resources": {"q_1week": 1}, - }, - "q_short_gpu": { - "queue": "q_short_gpu", - "memory": "30GB", - "io_big": False, - "resource_spec": "", - "max_jobs": 45, - "resources": {"q_short_gpu": 1}, - }, - "q_gpu": { - "queue": "q_gpu", - "memory": "30GB", - "io_big": False, - "resource_spec": "", - "resources": {"q_gpu": 1}, - }, - "q_long_gpu": { - "queue": "q_long_gpu", - "memory": "30GB", - "io_big": False, - "resource_spec": "", - "resources": {"q_long_gpu": 1}, - }, -} -""" -This queue setup has a light arrangement. -For CPU jobs, it prioritizes all.q and not io_big -""" diff --git a/setup.py b/setup.py index ce66a0872616a56d059f9365cf557da7195ea35b..b3829f2f81c51c831dcf7ddf12f4654f8b9f7093 100644 --- a/setup.py +++ b/setup.py @@ -14,6 +14,7 @@ install_requires = load_requirements() setup( + # This is the basic information about the project. name="bob.pipelines", version=open("version.txt").read().rstrip(), description="Tools to build robust and extensible pipelines", @@ -27,13 +28,29 @@ setup( # maintainer_email='email@example.com' # you may add more keywords separating those by commas (a, b, c, ...) keywords="bob", + + # If you have a better, long description of your package, place it on the + # 'doc' directory and then hook it here long_description=open("README.rst").read(), - # leave this here, it is pretty standard + + # This line is required for any distutils based packaging. packages=find_packages(), include_package_data=True, zip_safe=False, + + # Packages that should be installed when you "install" this package. install_requires=install_requires, - # check classifiers, add and remove as you see fit + + # entry_points defines which scripts will be inside the 'bin' directory + entry_points = { + 'dask.client': [ + 'local-parallel = bob.pipelines.config.distributed.local_parallel', + 'sge = bob.pipelines.config.distributed.sge_default', + 'sge-light = bob.pipelines.config.distributed.sge_light', + ], + }, + + # check classifiers (important for PyPI), add and remove as you see fit. # full list here: https://pypi.org/classifiers/ # don't remove the Bob framework unless it's not a bob package classifiers=[