jman.py 18.2 KB
Newer Older
1
2
3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Andre Anjos <andre.anjos@idiap.ch>
4
# Wed 24 Aug 2011 16:13:31 CEST
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19

"""A logging Idiap/SGE job manager
"""

__epilog__ = """ For a list of available commands:
  >>> %(prog)s --help

  For a list of options for a particular command:
  >>> %(prog)s <command> --help
"""

import os
import sys

import argparse
20
import logging
21
import string
22

23
from ..tools import make_shell, logger
24
from .. import local, sge
25
from ..models import Status
26
27
28
29

def setup(args):
  """Returns the JobManager and sets up the basic infrastructure"""

30
  kwargs = {'wrapper_script' : args.wrapper_script, 'debug' : args.verbose==3, 'database' : args.database}
31
  if args.local:
32
    jm = local.JobManagerLocal(**kwargs)
33
  else:
34
    jm = sge.JobManagerSGE(**kwargs)
35
36

  # set-up logging
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
  if args.verbose not in range(0,4):
    raise ValueError("The verbosity level %d does not exist. Please reduce the number of '--verbose' parameters in your call to maximum 3" % level)

  # set up the verbosity level of the logging system
  log_level = {
      0: logging.ERROR,
      1: logging.WARNING,
      2: logging.INFO,
      3: logging.DEBUG
    }[args.verbose]

  handler = logging.StreamHandler()
  handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(name)s: %(message)s"))
  logger.addHandler(handler)
  logger.setLevel(log_level)
52
53
54

  return jm

55
56
57
58
59
60
61
62
def get_array(array):
  if array is None:
    return None
  start = array.find('-')
  if start == -1:
    a = 1
    b = int(array)
    c = 1
63
  else:
64
65
66
67
68
69
70
    a = int(array[0:start])
    step = array.find(':')
    if step == -1:
      b = int(array[start+1:])
      c = 1
    else:
      b = int(array[start+1:step])
71
      c = int(array[step+1:])
72

73
  return (a,b,c)
74
75


76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
def get_ids(jobs):
  if jobs is None:
    return None
  indexes = []
  for job in jobs:
    # check if a range is specified
    separator = job.find('-')
    if separator == -1:
      index = int(job)
      indexes.append(index)
    else:
      first = int(job[0:separator])
      last = int(job[separator+1:])
      indexes.extend(range(first, last+1))
  return indexes


93
94
95
96
def submit(args):
  """Submission command"""

  # set full path to command
97
98
  if args.job[0] == '--':
    del args.job[0]
99
  if not os.path.isabs(args.job[0]):
100
101
102
103
104
105
106
107
108
109
    args.job[0] = os.path.abspath(args.job[0])

  jm = setup(args)
  kwargs = {
      'queue': args.qname,
      'cwd': True,
      'name': args.name,
      'env': args.env,
      'memfree': args.memory,
      'hvmem': args.memory,
110
      'io_big': args.io_big,
111
  }
112

113
114
115
  if args.array is not None:         kwargs['array'] = get_array(args.array)
  if args.log_dir is not None:       kwargs['log_dir'] = args.log_dir
  if args.dependencies is not None:  kwargs['dependencies'] = args.dependencies
116
117
118
  if args.parallel is not None:
    kwargs['pe_opt'] = "pe_mth %d" % args.parallel
    kwargs['memfree'] = "%d%s" % (int(args.memory.rstrip(string.ascii_letters)) * args.parallel, args.memory.lstrip(string.digits))
119
120
  kwargs['dry_run'] = args.dry_run
  kwargs['stop_on_failure'] = args.stop_on_failure
121

122
123
124
  # submit the job
  job_id = jm.submit(args.job, **kwargs)

125
126

def resubmit(args):
127
  """Re-submits the jobs with the given ids."""
128
  jm = setup(args)
129
  if not args.keep_logs:
130
    jm.delete(job_ids=args.job_ids, delete_jobs=False)
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149

  kwargs = {
      'cwd': True
  }
  if args.qname is not None:
    kwargs['queue'] = args.qname
  if args.memory is not None:
    kwargs['memfree'] = args.memory
    kwargs['hvmem'] = args.memory
  if args.parallel is not None:
    kwargs['pe_opt'] = "pe_mth %d" % args.parallel
    kwargs['memfree'] = "%d%s" % (int(args.memory.rstrip(string.ascii_letters)) * args.parallel, args.memory.lstrip(string.digits))
  if args.io_big:
    kwargs['io_big'] = True
  if args.no_io_big:
    kwargs['io_big'] = False


  jm.resubmit(get_ids(args.job_ids), args.also_success, args.running_jobs, **kwargs)
150

151

152
153
def run_scheduler(args):
  """Runs the scheduler on the local machine. To stop it, please use Ctrl-C."""
154
155
156
  if not args.local:
    raise ValueError("The execute command can only be used with the '--local' command line option")
  jm = setup(args)
157
  jm.run_scheduler(parallel_jobs=args.parallel, job_ids=get_ids(args.job_ids), sleep_time=args.sleep_time, die_when_finished=args.die_when_finished, no_log=args.no_log_files, nice=args.nice)
158
159


160
def list(args):
161
162
  """Lists the jobs in the given database."""
  jm = setup(args)
163
  jm.list(job_ids=get_ids(args.job_ids), print_array_jobs=args.print_array_jobs, print_dependencies=args.print_dependencies, status=args.status, long=args.verbose > 1 or args.long, ids_only=args.ids_only)
164
165
166
167
168
169
170


def communicate(args):
  """Uses qstat to get the status of the requested jobs."""
  if args.local:
    raise ValueError("The communicate command can only be used without the '--local' command line option")
  jm = setup(args)
171
  jm.communicate(job_ids=get_ids(args.job_ids))
172
173
174
175
176


def report(args):
  """Reports the results of the finished (and unfinished) jobs."""
  jm = setup(args)
177
  jm.report(job_ids=get_ids(args.job_ids), array_ids=get_ids(args.array_ids), unfinished=args.unfinished_also, output=not args.errors_only, error=not args.output_only)
178
179
180
181
182
183
184


def stop(args):
  """Stops (qdel's) the jobs with the given ids."""
  if args.local:
    raise ValueError("Stopping commands locally is not supported (please kill them yourself)")
  jm = setup(args)
185
  jm.stop_jobs(get_ids(args.job_ids))
186
187
188


def delete(args):
189
  """Deletes the jobs from the job manager. If the jobs are still running in the grid, they are stopped."""
190
  jm = setup(args)
191
  # first, stop the jobs if they are running in the grid
192
  if not args.local and 'executing' in args.status:
193
194
    stop(args)
  # then, delete them from the database
195
  jm.delete(job_ids=get_ids(args.job_ids), array_ids=get_ids(args.array_ids), delete_logs=not args.keep_logs, delete_log_dir=not args.keep_log_dir, status=args.status)
196
197
198


def run_job(args):
199
  """Starts the wrapper script to execute a job, interpreting the JOB_ID and SGE_TASK_ID keywords that are set by the grid or by us."""
200
201
202
203
204
  jm = setup(args)
  job_id = int(os.environ['JOB_ID'])
  array_id = int(os.environ['SGE_TASK_ID']) if os.environ['SGE_TASK_ID'] != 'undefined' else None
  jm.run_job(job_id, array_id)

205

206
class AliasedSubParsersAction(argparse._SubParsersAction):
207
  """Hack taken from https://gist.github.com/471779 to allow aliases in
208
209
210
211
212
213
214
215
216
  argparse for python 2.x (this has been implemented on python 3.2)
  """

  class _AliasedPseudoAction(argparse.Action):
    def __init__(self, name, aliases, help):
      dest = name
      if aliases:
        dest += ' (%s)' % ','.join(aliases)
      sup = super(AliasedSubParsersAction._AliasedPseudoAction, self)
217
      sup.__init__(option_strings=[], dest=dest, help=help)
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239

  def add_parser(self, name, **kwargs):
    if 'aliases' in kwargs:
      aliases = kwargs['aliases']
      del kwargs['aliases']
    else:
      aliases = []

    parser = super(AliasedSubParsersAction, self).add_parser(name, **kwargs)

    # Make the aliases work.
    for alias in aliases:
      self._name_parser_map[alias] = parser
    # Make the help text reflect them, first removing old help entry.
    if 'help' in kwargs:
      help = kwargs.pop('help')
      self._choices_actions.pop()
      pseudo_action = self._AliasedPseudoAction(name, aliases, help)
      self._choices_actions.append(pseudo_action)

    return parser

240

241
def main(command_line_options = None):
242
243
244

  from ..config import __version__

245
  formatter = argparse.ArgumentDefaultsHelpFormatter
246
  parser = argparse.ArgumentParser(description=__doc__, epilog=__epilog__,
247
      formatter_class=formatter)
248
249
250
251
  # part of the hack to support aliases in subparsers
  parser.register('action', 'parsers', AliasedSubParsersAction)

  # general options
252
253
  parser.add_argument('-v', '--verbose', action = 'count', default = 0,
      help = "Increase the verbosity level from 0 (only error messages) to 1 (warnings), 2 (log messages), 3 (debug information) by adding the --verbose option as often as desired (e.g. '-vvv' for debug).")
254
  parser.add_argument('-V', '--version', action='version',
255
      version='GridTk version %s' % __version__)
256
257
  parser.add_argument('-d', '--database', '--db', metavar='DATABASE', default = 'submitted.sql3',
      help='replace the default database "submitted.sql3" by one provided by you.')
258
259
260

  parser.add_argument('-l', '--local', action='store_true',
        help = 'Uses the local job manager instead of the SGE one.')
261
  cmdparser = parser.add_subparsers(title='commands', help='commands accepted by %(prog)s')
262

263
  # subcommand 'submit'
264
  submit_parser = cmdparser.add_parser('submit', aliases=['sub'], formatter_class=formatter, help='Submits jobs to the SGE queue or to the local job scheduler and logs them in a database.')
265
  submit_parser.add_argument('-q', '--queue', metavar='QNAME', dest='qname', default='all.q', choices=('all.q', 'q1d', 'q1w', 'q1m', 'q1dm', 'q1wm'), help='the name of the SGE queue to submit the job to')
266
  submit_parser.add_argument('-m', '--memory', help='Sets both the h_vmem and the mem_free parameters when submitting the job to the specified value, e.g. 8G to set the memory requirements to 8 gigabytes')
267
  submit_parser.add_argument('-p', '--parallel', '--pe_mth', type=int, help='Sets the number of slots per job (-pe pe_mth) and multiplies the mem_free parameter. E.g. to get 16 G of memory, use -m 8G -p 2.')
268
  submit_parser.add_argument('-n', '--name', dest='name', help='Gives the job a name')
269
270
271
272
  submit_parser.add_argument('-x', '--dependencies', type=int, default=[], metavar='ID', nargs='*', help='Set job dependencies to the list of job identifiers separated by spaces')
  submit_parser.add_argument('-k', '--stop-on-failure', action='store_true', help='Stop depending jobs when this job finished with an error.')
  submit_parser.add_argument('-l', '--log-dir', metavar='DIR', help='Sets the log directory. By default, "logs" is selected for the SGE. If the jobs are executed locally, by default the result is written to console.')
  submit_parser.add_argument('-s', '--environment', metavar='KEY=VALUE', dest='env', nargs='*', default=[], help='Passes specific environment variables to the job.')
273
  submit_parser.add_argument('-t', '--array', '--parametric', metavar='(first-)last(:step)', help="Creates a parametric (array) job. You must specify the 'last' value, but 'first' (default=1) and 'step' (default=1) can be specified as well (when specifying 'step', 'first' has to be given, too).")
274
  submit_parser.add_argument('-z', '--dry-run', action='store_true', help='Do not really submit anything, just print out what would submit in this case')
275
  submit_parser.add_argument('-i', '--io-big', action='store_true', help='Sets "io_big" on the submitted jobs so it limits the machines in which the job is submitted to those that can do high-throughput.')
276
  submit_parser.add_argument('job', metavar='command', nargs=argparse.REMAINDER, help = "The job that should be executed. Sometimes a -- is required to separate the job from other command line options.")
277
278
  submit_parser.set_defaults(func=submit)

279
  # subcommand 're-submit'
280
  resubmit_parser = cmdparser.add_parser('resubmit', aliases=['reset', 'requeue', 're'], formatter_class=formatter, help='Re-submits a list of jobs.')
281
282
283
284
285
286
  resubmit_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Re-submit only the jobs with the given ids (by default, all finished jobs are re-submitted).')
  resubmit_parser.add_argument('-q', '--queue', metavar='QNAME', dest='qname', choices=('all.q', 'q1d', 'q1w', 'q1m', 'q1dm', 'q1wm'), help='Reset the SGE queue to submit the job to')
  resubmit_parser.add_argument('-m', '--memory', help='Resets both the h_vmem and the mem_free parameters when submitting the job to the specified value, e.g. 8G to set the memory requirements to 8 gigabytes')
  resubmit_parser.add_argument('-p', '--parallel', '--pe_mth', type=int, help='Resets the number of slots per job (-pe pe_mth) and multiplies the mem_free parameter. E.g. to get 16 G of memory, use -m 8G -p 2.')
  resubmit_parser.add_argument('-i', '--io-big', action='store_true', help='Resubmits the job to the "io_big" queue.')
  resubmit_parser.add_argument('-I', '--no-io-big', action='store_true', help='Resubmits the job NOT to the "io_big" queue.')
287
  resubmit_parser.add_argument('-k', '--keep-logs', action='store_true', help='Do not clean the log files of the old job before re-submitting.')
288
  resubmit_parser.add_argument('-s', '--also-success', action='store_true', help='Re-submit also jobs that have finished successfully.')
289
  resubmit_parser.add_argument('-a', '--running-jobs', action='store_true', help='Re-submit even jobs that are running or waiting (use this flag with care).')
290
291
  resubmit_parser.set_defaults(func=resubmit)

292
  # subcommand 'stop'
293
  stop_parser = cmdparser.add_parser('stop', formatter_class=formatter, help='Stops the execution of jobs in the grid.')
294
  stop_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Stop only the jobs with the given ids (by default, all jobs are stopped).')
295
  stop_parser.set_defaults(func=stop)
296

297
  # subcommand 'list'
298
  list_parser = cmdparser.add_parser('list', aliases=['ls'], formatter_class=formatter,  help='Lists jobs stored in the database. Use the -vv option to get a long listing.')
299
  list_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='List only the jobs with the given ids (by default, all jobs are listed)')
300
301
  list_parser.add_argument('-a', '--print-array-jobs', action='store_true', help='Also list the array ids.')
  list_parser.add_argument('-l', '--long', action='store_true', help='Prints additional information about the submitted job.')
302
  list_parser.add_argument('-x', '--print-dependencies', action='store_true', help='Print the dependencies of the jobs as well.')
303
  list_parser.add_argument('-o', '--ids-only', action='store_true', help='Prints ONLY the job ids (so that they can be parsed by automatic scripts).')
304
  list_parser.add_argument('-s', '--status', nargs='+', choices = Status, default = Status, help='Delete only jobs that have the given statuses; by default all jobs are deleted.')
305
  list_parser.set_defaults(func=list)
306

307
  # subcommand 'communicate'
308
  stop_parser = cmdparser.add_parser('communicate', aliases = ['com'], formatter_class=formatter, help='Communicates with the grid to see if there were unexpected errors (e.g. a timeout) during the job execution.')
309
  stop_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Check only the jobs with the given ids (by default, all jobs are checked)')
310
311
312
  stop_parser.set_defaults(func=communicate)


313
  # subcommand 'report'
314
  report_parser = cmdparser.add_parser('report', aliases=['rep', 'r', 'explain', 'why'], formatter_class=formatter, help='Iterates through the result and error log files and prints out the logs.')
315
316
  report_parser.add_argument('-e', '--errors-only', action='store_true', help='Only report the error logs (by default, both logs are reported).')
  report_parser.add_argument('-o', '--output-only', action='store_true', help='Only report the output logs  (by default, both logs are reported).')
317
  report_parser.add_argument('-u', '--unfinished-also', action='store_true', help='Report also the unfinished jobs; use this option also to check error files for jobs with success status.')
318
319
  report_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Report only the jobs with the given ids (by default, all finished jobs are reported)')
  report_parser.add_argument('-a', '--array-ids', metavar='ID', nargs='+', help='Report only the jobs with the given array ids. If specified, a single job-id must be given as well.')
320
  report_parser.set_defaults(func=report)
321

322
  # subcommand 'delete'
323
  delete_parser = cmdparser.add_parser('delete', aliases=['del', 'rm', 'remove'], formatter_class=formatter, help='Removes jobs from the database; if jobs are running or are still scheduled in SGE, the jobs are also removed from the SGE queue.')
324
325
  delete_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Delete only the jobs with the given ids (by default, all jobs are deleted).')
  delete_parser.add_argument('-a', '--array-ids', metavar='ID', nargs='+', help='Delete only the jobs with the given array ids. If specified, a single job-id must be given as well. Note that the whole job including all array jobs will be removed from the SGE queue.')
326
327
  delete_parser.add_argument('-r', '--keep-logs', action='store_true', help='If set, the log files will NOT be removed.')
  delete_parser.add_argument('-R', '--keep-log-dir', action='store_true', help='When removing the logs, keep the log directory.')
328
  delete_parser.add_argument('-s', '--status', nargs='+', choices = Status, default = Status, help='Delete only jobs that have the given statuses; by default all jobs are deleted.')
329
330
  delete_parser.set_defaults(func=delete)

331
  # subcommand 'run_scheduler'
332
  scheduler_parser = cmdparser.add_parser('run-scheduler', aliases=['sched', 'x'], formatter_class=formatter, help='Runs the scheduler on the local machine. To stop the scheduler safely, please use Ctrl-C; only valid in combination with the \'--local\' option.')
333
  scheduler_parser.add_argument('-p', '--parallel', type=int, default=1, help='Select the number of parallel jobs that you want to execute locally')
334
  scheduler_parser.add_argument('-j', '--job-ids', metavar='ID', nargs='+', help='Select the job ids that should be run (be default, all submitted and queued jobs are run).')
335
336
337
  scheduler_parser.add_argument('-s', '--sleep-time', type=float, default=0.1, help='Set the sleep time between for the scheduler in seconds.')
  scheduler_parser.add_argument('-x', '--die-when-finished', action='store_true', help='Let the job manager die when it has finished all jobs of the database.')
  scheduler_parser.add_argument('-l', '--no-log-files', action='store_true', help='Overwrites the log file setup to print the results to the console.')
338
  scheduler_parser.add_argument('-n', '--nice', type=int, help='Jobs will be run with the given priority (can only be positive, i.e., to have lower priority')
339
  scheduler_parser.set_defaults(func=run_scheduler)
340

341

342
  # subcommand 'run-job'; this should not be seen on the command line since it is actually a wrapper script
343
344
  run_parser = cmdparser.add_parser('run-job', help=argparse.SUPPRESS)
  run_parser.set_defaults(func=run_job)
345

346
347
348
349
350
351
352

  if command_line_options:
    args = parser.parse_args(command_line_options[1:])
    args.wrapper_script = command_line_options[0]
  else:
    args = parser.parse_args()
    args.wrapper_script = sys.argv[0]
353
354
355

  args.func(args)

356
  return 0