Commit ce9f5391 authored by Samuel GAIST's avatar Samuel GAIST
Browse files

[cache] Pre-commit cleanup

parent b48fe6ea
......@@ -53,16 +53,16 @@ logger = logging.getLogger(__name__)
def get_paths(config):
func = lambda z: z.split('.', 1)[0]
def func(z):
return z.split(".", 1)[0]
retval = []
for dirname, _, files in os.walk(config.cache):
files = fnmatch.filter(files, '*.data') #avoid index-only files
files = fnmatch.filter(files, "*.data") # avoid index-only files
if not files:
continue
d = dirname.replace(config.cache, '').strip(os.sep)
d = dirname.replace(config.cache, "").strip(os.sep)
retval += list(set([os.path.join(d, func(k)) for k in files]))
return retval
......@@ -70,10 +70,16 @@ def get_paths(config):
@click.group(cls=AliasedGroup)
@click.pass_context
@click.option('--start', type=click.INT, help='If set, allows the user to '
'print only a few bits of the file')
@click.option('--end', type=click.INT, help='If set, allows the user to '
'print only a few bits of the file')
@click.option(
"--start",
type=click.INT,
help="If set, allows the user to " "print only a few bits of the file",
)
@click.option(
"--end",
type=click.INT,
help="If set, allows the user to " "print only a few bits of the file",
)
def cache(ctx, start, end):
"""Configuration manipulation and display"""
pass
......@@ -82,117 +88,127 @@ def cache(ctx, start, end):
@cache.command()
@click.pass_context
def clear(ctx):
'''Deletes all available cache
"""Deletes all available cache
To clear all available cache:
$ %(prog)s cache clear
'''
"""
import shutil
if os.path.isdir(ctx.meta['config'].cache):
for k in os.listdir(ctx.meta['config'].cache):
p = os.path.join(ctx.meta['config'].cache, k)
if os.path.isdir(ctx.meta["config"].cache):
for k in os.listdir(ctx.meta["config"].cache):
p = os.path.join(ctx.meta["config"].cache, k)
shutil.rmtree(p)
@cache.command()
@click.argument('paths', nargs=-1, type=click.Path(exists=True))
@click.argument("paths", nargs=-1, type=click.Path(exists=True))
@click.pass_context
@click.option('--sizes', help='If set, also print the size in bytes for '
'objects in a file. This triggers the full file readout',
is_flag=True)
@click.option(
"--sizes",
help="If set, also print the size in bytes for "
"objects in a file. This triggers the full file readout",
is_flag=True,
)
def info(ctx, paths, sizes):
'''Displays information about a particular cache file
"""Displays information about a particular cache file
To collect information about a particular cache file:
$ %(prog)s cache info 7f/d8/8d/a11178ac27075feaba8131fe878d6e3...
'''
config = ctx.meta['config']
index_start = int(ctx.meta['start']) if 'start' in ctx.meta else None
index_end = int(ctx.meta['end']) if 'end' in ctx.meta else None
"""
config = ctx.meta["config"]
index_start = int(ctx.meta["start"]) if "start" in ctx.meta else None
index_end = int(ctx.meta["end"]) if "end" in ctx.meta else None
if not paths:
paths = get_paths(config)
for path in paths:
logger.info('path: %s', path)
fullpath = os.path.join(config.cache, path + '.data')
logger.info("path: %s", path)
fullpath = os.path.join(config.cache, path + ".data")
f = CachedDataSource()
status = f.setup(fullpath, config.path, index_start, index_end)
if not status:
logger.error("cannot setup data source with `%s' and prefix `%s'",
fullpath, config.path)
logger.error(
"cannot setup data source with `%s' and prefix `%s'",
fullpath,
config.path,
)
return 1
logger.info(' dataformat: %s', f.dataformat.name)
logger.info(" dataformat: %s", f.dataformat.name)
if sizes:
counter = 0
logger.info(' index:')
logger.info(" index:")
for data, start, end in f:
size = len(data.pack())
counter += size
if start == end:
logger.info(' [%d] - %d bytes', start, size)
logger.info(" [%d] - %d bytes", start, size)
else:
logger.info(' [%d:%d] - %d bytes', start, end, size)
logger.info(' total (stripped-down) size: %d bytes', counter)
logger.info(" [%d:%d] - %d bytes", start, end, size)
logger.info(" total (stripped-down) size: %d bytes", counter)
else:
index = load_data_index(config.cache, path + '.data')
logger.info(' objects : %d', len(index)-1)
index = load_data_index(config.cache, path + ".data")
logger.info(" objects : %d", len(index) - 1)
@cache.command()
@click.argument('paths', nargs=-1)
@click.argument("paths", nargs=-1)
@click.pass_context
@raise_on_error
def view(ctx, paths):
'''Displays information about a particular cache file
"""Displays information about a particular cache file
To view a particular cache file:
$ %(prog)s cache view 7f/d8/8d/a11178ac27075feaba8131fe878d6e3...
'''
config = ctx.meta['config']
index_start = int(ctx.meta['start']) if 'start' in ctx.meta else None
index_end = int(ctx.meta['end']) if 'end' in ctx.meta else None
"""
config = ctx.meta["config"]
index_start = int(ctx.meta["start"]) if "start" in ctx.meta else None
index_end = int(ctx.meta["end"]) if "end" in ctx.meta else None
if not paths:
paths = get_paths(config)
for path in paths:
logger.info('path: %s', path)
fullpath = os.path.join(config.cache, path + '.data')
logger.info("path: %s", path)
fullpath = os.path.join(config.cache, path + ".data")
f = CachedDataSource()
status = f.setup(fullpath, config.path, index_start, index_end)
if not status:
logger.error("cannot setup data source with `%s' and prefix `%s'",
fullpath, config.path)
logger.error(
"cannot setup data source with `%s' and prefix `%s'",
fullpath,
config.path,
)
return 1
logger.info(' dataformat: %s', f.dataformat.name)
logger.info(" dataformat: %s", f.dataformat.name)
for data, start, end in f:
logger.extra(80 * '-')
logger.extra(80 * "-")
if start == end:
header = '[%d]: ' % start
header = "[%d]: " % start
else:
header = '[%d:%d]: ' % (start, end)
header = "[%d:%d]: " % (start, end)
json_data = data.as_dict()
for name, value in json_data.items():
json_data[name] = common.stringify(value)
json_data = simplejson.dumps(
json_data, indent=2,
cls=NumpyJSONEncoder).\
replace('"BEAT_LIST_DELIMITER[', '[')\
.replace(']BEAT_LIST_DELIMITER"', ']')\
.replace('"...",', '...')\
.replace('"BEAT_LIST_SIZE(', '(')\
.replace(')BEAT_LIST_SIZE"', ')')
json_data = (
simplejson.dumps(json_data, indent=2, cls=NumpyJSONEncoder)
.replace('"BEAT_LIST_DELIMITER[', "[")
.replace(']BEAT_LIST_DELIMITER"', "]")
.replace('"...",', "...")
.replace('"BEAT_LIST_SIZE(', "(")
.replace(')BEAT_LIST_SIZE"', ")")
)
logger.info(header + json_data)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment