Commit 6b930ac9 authored by André Anjos's avatar André Anjos 💬
Browse files

Merge branch 'py3_compatibility' into '1.6.x'

Py3 compatibility

See merge request !18
parents 134601e5 b3ebf52b
Pipeline #19333 failed with stage
in 15 minutes and 37 seconds
......@@ -115,10 +115,10 @@ class Host(object):
def full_environment_name(self, name):
try:
return filter(lambda x: x.startswith(name + ' ('), self.processing_environments.keys())[0]
return list(filter(lambda x: x.startswith(name + ' ('), self.processing_environments.keys()))[0]
except:
try:
return filter(lambda x: x.startswith(name + ' ('), self.db_environments.keys())[0]
return list(filter(lambda x: x.startswith(name + ' ('), self.db_environments.keys()))[0]
except:
return None
......@@ -151,7 +151,7 @@ class Host(object):
def _describe(image):
'''Tries to run the "describe" app on the image, collect results'''
if Host.images_cache.has_key(image):
if image in Host.images_cache:
return Host.images_cache[image]
cmd = [
......@@ -275,7 +275,7 @@ class Host(object):
key = description['name'] + ' (' + description['version'] + ')'
if description.has_key('databases'):
if 'databases' in description:
if (key in db_environments) and not _must_replace(image, db_environments, key):
continue
......@@ -327,6 +327,7 @@ class Host(object):
place.
"""
cmd = [
'docker',
'run',
......@@ -462,7 +463,7 @@ class Host(object):
data = client.stats(container.id, decode=True, stream=False)
# If CPU statistics can't be retrieved
if not data['cpu_stats'].has_key('system_cpu_usage'):
if 'system_cpu_usage' not in data['cpu_stats']:
data['cpu_stats'] = dict(data['precpu_stats'])
# If memory statistics can't be retrieved
......@@ -522,7 +523,7 @@ class Host(object):
status = self.wait(container)
output = self.logs(container)
except Exception, e:
except Exception as e:
return 1, None
finally:
......
......@@ -187,14 +187,14 @@ class BaseExecutor(object):
if len(self.data['inputs']) != len(self.algorithm.input_map):
self.errors.append("The number of inputs of the algorithm doesn't correspond")
if self.data.has_key('outputs') and (len(self.data['outputs']) != len(self.algorithm.output_map)):
if 'outputs' in self.data and (len(self.data['outputs']) != len(self.algorithm.output_map)):
self.errors.append("The number of outputs of the algorithm doesn't correspond")
for name in self.data['inputs'].keys():
if name not in self.algorithm.input_map.keys():
self.errors.append("The input '%s' doesn't exist in the algorithm" % name)
if self.data.has_key('outputs'):
if 'outputs' in self.data:
for name in self.data['outputs'].keys():
if name not in self.algorithm.output_map.keys():
self.errors.append("The output '%s' doesn't exist in the algorithm" % name)
......@@ -361,7 +361,8 @@ class BaseExecutor(object):
data = convert_experiment_configuration_to_container(self.data)
with open(os.path.join(directory, 'configuration.json'), 'wb') as f:
simplejson.dump(data, f, indent=2)
json_data = simplejson.dumps(data, indent=2)
f.write(json_data.encode('utf-8'))
tmp_prefix = os.path.join(directory, 'prefix')
if not os.path.exists(tmp_prefix):
......@@ -374,7 +375,8 @@ class BaseExecutor(object):
"""Exports contents useful for a backend runner to run the algorithm"""
with open(os.path.join(directory, 'configuration.json'), 'wb') as f:
simplejson.dump(self.data, f, indent=2)
json_data = simplejson.dumps(self.data, indent=2)
f.write(json_data.encode('utf-8'))
tmp_prefix = os.path.join(directory, 'prefix')
if not os.path.exists(tmp_prefix): os.makedirs(tmp_prefix)
......
......@@ -227,7 +227,7 @@ class DockerExecutor(RemoteExecutor):
database_paths = {}
if not self.data.has_key('datasets_root_path'):
if 'datasets_root_path' not in self.data:
for db_name in self.databases.keys():
json_path = os.path.join(root_folder, db_name + '.json')
......@@ -269,7 +269,7 @@ class DockerExecutor(RemoteExecutor):
databases_container.add_volume(databases_configuration_path, '/beat/prefix')
databases_container.add_volume(self.cache, '/beat/cache')
if not self.data.has_key('datasets_root_path'):
if 'datasets_root_path' not in self.data:
for db_name, db_path in database_paths.items():
databases_container.add_volume(db_path, os.path.join('/databases', db_name))
else:
......
......@@ -85,7 +85,8 @@ def load_schema(schema_name, version=1):
os.path.join(schema_name, '%d.json' % version))
with open(fname, 'rb') as f:
schema = simplejson.load(f)
data = f.read().decode("utf-8")
schema = simplejson.loads(data)
basedir = os.path.realpath(os.path.dirname(fname))
resolver = jsonschema.RefResolver('file://' + basedir + '/', schema)
......
......@@ -40,7 +40,7 @@
"description": {
"type": "string",
"maxLength": "80"
"maxLength": 80
},
"language": {
......
......@@ -13,7 +13,7 @@ import zmq
prefix = pkg_resources.resource_filename(__name__, '../test/prefix')
print prefix
print(prefix)
......@@ -25,7 +25,7 @@ class CustomDataSource(DataSource):
self.dataformat = dataformat
self.file = open('benchmark.data', 'rb')
self.unpack = True
def next(self):
self.file.seek(0)
packed = self.file.read()
......@@ -40,7 +40,7 @@ class CustomDataSource(DataSource):
self.current += 1
return result
def hasMoreData(self):
return self.current < self.nb_data_units
......@@ -63,7 +63,7 @@ def main():
data_source = CustomDataSource(10000, dataformat)
print 'Nb Data units: %d' % data_source.nb_data_units
print('Nb Data units: %d' % data_source.nb_data_units)
t1 = time.time()
......@@ -72,7 +72,7 @@ def main():
t2 = time.time()
print 'Datasource (unpack): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units)
print('Datasource (unpack): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units))
data_source.reset()
......@@ -88,7 +88,7 @@ def main():
t2 = time.time()
print 'Datasource (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units)
print('Datasource (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units))
data_source.reset()
......@@ -123,7 +123,7 @@ def main():
t2 = time.time()
print 'Input (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units)
print('Input (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units))
data_source.reset()
......@@ -169,7 +169,7 @@ def main():
t2 = time.time()
print 'Remote (unpack): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units)
print('Remote (unpack): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units))
data_source.reset()
......@@ -186,6 +186,6 @@ def main():
t2 = time.time()
print 'Remote (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units)
print('Remote (packed): %.3fs (%.3fms/unit)' % (t2 - t1, (t2 - t1) * 1000.0 / data_source.nb_data_units))
data_source.reset()
......@@ -54,8 +54,12 @@ import zmq
import signal
import simplejson
import multiprocessing
try:
import Queue
except ImportError:
import queue as Queue
import tempfile
import Queue
from docopt import docopt
from socket import gethostname
......@@ -134,7 +138,7 @@ def connect_to_scheduler(address, name):
# Starts our 0MQ server
context = zmq.Context()
socket = context.socket(zmq.DEALER)
socket.setsockopt(zmq.IDENTITY, name)
socket.setsockopt_string(zmq.IDENTITY, name)
if address.find('://') < 0:
address = 'tcp://' + address
......@@ -146,7 +150,7 @@ def connect_to_scheduler(address, name):
poller.register(socket, zmq.POLLIN)
# Tell the scheduler we are ready
socket.send('rdy')
socket.send(WorkerController.READY)
# Wait for a response from the scheduler
logger.info("Waiting for the scheduler...")
......@@ -298,7 +302,7 @@ def main(user_input=None):
execution_process.join()
if result.has_key('result'):
if 'result' in result:
content = simplejson.dumps(result['result'])
status = WorkerController.DONE
......@@ -313,7 +317,7 @@ def main(user_input=None):
execution_process.job_id,
content
]
elif result.has_key('error'):
elif 'error' in result:
logger.error(result['error'])
message = [
......
......@@ -233,7 +233,7 @@ def cpu_statistics(start, end):
"""
if not end.has_key('system_cpu_usage'):
if 'system_cpu_usage' not in end:
return {
'user': 0.0,
'system': 0.0,
......
......@@ -18,12 +18,12 @@
"description": "The title for this plot"
},
"xaxis_multiplier": {
"default": "1.0",
"default": 1.0,
"type": "float64",
"description": "The multiplication factor for the X-axis (horizontal)"
},
"yaxis_multiplier": {
"default": "1.0",
"default": 1.0,
"type": "float64",
"description": "The multiplication factor for the Y-axis (vertical)"
},
......
......@@ -68,7 +68,11 @@ class Plotter:
if not isinstance(self.line_attributes, (list,tuple)):
self.line_attributes = [self.line_attributes]
Z = itertools.izip
try:
Z = itertools.izip
except AttributeError:
Z = zip
C = itertools.cycle
for input, attributes, label in Z(inputs, C(self.line_attributes), C(self.legend)):
......@@ -98,7 +102,10 @@ class Plotter:
if any(self.legend): ax.legend()
# Returns the image
sio = six.StringIO()
if six.PY2:
sio = six.StringIO()
else:
sio = six.BytesIO()
if self.mimetype == 'image/png':
pyplot.savefig(sio, format='png')
......
......@@ -73,8 +73,8 @@ def test_dependencies():
nose.tools.eq_(len(a.uses), 1)
nose.tools.eq_(len(a.libraries), 1)
nose.tools.eq_(a.uses.keys()[0], 'dep1')
nose.tools.eq_(a.uses.values()[0], 'user/dep/1')
nose.tools.eq_(list(a.uses.keys())[0], 'dep1')
nose.tools.eq_(list(a.uses.values())[0], 'user/dep/1')
a.uses = {}
a.uses['mod1'] = dep_name
......@@ -84,8 +84,8 @@ def test_dependencies():
nose.tools.eq_(len(a.uses), 1)
nose.tools.eq_(len(a.libraries), 1)
nose.tools.eq_(a.uses.keys()[0], 'mod1')
nose.tools.eq_(a.uses.values()[0], 'user/dep/1')
nose.tools.eq_(list(a.uses.keys())[0], 'mod1')
nose.tools.eq_(list(a.uses.values())[0], 'user/dep/1')
a.uses = {}
a.write()
......
......@@ -82,7 +82,7 @@ class AsyncTest(unittest.TestCase):
try:
self.host.start(container)
except Exception, e:
except Exception as e:
self.assertTrue(str(e).find('Failed to create the container') >= 0)
self.assertFalse(self.host.containers) # All containers are gone
......
......@@ -37,7 +37,6 @@ logger = logging.getLogger(__name__)
import unittest
import simplejson
import multiprocessing
import Queue
import tempfile
import shutil
import zmq
......@@ -135,13 +134,14 @@ class TestDatabasesProvider(unittest.TestCase):
def start_databases_provider(self, configuration):
with open(os.path.join(self.working_dir, 'configuration.json'), 'wb') as f:
simplejson.dump(configuration, f, indent=4)
data = simplejson.dumps(configuration, indent=4)
f.write(data.encode('utf-8'))
working_prefix = os.path.join(self.working_dir, 'prefix')
if not os.path.exists(working_prefix):
os.makedirs(working_prefix)
input_name, input_cfg = configuration['inputs'].items()[0]
input_name, input_cfg = list(configuration['inputs'].items())[0]
database = Database(prefix, input_cfg['database'])
database.export(working_prefix)
......
......@@ -53,4 +53,5 @@ class EnvironmentTest(unittest.TestCase):
self.assertTrue(len(package_list) > 0)
for package in package_list:
self.assertListEqual(package.keys(), ['version', 'name'])
self.assertListEqual(sorted(list(package.keys())),
sorted(['version', 'name']))
......@@ -143,7 +143,7 @@ class TestExecution(unittest.TestCase):
return result
assert result['status'] == 0
if result.has_key('statistics'):
if 'statistics' in result:
assert isinstance(result['statistics'], dict)
if executor.analysis:
......@@ -277,7 +277,7 @@ class TestExecution(unittest.TestCase):
# import time
# start = time.time()
# assert self.execute('user/user/double/1/large', [{'out_data': 49489830}]) is None
# print time.time() - start
# print(time.time() - start)
# For benchmark purposes
# @slow
......@@ -285,7 +285,7 @@ class TestExecution(unittest.TestCase):
# import time
# start = time.time()
# assert self.execute('user/user/double/1/large2', [{'out_data': 21513820}]) is None
# print time.time() - start
# print(time.time() - start)
#----------------------------------------------------------
......
......@@ -34,10 +34,11 @@ from . import prefix
def doit(filename, error_msg):
database = Database(prefix, filename)
assert database.errors
print(error_msg)
found = False
for msg in database.errors:
print(msg, error_msg)
if msg.find(error_msg) != -1:
found = True
break
......@@ -51,40 +52,40 @@ def test_load_invalid_database():
doit('invalid/1', 'invalid JSON code')
def test_load_database_without_protocols_list():
doit('missing_protocols/1', "'protocols' is a required property")
doit('missing_protocols/1', "%r is a required property" % u'protocols')
def test_load_database_with_empty_protocols_list():
doit('empty_protocols/1', "/protocols: [] is too short")
def test_load_database_with_missing_protocol_name():
doit('missing_protocol_name/1', "/protocols/0: 'name' is a required property")
doit('missing_protocol_name/1', "/protocols/0: %r is a required property" % u'name')
def test_load_database_with_mixed_protocol_names():
doit('mixed_protocol_names/1', "None is not of type 'string'")
doit('mixed_protocol_names/1', "None is not of type %r" % u'string')
def test_load_database_with_same_protocol_names():
doit('same_protocol_names/1', "found different protocols with the same name:")
def test_load_database_with_missing_protocol_sets():
doit('missing_protocol_sets/1', "'sets' is a required property")
doit('missing_protocol_sets/1', "%r is a required property" % u'sets')
def test_load_database_with_empty_protocol_sets():
doit('empty_protocol_sets/1', 'rule: /properties/protocols/items/properties/sets/minItems')
def test_load_database_with_missing_set_name():
doit('missing_set_name/1', "'name' is a required property")
doit('missing_set_name/1', "%r is a required property" % u'name')
def test_load_database_with_mixed_set_names():
doit('mixed_set_names/1', "name: None is not of type 'string'")
doit('mixed_set_names/1', "name: None is not of type %r" % u'string')
def test_load_database_with_same_set_names():
doit('same_set_names/1', "found different sets with the same name")
def test_load_database_with_missing_set_view():
doit('missing_set_view/1', "'view' is a required property")
doit('missing_set_view/1', "%r is a required property" % u'view')
def test_load_database_with_missing_set_outputs_list():
doit('missing_set_outputs/1', "'outputs' is a required property")
doit('missing_set_outputs/1', "%r is a required property" % u'outputs')
def test_load_database_with_empty_set_outputs_list():
doit('empty_set_outputs/1', 'outputs: OrderedDict() does not have enough properties')
......@@ -139,8 +139,8 @@ def test_dependencies():
nose.tools.eq_(len(l.uses), 1)
nose.tools.eq_(len(l.libraries), 1)
nose.tools.eq_(l.uses.keys()[0], 'dep1')
nose.tools.eq_(l.uses.values()[0], 'user/dep/1')
nose.tools.eq_(list(l.uses.keys())[0], 'dep1')
nose.tools.eq_(list(l.uses.values())[0], 'user/dep/1')
l.uses = {} #reset
l.uses['mod1'] = l_dep.name
......@@ -152,8 +152,8 @@ def test_dependencies():
nose.tools.eq_(len(l.uses), 1)
nose.tools.eq_(len(l.libraries), 1)
nose.tools.eq_(l.uses.keys()[0], 'mod1')
nose.tools.eq_(l.uses.values()[0], 'user/dep/1')
nose.tools.eq_(list(l.uses.keys())[0], 'mod1')
nose.tools.eq_(list(l.uses.values())[0], 'user/dep/1')
l.uses = {} #reset
l.write() #rewrite
......
......@@ -29,6 +29,7 @@
import imghdr
import numpy
import six
import nose.tools
......@@ -97,7 +98,11 @@ def test_plot_jpeg():
def test_plot_pdf():
fig = do_plot('application/pdf')
assert fig.startswith('%PDF')
if six.PY2:
assert fig.startswith('%PDF')
else:
assert fig.startswith(b'%PDF')
#with open('test.pdf', 'wb') as f: f.write(fig)
def test_plot_many_lines():
......
......@@ -36,7 +36,10 @@ logger = logging.getLogger(__name__)
import unittest
import simplejson
import multiprocessing
import Queue
try:
import Queue
except ImportError:
import queue as Queue
from time import time
from time import sleep
......@@ -51,8 +54,8 @@ from . import prefix, tmp_prefix
#----------------------------------------------------------
WORKER1 = 'worker1'
WORKER2 = 'worker2'
WORKER1 = b'worker1'
WORKER2 = b'worker2'
#----------------------------------------------------------
......@@ -142,10 +145,10 @@ class ControllerProcess(multiprocessing.Process):
self.queue.put('STARTED')
def onWorkerReady(name):
self.queue.put('READY ' + name)
self.queue.put('READY ' + name.decode('utf-8'))
def onWorkerGone(name):
self.queue.put('GONE ' + name)
self.queue.put('GONE ' + name.decode('utf-8'))
self.controller = WorkerController(
'127.0.0.1',
......@@ -248,7 +251,7 @@ class TestWorkerBase(unittest.TestCase):
args = [
'--prefix=%s' % prefix,
'--cache=%s' % tmp_prefix,
'--name=%s' % name,
'--name=%s' % name.decode('utf-8'),
# '-vv',
self.controller.address if address is None else address,
]
......
......@@ -65,7 +65,7 @@ def slow(t):
@dec.slow
def test_big(self):
print 'Big, slow test'
print('Big, slow test')
"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment