Commit 1a3a5955 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI

Merge branch 'pre_commit_cleanup' into 'master'

Pre commit cleanup

See merge request !77
parents ec4b99bc b8fd9960
Pipeline #40904 passed with stages
in 7 minutes and 11 seconds
[flake8]
max-line-length = 80
max-line-length = 88
select = B,C,E,F,W,T4,B9,B950
ignore = E501, W503
ignore = E501, W503, E203
[settings]
line_length=88
order_by_type=true
lines_between_types=1
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/ambv/black
- repo: https://github.com/timothycrosley/isort
rev: 4.3.21-2
hooks:
- id: isort
args: [-sl]
- repo: https://github.com/psf/black
rev: stable
hooks:
- id: black
language_version: python3.6
exclude: beat/core/test/prefix/algorithms/user/syntax_error/1.py|beat/core/test/prefix/databases/invalid/1.py
- id: black
exclude: beat/backend/python/test/prefix/.*/syntax_error
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.0.0
hooks:
- id: check-ast
exclude: beat/backend/python/test/prefix/.*/syntax_error
- id: check-case-conflict
- id: trailing-whitespace
- id: end-of-file-fixer
- id: debug-statements
exclude: beat/core/test/prefix/algorithms/user/syntax_error/1.py|beat/core/test/prefix/databases/invalid/1.py
exclude: beat/backend/python/test/prefix/.*/syntax_error
- id: check-added-large-files
- id: check-docstring-first
- id: flake8
exclude: beat/backend/python/test/prefix/.*/(.*crash|syntax_error)
- id: check-yaml
exclude: conda/meta.yaml
- repo: https://github.com/PyCQA/bandit
rev: 'master' # Update me!
hooks:
- id: bandit
exclude: beat/editor/test|beat/core/test/prefix/algorithms/user/syntax_error/1.py|beat/core/test/prefix/databases/invalid/1.py
exclude: beat/backend/python/test/prefix/.*/syntax_error
- repo: local
hooks:
- id: sphinx-build
......
......@@ -34,4 +34,5 @@
###################################################################################
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
......@@ -34,4 +34,5 @@
###################################################################################
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
......@@ -31,4 +31,4 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #
# #
###################################################################################
\ No newline at end of file
###################################################################################
......@@ -42,20 +42,19 @@ algorithm
Validation for algorithms
"""
import logging
import os
import sys
import logging
import six
import numpy
import simplejson as json
import six
from . import dataformat
from . import library
from . import loader
from . import utils
logger = logging.getLogger(__name__)
......
This diff is collapsed.
......@@ -42,23 +42,24 @@ data
Data I/O classes and functions
"""
import abc
import glob
import logging
import os
import re
import glob
import simplejson as json
import time
import abc
import zmq
import logging
import six
from functools import reduce
from collections import namedtuple
from functools import reduce
import simplejson as json
import six
import zmq
from .hash import hashFileContents
from .dataformat import DataFormat
from .algorithm import Algorithm
from .dataformat import DataFormat
from .exceptions import RemoteException
from .hash import hashFileContents
logger = logging.getLogger(__name__)
......@@ -107,8 +108,8 @@ def mixDataIndices(list_of_data_indices):
for index in range(start, end + 1):
done = False
for l in list_of_data_indices:
for indices in l:
for data_indices in list_of_data_indices:
for indices in data_indices:
if indices[1] == index:
result.append((current_start, index))
current_start = index + 1
......
......@@ -46,7 +46,6 @@ import six
from .data import mixDataIndices
# ----------------------------------------------------------
......
......@@ -42,23 +42,22 @@ database
Validation of databases
"""
import itertools
import os
import sys
import six
import simplejson as json
import itertools
import numpy as np
from collections import namedtuple
import numpy as np
import simplejson as json
import six
from . import loader
from . import utils
from .protocoltemplate import ProtocolTemplate
from .dataformat import DataFormat
from .outputs import OutputList
from .exceptions import OutputError
from .outputs import OutputList
from .protocoltemplate import ProtocolTemplate
# ----------------------------------------------------------
......
......@@ -42,17 +42,16 @@ dataformat
Validation and parsing for dataformats
"""
import re
import copy
import re
import six
import numpy
import simplejson as json
import six
from . import utils
from .baseformat import baseformat
# ----------------------------------------------------------
......
......@@ -34,9 +34,16 @@
###################################################################################
from .database import DBExecutor
from .algorithm import AlgorithmExecutor
from .database import DBExecutor
from .loop import LoopExecutor
from .messagehandlers import MessageHandler
from .messagehandlers import LoopMessageHandler
from .messagehandlers import MessageHandler
__all__ = [
"AlgorithmExecutor",
"DBExecutor",
"LoopExecutor",
"LoopMessageHandler",
"MessageHandler",
]
......@@ -44,15 +44,15 @@ A class that can setup and execute algorithm blocks on the backend
import logging
import os
import simplejson
import zmq
from .. import stats
from ..algorithm import Algorithm
from ..helpers import AccessMode
from ..helpers import create_inputs_from_configuration
from ..helpers import create_outputs_from_configuration
from ..helpers import AccessMode
from .. import stats
from .loop import LoopChannel
logger = logging.getLogger(__name__)
......
......@@ -43,6 +43,7 @@ Execution utilities
"""
import os
import simplejson
from ..database import Database
......
......@@ -42,21 +42,20 @@ executor
A class that can setup and execute loop algorithm blocks on the backend
"""
import json
import logging
import os
import json
import zmq
from ..algorithm import Algorithm
from ..dataformat import DataFormat
from ..exceptions import RemoteException
from ..helpers import AccessMode
from ..helpers import create_inputs_from_configuration
from ..helpers import create_outputs_from_configuration
from ..helpers import AccessMode
from ..exceptions import RemoteException
from .helpers import make_data_format
logger = logging.getLogger(__name__)
......
......@@ -44,19 +44,16 @@ communication.
"""
import logging
import threading
import zmq
import simplejson
import threading
import zmq
from .. import baseformat
from ..dataformat import DataFormat
from ..exceptions import RemoteException
from .. import baseformat
from .helpers import make_data_format
logger = logging.getLogger(__name__)
......
......@@ -42,13 +42,13 @@ hash
Various functions for hashing platform contributions and others
"""
import hashlib
import simplejson
import collections
import copy
import six
import hashlib
import os
import simplejson
import six
# ----------------------------------------------------------
......@@ -59,9 +59,9 @@ def _sha256(s):
"""
if isinstance(s, six.string_types):
try:
s = six.u(s).encode('utf-8')
s = six.u(s).encode("utf-8")
except Exception:
s = s.encode('utf-8')
s = s.encode("utf-8")
return hashlib.sha256(s).hexdigest()
......@@ -71,14 +71,14 @@ def _sha256(s):
def _stringify(dictionary):
names = sorted(dictionary.keys())
converted_dictionary = '{'
converted_dictionary = "{"
for name in names:
converted_dictionary += '"%s":%s,' % (name, str(dictionary[name]))
if len(converted_dictionary) > 1:
converted_dictionary = converted_dictionary[:-1]
converted_dictionary += '}'
converted_dictionary += "}"
return converted_dictionary
......@@ -87,13 +87,13 @@ def _stringify(dictionary):
def _compact(text):
return text.replace(' ', '').replace('\n', '')
return text.replace(" ", "").replace("\n", "")
# ----------------------------------------------------------
def toPath(hash, suffix='.data'):
def toPath(hash, suffix=".data"):
""" Returns the path on disk which corresponds to the hash given.
Parameters:
......@@ -159,7 +159,7 @@ def hashJSON(contents, description):
contents = copy.deepcopy(contents) # temporary copy
del contents[description]
contents = simplejson.dumps(contents, sort_keys=True)
return hashlib.sha256(contents.encode('utf-8')).hexdigest()
return hashlib.sha256(contents.encode("utf-8")).hexdigest()
# ----------------------------------------------------------
......@@ -175,11 +175,14 @@ def hashJSONFile(path, description):
"""
try:
with open(path, 'rb') as f:
with open(path, "rb") as f:
# preserve order
return hashJSON(simplejson.loads(f.read().decode('utf-8'),
object_pairs_hook=collections.OrderedDict),
description)
return hashJSON(
simplejson.loads(
f.read().decode("utf-8"), object_pairs_hook=collections.OrderedDict
),
description,
)
except simplejson.JSONDecodeError:
# falls back to normal file content hashing
return hashFileContents(path)
......@@ -195,9 +198,9 @@ def hashFileContents(path):
str: hash
"""
with open(path, 'rb') as f:
with open(path, "rb") as f:
sha256 = hashlib.sha256()
for chunk in iter(lambda: f.read(sha256.block_size * 1000), b''):
for chunk in iter(lambda: f.read(sha256.block_size * 1000), b""):
sha256.update(chunk)
return sha256.hexdigest()
......@@ -218,9 +221,14 @@ def hashDataset(database_name, protocol_name, set_name):
str: hash
"""
s = _compact("""{
s = (
_compact(
"""{
"database": "%s",
"protocol": "%s",
"set": "%s"
}""") % (database_name, protocol_name, set_name)
}"""
)
% (database_name, protocol_name, set_name)
)
return hash(s)
......@@ -42,24 +42,24 @@ helpers
This module implements various helper methods and classes
"""
import os
import errno
import logging
import os
from .algorithm import Algorithm
from .data import CachedDataSink
from .data import CachedDataSource
from .data import RemoteDataSource
from .data import CachedDataSink
from .data import getAllFilenames
from .data_loaders import DataLoaderList
from .data_loaders import DataLoader
from .inputs import InputList
from .data_loaders import DataLoaderList
from .inputs import Input
from .inputs import InputGroup
from .outputs import SynchronizationListener
from .outputs import OutputList
from .inputs import InputList
from .outputs import Output
from .outputs import OutputList
from .outputs import RemotelySyncedOutput
from .algorithm import Algorithm
from .outputs import SynchronizationListener
logger = logging.getLogger(__name__)
......
......@@ -46,7 +46,6 @@ from functools import reduce
import six
# ----------------------------------------------------------
......
......@@ -43,12 +43,12 @@ Validation for libraries
"""
import os
import simplejson as json
from . import loader
from . import utils
# ----------------------------------------------------------
......
......@@ -44,10 +44,10 @@ executor. Safe in this context means that if the method raises an
exception, it will catch it and return in a suitable form to the caller.
"""
import sys
import six
import imp
import sys
import six
# ----------------------------------------------------------
......@@ -79,11 +79,11 @@ def load_module(name, path, uses):
# loads used modules
for k, v in uses.items():
retval.__dict__[k] = load_module(k, v['path'], v['uses'])
retval.__dict__[k] = load_module(k, v["path"], v["uses"])
# execute the module code on the context of previously import modules
with open(path, "rb") as f:
exec(compile(f.read(), path, 'exec'), retval.__dict__)
exec(compile(f.read(), path, "exec"), retval.__dict__) # nosec
return retval
......@@ -118,7 +118,7 @@ def run(obj, method, exc=None, *args, **kwargs):
"""
try:
if method == '__new__':
if method == "__new__":
return obj(*args, **kwargs)
return getattr(obj, method)(*args, **kwargs)
......
......@@ -44,10 +44,8 @@ Validation of database protocol templates
import simplejson as json
from .dataformat import DataFormat
from . import utils
from .dataformat import DataFormat
# ----------------------------------------------------------
......
......@@ -31,4 +31,4 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #
# #
###################################################################################
\ No newline at end of file
###################################################################################
......@@ -56,42 +56,38 @@ options:
"""
import json
import logging
import os
import pwd
import sys
import docopt
import simplejson
import pwd
import stat
import json
import zmq
from beat.backend.python.exceptions import UserError
from beat.backend.python.execution import DBExecutor
from beat.backend.python.execution import MessageHandler
from beat.backend.python.exceptions import UserError
#----------------------------------------------------------
# ----------------------------------------------------------
def process_traceback(tb, prefix):
import traceback
databases_prefix = os.path.join(prefix, 'databases') + os.sep
databases_prefix = os.path.join(prefix, "databases") + os.sep
for first_line, line in enumerate(tb):
if line[0].startswith(databases_prefix):
break
s = ''.join(traceback.format_list(tb[first_line:]))
s = s.replace(databases_prefix, 'databases' + os.sep).strip()
s = "".join(traceback.format_list(tb[first_line:]))
s = s.replace(databases_prefix, "databases" + os.sep).strip()
return s
#----------------------------------------------------------
# ----------------------------------------------------------
def main(arguments=None):
......@@ -100,112 +96,123 @@ def main(arguments=None):
if arguments is None:
arguments = sys.argv[1:]
package = __name__.rsplit('.', 2)[0]
version = package + ' v' + \
__import__('pkg_resources').require(package)[0].version
package = __name__.rsplit(".", 2)[0]
version = package + " v" + __import__("pkg_resources").require(package)[0].version
prog = os.path.basename(sys.argv[0])
args = docopt.docopt(
__doc__ % dict(prog=prog, version=version),
argv=arguments,
version=version
__doc__ % dict(prog=prog, version=version), argv=arguments, version=version
)
# Setup the logging system
formatter = logging.Formatter(fmt="[%(asctime)s - databases_provider.py - " \
"%(name)s] %(levelname)s: %(message)s",
datefmt="%d/%b/%Y %H:%M:%S")
formatter = logging.Formatter(
fmt="[%(asctime)s - databases_provider.py - "
"%(name)s] %(levelname)s: %(message)s",
datefmt="%d/%b/%Y %H:%M:%S",
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
root_logger = logging.getLogger('beat.backend.python')
root_logger = logging.getLogger("beat.backend.python")
root_logger.addHandler(handler)
if args['--debug']:
if args["--debug"]:
root_logger.setLevel(logging.DEBUG)
else:
root_logger.setLevel(logging.INFO)
logger = logging.getLogger(__name__)
# Create the message handler
message_handler = MessageHandler(args['<addr>'])
message_handler = MessageHandler(args["<addr>"])
# If necessary, change to another user (with less privileges, but has access
# to the databases)
with open(os.path.join(args['<dir>'], 'configuration.json'), 'r') as f:
with open(os.path.join(args["<dir>"], "configuration.json"), "r") as f:
cfg = simplejson.load(f)
if 'datasets_uid' in cfg:
if "datasets_uid" in cfg:
# First create the user (if it doesn't exists)
try:
user = pwd.getpwuid(cfg['datasets_uid'])
except:
import subprocess
retcode = subprocess.call(['adduser', '--uid', str(cfg['datasets_uid']),
'--no-create-home', '--disabled-password',
'--disabled-login', '--gecos', '""', '-q',
'beat-nobody'])
_ = pwd.getpwuid(cfg["datasets_uid"])
except Exception:
import subprocess # nosec
retcode = subprocess.call( # nosec
[
"adduser",
"--uid",
str(cfg["datasets_uid"]),
"--no-create-home",
"--disabled-password",
"--disabled-login",