Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
beat.backend.python
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
5
Issues
5
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
beat
beat.backend.python
Commits
928a2797
Commit
928a2797
authored
Mar 11, 2020
by
Flavio TARSETTI
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'logger_cleanup' into 'master'
Logger cleanup See merge request
!69
parents
1cdad443
fb229608
Pipeline
#38028
failed with stages
in 90 minutes
Changes
12
Pipelines
1
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
318 additions
and
378 deletions
+318
-378
beat/backend/python/data.py
beat/backend/python/data.py
+2
-2
beat/backend/python/data_loaders.py
beat/backend/python/data_loaders.py
+56
-64
beat/backend/python/execution/database.py
beat/backend/python/execution/database.py
+0
-6
beat/backend/python/execution/messagehandlers.py
beat/backend/python/execution/messagehandlers.py
+1
-1
beat/backend/python/inputs.py
beat/backend/python/inputs.py
+90
-92
beat/backend/python/outputs.py
beat/backend/python/outputs.py
+0
-4
beat/backend/python/scripts/execute.py
beat/backend/python/scripts/execute.py
+1
-1
beat/backend/python/test/test_databases_provider.py
beat/backend/python/test/test_databases_provider.py
+0
-4
beat/backend/python/test/test_dbexecutor.py
beat/backend/python/test/test_dbexecutor.py
+0
-4
beat/backend/python/test/test_helpers.py
beat/backend/python/test/test_helpers.py
+135
-149
beat/backend/python/test/test_loop_executor.py
beat/backend/python/test/test_loop_executor.py
+0
-4
beat/backend/python/test/test_message_handler.py
beat/backend/python/test/test_message_handler.py
+33
-47
No files found.
beat/backend/python/data.py
View file @
928a2797
...
...
@@ -415,7 +415,7 @@ class CachedDataSource(DataSource):
)
=
getAllFilenames
(
filename
,
start_index
,
end_index
)
if
len
(
self
.
filenames
)
==
0
:
logger
.
warn
(
"No files found for %s"
%
filename
)
logger
.
warn
ing
(
"No files found for %s"
%
filename
)
return
False
check_consistency
(
self
.
filenames
,
data_checksum_filenames
)
...
...
@@ -429,7 +429,7 @@ class CachedDataSource(DataSource):
try
:
f
=
open
(
current_filename
,
"rb"
)
except
Exception
as
e
:
logger
.
warn
(
"Could not setup `%s': %s"
%
(
filename
,
e
))
logger
.
warn
ing
(
"Could not setup `%s': %s"
%
(
filename
,
e
))
return
False
# Reads the header of the current file
...
...
beat/backend/python/data_loaders.py
View file @
928a2797
...
...
@@ -42,12 +42,10 @@ data_loaders
This module implements all the data communication related classes
"""
import
logging
import
six
from
.data
import
mixDataIndices
logger
=
logging
.
getLogger
(
__name__
)
# ----------------------------------------------------------
...
...
@@ -90,35 +88,36 @@ class DataView(object):
"""
def
__init__
(
self
,
data_loader
,
data_indices
):
self
.
infos
=
{}
self
.
data_indices
=
data_indices
self
.
nb_data_units
=
len
(
data_indices
)
self
.
data_index_start
=
data_indices
[
0
][
0
]
self
.
data_index_end
=
data_indices
[
-
1
][
1
]
self
.
infos
=
{}
self
.
data_indices
=
data_indices
self
.
nb_data_units
=
len
(
data_indices
)
self
.
data_index_start
=
data_indices
[
0
][
0
]
self
.
data_index_end
=
data_indices
[
-
1
][
1
]
for
input_name
,
infos
in
data_loader
.
infos
.
items
():
input_data_indices
=
[]
current_start
=
self
.
data_index_start
for
i
in
range
(
self
.
data_index_start
,
self
.
data_index_end
+
1
):
for
indices
in
infos
[
'data_indices'
]:
for
indices
in
infos
[
"data_indices"
]:
if
indices
[
1
]
==
i
:
input_data_indices
.
append
(
(
current_start
,
i
)
)
input_data_indices
.
append
(
(
current_start
,
i
)
)
current_start
=
i
+
1
break
if
(
len
(
input_data_indices
)
==
0
)
or
(
input_data_indices
[
-
1
][
1
]
!=
self
.
data_index_end
):
input_data_indices
.
append
(
(
current_start
,
self
.
data_index_end
)
)
if
(
len
(
input_data_indices
)
==
0
)
or
(
input_data_indices
[
-
1
][
1
]
!=
self
.
data_index_end
):
input_data_indices
.
append
((
current_start
,
self
.
data_index_end
))
self
.
infos
[
input_name
]
=
dict
(
data_source
=
infos
[
'data_source'
],
data_indices
=
input_data_indices
,
data
=
None
,
start_index
=
-
1
,
end_index
=
-
1
,
data_source
=
infos
[
"data_source"
],
data_indices
=
input_data_indices
,
data
=
None
,
start_index
=
-
1
,
end_index
=
-
1
,
)
def
count
(
self
,
input_name
=
None
):
"""Returns the number of available data indexes for the given input
name. If none given the number of available data units.
...
...
@@ -134,30 +133,30 @@ class DataView(object):
"""
if
input_name
is
not
None
:
try
:
return
len
(
self
.
infos
[
input_name
][
'data_indices'
])
except
:
return
len
(
self
.
infos
[
input_name
][
"data_indices"
])
except
Exception
:
return
None
else
:
return
self
.
nb_data_units
def
__getitem__
(
self
,
index
):
if
index
<
0
:
return
(
None
,
None
,
None
)
try
:
indices
=
self
.
data_indices
[
index
]
except
:
except
Exception
:
return
(
None
,
None
,
None
)
result
=
{}
for
input_name
,
infos
in
self
.
infos
.
items
():
if
(
indices
[
0
]
<
infos
[
'start_index'
])
or
(
infos
[
'end_index'
]
<
indices
[
0
]):
(
infos
[
'data'
],
infos
[
'start_index'
],
infos
[
'end_index'
])
=
\
infos
[
'data_source'
].
getAtDataIndex
(
indices
[
0
])
if
(
indices
[
0
]
<
infos
[
"start_index"
])
or
(
infos
[
"end_index"
]
<
indices
[
0
]):
(
infos
[
"data"
],
infos
[
"start_index"
],
infos
[
"end_index"
])
=
infos
[
"data_source"
].
getAtDataIndex
(
indices
[
0
])
result
[
input_name
]
=
infos
[
'data'
]
result
[
input_name
]
=
infos
[
"data"
]
return
(
result
,
indices
[
0
],
indices
[
1
])
...
...
@@ -204,35 +203,34 @@ class DataLoader(object):
"""
def
__init__
(
self
,
channel
):
self
.
channel
=
str
(
channel
)
self
.
infos
=
{}
self
.
channel
=
str
(
channel
)
self
.
infos
=
{}
self
.
mixed_data_indices
=
None
self
.
nb_data_units
=
0
self
.
data_index_start
=
-
1
# Lower index across all inputs
self
.
data_index_end
=
-
1
# Bigger index across all inputs
self
.
nb_data_units
=
0
self
.
data_index_start
=
-
1
# Lower index across all inputs
self
.
data_index_end
=
-
1
# Bigger index across all inputs
def
add
(
self
,
input_name
,
data_source
):
self
.
infos
[
input_name
]
=
dict
(
data_source
=
data_source
,
data_indices
=
data_source
.
data_indices
(),
data
=
None
,
start_index
=
-
1
,
end_index
=
-
1
,
data_source
=
data_source
,
data_indices
=
data_source
.
data_indices
(),
data
=
None
,
start_index
=
-
1
,
end_index
=
-
1
,
)
self
.
mixed_data_indices
=
mixDataIndices
([
x
[
'data_indices'
]
for
x
in
self
.
infos
.
values
()
])
self
.
mixed_data_indices
=
mixDataIndices
(
[
x
[
"data_indices"
]
for
x
in
self
.
infos
.
values
()]
)
self
.
nb_data_units
=
len
(
self
.
mixed_data_indices
)
self
.
data_index_start
=
self
.
mixed_data_indices
[
0
][
0
]
self
.
data_index_end
=
self
.
mixed_data_indices
[
-
1
][
1
]
def
input_names
(
self
):
"""Returns the name of all inputs associated to this data loader"""
return
self
.
infos
.
keys
()
def
count
(
self
,
input_name
=
None
):
"""Returns the number of available data indexes for the given input
name. If none given the number of available data units.
...
...
@@ -249,13 +247,12 @@ class DataLoader(object):
if
input_name
is
not
None
:
try
:
return
len
(
self
.
infos
[
input_name
][
'data_indices'
])
except
:
return
len
(
self
.
infos
[
input_name
][
"data_indices"
])
except
Exception
:
return
0
else
:
return
self
.
nb_data_units
def
view
(
self
,
input_name
,
index
):
""" Returns the view associated with this data loader
...
...
@@ -272,33 +269,36 @@ class DataLoader(object):
return
None
try
:
indices
=
self
.
infos
[
input_name
][
'data_indices'
][
index
]
except
:
indices
=
self
.
infos
[
input_name
][
"data_indices"
][
index
]
except
Exception
:
return
None
limited_data_indices
=
[
x
for
x
in
self
.
mixed_data_indices
if
(
indices
[
0
]
<=
x
[
0
])
and
(
x
[
1
]
<=
indices
[
1
])
]
limited_data_indices
=
[
x
for
x
in
self
.
mixed_data_indices
if
(
indices
[
0
]
<=
x
[
0
])
and
(
x
[
1
]
<=
indices
[
1
])
]
return
DataView
(
self
,
limited_data_indices
)
def
__getitem__
(
self
,
index
):
if
index
<
0
:
return
(
None
,
None
,
None
)
try
:
indices
=
self
.
mixed_data_indices
[
index
]
except
:
except
Exception
:
return
(
None
,
None
,
None
)
result
=
{}
for
input_name
,
infos
in
self
.
infos
.
items
():
if
(
indices
[
0
]
<
infos
[
'start_index'
])
or
(
infos
[
'end_index'
]
<
indices
[
0
]):
(
infos
[
'data'
],
infos
[
'start_index'
],
infos
[
'end_index'
])
=
\
infos
[
'data_source'
].
getAtDataIndex
(
indices
[
0
])
if
(
indices
[
0
]
<
infos
[
"start_index"
])
or
(
infos
[
"end_index"
]
<
indices
[
0
]):
(
infos
[
"data"
],
infos
[
"start_index"
],
infos
[
"end_index"
])
=
infos
[
"data_source"
].
getAtDataIndex
(
indices
[
0
])
result
[
input_name
]
=
infos
[
'data'
]
result
[
input_name
]
=
infos
[
"data"
]
return
(
result
,
indices
[
0
],
indices
[
1
])
...
...
@@ -354,7 +354,6 @@ class DataLoaderList(object):
self
.
_loaders
=
[]
self
.
main_loader
=
None
def
add
(
self
,
data_loader
):
"""Add a data loader to the list
...
...
@@ -366,7 +365,6 @@ class DataLoaderList(object):
self
.
_loaders
.
append
(
data_loader
)
def
__getitem__
(
self
,
name_or_index
):
try
:
if
isinstance
(
name_or_index
,
six
.
string_types
):
...
...
@@ -374,30 +372,24 @@ class DataLoaderList(object):
elif
isinstance
(
name_or_index
,
int
):
return
self
.
_loaders
[
name_or_index
]
except
:
pass
return
None
except
Exception
:
return
None
def
__iter__
(
self
):
for
i
in
range
(
len
(
self
.
_loaders
)):
yield
self
.
_loaders
[
i
]
def
__len__
(
self
):
return
len
(
self
.
_loaders
)
def
loaderOf
(
self
,
input_name
):
"""Returns the data loader matching the input name"""
try
:
return
[
k
for
k
in
self
.
_loaders
if
input_name
in
k
.
input_names
()
][
0
]
except
:
return
[
k
for
k
in
self
.
_loaders
if
input_name
in
k
.
input_names
()
][
0
]
except
Exception
:
return
None
def
secondaries
(
self
):
"""Returns a list of all data loaders except the main one"""
...
...
beat/backend/python/execution/database.py
View file @
928a2797
...
...
@@ -43,17 +43,11 @@ Execution utilities
"""
import
os
import
logging
import
simplejson
from
..database
import
Database
logger
=
logging
.
getLogger
(
__name__
)
class
DBExecutor
(
object
):
"""Executor specialised in database views
...
...
beat/backend/python/execution/messagehandlers.py
View file @
928a2797
...
...
@@ -336,7 +336,7 @@ class MessageHandler(threading.Thread):
answer
=
self
.
socket
.
recv
()
# ack
logger
.
debug
(
"recv: %s"
,
answer
)
break
logger
.
warn
(
logger
.
warn
ing
(
'(try %d) waited %d ms for "ack" from server'
,
this_try
,
timeout
)
this_try
+=
1
...
...
beat/backend/python/inputs.py
View file @
928a2797
This diff is collapsed.
Click to expand it.
beat/backend/python/outputs.py
View file @
928a2797
...
...
@@ -43,12 +43,8 @@ This module implements output related classes
"""
import
six
import
logging
import
zmq
logger
=
logging
.
getLogger
(
__name__
)
class
SynchronizationListener
:
"""A callback mechanism to keep Inputs and Outputs in groups and lists
...
...
beat/backend/python/scripts/execute.py
View file @
928a2797
...
...
@@ -95,7 +95,7 @@ def send_error(logger, socket, tp, message):
answer
=
socket
.
recv
()
# ack
logger
.
debug
(
"recv: %s"
,
answer
)
break
logger
.
warn
(
'(try %d) waited %d ms for "ack" from server'
,
this_try
,
timeout
)
logger
.
warn
ing
(
'(try %d) waited %d ms for "ack" from server'
,
this_try
,
timeout
)
this_try
+=
1
if
this_try
>
max_tries
:
logger
.
error
(
"could not send error message to server"
)
...
...
beat/backend/python/test/test_databases_provider.py
View file @
928a2797
...
...
@@ -38,7 +38,6 @@
import
os
import
socket
import
logging
import
unittest
import
json
...
...
@@ -62,9 +61,6 @@ from .test_database import INTEGERS_DBS
from
.
import
prefix
logger
=
logging
.
getLogger
(
__name__
)
# ----------------------------------------------------------
...
...
beat/backend/python/test/test_dbexecutor.py
View file @
928a2797
...
...
@@ -37,7 +37,6 @@
# Tests for experiment execution
import
os
import
logging
import
unittest
import
zmq
import
tempfile
...
...
@@ -62,9 +61,6 @@ from . import prefix
# ----------------------------------------------------------
logger
=
logging
.
getLogger
(
__name__
)
CONFIGURATION
=
{
"queue"
:
"queue"
,
"algorithm"
:
"user/sum/1"
,
...
...
beat/backend/python/test/test_helpers.py
View file @
928a2797
This diff is collapsed.
Click to expand it.
beat/backend/python/test/test_loop_executor.py
View file @
928a2797
...
...
@@ -37,7 +37,6 @@
# Tests for experiment execution
import
os
import
logging
import
unittest
import
zmq
import
tempfile
...
...
@@ -63,9 +62,6 @@ from ..helpers import convert_experiment_configuration_to_container
from
.
import
prefix
logger
=
logging
.
getLogger
(
__name__
)
# ----------------------------------------------------------
...
...
beat/backend/python/test/test_message_handler.py
View file @
928a2797
...
...
@@ -34,9 +34,6 @@
###################################################################################
import
logging
logger
=
logging
.
getLogger
(
__name__
)
import
unittest
import
zmq
import
os
...
...
@@ -46,9 +43,6 @@ import numpy as np
from
..execution
import
MessageHandler
from
..dataformat
import
DataFormat
from
..inputs
import
Input
from
..inputs
import
InputGroup
from
..inputs
import
InputList
from
..data
import
RemoteException
from
..data
import
CachedDataSource
from
..data
import
RemoteDataSource
...
...
@@ -59,21 +53,19 @@ from .mocks import CrashingDataSource
from
.
import
prefix
#----------------------------------------------------------
#
----------------------------------------------------------
class
TestMessageHandlerBase
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
filenames
=
[]
self
.
data_loader
=
None
def
tearDown
(
self
):
for
filename
in
self
.
filenames
:
basename
,
ext
=
os
.
path
.
splitext
(
filename
)
filenames
=
[
filename
]
filenames
+=
glob
.
glob
(
basename
+
'*'
)
filenames
+=
glob
.
glob
(
basename
+
"*"
)
for
filename
in
filenames
:
if
os
.
path
.
exists
(
filename
):
os
.
unlink
(
filename
)
...
...
@@ -87,32 +79,34 @@ class TestMessageHandlerBase(unittest.TestCase):
self
.
data_loader
=
None
def
create_data_loader
(
self
,
data_sources
):
self
.
client_context
=
zmq
.
Context
()
self
.
message_handler
=
MessageHandler
(
'127.0.0.1'
,
data_sources
=
data_sources
,
context
=
self
.
client_context
)
self
.
message_handler
=
MessageHandler
(
"127.0.0.1"
,
data_sources
=
data_sources
,
context
=
self
.
client_context
)
self
.
message_handler
.
start
()
self
.
client_socket
=
self
.
client_context
.
socket
(
zmq
.
PAIR
)
self
.
client_socket
.
connect
(
self
.
message_handler
.
address
)
self
.
data_loader
=
DataLoader
(
'channel'
)
self
.
data_loader
=
DataLoader
(
"channel"
)
for
input_name
in
data_sources
.
keys
():
data_source
=
RemoteDataSource
()
data_source
.
setup
(
self
.
client_socket
,
input_name
,
'user/single_integer/1'
,
prefix
)
data_source
.
setup
(
self
.
client_socket
,
input_name
,
"user/single_integer/1"
,
prefix
)
self
.
data_loader
.
add
(
input_name
,
data_source
)
def
writeData
(
self
,
start_index
=
0
,
end_index
=
10
,
step
=
1
,
base
=
0
):
testfile
=
tempfile
.
NamedTemporaryFile
(
prefix
=
__name__
,
suffix
=
'.data'
)
testfile
.
close
()
# preserve only the name
testfile
=
tempfile
.
NamedTemporaryFile
(
prefix
=
__name__
,
suffix
=
".data"
)
testfile
.
close
()
# preserve only the name
filename
=
testfile
.
name
self
.
filenames
.
append
(
filename
)
dataformat
=
DataFormat
(
prefix
,
'user/single_integer/1'
)
dataformat
=
DataFormat
(
prefix
,
"user/single_integer/1"
)
self
.
assertTrue
(
dataformat
.
valid
)
data_sink
=
CachedDataSink
()
...
...
@@ -140,102 +134,94 @@ class TestMessageHandlerBase(unittest.TestCase):
return
cached_file
#----------------------------------------------------------
#
----------------------------------------------------------
class
TestOneDataSource
(
TestMessageHandlerBase
):
def
setUp
(
self
):
super
(
TestOneDataSource
,
self
).
setUp
()
data_sources
=
{}
data_sources
[
'a'
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
data_sources
[
"a"
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
self
.
create_data_loader
(
data_sources
)
def
test_iteration
(
self
):
self
.
assertEqual
(
self
.
data_loader
.
count
(
'a'
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
"a"
),
10
)
for
i
in
range
(
10
):
(
result
,
start
,
end
)
=
self
.
data_loader
[
i
]
self
.
assertEqual
(
start
,
i
)
self
.
assertEqual
(
end
,
i
)
self
.
assertEqual
(
result
[
'a'
].
value
,
i
)
self
.
assertEqual
(
result
[
"a"
].
value
,
i
)
#----------------------------------------------------------
#
----------------------------------------------------------
class
TestSameFrequencyDataSources
(
TestMessageHandlerBase
):
def
setUp
(
self
):
super
(
TestSameFrequencyDataSources
,
self
).
setUp
()
data_sources
=
{}
data_sources
[
'a'
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
data_sources
[
'b'
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
,
base
=
10
)
data_sources
[
"a"
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
data_sources
[
"b"
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
,
base
=
10
)
self
.
create_data_loader
(
data_sources
)
def
test_iteration
(
self
):
self
.
assertEqual
(
self
.
data_loader
.
count
(
'a'
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
'b'
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
"a"
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
"b"
),
10
)
for
i
in
range
(
10
):
(
result
,
start
,
end
)
=
self
.
data_loader
[
i
]
self
.
assertEqual
(
start
,
i
)
self
.
assertEqual
(
end
,
i
)
self
.
assertEqual
(
result
[
'a'
].
value
,
i
)
self
.
assertEqual
(
result
[
'b'
].
value
,
10
+
i
)
self
.
assertEqual
(
result
[
"a"
].
value
,
i
)
self
.
assertEqual
(
result
[
"b"
].
value
,
10
+
i
)
#----------------------------------------------------------
#
----------------------------------------------------------
class
TestDifferentFrequenciesDataSources
(
TestMessageHandlerBase
):
def
setUp
(
self
):
super
(
TestDifferentFrequenciesDataSources
,
self
).
setUp
()
data_sources
=
{}
data_sources
[
'a'
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
data_sources
[
'b'
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
,
base
=
10
,
step
=
5
)
data_sources
[
"a"
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
)
data_sources
[
"b"
]
=
self
.
writeData
(
start_index
=
0
,
end_index
=
9
,
base
=
10
,
step
=
5
)
self
.
create_data_loader
(
data_sources
)
def
test_iteration
(
self
):
self
.
assertEqual
(
self
.
data_loader
.
count
(
'a'
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
'b'
),
2
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
"a"
),
10
)
self
.
assertEqual
(
self
.
data_loader
.
count
(
"b"
),
2
)
for
i
in
range
(
10
):
(
result
,
start
,
end
)
=
self
.
data_loader
[
i
]
self
.
assertEqual
(
start
,
i
)
self
.
assertEqual
(
end
,
i
)
self
.
assertEqual
(
result
[
'a'
].
value
,
i
)
self
.
assertEqual
(
result
[
"a"
].
value
,
i
)
if
i
<
5
:
self
.
assertEqual
(
result
[
'b'
].
value
,
10
)
self
.
assertEqual
(
result
[
"b"
].
value
,
10
)
else
:
self
.
assertEqual
(
result
[
'b'
].
value
,
15
)
self
.
assertEqual
(
result
[
"b"
].
value
,
15
)
#----------------------------------------------------------
#
----------------------------------------------------------
class
TestCrashingDataSource
(
TestMessageHandlerBase
):
def
setUp
(
self
):
super
(
TestCrashingDataSource
,
self
).
setUp
()
data_sources
=
{}
data_sources
[
'a'
]
=
CrashingDataSource
()
data_sources
[
"a"
]
=
CrashingDataSource
()
self
.
create_data_loader
(
data_sources
)
def
test_crash
(
self
):
with
self
.
assertRaises
(
RemoteException
):
(
result
,
start
,
end
)
=
self
.
data_loader
[
0
]
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment