Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
beat
beat.editor
Commits
d9acbdd1
Commit
d9acbdd1
authored
Sep 19, 2018
by
Jaden DIEFENBAUGH
Browse files
[js][tc] move toolchain typings to its own helpers file, fix
#143
parent
eb1e3688
Changes
9
Hide whitespace changes
Inline
Side-by-side
conda/js/src/components/toolchain/GraphicalEditor.jsx
View file @
d9acbdd1
...
...
@@ -24,7 +24,7 @@ import type {
Group
,
LocationMapEntry
,
LocationMap
,
}
from
'
./types.j
s
'
;
}
from
'
@helpers/toolchainType
s
'
;
type
Props
=
{
// the representation data of the toolchain (block locations & channel colors)
...
...
conda/js/src/components/toolchain/InsertObjectModal.jsx
View file @
d9acbdd1
...
...
@@ -19,7 +19,7 @@ import {
import
type
{
FlattenedDatabaseEntry
}
from
'
@store/selectors
'
;
import
type
{
ConnectionType
}
from
'
./types.j
s
'
;
import
type
{
ConnectionType
}
from
'
@helpers/toolchainType
s
'
;
import
type
{
Protocol
,
Set
as
ProtocolSet
}
from
'
../database/DatabaseEditor.jsx
'
;
import
type
{
BeatObject
}
from
'
@helpers/beat
'
;
import
{
generateNewKey
}
from
'
@helpers
'
;
...
...
conda/js/src/components/toolchain/ToolchainBlock.jsx
View file @
d9acbdd1
...
...
@@ -26,7 +26,7 @@ import {
}
from
'
reactstrap
'
;
import
{
ContextMenuTrigger
}
from
'
react-contextmenu
'
;
import
cn
from
'
classnames
'
;
import
type
{
BlockSet
,
BlockCoords
}
from
'
./types.j
s
'
;
import
type
{
BlockSet
,
BlockCoords
}
from
'
@helpers/toolchainType
s
'
;
export
type
Props
=
{
name
:
string
,
...
...
conda/js/src/components/toolchain/ToolchainConnection.jsx
View file @
d9acbdd1
...
...
@@ -3,7 +3,7 @@ import * as React from 'react';
import
{
ContextMenu
,
MenuItem
,
ContextMenuTrigger
}
from
'
react-contextmenu
'
;
import
cn
from
'
classnames
'
;
import
type
{
BlockCoords
,
ConnectionType
}
from
'
./types.j
s
'
;
import
type
{
BlockCoords
,
ConnectionType
}
from
'
@helpers/toolchainType
s
'
;
type
Props
=
{
fromLocMap
:
BlockCoords
,
...
...
conda/js/src/components/toolchain/ToolchainEditor.jsx
View file @
d9acbdd1
...
...
@@ -39,7 +39,7 @@ import * as Selectors from '@store/selectors.js';
import
type
{
FlattenedDatabaseEntry
}
from
'
@store/selectors
'
;
import
Block
from
'
./ToolchainBlock.jsx
'
;
import
type
{
BlockType
,
ConnectionType
,
BlockSet
,
Group
}
from
'
./types.j
s
'
;
import
type
{
BlockType
,
ConnectionType
,
BlockSet
,
Group
,
Contents
}
from
'
@helpers/toolchainType
s
'
;
import
{
connectionToId
}
from
'
./ToolchainConnection.jsx
'
;
import
ValidSchemaBadge
from
'
../ValidSchemaBadge.jsx
'
;
import
CacheInput
from
'
../CacheInput.jsx
'
;
...
...
@@ -56,7 +56,13 @@ import RenameGroupModal from './RenameGroupModal.jsx';
type
Props
=
{
// saved data for the current toolchain
data
:
BeatObject
,
data
:
{
name
:
string
,
contents
:
Contents
,
extraContents
:
{
groups
:
Group
[]
},
},
// all toolchains
toolchains
:
BeatObject
[],
// all databases
...
...
@@ -405,7 +411,7 @@ export class ToolchainEditor extends React.PureComponent<Props, State> {
};
const
rep
=
{...
this
.
props
.
data
.
contents
.
representation
};
rep
.
connectio
ns
=
Object
.
entries
(
rep
.
connections
).
map
(([
name
,
rep
])
=>
{
const
newRepCon
ns
=
Object
.
entries
(
rep
.
connections
).
map
(([
name
,
rep
])
=>
{
if
(
!
name
.
includes
(
`.
${
oldName
}
`
))
return
[
name
,
rep
];
...
...
@@ -440,6 +446,10 @@ export class ToolchainEditor extends React.PureComponent<Props, State> {
to
:
updated
[
1
],
};
}),
representation
:
{
...
rep
,
connections
:
newRepConns
,
},
};
this
.
setContents
(
newContents
);
...
...
conda/js/src/components/toolchain/ToolchainEditor.spec.jsx
View file @
d9acbdd1
...
...
@@ -16,9 +16,149 @@ import testTcs from '@test/test_tcs.json';
import
testDbs
from
'
@test/test_dbs.json
'
;
import
testAlgs
from
'
@test/test_algs.json
'
;
const
getIrisTc
=
(
color1
:
string
=
'
#008000
'
,
color2
:
string
=
'
#FF0000
'
)
=>
{
return
{
'
name
'
:
'
test/iris/1
'
,
'
contents
'
:
{
'
description
'
:
''
,
'
datasets
'
:
[
{
'
name
'
:
'
training_data
'
,
'
outputs
'
:
[
'
measurements
'
,
'
species
'
]
},
{
'
name
'
:
'
testing_data
'
,
'
outputs
'
:
[
'
measurements
'
,
'
species
'
]
}
],
'
blocks
'
:
[
{
'
inputs
'
:
[
'
measurements
'
,
'
species
'
],
'
name
'
:
'
training_alg
'
,
'
outputs
'
:
[
'
lda_machine
'
],
'
synchronized_channel
'
:
'
training_data
'
},
{
'
inputs
'
:
[
'
lda_machine
'
,
'
measurements
'
],
'
name
'
:
'
testing_alg
'
,
'
outputs
'
:
[
'
scores
'
],
'
synchronized_channel
'
:
'
testing_data
'
}
],
'
analyzers
'
:
[
{
'
inputs
'
:
[
'
scores
'
,
'
species
'
],
'
name
'
:
'
analyzer
'
,
'
synchronized_channel
'
:
'
testing_data
'
}
],
'
connections
'
:
[
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_alg.scores
'
,
'
to
'
:
'
analyzer.scores
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_alg.lda_machine
'
,
'
to
'
:
'
testing_alg.lda_machine
'
},
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_data.measurements
'
,
'
to
'
:
'
testing_alg.measurements
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_data.measurements
'
,
'
to
'
:
'
training_alg.measurements
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_data.species
'
,
'
to
'
:
'
training_alg.species
'
},
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_data.species
'
,
'
to
'
:
'
analyzer.species
'
}
],
'
representation
'
:
{
'
blocks
'
:
{
'
analyzer
'
:
{
'
col
'
:
46
,
'
height
'
:
3
,
'
row
'
:
4
,
'
width
'
:
10
},
'
testing_alg
'
:
{
'
col
'
:
32
,
'
height
'
:
3
,
'
row
'
:
3
,
'
width
'
:
10
},
'
testing_data
'
:
{
'
col
'
:
6
,
'
height
'
:
3
,
'
row
'
:
5
,
'
width
'
:
10
},
'
training_alg
'
:
{
'
col
'
:
19
,
'
height
'
:
3
,
'
row
'
:
0
,
'
width
'
:
10
},
'
training_data
'
:
{
'
col
'
:
6
,
'
height
'
:
3
,
'
row
'
:
0
,
'
width
'
:
10
}
},
'
channel_colors
'
:
{
'
testing_data
'
:
color2
,
'
training_data
'
:
color1
},
'
connections
'
:
{
'
testing_alg.scores/analyzer.scores
'
:
[],
'
testing_data.measurements/testing_alg.measurements
'
:
[],
'
testing_data.species/analyzer.species
'
:
[],
'
training_alg.lda_machine/testing_alg.lda_machine
'
:
[],
'
training_data.measurements/training_alg.measurements
'
:
[],
'
training_data.species/training_alg.species
'
:
[]
}
}
},
'
extraContents
'
:
{
'
groups
'
:
[]
}
};
};
chai
.
use
(
deepEqualInAnyOrder
);
describe
(
'
<ToolchainEditor />
'
,
function
()
{
describe
.
only
(
'
<ToolchainEditor />
'
,
function
()
{
// these tests might take a long time, comparatively
this
.
timeout
(
10000
);
...
...
@@ -499,143 +639,7 @@ describe('<ToolchainEditor />', function() {
}
});
expect
(
data
).
to
.
deep
.
equalInAnyOrder
({
'
name
'
:
'
test/iris/1
'
,
'
contents
'
:
{
'
description
'
:
''
,
'
datasets
'
:
[
{
'
name
'
:
'
training_data
'
,
'
outputs
'
:
[
'
measurements
'
,
'
species
'
]
},
{
'
name
'
:
'
testing_data
'
,
'
outputs
'
:
[
'
measurements
'
,
'
species
'
]
}
],
'
blocks
'
:
[
{
'
inputs
'
:
[
'
measurements
'
,
'
species
'
],
'
name
'
:
'
training_alg
'
,
'
outputs
'
:
[
'
lda_machine
'
],
'
synchronized_channel
'
:
'
training_data
'
},
{
'
inputs
'
:
[
'
lda_machine
'
,
'
measurements
'
],
'
name
'
:
'
testing_alg
'
,
'
outputs
'
:
[
'
scores
'
],
'
synchronized_channel
'
:
'
testing_data
'
}
],
'
analyzers
'
:
[
{
'
inputs
'
:
[
'
scores
'
,
'
species
'
],
'
name
'
:
'
analyzer
'
,
'
synchronized_channel
'
:
'
testing_data
'
}
],
'
connections
'
:
[
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_alg.scores
'
,
'
to
'
:
'
analyzer.scores
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_alg.lda_machine
'
,
'
to
'
:
'
testing_alg.lda_machine
'
},
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_data.measurements
'
,
'
to
'
:
'
testing_alg.measurements
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_data.measurements
'
,
'
to
'
:
'
training_alg.measurements
'
},
{
'
channel
'
:
'
training_data
'
,
'
from
'
:
'
training_data.species
'
,
'
to
'
:
'
training_alg.species
'
},
{
'
channel
'
:
'
testing_data
'
,
'
from
'
:
'
testing_data.species
'
,
'
to
'
:
'
analyzer.species
'
}
],
'
representation
'
:
{
'
blocks
'
:
{
'
analyzer
'
:
{
'
col
'
:
46
,
'
height
'
:
3
,
'
row
'
:
4
,
'
width
'
:
10
},
'
testing_alg
'
:
{
'
col
'
:
32
,
'
height
'
:
3
,
'
row
'
:
3
,
'
width
'
:
10
},
'
testing_data
'
:
{
'
col
'
:
6
,
'
height
'
:
3
,
'
row
'
:
5
,
'
width
'
:
10
},
'
training_alg
'
:
{
'
col
'
:
19
,
'
height
'
:
3
,
'
row
'
:
0
,
'
width
'
:
10
},
'
training_data
'
:
{
'
col
'
:
6
,
'
height
'
:
3
,
'
row
'
:
0
,
'
width
'
:
10
}
},
'
channel_colors
'
:
{
'
testing_data
'
:
ch2
,
'
training_data
'
:
ch1
},
'
connections
'
:
{
'
testing_alg.scores/analyzer.scores
'
:
[],
'
testing_data.measurements/testing_alg.measurements
'
:
[],
'
testing_data.species/analyzer.species
'
:
[],
'
training_alg.lda_machine/testing_alg.lda_machine
'
:
[],
'
training_data.measurements/training_alg.measurements
'
:
[],
'
training_data.species/training_alg.species
'
:
[]
}
}
},
'
extraContents
'
:
{
'
groups
'
:
[]
}
});
expect
(
data
).
to
.
deep
.
equalInAnyOrder
(
getIrisTc
(
ch1
,
ch2
));
});
});
...
...
@@ -896,4 +900,91 @@ describe('<ToolchainEditor />', function() {
});
});
});
describe
(
'
Regression Tests
'
,
()
=>
{
const
timeout
=
(
ms
)
=>
{
return
new
Promise
(
resolve
=>
setTimeout
(
resolve
,
ms
));
};
const
tcs
=
testTcs
.
map
(
tc
=>
getValidObj
(
tc
));
const
dbs
=
testDbs
.
map
(
db
=>
getValidDatabaseObj
(
db
));
const
algs
=
testAlgs
.
map
(
alg
=>
getValidAlgorithmObj
(
alg
));
const
state
=
{
...
reducer
({},
{
type
:
''
,
payload
:
{}}),
toolchain
:
tcs
,
database
:
dbs
,
algorithm
:
algs
,
};
const
sets
=
Selectors
.
flattenedDatabases
(
state
);
const
normalAlgorithms
=
Selectors
.
normalBlocks
(
state
);
const
protocols
=
Selectors
.
databaseProtocols
(
state
);
const
analyzerAlgorithms
=
Selectors
.
analyzerBlocks
(
state
);
const
saveFunc
=
sinon
.
spy
();
const
_updateFunc
=
(
obj
)
=>
{
wrapper
.
setProps
&&
wrapper
.
setProps
({
data
:
obj
});
};
const
updateFunc
=
sinon
.
spy
(
_updateFunc
);
const
tcName
=
'
test/iris/1
'
;
it
(
'
Properly changes connection names when renaming an input
'
,
()
=>
{
const
tc
=
getValidObj
(
getIrisTc
());
wrapper
=
mount
(
<
C
data
=
{
tc
}
sets
=
{
sets
}
protocols
=
{
protocols
}
toolchains
=
{
state
.
toolchain
}
databases
=
{
state
.
database
}
normalAlgorithms
=
{
normalAlgorithms
}
analyzerAlgorithms
=
{
analyzerAlgorithms
}
saveFunc
=
{
saveFunc
}
updateFunc
=
{
updateFunc
}
/>
);
// change testing_alg.lda_machine to testing_alg.lda
wrapper
.
find
(
'
rect#block_testing_alg
'
).
simulate
(
'
click
'
);
wrapper
.
update
();
expect
(
wrapper
.
find
(
'
ToolchainModal
'
).
props
().
active
).
to
.
equal
(
true
);
wrapper
.
find
(
'
.modal CacheInput[value="lda_machine"]
'
).
prop
(
'
onChange
'
)(
{
target
:
{
value
:
'
lda
'
}});
wrapper
.
update
();
expect
(
wrapper
.
find
(
'
.modal CacheInput[value="lda"]
'
).
props
().
value
).
to
.
equal
(
'
lda
'
);
const
data
=
wrapper
.
props
().
data
;
expect
(
data
.
contents
.
representation
.
connections
).
to
.
have
.
property
(
'
training_alg.lda_machine/testing_alg.lda
'
);
expect
(
data
.
contents
.
representation
.
connections
).
to
.
not
.
have
.
property
(
'
training_alg.lda_machine/testing_alg.lda_machine
'
);
});
it
(
'
Properly changes connection names when renaming an output
'
,
()
=>
{
const
tc
=
getValidObj
(
getIrisTc
());
wrapper
=
mount
(
<
C
data
=
{
tc
}
sets
=
{
sets
}
protocols
=
{
protocols
}
toolchains
=
{
state
.
toolchain
}
databases
=
{
state
.
database
}
normalAlgorithms
=
{
normalAlgorithms
}
analyzerAlgorithms
=
{
analyzerAlgorithms
}
saveFunc
=
{
saveFunc
}
updateFunc
=
{
updateFunc
}
/>
);
// change testing_alg.lda_machine to testing_alg.lda
wrapper
.
find
(
'
rect#block_training_alg
'
).
simulate
(
'
click
'
);
wrapper
.
update
();
expect
(
wrapper
.
find
(
'
ToolchainModal
'
).
props
().
active
).
to
.
equal
(
true
);
wrapper
.
find
(
'
.modal CacheInput[value="lda_machine"]
'
).
prop
(
'
onChange
'
)(
{
target
:
{
value
:
'
lda
'
}});
wrapper
.
update
();
expect
(
wrapper
.
find
(
'
.modal CacheInput[value="lda"]
'
).
props
().
value
).
to
.
equal
(
'
lda
'
);
const
data
=
wrapper
.
props
().
data
;
expect
(
data
.
contents
.
representation
.
connections
).
to
.
have
.
property
(
'
training_alg.lda/testing_alg.lda_machine
'
);
expect
(
data
.
contents
.
representation
.
connections
).
to
.
not
.
have
.
property
(
'
training_alg.lda_machine/testing_alg.lda_machine
'
);
});
});
});
conda/js/src/components/toolchain/index.js
View file @
d9acbdd1
// @flow
import
ConnectedToolchainEditor
,
{
ToolchainEditor
}
from
'
./ToolchainEditor.jsx
'
;
import
GraphicalEditor
from
'
./GraphicalEditor.jsx
'
;
export
type
{
BlockSet
}
from
'
./types.js
'
;
export
{
ToolchainEditor
,
...
...
conda/js/src/helpers/beat.js
View file @
d9acbdd1
// @flow
// BEAT-specific helpers
import
{
copyObj
}
from
'
.
'
;
import
type
{
Contents
as
ToolchainContents
,
Group
}
from
'
./toolchainTypes
'
;
// all the BEAT entities
export
type
BeatEntity
=
'
database
'
|
'
library
'
|
'
dataformat
'
|
'
algorithm
'
|
'
toolchain
'
|
'
experiment
'
|
'
plotter
'
|
'
plotterparameter
'
;
...
...
@@ -11,7 +12,9 @@ export type BeatObject = {|
// contents (whats stored in the JSON files in the beat prefix)
contents
:
any
,
// extra contents that are additions, arent relevant to the web platform, and dont conform to the BEAT schemas
extraContents
?:
any
,
extraContents
?:
{
groups
:
Group
[]
},
|
};
// format for the settings object
...
...
conda/js/src/
component
s/toolchain
/t
ypes.js
→
conda/js/src/
helper
s/toolchain
T
ypes.js
View file @
d9acbdd1
...
...
@@ -50,3 +50,16 @@ export type LocationMapEntry = {|
export
type
LocationMap
=
{
|
[
string
]:
LocationMapEntry
,
|
};
export
type
Contents
=
{
|
description
:
string
,
datasets
:
DatasetBlock
[],
blocks
:
NormalBlock
[],
analyzers
:
AnalyzerBlock
[],
connections
:
ConnectionType
[],
representation
:
{
blocks
:
{
[
string
]:
BlockCoords
},
channel_colors
:
{
[
string
]:
string
},
connections
:
{
[
string
]:
any
[]
},
}
|
};
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment