file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
App.js | import React from "react"
import Presentation from "./Presentation"
import Icon from 'material-ui/Icon'
import IconButton from 'material-ui/IconButton'
import Grid from 'material-ui/Grid'
import Typography from 'material-ui/Typography'
import { colors } from "../themes/coinium"
require("../themes/coinium/index.css")
const FOOTER_WIDTH = 60
const MODES = {
PRESENTATION: 0,
HELP: 1
}
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
mode: MODES.PRESENTATION
};
}
goToSlide(slideName) {
this.setState({mode: MODES.PRESENTATION}, () => {
location.hash = `/${slideName}`
})
}
renderHelp() {
const style = {
height: '100%',
backgroundColor: colors.primary
}
const creditsStyle = {
opacity: 0.8
}
return (
<Grid container direction="column" justify="center" align="center" style={style}>
<Typography type="caption" style={creditsStyle}>
Copyright 2017 Coinium, Inc
<hr />
Contact us: <a href="mailto:[email protected]">[email protected]</a>
<hr />
{"Some icons based on the work of "}
<a href="http://www.freepik.com" title="Freepik">Freepik</a>
{" from "}
<a href="https://www.flaticon.com/" title="Flaticon">www.flaticon.com</a>
{" are licensed by "}
<a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0" target="_blank">CC 3.0 BY</a>
</Typography>
</Grid>
)
}
renderCurrentPage() |
render() {
const mainStyle = {
position: 'fixed',
top: 0,
right: FOOTER_WIDTH,
bottom: 0,
left: 0,
boxShadow: '2px 0px 4px rgba(0,0,0,0.4)',
zIndex: 2,
overflow: 'hidden'
}
const navStyle = {
background: colors.secondary,
position: 'fixed',
top: 0,
right: 0,
bottom: 0,
left: 'auto',
width: FOOTER_WIDTH,
zIndex: 1
}
const onHelpClick = () => {
const mode = this.state.mode == MODES.HELP
? MODES.PRESENTATION
: MODES.HELP
this.setState({mode})
}
return (
<Grid container className="App">
<Grid item style={mainStyle}>
{this.renderCurrentPage()}
</Grid>
<Grid item container direction="column"
justify="space-between" align="center" spacing={0}
style={navStyle}>
<Grid>
<IconButton onClick={this.goToSlide.bind(this, "home")}>
<Icon color="contrast">home</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "problem")}>
<Icon color="contrast">info_outline</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "team")}>
<Icon color="contrast">people</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "mobile")}>
<Icon color="contrast">phone_iphone</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "signup")}>
<Icon color="contrast">insert_drive_file</Icon>
</IconButton>
</Grid>
<Grid>
<IconButton onClick={onHelpClick}>
<Icon color="contrast">help_outline</Icon>
</IconButton>
</Grid>
</Grid>
</Grid>
);
}
} | {
switch (this.state.mode) {
case MODES.PRESENTATION:
return <Presentation />
case MODES.HELP:
return this.renderHelp()
default:
return (
<Typography>Please reload</Typography>
)
}
} | identifier_body |
App.js | import React from "react"
import Presentation from "./Presentation"
import Icon from 'material-ui/Icon'
import IconButton from 'material-ui/IconButton'
import Grid from 'material-ui/Grid'
import Typography from 'material-ui/Typography'
import { colors } from "../themes/coinium"
require("../themes/coinium/index.css")
const FOOTER_WIDTH = 60
const MODES = {
PRESENTATION: 0,
HELP: 1
}
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
mode: MODES.PRESENTATION
};
}
goToSlide(slideName) {
this.setState({mode: MODES.PRESENTATION}, () => {
location.hash = `/${slideName}`
})
}
renderHelp() {
const style = {
height: '100%',
backgroundColor: colors.primary
}
const creditsStyle = {
opacity: 0.8
}
return (
<Grid container direction="column" justify="center" align="center" style={style}>
<Typography type="caption" style={creditsStyle}>
Copyright 2017 Coinium, Inc
<hr />
Contact us: <a href="mailto:[email protected]">[email protected]</a>
<hr />
{"Some icons based on the work of "}
<a href="http://www.freepik.com" title="Freepik">Freepik</a>
{" from "}
<a href="https://www.flaticon.com/" title="Flaticon">www.flaticon.com</a>
{" are licensed by "}
<a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0" target="_blank">CC 3.0 BY</a>
</Typography>
</Grid>
)
}
renderCurrentPage() {
switch (this.state.mode) {
case MODES.PRESENTATION:
return <Presentation />
case MODES.HELP:
return this.renderHelp()
default:
return (
<Typography>Please reload</Typography>
)
}
}
render() {
const mainStyle = {
position: 'fixed',
top: 0,
right: FOOTER_WIDTH,
bottom: 0,
left: 0,
boxShadow: '2px 0px 4px rgba(0,0,0,0.4)', | const navStyle = {
background: colors.secondary,
position: 'fixed',
top: 0,
right: 0,
bottom: 0,
left: 'auto',
width: FOOTER_WIDTH,
zIndex: 1
}
const onHelpClick = () => {
const mode = this.state.mode == MODES.HELP
? MODES.PRESENTATION
: MODES.HELP
this.setState({mode})
}
return (
<Grid container className="App">
<Grid item style={mainStyle}>
{this.renderCurrentPage()}
</Grid>
<Grid item container direction="column"
justify="space-between" align="center" spacing={0}
style={navStyle}>
<Grid>
<IconButton onClick={this.goToSlide.bind(this, "home")}>
<Icon color="contrast">home</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "problem")}>
<Icon color="contrast">info_outline</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "team")}>
<Icon color="contrast">people</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "mobile")}>
<Icon color="contrast">phone_iphone</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "signup")}>
<Icon color="contrast">insert_drive_file</Icon>
</IconButton>
</Grid>
<Grid>
<IconButton onClick={onHelpClick}>
<Icon color="contrast">help_outline</Icon>
</IconButton>
</Grid>
</Grid>
</Grid>
);
}
} | zIndex: 2,
overflow: 'hidden'
}
| random_line_split |
App.js | import React from "react"
import Presentation from "./Presentation"
import Icon from 'material-ui/Icon'
import IconButton from 'material-ui/IconButton'
import Grid from 'material-ui/Grid'
import Typography from 'material-ui/Typography'
import { colors } from "../themes/coinium"
require("../themes/coinium/index.css")
const FOOTER_WIDTH = 60
const MODES = {
PRESENTATION: 0,
HELP: 1
}
export default class App extends React.Component {
| (props) {
super(props);
this.state = {
mode: MODES.PRESENTATION
};
}
goToSlide(slideName) {
this.setState({mode: MODES.PRESENTATION}, () => {
location.hash = `/${slideName}`
})
}
renderHelp() {
const style = {
height: '100%',
backgroundColor: colors.primary
}
const creditsStyle = {
opacity: 0.8
}
return (
<Grid container direction="column" justify="center" align="center" style={style}>
<Typography type="caption" style={creditsStyle}>
Copyright 2017 Coinium, Inc
<hr />
Contact us: <a href="mailto:[email protected]">[email protected]</a>
<hr />
{"Some icons based on the work of "}
<a href="http://www.freepik.com" title="Freepik">Freepik</a>
{" from "}
<a href="https://www.flaticon.com/" title="Flaticon">www.flaticon.com</a>
{" are licensed by "}
<a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0" target="_blank">CC 3.0 BY</a>
</Typography>
</Grid>
)
}
renderCurrentPage() {
switch (this.state.mode) {
case MODES.PRESENTATION:
return <Presentation />
case MODES.HELP:
return this.renderHelp()
default:
return (
<Typography>Please reload</Typography>
)
}
}
render() {
const mainStyle = {
position: 'fixed',
top: 0,
right: FOOTER_WIDTH,
bottom: 0,
left: 0,
boxShadow: '2px 0px 4px rgba(0,0,0,0.4)',
zIndex: 2,
overflow: 'hidden'
}
const navStyle = {
background: colors.secondary,
position: 'fixed',
top: 0,
right: 0,
bottom: 0,
left: 'auto',
width: FOOTER_WIDTH,
zIndex: 1
}
const onHelpClick = () => {
const mode = this.state.mode == MODES.HELP
? MODES.PRESENTATION
: MODES.HELP
this.setState({mode})
}
return (
<Grid container className="App">
<Grid item style={mainStyle}>
{this.renderCurrentPage()}
</Grid>
<Grid item container direction="column"
justify="space-between" align="center" spacing={0}
style={navStyle}>
<Grid>
<IconButton onClick={this.goToSlide.bind(this, "home")}>
<Icon color="contrast">home</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "problem")}>
<Icon color="contrast">info_outline</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "team")}>
<Icon color="contrast">people</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "mobile")}>
<Icon color="contrast">phone_iphone</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "signup")}>
<Icon color="contrast">insert_drive_file</Icon>
</IconButton>
</Grid>
<Grid>
<IconButton onClick={onHelpClick}>
<Icon color="contrast">help_outline</Icon>
</IconButton>
</Grid>
</Grid>
</Grid>
);
}
} | constructor | identifier_name |
cellbase-manager.js | /*
* Copyright (c) 2012 Francisco Salavert (ICM-CIPF)
* Copyright (c) 2012 Ruben Sanchez (ICM-CIPF)
* Copyright (c) 2012 Ignacio Medina (ICM-CIPF)
*
* This file is part of JS Common Libs.
*
* JS Common Libs is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* JS Common Libs is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JS Common Libs. If not, see <http://www.gnu.org/licenses/>.
*/
var CellBaseManager = {
host: 'https://www.ebi.ac.uk/cellbase/webservices/rest',
version: 'v3',
get: function (args) { | var success = args.success;
var error = args.error;
var async = (_.isUndefined(args.async) || _.isNull(args.async) ) ? true : args.async;
var urlConfig = _.omit(args, ['success', 'error', 'async']);
var url = CellBaseManager.url(urlConfig);
if(typeof url === 'undefined'){
return;
}
console.log(url);
var d;
$.ajax({
type: "GET",
url: url,
dataType: 'json',//still firefox 20 does not auto serialize JSON, You can force it to always do the parsing by adding dataType: 'json' to your call.
async: async,
success: function (data, textStatus, jqXHR) {
if($.isPlainObject(data) || $.isArray(data)){
// data.params = args.params;
// data.resource = args.resource;
// data.category = args.category;
// data.subCategory = args.subCategory;
if (_.isFunction(success)) success(data);
d = data;
}else{
console.log('Cellbase returned a non json object or list, please check the url.');
console.log(url);
console.log(data)
}
},
error: function (jqXHR, textStatus, errorThrown) {
console.log("CellBaseManager: Ajax call returned : " + errorThrown + '\t' + textStatus + '\t' + jqXHR.statusText + " END");
if (_.isFunction(error)) error(jqXHR, textStatus, errorThrown);
}
});
return d;
},
url: function (args) {
if (!$.isPlainObject(args)) args = {};
if (!$.isPlainObject(args.params)) args.params = {};
var version = this.version;
if(typeof args.version !== 'undefined' && args.version != null){
version = args.version
}
var host = this.host;
if (typeof args.host !== 'undefined' && args.host != null) {
host = args.host;
}
delete args.host;
delete args.version;
var config = {
host: host,
version: version
};
var params = {
of: 'json'
};
_.extend(config, args);
_.extend(config.params, params);
var query = '';
if(typeof config.query !== 'undefined' && config.query != null){
if ($.isArray(config.query)) {
config.query = config.query.toString();
}
query = '/' + config.query;
}
//species can be the species code(String) or an object with text attribute
if ($.isPlainObject(config.species)) {
config.species = Utils.getSpeciesCode(config.species.text);
}
var url = config.host + '/' + config.version + '/' + config.species + '/' + config.category + '/' + config.subCategory + query + '/' + config.resource;
url = Utils.addQueryParamtersToUrl(config.params, url);
return url;
}
}; | random_line_split |
|
cellbase-manager.js | /*
* Copyright (c) 2012 Francisco Salavert (ICM-CIPF)
* Copyright (c) 2012 Ruben Sanchez (ICM-CIPF)
* Copyright (c) 2012 Ignacio Medina (ICM-CIPF)
*
* This file is part of JS Common Libs.
*
* JS Common Libs is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* JS Common Libs is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JS Common Libs. If not, see <http://www.gnu.org/licenses/>.
*/
var CellBaseManager = {
host: 'https://www.ebi.ac.uk/cellbase/webservices/rest',
version: 'v3',
get: function (args) {
var success = args.success;
var error = args.error;
var async = (_.isUndefined(args.async) || _.isNull(args.async) ) ? true : args.async;
var urlConfig = _.omit(args, ['success', 'error', 'async']);
var url = CellBaseManager.url(urlConfig);
if(typeof url === 'undefined'){
return;
}
console.log(url);
var d;
$.ajax({
type: "GET",
url: url,
dataType: 'json',//still firefox 20 does not auto serialize JSON, You can force it to always do the parsing by adding dataType: 'json' to your call.
async: async,
success: function (data, textStatus, jqXHR) {
if($.isPlainObject(data) || $.isArray(data)){
// data.params = args.params;
// data.resource = args.resource;
// data.category = args.category;
// data.subCategory = args.subCategory;
if (_.isFunction(success)) success(data);
d = data;
}else{
console.log('Cellbase returned a non json object or list, please check the url.');
console.log(url);
console.log(data)
}
},
error: function (jqXHR, textStatus, errorThrown) {
console.log("CellBaseManager: Ajax call returned : " + errorThrown + '\t' + textStatus + '\t' + jqXHR.statusText + " END");
if (_.isFunction(error)) error(jqXHR, textStatus, errorThrown);
}
});
return d;
},
url: function (args) {
if (!$.isPlainObject(args)) args = {};
if (!$.isPlainObject(args.params)) args.params = {};
var version = this.version;
if(typeof args.version !== 'undefined' && args.version != null){
version = args.version
}
var host = this.host;
if (typeof args.host !== 'undefined' && args.host != null) {
host = args.host;
}
delete args.host;
delete args.version;
var config = {
host: host,
version: version
};
var params = {
of: 'json'
};
_.extend(config, args);
_.extend(config.params, params);
var query = '';
if(typeof config.query !== 'undefined' && config.query != null){
if ($.isArray(config.query)) {
config.query = config.query.toString();
}
query = '/' + config.query;
}
//species can be the species code(String) or an object with text attribute
if ($.isPlainObject(config.species)) |
var url = config.host + '/' + config.version + '/' + config.species + '/' + config.category + '/' + config.subCategory + query + '/' + config.resource;
url = Utils.addQueryParamtersToUrl(config.params, url);
return url;
}
}; | {
config.species = Utils.getSpeciesCode(config.species.text);
} | conditional_block |
test_flord_g_ctdbp_p_dcl_recovered_driver.py | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler | _author__ = 'jeff roy'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one() | from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH
from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_recovered_driver import parse
| random_line_split |
test_flord_g_ctdbp_p_dcl_recovered_driver.py | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH
from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_recovered_driver import parse
_author__ = 'jeff roy'
log = get_logger()
class | (unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one()
| DriverTest | identifier_name |
test_flord_g_ctdbp_p_dcl_recovered_driver.py | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH
from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_recovered_driver import parse
_author__ = 'jeff roy'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
| test = DriverTest('test_one')
test.test_one() | conditional_block |
|
test_flord_g_ctdbp_p_dcl_recovered_driver.py | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH
from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_recovered_driver import parse
_author__ = 'jeff roy'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
|
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one()
| source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False) | identifier_body |
tags.js | import { CONSTANT_TAG, DirtyableTag } from 'glimmer-reference';
import { meta as metaFor } from './meta';
import require from 'require';
import { isProxy } from './is_proxy';
let hasViews = () => false;
export function setHasViews(fn) {
hasViews = fn;
}
function makeTag() {
return new DirtyableTag();
}
export function tagForProperty(object, propertyKey, _meta) {
if (isProxy(object)) {
return tagFor(object, _meta);
}
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
let tags = meta.writableTags();
let tag = tags[propertyKey];
if (tag) { return tag; }
return tags[propertyKey] = makeTag();
} else {
return CONSTANT_TAG;
}
}
export function tagFor(object, _meta) |
export function markObjectAsDirty(meta, propertyKey) {
let objectTag = meta && meta.readableTag();
if (objectTag) {
objectTag.dirty();
}
let tags = meta && meta.readableTags();
let propertyTag = tags && tags[propertyKey];
if (propertyTag) {
propertyTag.dirty();
}
if (objectTag || propertyTag) {
ensureRunloop();
}
}
let run;
function K() {}
function ensureRunloop() {
if (!run) {
run = require('ember-metal/run_loop').default;
}
if (hasViews() && !run.backburner.currentInstance) {
run.schedule('actions', K);
}
}
| {
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
return meta.writableTag(makeTag);
} else {
return CONSTANT_TAG;
}
} | identifier_body |
tags.js | import { CONSTANT_TAG, DirtyableTag } from 'glimmer-reference';
import { meta as metaFor } from './meta';
import require from 'require';
import { isProxy } from './is_proxy';
let hasViews = () => false;
export function setHasViews(fn) {
hasViews = fn;
}
function makeTag() {
return new DirtyableTag();
}
export function tagForProperty(object, propertyKey, _meta) {
if (isProxy(object)) {
return tagFor(object, _meta);
}
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
let tags = meta.writableTags();
let tag = tags[propertyKey]; | }
}
export function tagFor(object, _meta) {
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
return meta.writableTag(makeTag);
} else {
return CONSTANT_TAG;
}
}
export function markObjectAsDirty(meta, propertyKey) {
let objectTag = meta && meta.readableTag();
if (objectTag) {
objectTag.dirty();
}
let tags = meta && meta.readableTags();
let propertyTag = tags && tags[propertyKey];
if (propertyTag) {
propertyTag.dirty();
}
if (objectTag || propertyTag) {
ensureRunloop();
}
}
let run;
function K() {}
function ensureRunloop() {
if (!run) {
run = require('ember-metal/run_loop').default;
}
if (hasViews() && !run.backburner.currentInstance) {
run.schedule('actions', K);
}
} | if (tag) { return tag; }
return tags[propertyKey] = makeTag();
} else {
return CONSTANT_TAG; | random_line_split |
tags.js | import { CONSTANT_TAG, DirtyableTag } from 'glimmer-reference';
import { meta as metaFor } from './meta';
import require from 'require';
import { isProxy } from './is_proxy';
let hasViews = () => false;
export function | (fn) {
hasViews = fn;
}
function makeTag() {
return new DirtyableTag();
}
export function tagForProperty(object, propertyKey, _meta) {
if (isProxy(object)) {
return tagFor(object, _meta);
}
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
let tags = meta.writableTags();
let tag = tags[propertyKey];
if (tag) { return tag; }
return tags[propertyKey] = makeTag();
} else {
return CONSTANT_TAG;
}
}
export function tagFor(object, _meta) {
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
return meta.writableTag(makeTag);
} else {
return CONSTANT_TAG;
}
}
export function markObjectAsDirty(meta, propertyKey) {
let objectTag = meta && meta.readableTag();
if (objectTag) {
objectTag.dirty();
}
let tags = meta && meta.readableTags();
let propertyTag = tags && tags[propertyKey];
if (propertyTag) {
propertyTag.dirty();
}
if (objectTag || propertyTag) {
ensureRunloop();
}
}
let run;
function K() {}
function ensureRunloop() {
if (!run) {
run = require('ember-metal/run_loop').default;
}
if (hasViews() && !run.backburner.currentInstance) {
run.schedule('actions', K);
}
}
| setHasViews | identifier_name |
tags.js | import { CONSTANT_TAG, DirtyableTag } from 'glimmer-reference';
import { meta as metaFor } from './meta';
import require from 'require';
import { isProxy } from './is_proxy';
let hasViews = () => false;
export function setHasViews(fn) {
hasViews = fn;
}
function makeTag() {
return new DirtyableTag();
}
export function tagForProperty(object, propertyKey, _meta) {
if (isProxy(object)) {
return tagFor(object, _meta);
}
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
let tags = meta.writableTags();
let tag = tags[propertyKey];
if (tag) { return tag; }
return tags[propertyKey] = makeTag();
} else {
return CONSTANT_TAG;
}
}
export function tagFor(object, _meta) {
if (typeof object === 'object' && object) {
let meta = _meta || metaFor(object);
return meta.writableTag(makeTag);
} else {
return CONSTANT_TAG;
}
}
export function markObjectAsDirty(meta, propertyKey) {
let objectTag = meta && meta.readableTag();
if (objectTag) {
objectTag.dirty();
}
let tags = meta && meta.readableTags();
let propertyTag = tags && tags[propertyKey];
if (propertyTag) {
propertyTag.dirty();
}
if (objectTag || propertyTag) {
ensureRunloop();
}
}
let run;
function K() {}
function ensureRunloop() {
if (!run) |
if (hasViews() && !run.backburner.currentInstance) {
run.schedule('actions', K);
}
}
| {
run = require('ember-metal/run_loop').default;
} | conditional_block |
float.rs | use crate::msgpack::encode::*;
#[test]
fn pass_pack_f32() |
#[test]
fn pass_pack_f64() {
use std::f64;
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
write_f64(&mut &mut buf[..], f64::INFINITY).ok().unwrap();
assert_eq!([0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], buf);
}
| {
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00];
write_f32(&mut &mut buf[..], 3.4028234e38_f32).ok().unwrap();
assert_eq!([0xca, 0x7f, 0x7f, 0xff, 0xff], buf);
} | identifier_body |
float.rs | use crate::msgpack::encode::*;
#[test]
fn pass_pack_f32() {
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00]; | write_f32(&mut &mut buf[..], 3.4028234e38_f32).ok().unwrap();
assert_eq!([0xca, 0x7f, 0x7f, 0xff, 0xff], buf);
}
#[test]
fn pass_pack_f64() {
use std::f64;
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
write_f64(&mut &mut buf[..], f64::INFINITY).ok().unwrap();
assert_eq!([0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], buf);
} | random_line_split |
|
float.rs | use crate::msgpack::encode::*;
#[test]
fn pass_pack_f32() {
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00];
write_f32(&mut &mut buf[..], 3.4028234e38_f32).ok().unwrap();
assert_eq!([0xca, 0x7f, 0x7f, 0xff, 0xff], buf);
}
#[test]
fn | () {
use std::f64;
let mut buf = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
write_f64(&mut &mut buf[..], f64::INFINITY).ok().unwrap();
assert_eq!([0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], buf);
}
| pass_pack_f64 | identifier_name |
functions.js | var slidedelay = $('#bannerslider').attr('data-delay');
var pauseonhover = $('#bannerslider').attr('data-pause');
var fadedelay = 2000;
$(document).ready(function() {
if ($('#bannerslider').hasClass("slide") && $('#bannerslider').hasClass("carousel")) | else if ($('#bannerslider').hasClass("slide") && $('#imageContainer').children().length>1) {
$('#imageContainer').children(':first-child').addClass("showbanner");
setTimeout(nextSlide, slidedelay);
}
});
function nextSlide() {
var images = $('#imageContainer').children();
$(images).each( function(i) {
if ($(this).hasClass("showbanner")) {
$(this).fadeOut(fadedelay).removeClass("showbanner");
var nextIndex = (i == (images.length - 1)) ? 0 : i+1;
$(images[nextIndex]).fadeIn(fadedelay).addClass("showbanner");
setTimeout(nextSlide, slidedelay);
return false
}
});
}
| {
$('#bannerslider').carousel({
interval: slidedelay,
pause: '"' + pauseonhover + '"'
});
} | conditional_block |
functions.js | var slidedelay = $('#bannerslider').attr('data-delay');
var pauseonhover = $('#bannerslider').attr('data-pause');
var fadedelay = 2000;
$(document).ready(function() {
if ($('#bannerslider').hasClass("slide") && $('#bannerslider').hasClass("carousel")) {
$('#bannerslider').carousel({
interval: slidedelay,
pause: '"' + pauseonhover + '"'
});
} else if ($('#bannerslider').hasClass("slide") && $('#imageContainer').children().length>1) {
$('#imageContainer').children(':first-child').addClass("showbanner");
setTimeout(nextSlide, slidedelay);
}
});
function nextSlide() {
var images = $('#imageContainer').children();
$(images).each( function(i) {
if ($(this).hasClass("showbanner")) {
$(this).fadeOut(fadedelay).removeClass("showbanner"); | return false
}
});
} | var nextIndex = (i == (images.length - 1)) ? 0 : i+1;
$(images[nextIndex]).fadeIn(fadedelay).addClass("showbanner");
setTimeout(nextSlide, slidedelay); | random_line_split |
functions.js | var slidedelay = $('#bannerslider').attr('data-delay');
var pauseonhover = $('#bannerslider').attr('data-pause');
var fadedelay = 2000;
$(document).ready(function() {
if ($('#bannerslider').hasClass("slide") && $('#bannerslider').hasClass("carousel")) {
$('#bannerslider').carousel({
interval: slidedelay,
pause: '"' + pauseonhover + '"'
});
} else if ($('#bannerslider').hasClass("slide") && $('#imageContainer').children().length>1) {
$('#imageContainer').children(':first-child').addClass("showbanner");
setTimeout(nextSlide, slidedelay);
}
});
function nextSlide() | {
var images = $('#imageContainer').children();
$(images).each( function(i) {
if ($(this).hasClass("showbanner")) {
$(this).fadeOut(fadedelay).removeClass("showbanner");
var nextIndex = (i == (images.length - 1)) ? 0 : i+1;
$(images[nextIndex]).fadeIn(fadedelay).addClass("showbanner");
setTimeout(nextSlide, slidedelay);
return false
}
});
} | identifier_body |
|
functions.js | var slidedelay = $('#bannerslider').attr('data-delay');
var pauseonhover = $('#bannerslider').attr('data-pause');
var fadedelay = 2000;
$(document).ready(function() {
if ($('#bannerslider').hasClass("slide") && $('#bannerslider').hasClass("carousel")) {
$('#bannerslider').carousel({
interval: slidedelay,
pause: '"' + pauseonhover + '"'
});
} else if ($('#bannerslider').hasClass("slide") && $('#imageContainer').children().length>1) {
$('#imageContainer').children(':first-child').addClass("showbanner");
setTimeout(nextSlide, slidedelay);
}
});
function | () {
var images = $('#imageContainer').children();
$(images).each( function(i) {
if ($(this).hasClass("showbanner")) {
$(this).fadeOut(fadedelay).removeClass("showbanner");
var nextIndex = (i == (images.length - 1)) ? 0 : i+1;
$(images[nextIndex]).fadeIn(fadedelay).addClass("showbanner");
setTimeout(nextSlide, slidedelay);
return false
}
});
}
| nextSlide | identifier_name |
white_world.js | /*var bmd, map, layer, marker, currentTile;
var score, scoreTextValue, nBlackTextValue, textStyle_Key, textStyle_Value;
var cursors;
var player;
var jumpButton, jumpTimer;
var background, colorBackground, backgroundDelay, changeBackground, screenDelay;
var index;
var floors;
var timer;
var A,S,D,F;
var obstacles;
var velocityUp;*/
var laserRojoHGroup;
var laserRojoVGroup;
var laserRojoH;
var laserRojoV;
var laserDelay;
var nLaserH;
var White_World = {
preload : function() {
game.load.spritesheet('camaleonWalk', 'assets/images/Camaleon.png', 31, 27);
game.load.image('floor', 'assets/images/spikess.png');
game.load.image('backgroundWhite', 'assets/images/backgroundWhite.png');
game.load.spritesheet('laserRojoHorizontal', 'assets/images/laser_horizontal.png',800,32);
},
create : function() {
game.physics.startSystem(Phaser.Physics.ARCADE);
game.physics.arcade.gravity.y = 450;
// Create our Timer
timer = game.time.create(false);
music_mundo2 = game.add.audio('music_mundo2', 1, true);
music_mundo2.play();
sfx_laser = game.add.audio('sfx_laser2');
sfx_laser.addMarker('laser', 2.5, 2);
background = game.add.tileSprite(0, 0, 800, 600, "backgroundWhite");
background.fixedToCamera = true;
jumpTimer = 0;
currentTile = 0;
score = 0;
laserDelay = 4;
nLaserH = 3;
//Paleta de colores
map = game.add.tilemap();
bmd = game.add.bitmapData(32 * 2, 32 * 1);
var color = Phaser.Color.createColor(64,64,64);//Black
bmd.rect(0*32, 0, 32, 32, color.rgba);
color = Phaser.Color.createColor(255,255,255); //White
bmd.rect(1*32, 0, 64, 32, color.rgba);
// Add a Tileset image to the map
map.addTilesetImage('tiles', bmd);
// Creates a new blank layer and sets the map dimensions.
// In this case the map is 40x30 tiles in size and the tiles are 32x32 pixels in size.
layer = map.create('level1', 2000, 30, 32, 32); //Intentar Corregir el 2000
// Populate some tiles for our player to start on with color Blue
for (var i = 0; i < 20; i++){
i < 10 ? map.putTile(0, i, 10, layer) : map.putTile(0, i, 10, layer);
}
//Se setea Blanco con collider debido al background inicial Azul, y color Negro siempre tiene Collider;
map.setCollision([0], true);
// Create our tile selector at the top of the screen
this.createTileSelector();
// Add Text to top of game.
textStyle_Key = { font: "bold 14px sans-serif", fill: "#46c0f9", align: "center" };
textStyle_Value = { font: "bold 18px sans-serif", fill: "#46c0f9", align: "center" };
// Score.
game.add.text(30, 40, "Distance", textStyle_Key).fixedToCamera = true;
scoreTextValue = game.add.text(100, 38, score.toString(), textStyle_Value);
scoreTextValue.fixedToCamera = true;
// Letras con que se activa cada Tile
game.add.text(12, 10, "A", textStyle_Key).fixedToCamera = true;
game.add.text(44, 10, "S", textStyle_Key).fixedToCamera = true;
this.createFloor();
this.laserRojoCreate();
// Crea Player
this.createPlayer();
cursors = game.input.keyboard.createCursorKeys();
jumpButton = game.input.keyboard.addKey(Phaser.Keyboard.SPACEBAR);
A = game.input.keyboard.addKey(Phaser.Keyboard.A);
S = game.input.keyboard.addKey(Phaser.Keyboard.S);
game.input.addMoveCallback(this.updateMarker, this);
timer.start();
},
update : function() {
game.world.setBounds(player.xChange, 0, game.width + player.xChange, game.world.height);
game.physics.arcade.collide(player, layer);
this.addScore();
this.playerMove();
if(timer.seconds > laserDelay){
if(laserRojoHGroup.exists){
laserRojoHGroup.forEach(function(laserRojoH) {
laserRojoH.kill();
});
}
this.laserRojoHorizontal();
}
laserRojoHGroup.forEach(function(laserRojo) {
if(laserRojo.frame == 14){
laserRojo.kill();
}
});
game.physics.arcade.overlap(laserRojoHGroup, player, this.playerLaserCollision, null, this); | },
playerCollision : function(){
this.gameOver();
},
playerLaserCollision : function(pj, laser){
if(laser.frame == 10){
this.gameOver();
}
},
addScore : function(){
score = Math.floor(player.x / 50) + GlobalScore;
scoreTextValue.text = score.toString();
},
laserRojoCreate : function() {
laserRojoHGroup = game.add.group();
laserRojoHGroup.enableBody = true;
laserRojoHGroup.createMultiple(10, 'laserRojoHorizontal', 0, false);
},
laserRojoHorizontal: function() {
var posX = 0;
var posY = Math.floor(player.y / 32);
for(var i = 0; i < nLaserH; i++){
var laserRojoH = laserRojoHGroup.getFirstDead(true, posX, posY*32 + i*64);
laserRojoH.body.immovable = true;
laserRojoH.body.allowGravity = false;
laserRojoH.fixedToCamera = true;
laserRojoH.body.setSize(800,20,0,6);
laserRojoH.animations.add('laserRojo', [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14], 8, false);
laserRojoH.play('laserRojo');
sfx_laser.play('laser');
}
for(var i = 0; i < (nLaserH - 1); i++){
var laserRojoH = laserRojoHGroup.getFirstDead(true, posX, posY*32 - (i+1)*64);
laserRojoH.body.immovable = true;
laserRojoH.body.allowGravity = false;
laserRojoH.fixedToCamera = true;
laserRojoH.body.setSize(800,20,0,6);
laserRojoH.animations.add('laserRojo', [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14], 8, false);
laserRojoH.play('laserRojo');
}
laserDelay = timer.seconds + 4;
},
createFloor : function(){
floors = game.add.group();
floors.enableBody = true;
var floor = floors.create(0, game.world.height - 32, 'floor');
//floor.scale.x = game.world.width;
//floor.scale.setTo(0.25, 0.25);
floor.fixedToCamera = true;
floor.body.immovable = true;
floor.body.allowGravity = false;
},
createPlayer : function() {
player = game.add.sprite(96, game.world.centerY - 30, 'camaleonWalk');
player.xOrig = player.x;
player.xChange = 0;
//Asegurarse de setear al jugador al inicio del mapa
game.world.setBounds(player.xChange, 0, game.width, game.world.height);
game.physics.arcade.enable(player);
player.body.collideWorldBounds = true;
player.body.setSize(32,16,0,11);
player.animations.add('camaleonWalkBlack', [24,25,26,27], 20, true);
player.animations.add('camaleonJumpBlack', [28,29,30,31,32,33,34,35], 12, true);
player.animations.add('camaleonWalkBlue', [12,13,14,15], 20, true);
player.animations.add('camaleonJumpBlue', [16,17,18,19,20,21,22,23], 12, true);
},
playerMove : function(){
player.body.velocity.x = 150 + velocityUp;
if(player.body.onFloor()){
player.play('camaleonWalkBlack');
}
else{
player.play('camaleonJumpBlack');
}
//Si player está en el suelo su caja de colision es player.body.setSize(32,16,0,11);
if(player.body.onFloor()){
player.body.setSize(32,16,0,11);
}
//Si player está en el aire su colsion es player.body.setSize(32,32,0,0)
else{
player.body.setSize(21,16,6,9);
}
player.xChange = Math.max(Math.abs(player.x - player.xOrig), player.xChange);
if(A.isDown){
currentTileMarker.x = 0;
currentTileMarker.y = 0;
currentTile = 0;
}
else if(S.isDown){
currentTileMarker.x = 32;
currentTileMarker.y = 0;
currentTile = 1;
}
if (jumpButton.isDown && player.body.onFloor() && game.time.now > jumpTimer)
{
player.body.velocity.y = -220;
jumpTimer = game.time.now + 750;
}
},
pickTile: function(sprite, pointer) {
var x = game.math.snapToFloor(pointer.x, 32, 0);
var y = game.math.snapToFloor(pointer.y, 32, 0);
currentTileMarker.x = x;
currentTileMarker.y = y;
x /= 32;
y /= 32;
currentTile = x + (y * 25);
},
updateMarker : function() {
marker.x = layer.getTileX(game.input.activePointer.worldX) * 32;
marker.y = layer.getTileY(game.input.activePointer.worldY) * 32;
if (game.input.mousePointer.isDown && marker.y > 32 && marker.y < (game.world.height - 32))
{
//Acá utilizar sprite en vez de currenTile
map.putTile(currentTile, layer.getTileX(marker.x), layer.getTileY(marker.y), layer);
}
},
createTileSelector : function() {
// Our tile selection window
var tileSelector = game.add.group();
var tileSelectorBackground = game.make.graphics();
tileSelectorBackground.beginFill(0x000000, 0.8);
tileSelectorBackground.drawRect(0, 0, 64, 33);
tileSelectorBackground.endFill();
tileSelector.add(tileSelectorBackground);
var tileStrip = tileSelector.create(1, 1, bmd);
tileStrip.inputEnabled = true;
//tileStrip.events.onInputDown.add(this.pickTile, this); //Permite cambiar color haciando click en la paleta de colores
// Our painting marker (El marcador negro)
marker = game.add.graphics();
marker.lineStyle(2, 0x000000, 1);
marker.drawRect(0, 0, 32, 32);
// Our current tile marker
currentTileMarker = game.add.graphics();
currentTileMarker.lineStyle(1, 0xffffff, 1);
currentTileMarker.drawRect(0, 0, 32, 32);
tileSelector.add(currentTileMarker);
tileSelector.fixedToCamera = true;
},
gameOver : function(){
//TGS.Analytics.logGameEvent('end');
sfx_colision.play('colision');
music_mundo2.stop();
game.world.setBounds(0, 0, game.width, game.height);
GlobalScore = score;
game.state.start('Game_Over');
},
render : function(){
laserRojoHGroup.forEach(function(laserRojo) {
//game.debug.body(laserRojo);
});
}
} | game.physics.arcade.overlap(obstacles, player, this.playerCollision, null, this);
game.physics.arcade.overlap(player, floors, this.gameOver, null, this); | random_line_split |
white_world.js | /*var bmd, map, layer, marker, currentTile;
var score, scoreTextValue, nBlackTextValue, textStyle_Key, textStyle_Value;
var cursors;
var player;
var jumpButton, jumpTimer;
var background, colorBackground, backgroundDelay, changeBackground, screenDelay;
var index;
var floors;
var timer;
var A,S,D,F;
var obstacles;
var velocityUp;*/
var laserRojoHGroup;
var laserRojoVGroup;
var laserRojoH;
var laserRojoV;
var laserDelay;
var nLaserH;
var White_World = {
preload : function() {
game.load.spritesheet('camaleonWalk', 'assets/images/Camaleon.png', 31, 27);
game.load.image('floor', 'assets/images/spikess.png');
game.load.image('backgroundWhite', 'assets/images/backgroundWhite.png');
game.load.spritesheet('laserRojoHorizontal', 'assets/images/laser_horizontal.png',800,32);
},
create : function() {
game.physics.startSystem(Phaser.Physics.ARCADE);
game.physics.arcade.gravity.y = 450;
// Create our Timer
timer = game.time.create(false);
music_mundo2 = game.add.audio('music_mundo2', 1, true);
music_mundo2.play();
sfx_laser = game.add.audio('sfx_laser2');
sfx_laser.addMarker('laser', 2.5, 2);
background = game.add.tileSprite(0, 0, 800, 600, "backgroundWhite");
background.fixedToCamera = true;
jumpTimer = 0;
currentTile = 0;
score = 0;
laserDelay = 4;
nLaserH = 3;
//Paleta de colores
map = game.add.tilemap();
bmd = game.add.bitmapData(32 * 2, 32 * 1);
var color = Phaser.Color.createColor(64,64,64);//Black
bmd.rect(0*32, 0, 32, 32, color.rgba);
color = Phaser.Color.createColor(255,255,255); //White
bmd.rect(1*32, 0, 64, 32, color.rgba);
// Add a Tileset image to the map
map.addTilesetImage('tiles', bmd);
// Creates a new blank layer and sets the map dimensions.
// In this case the map is 40x30 tiles in size and the tiles are 32x32 pixels in size.
layer = map.create('level1', 2000, 30, 32, 32); //Intentar Corregir el 2000
// Populate some tiles for our player to start on with color Blue
for (var i = 0; i < 20; i++){
i < 10 ? map.putTile(0, i, 10, layer) : map.putTile(0, i, 10, layer);
}
//Se setea Blanco con collider debido al background inicial Azul, y color Negro siempre tiene Collider;
map.setCollision([0], true);
// Create our tile selector at the top of the screen
this.createTileSelector();
// Add Text to top of game.
textStyle_Key = { font: "bold 14px sans-serif", fill: "#46c0f9", align: "center" };
textStyle_Value = { font: "bold 18px sans-serif", fill: "#46c0f9", align: "center" };
// Score.
game.add.text(30, 40, "Distance", textStyle_Key).fixedToCamera = true;
scoreTextValue = game.add.text(100, 38, score.toString(), textStyle_Value);
scoreTextValue.fixedToCamera = true;
// Letras con que se activa cada Tile
game.add.text(12, 10, "A", textStyle_Key).fixedToCamera = true;
game.add.text(44, 10, "S", textStyle_Key).fixedToCamera = true;
this.createFloor();
this.laserRojoCreate();
// Crea Player
this.createPlayer();
cursors = game.input.keyboard.createCursorKeys();
jumpButton = game.input.keyboard.addKey(Phaser.Keyboard.SPACEBAR);
A = game.input.keyboard.addKey(Phaser.Keyboard.A);
S = game.input.keyboard.addKey(Phaser.Keyboard.S);
game.input.addMoveCallback(this.updateMarker, this);
timer.start();
},
update : function() {
game.world.setBounds(player.xChange, 0, game.width + player.xChange, game.world.height);
game.physics.arcade.collide(player, layer);
this.addScore();
this.playerMove();
if(timer.seconds > laserDelay){
if(laserRojoHGroup.exists) |
this.laserRojoHorizontal();
}
laserRojoHGroup.forEach(function(laserRojo) {
if(laserRojo.frame == 14){
laserRojo.kill();
}
});
game.physics.arcade.overlap(laserRojoHGroup, player, this.playerLaserCollision, null, this);
game.physics.arcade.overlap(obstacles, player, this.playerCollision, null, this);
game.physics.arcade.overlap(player, floors, this.gameOver, null, this);
},
playerCollision : function(){
this.gameOver();
},
playerLaserCollision : function(pj, laser){
if(laser.frame == 10){
this.gameOver();
}
},
addScore : function(){
score = Math.floor(player.x / 50) + GlobalScore;
scoreTextValue.text = score.toString();
},
laserRojoCreate : function() {
laserRojoHGroup = game.add.group();
laserRojoHGroup.enableBody = true;
laserRojoHGroup.createMultiple(10, 'laserRojoHorizontal', 0, false);
},
laserRojoHorizontal: function() {
var posX = 0;
var posY = Math.floor(player.y / 32);
for(var i = 0; i < nLaserH; i++){
var laserRojoH = laserRojoHGroup.getFirstDead(true, posX, posY*32 + i*64);
laserRojoH.body.immovable = true;
laserRojoH.body.allowGravity = false;
laserRojoH.fixedToCamera = true;
laserRojoH.body.setSize(800,20,0,6);
laserRojoH.animations.add('laserRojo', [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14], 8, false);
laserRojoH.play('laserRojo');
sfx_laser.play('laser');
}
for(var i = 0; i < (nLaserH - 1); i++){
var laserRojoH = laserRojoHGroup.getFirstDead(true, posX, posY*32 - (i+1)*64);
laserRojoH.body.immovable = true;
laserRojoH.body.allowGravity = false;
laserRojoH.fixedToCamera = true;
laserRojoH.body.setSize(800,20,0,6);
laserRojoH.animations.add('laserRojo', [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14], 8, false);
laserRojoH.play('laserRojo');
}
laserDelay = timer.seconds + 4;
},
createFloor : function(){
floors = game.add.group();
floors.enableBody = true;
var floor = floors.create(0, game.world.height - 32, 'floor');
//floor.scale.x = game.world.width;
//floor.scale.setTo(0.25, 0.25);
floor.fixedToCamera = true;
floor.body.immovable = true;
floor.body.allowGravity = false;
},
createPlayer : function() {
player = game.add.sprite(96, game.world.centerY - 30, 'camaleonWalk');
player.xOrig = player.x;
player.xChange = 0;
//Asegurarse de setear al jugador al inicio del mapa
game.world.setBounds(player.xChange, 0, game.width, game.world.height);
game.physics.arcade.enable(player);
player.body.collideWorldBounds = true;
player.body.setSize(32,16,0,11);
player.animations.add('camaleonWalkBlack', [24,25,26,27], 20, true);
player.animations.add('camaleonJumpBlack', [28,29,30,31,32,33,34,35], 12, true);
player.animations.add('camaleonWalkBlue', [12,13,14,15], 20, true);
player.animations.add('camaleonJumpBlue', [16,17,18,19,20,21,22,23], 12, true);
},
playerMove : function(){
player.body.velocity.x = 150 + velocityUp;
if(player.body.onFloor()){
player.play('camaleonWalkBlack');
}
else{
player.play('camaleonJumpBlack');
}
//Si player está en el suelo su caja de colision es player.body.setSize(32,16,0,11);
if(player.body.onFloor()){
player.body.setSize(32,16,0,11);
}
//Si player está en el aire su colsion es player.body.setSize(32,32,0,0)
else{
player.body.setSize(21,16,6,9);
}
player.xChange = Math.max(Math.abs(player.x - player.xOrig), player.xChange);
if(A.isDown){
currentTileMarker.x = 0;
currentTileMarker.y = 0;
currentTile = 0;
}
else if(S.isDown){
currentTileMarker.x = 32;
currentTileMarker.y = 0;
currentTile = 1;
}
if (jumpButton.isDown && player.body.onFloor() && game.time.now > jumpTimer)
{
player.body.velocity.y = -220;
jumpTimer = game.time.now + 750;
}
},
pickTile: function(sprite, pointer) {
var x = game.math.snapToFloor(pointer.x, 32, 0);
var y = game.math.snapToFloor(pointer.y, 32, 0);
currentTileMarker.x = x;
currentTileMarker.y = y;
x /= 32;
y /= 32;
currentTile = x + (y * 25);
},
updateMarker : function() {
marker.x = layer.getTileX(game.input.activePointer.worldX) * 32;
marker.y = layer.getTileY(game.input.activePointer.worldY) * 32;
if (game.input.mousePointer.isDown && marker.y > 32 && marker.y < (game.world.height - 32))
{
//Acá utilizar sprite en vez de currenTile
map.putTile(currentTile, layer.getTileX(marker.x), layer.getTileY(marker.y), layer);
}
},
createTileSelector : function() {
// Our tile selection window
var tileSelector = game.add.group();
var tileSelectorBackground = game.make.graphics();
tileSelectorBackground.beginFill(0x000000, 0.8);
tileSelectorBackground.drawRect(0, 0, 64, 33);
tileSelectorBackground.endFill();
tileSelector.add(tileSelectorBackground);
var tileStrip = tileSelector.create(1, 1, bmd);
tileStrip.inputEnabled = true;
//tileStrip.events.onInputDown.add(this.pickTile, this); //Permite cambiar color haciando click en la paleta de colores
// Our painting marker (El marcador negro)
marker = game.add.graphics();
marker.lineStyle(2, 0x000000, 1);
marker.drawRect(0, 0, 32, 32);
// Our current tile marker
currentTileMarker = game.add.graphics();
currentTileMarker.lineStyle(1, 0xffffff, 1);
currentTileMarker.drawRect(0, 0, 32, 32);
tileSelector.add(currentTileMarker);
tileSelector.fixedToCamera = true;
},
gameOver : function(){
//TGS.Analytics.logGameEvent('end');
sfx_colision.play('colision');
music_mundo2.stop();
game.world.setBounds(0, 0, game.width, game.height);
GlobalScore = score;
game.state.start('Game_Over');
},
render : function(){
laserRojoHGroup.forEach(function(laserRojo) {
//game.debug.body(laserRojo);
});
}
} | {
laserRojoHGroup.forEach(function(laserRojoH) {
laserRojoH.kill();
});
} | conditional_block |
loggerExtended.js | /**
* Aru
* Logger Extension
* Copyright (C) 2018 - Present, PyroclasticMayhem
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
// Import dependencies
const Akalogger = require('akalogger');
module.exports = class AruLog extends Akalogger {
cmdUsage (cmdName, msg, args) {
if (!msg.channel.guild) {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}`);
} else {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}`);
}
}
cmdUsageError (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
cmdUsageWarn (cmdName, msg, args, err) |
};
| {
if (!msg.channel.guild) {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
} | identifier_body |
loggerExtended.js | /**
* Aru
* Logger Extension
* Copyright (C) 2018 - Present, PyroclasticMayhem
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
// Import dependencies
const Akalogger = require('akalogger');
| module.exports = class AruLog extends Akalogger {
cmdUsage (cmdName, msg, args) {
if (!msg.channel.guild) {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}`);
} else {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}`);
}
}
cmdUsageError (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
cmdUsageWarn (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
}; | random_line_split |
|
loggerExtended.js | /**
* Aru
* Logger Extension
* Copyright (C) 2018 - Present, PyroclasticMayhem
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
// Import dependencies
const Akalogger = require('akalogger');
module.exports = class AruLog extends Akalogger {
| (cmdName, msg, args) {
if (!msg.channel.guild) {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}`);
} else {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}`);
}
}
cmdUsageError (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
cmdUsageWarn (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
};
| cmdUsage | identifier_name |
loggerExtended.js | /**
* Aru
* Logger Extension
* Copyright (C) 2018 - Present, PyroclasticMayhem
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
// Import dependencies
const Akalogger = require('akalogger');
module.exports = class AruLog extends Akalogger {
cmdUsage (cmdName, msg, args) {
if (!msg.channel.guild) {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}`);
} else |
}
cmdUsageError (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.error(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
cmdUsageWarn (cmdName, msg, args, err) {
if (!msg.channel.guild) {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in private messages with args ${args}: ${err}`);
} else {
this.warn(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}: ${err}`);
}
}
};
| {
this.info(`${cmdName} used by ${msg.author.username}#${msg.author.discriminator} in ${msg.channel.guild.name}#${msg.channel.name} with args ${args}`);
} | conditional_block |
ansiprint.py | #!/usr/bin/env python
'''Print message using ANSI terminal codes'''
__author__ = "Miki Tebeka <[email protected]>"
from sys import stdout, stderr
# Format
bright = 1
dim = 2
underline = 4
blink = 5
reverse = 7
hidden = 8
# Forground
black = 30
red = 31
green = 32
yellow = 33
blue = 34
magenta = 35
cyan = 36
white = 37
# Background
on_black = 40
on_red = 41
on_green = 42
on_yellow = 43
on_blue = 44
on_magenta = 45
on_cyan = 46
on_white = 47
def ansiformat(msg, *args):
'''Format msg according to args.
See http://www.termsys.demon.co.uk/vtansi.htm for more details/
'''
return "\033[%sm%s\033[0m" % (";".join(["%s" % f for f in args]), msg)
def ansiprint(msg, *args, **kw):
|
if __name__ == "__main__":
from sys import argv, exit
from os.path import basename
h = {
"bright" : bright,
"dim" : dim,
"underline" : underline,
"blink" : blink,
"reverse" : reverse,
"hidden" : hidden,
"black" : black,
"red" : red,
"green" : green,
"yellow" : yellow,
"blue" : blue,
"magenta" : magenta,
"cyan" : cyan,
"white" : white,
"on_black" : on_black,
"on_red" : on_red,
"on_green" : on_green,
"on_yellow" : on_yellow,
"on_blue" : on_blue,
"on_magenta" : on_magenta,
"on_cyan" : on_cyan,
"on_white" : on_white
}
eg = "e.g. ansiprint hello red on_green underline -> %s" % \
ansiformat("hello", red, on_green, underline)
if len(argv) < 2:
print >> stderr, "usage: %s message [format ...]" % basename(argv[0])
print >> stderr, eg
exit(1)
for i in argv[2:]:
if i not in h:
ansiprint("%s: Unknown format\n" % i, red, bright, stderr=True)
print >> stderr, "Formats can be:",
msg = ", ".join([ansiformat(f, h[f]) for f in h.keys()])
print msg
print >> stderr, eg
exit(1)
ansiprint(argv[1], *[h[i] for i in argv[2:]])
print
| '''Print formatted message.
Should work on ANSI compatible terminal.
'''
if kw.get("stderr", 0):
outfo = stderr
else:
outfo = stdout
outfo.write(ansiformat(msg, *args))
outfo.flush() | identifier_body |
ansiprint.py | #!/usr/bin/env python
'''Print message using ANSI terminal codes'''
__author__ = "Miki Tebeka <[email protected]>"
from sys import stdout, stderr
# Format
bright = 1
dim = 2
underline = 4
blink = 5
reverse = 7
hidden = 8
| green = 32
yellow = 33
blue = 34
magenta = 35
cyan = 36
white = 37
# Background
on_black = 40
on_red = 41
on_green = 42
on_yellow = 43
on_blue = 44
on_magenta = 45
on_cyan = 46
on_white = 47
def ansiformat(msg, *args):
'''Format msg according to args.
See http://www.termsys.demon.co.uk/vtansi.htm for more details/
'''
return "\033[%sm%s\033[0m" % (";".join(["%s" % f for f in args]), msg)
def ansiprint(msg, *args, **kw):
'''Print formatted message.
Should work on ANSI compatible terminal.
'''
if kw.get("stderr", 0):
outfo = stderr
else:
outfo = stdout
outfo.write(ansiformat(msg, *args))
outfo.flush()
if __name__ == "__main__":
from sys import argv, exit
from os.path import basename
h = {
"bright" : bright,
"dim" : dim,
"underline" : underline,
"blink" : blink,
"reverse" : reverse,
"hidden" : hidden,
"black" : black,
"red" : red,
"green" : green,
"yellow" : yellow,
"blue" : blue,
"magenta" : magenta,
"cyan" : cyan,
"white" : white,
"on_black" : on_black,
"on_red" : on_red,
"on_green" : on_green,
"on_yellow" : on_yellow,
"on_blue" : on_blue,
"on_magenta" : on_magenta,
"on_cyan" : on_cyan,
"on_white" : on_white
}
eg = "e.g. ansiprint hello red on_green underline -> %s" % \
ansiformat("hello", red, on_green, underline)
if len(argv) < 2:
print >> stderr, "usage: %s message [format ...]" % basename(argv[0])
print >> stderr, eg
exit(1)
for i in argv[2:]:
if i not in h:
ansiprint("%s: Unknown format\n" % i, red, bright, stderr=True)
print >> stderr, "Formats can be:",
msg = ", ".join([ansiformat(f, h[f]) for f in h.keys()])
print msg
print >> stderr, eg
exit(1)
ansiprint(argv[1], *[h[i] for i in argv[2:]])
print | # Forground
black = 30
red = 31 | random_line_split |
ansiprint.py | #!/usr/bin/env python
'''Print message using ANSI terminal codes'''
__author__ = "Miki Tebeka <[email protected]>"
from sys import stdout, stderr
# Format
bright = 1
dim = 2
underline = 4
blink = 5
reverse = 7
hidden = 8
# Forground
black = 30
red = 31
green = 32
yellow = 33
blue = 34
magenta = 35
cyan = 36
white = 37
# Background
on_black = 40
on_red = 41
on_green = 42
on_yellow = 43
on_blue = 44
on_magenta = 45
on_cyan = 46
on_white = 47
def ansiformat(msg, *args):
'''Format msg according to args.
See http://www.termsys.demon.co.uk/vtansi.htm for more details/
'''
return "\033[%sm%s\033[0m" % (";".join(["%s" % f for f in args]), msg)
def ansiprint(msg, *args, **kw):
'''Print formatted message.
Should work on ANSI compatible terminal.
'''
if kw.get("stderr", 0):
|
else:
outfo = stdout
outfo.write(ansiformat(msg, *args))
outfo.flush()
if __name__ == "__main__":
from sys import argv, exit
from os.path import basename
h = {
"bright" : bright,
"dim" : dim,
"underline" : underline,
"blink" : blink,
"reverse" : reverse,
"hidden" : hidden,
"black" : black,
"red" : red,
"green" : green,
"yellow" : yellow,
"blue" : blue,
"magenta" : magenta,
"cyan" : cyan,
"white" : white,
"on_black" : on_black,
"on_red" : on_red,
"on_green" : on_green,
"on_yellow" : on_yellow,
"on_blue" : on_blue,
"on_magenta" : on_magenta,
"on_cyan" : on_cyan,
"on_white" : on_white
}
eg = "e.g. ansiprint hello red on_green underline -> %s" % \
ansiformat("hello", red, on_green, underline)
if len(argv) < 2:
print >> stderr, "usage: %s message [format ...]" % basename(argv[0])
print >> stderr, eg
exit(1)
for i in argv[2:]:
if i not in h:
ansiprint("%s: Unknown format\n" % i, red, bright, stderr=True)
print >> stderr, "Formats can be:",
msg = ", ".join([ansiformat(f, h[f]) for f in h.keys()])
print msg
print >> stderr, eg
exit(1)
ansiprint(argv[1], *[h[i] for i in argv[2:]])
print
| outfo = stderr | conditional_block |
ansiprint.py | #!/usr/bin/env python
'''Print message using ANSI terminal codes'''
__author__ = "Miki Tebeka <[email protected]>"
from sys import stdout, stderr
# Format
bright = 1
dim = 2
underline = 4
blink = 5
reverse = 7
hidden = 8
# Forground
black = 30
red = 31
green = 32
yellow = 33
blue = 34
magenta = 35
cyan = 36
white = 37
# Background
on_black = 40
on_red = 41
on_green = 42
on_yellow = 43
on_blue = 44
on_magenta = 45
on_cyan = 46
on_white = 47
def ansiformat(msg, *args):
'''Format msg according to args.
See http://www.termsys.demon.co.uk/vtansi.htm for more details/
'''
return "\033[%sm%s\033[0m" % (";".join(["%s" % f for f in args]), msg)
def | (msg, *args, **kw):
'''Print formatted message.
Should work on ANSI compatible terminal.
'''
if kw.get("stderr", 0):
outfo = stderr
else:
outfo = stdout
outfo.write(ansiformat(msg, *args))
outfo.flush()
if __name__ == "__main__":
from sys import argv, exit
from os.path import basename
h = {
"bright" : bright,
"dim" : dim,
"underline" : underline,
"blink" : blink,
"reverse" : reverse,
"hidden" : hidden,
"black" : black,
"red" : red,
"green" : green,
"yellow" : yellow,
"blue" : blue,
"magenta" : magenta,
"cyan" : cyan,
"white" : white,
"on_black" : on_black,
"on_red" : on_red,
"on_green" : on_green,
"on_yellow" : on_yellow,
"on_blue" : on_blue,
"on_magenta" : on_magenta,
"on_cyan" : on_cyan,
"on_white" : on_white
}
eg = "e.g. ansiprint hello red on_green underline -> %s" % \
ansiformat("hello", red, on_green, underline)
if len(argv) < 2:
print >> stderr, "usage: %s message [format ...]" % basename(argv[0])
print >> stderr, eg
exit(1)
for i in argv[2:]:
if i not in h:
ansiprint("%s: Unknown format\n" % i, red, bright, stderr=True)
print >> stderr, "Formats can be:",
msg = ", ".join([ansiformat(f, h[f]) for f in h.keys()])
print msg
print >> stderr, eg
exit(1)
ansiprint(argv[1], *[h[i] for i in argv[2:]])
print
| ansiprint | identifier_name |
emailer.ts | import * as SendGrid from 'sendgrid';
import {EmailTemplate} from 'email-templates';
/**
* Static class email module that provides simple Plug and Play functions to send custom templated emails using the SendGrid API.
*/
export class | {
/**
* Assign the API key required by SendGrid to send emails.
* @param {string} key - The SendGrid API key.
*/
public static initKey(key: string) {
this.sendGrid = SendGrid(key);
}
/**
* Send an HTML formatted, custom templated email using the SendGrid API.
* @param {MailOptions} opts - Template email's delivery fields and content.
* @param {Function} callback - The callback function that contains an error if an error occured with sending an email.
*/
public static send(options: MailOptions, callback: (error: Error) => void) {
new EmailTemplate(options.template).render(options.content, (err: Error, result: EmailTemplateResults) => {
// If an error occured rendering the template
if (err) {
return callback(err);
}
// Create a SendGrid request which contains the email's content.
const request = this.sendGrid.emptyRequest({
method: 'POST',
path: '/v3/mail/send',
body: {
from: {
name: options.from.name,
email: options.from.email,
},
personalizations: [{
to: [{
name: options.to.name,
email: options.to.email,
}],
subject: options.subject,
}],
content: [{
type: 'text/html',
value: result.html,
}],
},
});
// Send email via custom request using the SendGrid API
this.sendGrid.API(request, (sendGridError: Error) => {
return callback(sendGridError);
});
});
}
// Private static fields
private static sendGrid = null;
}
| Emailer | identifier_name |
emailer.ts | import * as SendGrid from 'sendgrid';
import {EmailTemplate} from 'email-templates';
/**
* Static class email module that provides simple Plug and Play functions to send custom templated emails using the SendGrid API.
*/
export class Emailer {
/**
* Assign the API key required by SendGrid to send emails.
* @param {string} key - The SendGrid API key.
*/
public static initKey(key: string) {
this.sendGrid = SendGrid(key);
}
/**
* Send an HTML formatted, custom templated email using the SendGrid API.
* @param {MailOptions} opts - Template email's delivery fields and content.
* @param {Function} callback - The callback function that contains an error if an error occured with sending an email.
*/
public static send(options: MailOptions, callback: (error: Error) => void) |
// Private static fields
private static sendGrid = null;
}
| {
new EmailTemplate(options.template).render(options.content, (err: Error, result: EmailTemplateResults) => {
// If an error occured rendering the template
if (err) {
return callback(err);
}
// Create a SendGrid request which contains the email's content.
const request = this.sendGrid.emptyRequest({
method: 'POST',
path: '/v3/mail/send',
body: {
from: {
name: options.from.name,
email: options.from.email,
},
personalizations: [{
to: [{
name: options.to.name,
email: options.to.email,
}],
subject: options.subject,
}],
content: [{
type: 'text/html',
value: result.html,
}],
},
});
// Send email via custom request using the SendGrid API
this.sendGrid.API(request, (sendGridError: Error) => {
return callback(sendGridError);
});
});
} | identifier_body |
emailer.ts | import * as SendGrid from 'sendgrid';
import {EmailTemplate} from 'email-templates';
/**
* Static class email module that provides simple Plug and Play functions to send custom templated emails using the SendGrid API.
*/
export class Emailer {
/**
* Assign the API key required by SendGrid to send emails.
* @param {string} key - The SendGrid API key.
*/
public static initKey(key: string) {
this.sendGrid = SendGrid(key);
}
/**
* Send an HTML formatted, custom templated email using the SendGrid API.
* @param {MailOptions} opts - Template email's delivery fields and content.
* @param {Function} callback - The callback function that contains an error if an error occured with sending an email.
*/
public static send(options: MailOptions, callback: (error: Error) => void) {
new EmailTemplate(options.template).render(options.content, (err: Error, result: EmailTemplateResults) => {
// If an error occured rendering the template
if (err) |
// Create a SendGrid request which contains the email's content.
const request = this.sendGrid.emptyRequest({
method: 'POST',
path: '/v3/mail/send',
body: {
from: {
name: options.from.name,
email: options.from.email,
},
personalizations: [{
to: [{
name: options.to.name,
email: options.to.email,
}],
subject: options.subject,
}],
content: [{
type: 'text/html',
value: result.html,
}],
},
});
// Send email via custom request using the SendGrid API
this.sendGrid.API(request, (sendGridError: Error) => {
return callback(sendGridError);
});
});
}
// Private static fields
private static sendGrid = null;
}
| {
return callback(err);
} | conditional_block |
emailer.ts | import * as SendGrid from 'sendgrid';
import {EmailTemplate} from 'email-templates';
/**
* Static class email module that provides simple Plug and Play functions to send custom templated emails using the SendGrid API.
*/
export class Emailer {
/**
* Assign the API key required by SendGrid to send emails.
* @param {string} key - The SendGrid API key.
*/
public static initKey(key: string) {
this.sendGrid = SendGrid(key);
}
/**
* Send an HTML formatted, custom templated email using the SendGrid API.
* @param {MailOptions} opts - Template email's delivery fields and content.
* @param {Function} callback - The callback function that contains an error if an error occured with sending an email.
*/
public static send(options: MailOptions, callback: (error: Error) => void) {
new EmailTemplate(options.template).render(options.content, (err: Error, result: EmailTemplateResults) => {
// If an error occured rendering the template
if (err) {
return callback(err);
}
// Create a SendGrid request which contains the email's content.
const request = this.sendGrid.emptyRequest({
method: 'POST',
path: '/v3/mail/send',
body: {
from: { | to: [{
name: options.to.name,
email: options.to.email,
}],
subject: options.subject,
}],
content: [{
type: 'text/html',
value: result.html,
}],
},
});
// Send email via custom request using the SendGrid API
this.sendGrid.API(request, (sendGridError: Error) => {
return callback(sendGridError);
});
});
}
// Private static fields
private static sendGrid = null;
} | name: options.from.name,
email: options.from.email,
},
personalizations: [{ | random_line_split |
session.js | 'use strict';
var util = require('util')
, tls = require('tls')
, crypto = require('crypto')
, EventEmitter = require('events').EventEmitter
, Connection = require('node-xmpp-core').Connection
, JID = require('node-xmpp-core').JID
, SRV = require('node-xmpp-core').SRV
, BOSHConnection = require('./bosh')
, WebSockets = require('./websockets')
function Session(opts) {
EventEmitter.call(this)
this.setOptions(opts)
if (opts.websocket && opts.websocket.url) {
this._setupWebsocketConnection(opts)
} else if (opts.bosh && opts.bosh.url) {
this._setupBoshConnection(opts)
} else {
this._setupSocketConnection(opts)
}
}
util.inherits(Session, EventEmitter)
Session.prototype._setupSocketConnection = function(opts) {
var params = {
xmlns: { '': opts.xmlns },
streamAttrs: {
version: '1.0',
to: this.jid.domain
}
}
for (var key in opts)
if (!(key in params))
params[key] = opts[key]
this.connection = new Connection(params)
this._addConnectionListeners()
if (opts.host) {
this._socketConnectionToHost(opts)
} else if (!SRV) {
throw 'Cannot load SRV'
} else {
this._performSrvLookup(opts)
}
}
Session.prototype._socketConnectionToHost = function(opts) {
if (opts.legacySSL) {
this.connection.allowTLS = false
this.connection.connect({
socket:function () {
return tls.connect(
opts.port || 5223,
opts.host,
opts.credentials || {}
)
}
})
} else {
if (opts.credentials) {
this.connection.credentials = crypto
.createCredentials(opts.credentials)
}
if (opts.disallowTLS) this.connection.allowTLS = false
this.connection.listen({
socket:function () {
// wait for connect event listeners
process.nextTick(function () {
this.socket.connect(opts.port || 5222, opts.host)
}.bind(this))
var socket = opts.socket
opts.socket = null
return socket // maybe create new socket
}
})
}
}
Session.prototype._performSrvLookup = function(opts) {
if (opts.legacySSL) {
throw 'LegacySSL mode does not support DNS lookups'
}
if (opts.credentials)
this.connection.credentials = crypto.createCredentials(opts.credentials)
if (opts.disallowTLS)
this.connection.allowTLS = false
this.connection.listen({socket:SRV.connect({
socket: opts.socket,
services: ['_xmpp-client._tcp'],
domain: this.jid.domain,
defaultPort: 5222
})})
}
Session.prototype._setupBoshConnection = function(opts) {
this.connection = new BOSHConnection({ | })
this._addConnectionListeners()
}
Session.prototype._setupWebsocketConnection = function(opts) {
this.connection = new WebSockets.WSConnection({
jid: this.jid,
websocket: opts.websocket
})
this._addConnectionListeners()
this.connection.on('connected', function() {
// Clients start <stream:stream>, servers reply
if (this.connection.startStream)
this.connection.startStream()
this.state = 5;
this.emit ('online', { jid: this.jid });
}.bind(this))
}
Session.prototype.setOptions = function(opts) {
/* jshint camelcase: false */
this.jid = (typeof opts.jid === 'string') ? new JID(opts.jid) : opts.jid
this.password = opts.password
this.preferredSaslMechanism = opts.preferredSaslMechanism
this.availableSaslMechanisms = []
this.api_key = opts.api_key
this.access_token = opts.access_token
this.oauth2_token = opts.oauth2_token
this.oauth2_auth = opts.oauth2_auth
this.register = opts.register
this.wait = opts.wait || '10'
if (typeof opts.actAs === 'string') {
this.actAs = new JID(opts.actAs)
} else {
this.actAs = opts.actAs
}
}
Session.prototype._addConnectionListeners = function (con) {
con = con || this.connection
con.on('stanza', this.onStanza.bind(this))
con.on('drain', this.emit.bind(this, 'drain'))
con.on('end', this.emit.bind(this, 'end'))
con.on('close', this.emit.bind(this, 'close'))
con.on('error', this.emit.bind(this, 'error'))
con.on('connect', this.emit.bind(this, 'connect'))
con.on('reconnect', this.emit.bind(this, 'reconnect'))
con.on('disconnect', this.emit.bind(this, 'disconnect'))
if (con.startStream) {
con.on('connect', function () {
// Clients start <stream:stream>, servers reply
con.startStream()
})
this.on('auth', function () {
con.startStream()
})
}
}
Session.prototype.pause = function() {
if (this.connection && this.connection.pause)
this.connection.pause()
}
Session.prototype.resume = function() {
if (this.connection && this.connection.resume)
this.connection.resume()
}
Session.prototype.send = function(stanza) {
return this.connection ? this.connection.send(stanza) : false
}
Session.prototype.end = function() {
if (this.connection)
this.connection.end()
}
Session.prototype.onStanza = function() {}
module.exports = Session | jid: this.jid,
bosh: opts.bosh,
wait: this.wait | random_line_split |
session.js | 'use strict';
var util = require('util')
, tls = require('tls')
, crypto = require('crypto')
, EventEmitter = require('events').EventEmitter
, Connection = require('node-xmpp-core').Connection
, JID = require('node-xmpp-core').JID
, SRV = require('node-xmpp-core').SRV
, BOSHConnection = require('./bosh')
, WebSockets = require('./websockets')
function | (opts) {
EventEmitter.call(this)
this.setOptions(opts)
if (opts.websocket && opts.websocket.url) {
this._setupWebsocketConnection(opts)
} else if (opts.bosh && opts.bosh.url) {
this._setupBoshConnection(opts)
} else {
this._setupSocketConnection(opts)
}
}
util.inherits(Session, EventEmitter)
Session.prototype._setupSocketConnection = function(opts) {
var params = {
xmlns: { '': opts.xmlns },
streamAttrs: {
version: '1.0',
to: this.jid.domain
}
}
for (var key in opts)
if (!(key in params))
params[key] = opts[key]
this.connection = new Connection(params)
this._addConnectionListeners()
if (opts.host) {
this._socketConnectionToHost(opts)
} else if (!SRV) {
throw 'Cannot load SRV'
} else {
this._performSrvLookup(opts)
}
}
Session.prototype._socketConnectionToHost = function(opts) {
if (opts.legacySSL) {
this.connection.allowTLS = false
this.connection.connect({
socket:function () {
return tls.connect(
opts.port || 5223,
opts.host,
opts.credentials || {}
)
}
})
} else {
if (opts.credentials) {
this.connection.credentials = crypto
.createCredentials(opts.credentials)
}
if (opts.disallowTLS) this.connection.allowTLS = false
this.connection.listen({
socket:function () {
// wait for connect event listeners
process.nextTick(function () {
this.socket.connect(opts.port || 5222, opts.host)
}.bind(this))
var socket = opts.socket
opts.socket = null
return socket // maybe create new socket
}
})
}
}
Session.prototype._performSrvLookup = function(opts) {
if (opts.legacySSL) {
throw 'LegacySSL mode does not support DNS lookups'
}
if (opts.credentials)
this.connection.credentials = crypto.createCredentials(opts.credentials)
if (opts.disallowTLS)
this.connection.allowTLS = false
this.connection.listen({socket:SRV.connect({
socket: opts.socket,
services: ['_xmpp-client._tcp'],
domain: this.jid.domain,
defaultPort: 5222
})})
}
Session.prototype._setupBoshConnection = function(opts) {
this.connection = new BOSHConnection({
jid: this.jid,
bosh: opts.bosh,
wait: this.wait
})
this._addConnectionListeners()
}
Session.prototype._setupWebsocketConnection = function(opts) {
this.connection = new WebSockets.WSConnection({
jid: this.jid,
websocket: opts.websocket
})
this._addConnectionListeners()
this.connection.on('connected', function() {
// Clients start <stream:stream>, servers reply
if (this.connection.startStream)
this.connection.startStream()
this.state = 5;
this.emit ('online', { jid: this.jid });
}.bind(this))
}
Session.prototype.setOptions = function(opts) {
/* jshint camelcase: false */
this.jid = (typeof opts.jid === 'string') ? new JID(opts.jid) : opts.jid
this.password = opts.password
this.preferredSaslMechanism = opts.preferredSaslMechanism
this.availableSaslMechanisms = []
this.api_key = opts.api_key
this.access_token = opts.access_token
this.oauth2_token = opts.oauth2_token
this.oauth2_auth = opts.oauth2_auth
this.register = opts.register
this.wait = opts.wait || '10'
if (typeof opts.actAs === 'string') {
this.actAs = new JID(opts.actAs)
} else {
this.actAs = opts.actAs
}
}
Session.prototype._addConnectionListeners = function (con) {
con = con || this.connection
con.on('stanza', this.onStanza.bind(this))
con.on('drain', this.emit.bind(this, 'drain'))
con.on('end', this.emit.bind(this, 'end'))
con.on('close', this.emit.bind(this, 'close'))
con.on('error', this.emit.bind(this, 'error'))
con.on('connect', this.emit.bind(this, 'connect'))
con.on('reconnect', this.emit.bind(this, 'reconnect'))
con.on('disconnect', this.emit.bind(this, 'disconnect'))
if (con.startStream) {
con.on('connect', function () {
// Clients start <stream:stream>, servers reply
con.startStream()
})
this.on('auth', function () {
con.startStream()
})
}
}
Session.prototype.pause = function() {
if (this.connection && this.connection.pause)
this.connection.pause()
}
Session.prototype.resume = function() {
if (this.connection && this.connection.resume)
this.connection.resume()
}
Session.prototype.send = function(stanza) {
return this.connection ? this.connection.send(stanza) : false
}
Session.prototype.end = function() {
if (this.connection)
this.connection.end()
}
Session.prototype.onStanza = function() {}
module.exports = Session
| Session | identifier_name |
session.js | 'use strict';
var util = require('util')
, tls = require('tls')
, crypto = require('crypto')
, EventEmitter = require('events').EventEmitter
, Connection = require('node-xmpp-core').Connection
, JID = require('node-xmpp-core').JID
, SRV = require('node-xmpp-core').SRV
, BOSHConnection = require('./bosh')
, WebSockets = require('./websockets')
function Session(opts) {
EventEmitter.call(this)
this.setOptions(opts)
if (opts.websocket && opts.websocket.url) {
this._setupWebsocketConnection(opts)
} else if (opts.bosh && opts.bosh.url) {
this._setupBoshConnection(opts)
} else {
this._setupSocketConnection(opts)
}
}
util.inherits(Session, EventEmitter)
Session.prototype._setupSocketConnection = function(opts) {
var params = {
xmlns: { '': opts.xmlns },
streamAttrs: {
version: '1.0',
to: this.jid.domain
}
}
for (var key in opts)
if (!(key in params))
params[key] = opts[key]
this.connection = new Connection(params)
this._addConnectionListeners()
if (opts.host) {
this._socketConnectionToHost(opts)
} else if (!SRV) {
throw 'Cannot load SRV'
} else {
this._performSrvLookup(opts)
}
}
Session.prototype._socketConnectionToHost = function(opts) {
if (opts.legacySSL) {
this.connection.allowTLS = false
this.connection.connect({
socket:function () {
return tls.connect(
opts.port || 5223,
opts.host,
opts.credentials || {}
)
}
})
} else {
if (opts.credentials) {
this.connection.credentials = crypto
.createCredentials(opts.credentials)
}
if (opts.disallowTLS) this.connection.allowTLS = false
this.connection.listen({
socket:function () {
// wait for connect event listeners
process.nextTick(function () {
this.socket.connect(opts.port || 5222, opts.host)
}.bind(this))
var socket = opts.socket
opts.socket = null
return socket // maybe create new socket
}
})
}
}
Session.prototype._performSrvLookup = function(opts) {
if (opts.legacySSL) {
throw 'LegacySSL mode does not support DNS lookups'
}
if (opts.credentials)
this.connection.credentials = crypto.createCredentials(opts.credentials)
if (opts.disallowTLS)
this.connection.allowTLS = false
this.connection.listen({socket:SRV.connect({
socket: opts.socket,
services: ['_xmpp-client._tcp'],
domain: this.jid.domain,
defaultPort: 5222
})})
}
Session.prototype._setupBoshConnection = function(opts) {
this.connection = new BOSHConnection({
jid: this.jid,
bosh: opts.bosh,
wait: this.wait
})
this._addConnectionListeners()
}
Session.prototype._setupWebsocketConnection = function(opts) {
this.connection = new WebSockets.WSConnection({
jid: this.jid,
websocket: opts.websocket
})
this._addConnectionListeners()
this.connection.on('connected', function() {
// Clients start <stream:stream>, servers reply
if (this.connection.startStream)
this.connection.startStream()
this.state = 5;
this.emit ('online', { jid: this.jid });
}.bind(this))
}
Session.prototype.setOptions = function(opts) {
/* jshint camelcase: false */
this.jid = (typeof opts.jid === 'string') ? new JID(opts.jid) : opts.jid
this.password = opts.password
this.preferredSaslMechanism = opts.preferredSaslMechanism
this.availableSaslMechanisms = []
this.api_key = opts.api_key
this.access_token = opts.access_token
this.oauth2_token = opts.oauth2_token
this.oauth2_auth = opts.oauth2_auth
this.register = opts.register
this.wait = opts.wait || '10'
if (typeof opts.actAs === 'string') {
this.actAs = new JID(opts.actAs)
} else {
this.actAs = opts.actAs
}
}
Session.prototype._addConnectionListeners = function (con) {
con = con || this.connection
con.on('stanza', this.onStanza.bind(this))
con.on('drain', this.emit.bind(this, 'drain'))
con.on('end', this.emit.bind(this, 'end'))
con.on('close', this.emit.bind(this, 'close'))
con.on('error', this.emit.bind(this, 'error'))
con.on('connect', this.emit.bind(this, 'connect'))
con.on('reconnect', this.emit.bind(this, 'reconnect'))
con.on('disconnect', this.emit.bind(this, 'disconnect'))
if (con.startStream) |
}
Session.prototype.pause = function() {
if (this.connection && this.connection.pause)
this.connection.pause()
}
Session.prototype.resume = function() {
if (this.connection && this.connection.resume)
this.connection.resume()
}
Session.prototype.send = function(stanza) {
return this.connection ? this.connection.send(stanza) : false
}
Session.prototype.end = function() {
if (this.connection)
this.connection.end()
}
Session.prototype.onStanza = function() {}
module.exports = Session
| {
con.on('connect', function () {
// Clients start <stream:stream>, servers reply
con.startStream()
})
this.on('auth', function () {
con.startStream()
})
} | conditional_block |
session.js | 'use strict';
var util = require('util')
, tls = require('tls')
, crypto = require('crypto')
, EventEmitter = require('events').EventEmitter
, Connection = require('node-xmpp-core').Connection
, JID = require('node-xmpp-core').JID
, SRV = require('node-xmpp-core').SRV
, BOSHConnection = require('./bosh')
, WebSockets = require('./websockets')
function Session(opts) |
util.inherits(Session, EventEmitter)
Session.prototype._setupSocketConnection = function(opts) {
var params = {
xmlns: { '': opts.xmlns },
streamAttrs: {
version: '1.0',
to: this.jid.domain
}
}
for (var key in opts)
if (!(key in params))
params[key] = opts[key]
this.connection = new Connection(params)
this._addConnectionListeners()
if (opts.host) {
this._socketConnectionToHost(opts)
} else if (!SRV) {
throw 'Cannot load SRV'
} else {
this._performSrvLookup(opts)
}
}
Session.prototype._socketConnectionToHost = function(opts) {
if (opts.legacySSL) {
this.connection.allowTLS = false
this.connection.connect({
socket:function () {
return tls.connect(
opts.port || 5223,
opts.host,
opts.credentials || {}
)
}
})
} else {
if (opts.credentials) {
this.connection.credentials = crypto
.createCredentials(opts.credentials)
}
if (opts.disallowTLS) this.connection.allowTLS = false
this.connection.listen({
socket:function () {
// wait for connect event listeners
process.nextTick(function () {
this.socket.connect(opts.port || 5222, opts.host)
}.bind(this))
var socket = opts.socket
opts.socket = null
return socket // maybe create new socket
}
})
}
}
Session.prototype._performSrvLookup = function(opts) {
if (opts.legacySSL) {
throw 'LegacySSL mode does not support DNS lookups'
}
if (opts.credentials)
this.connection.credentials = crypto.createCredentials(opts.credentials)
if (opts.disallowTLS)
this.connection.allowTLS = false
this.connection.listen({socket:SRV.connect({
socket: opts.socket,
services: ['_xmpp-client._tcp'],
domain: this.jid.domain,
defaultPort: 5222
})})
}
Session.prototype._setupBoshConnection = function(opts) {
this.connection = new BOSHConnection({
jid: this.jid,
bosh: opts.bosh,
wait: this.wait
})
this._addConnectionListeners()
}
Session.prototype._setupWebsocketConnection = function(opts) {
this.connection = new WebSockets.WSConnection({
jid: this.jid,
websocket: opts.websocket
})
this._addConnectionListeners()
this.connection.on('connected', function() {
// Clients start <stream:stream>, servers reply
if (this.connection.startStream)
this.connection.startStream()
this.state = 5;
this.emit ('online', { jid: this.jid });
}.bind(this))
}
Session.prototype.setOptions = function(opts) {
/* jshint camelcase: false */
this.jid = (typeof opts.jid === 'string') ? new JID(opts.jid) : opts.jid
this.password = opts.password
this.preferredSaslMechanism = opts.preferredSaslMechanism
this.availableSaslMechanisms = []
this.api_key = opts.api_key
this.access_token = opts.access_token
this.oauth2_token = opts.oauth2_token
this.oauth2_auth = opts.oauth2_auth
this.register = opts.register
this.wait = opts.wait || '10'
if (typeof opts.actAs === 'string') {
this.actAs = new JID(opts.actAs)
} else {
this.actAs = opts.actAs
}
}
Session.prototype._addConnectionListeners = function (con) {
con = con || this.connection
con.on('stanza', this.onStanza.bind(this))
con.on('drain', this.emit.bind(this, 'drain'))
con.on('end', this.emit.bind(this, 'end'))
con.on('close', this.emit.bind(this, 'close'))
con.on('error', this.emit.bind(this, 'error'))
con.on('connect', this.emit.bind(this, 'connect'))
con.on('reconnect', this.emit.bind(this, 'reconnect'))
con.on('disconnect', this.emit.bind(this, 'disconnect'))
if (con.startStream) {
con.on('connect', function () {
// Clients start <stream:stream>, servers reply
con.startStream()
})
this.on('auth', function () {
con.startStream()
})
}
}
Session.prototype.pause = function() {
if (this.connection && this.connection.pause)
this.connection.pause()
}
Session.prototype.resume = function() {
if (this.connection && this.connection.resume)
this.connection.resume()
}
Session.prototype.send = function(stanza) {
return this.connection ? this.connection.send(stanza) : false
}
Session.prototype.end = function() {
if (this.connection)
this.connection.end()
}
Session.prototype.onStanza = function() {}
module.exports = Session
| {
EventEmitter.call(this)
this.setOptions(opts)
if (opts.websocket && opts.websocket.url) {
this._setupWebsocketConnection(opts)
} else if (opts.bosh && opts.bosh.url) {
this._setupBoshConnection(opts)
} else {
this._setupSocketConnection(opts)
}
} | identifier_body |
script.js | jQuery(document).ready(function ($) {
var options = {
$AutoPlay: true, //[Optional] Whether to auto play, to enable slideshow, this option must be set to true, default value is false
$AutoPlaySteps: 1, //[Optional] Steps to go for each navigation request (this options applys only when slideshow disabled), the default value is 1
$AutoPlayInterval: 4000, //[Optional] Interval (in milliseconds) to go for next slide since the previous stopped if the slider is auto playing, default value is 3000
$PauseOnHover: 1, //[Optional] Whether to pause when mouse over if a slider is auto playing, 0 no pause, 1 pause for desktop, 2 pause for touch device, 3 pause for desktop and touch device, 4 freeze for desktop, 8 freeze for touch device, 12 freeze for desktop and touch device, default value is 1
$ArrowKeyNavigation: true, //[Optional] Allows keyboard (arrow key) navigation or not, default value is false
$SlideDuration: 500, //[Optional] Specifies default duration (swipe) for slide in milliseconds, default value is 500
$MinDragOffsetToSlide: 20, //[Optional] Minimum drag offset to trigger slide , default value is 20
//$SlideWidth: 600, //[Optional] Width of every slide in pixels, default value is width of 'slides' container
//$SlideHeight: 300, //[Optional] Height of every slide in pixels, default value is height of 'slides' container
$SlideSpacing: 5, //[Optional] Space between each slide in pixels, default value is 0
$DisplayPieces: 1, //[Optional] Number of pieces to display (the slideshow would be disabled if the value is set to greater than 1), the default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park slide (this options applys only when slideshow disabled), default value is 0.
$UISearchMode: 1, //[Optional] The way (0 parellel, 1 recursive, default value is 1) to search UI components (slides container, loading screen, navigator container, arrow navigator container, thumbnail navigator container etc).
$PlayOrientation: 1, //[Optional] Orientation to play slide (for auto play, navigation), 1 horizental, 2 vertical, 5 horizental reverse, 6 vertical reverse, default value is 1
$DragOrientation: 3, //[Optional] Orientation to drag slide, 0 no drag, 1 horizental, 2 vertical, 3 either, default value is 1 (Note that the $DragOrientation should be the same as $PlayOrientation when $DisplayPieces is greater than 1, or parking position is not 0)
$ThumbnailNavigatorOptions: {
$Class: $JssorThumbnailNavigator$, //[Required] Class to create thumbnail navigator instance
$ChanceToShow: 2, //[Required] 0 Never, 1 Mouse Over, 2 Always
$ActionMode: 1, //[Optional] 0 None, 1 act by click, 2 act by mouse hover, 3 both, default value is 1
$AutoCenter: 3, //[Optional] Auto center thumbnail items in the thumbnail navigator container, 0 None, 1 Horizontal, 2 Vertical, 3 Both, default value is 3
| $DisplayPieces: 5, //[Optional] Number of pieces to display, default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park thumbnail
$Orientation: 1, //[Optional] Orientation to arrange thumbnails, 1 horizental, 2 vertical, default value is 1
$DisableDrag: true //[Optional] Disable drag or not, default value is false
}
};
var jssor_slider1 = new $JssorSlider$("slider1_container", options);
//responsive code begin
//you can remove responsive code if you don't want the slider scales while window resizes
function ScaleSlider() {
var parentWidth = jssor_slider1.$Elmt.parentNode.clientWidth;
if (parentWidth) {
var sliderWidth = parentWidth;
//keep the slider width no more than 600
sliderWidth = Math.min(sliderWidth, 600);
jssor_slider1.$ScaleWidth(sliderWidth);
}
else
window.setTimeout(ScaleSlider, 30);
}
ScaleSlider();
$(window).bind("load", ScaleSlider);
$(window).bind("resize", ScaleSlider);
$(window).bind("orientationchange", ScaleSlider);
//responsive code end
}); | $Lanes: 1, //[Optional] Specify lanes to arrange thumbnails, default value is 1
$SpacingX: 1, //[Optional] Horizontal space between each thumbnail in pixel, default value is 0
$SpacingY: 0, //[Optional] Vertical space between each thumbnail in pixel, default value is 0
| random_line_split |
script.js | jQuery(document).ready(function ($) {
var options = {
$AutoPlay: true, //[Optional] Whether to auto play, to enable slideshow, this option must be set to true, default value is false
$AutoPlaySteps: 1, //[Optional] Steps to go for each navigation request (this options applys only when slideshow disabled), the default value is 1
$AutoPlayInterval: 4000, //[Optional] Interval (in milliseconds) to go for next slide since the previous stopped if the slider is auto playing, default value is 3000
$PauseOnHover: 1, //[Optional] Whether to pause when mouse over if a slider is auto playing, 0 no pause, 1 pause for desktop, 2 pause for touch device, 3 pause for desktop and touch device, 4 freeze for desktop, 8 freeze for touch device, 12 freeze for desktop and touch device, default value is 1
$ArrowKeyNavigation: true, //[Optional] Allows keyboard (arrow key) navigation or not, default value is false
$SlideDuration: 500, //[Optional] Specifies default duration (swipe) for slide in milliseconds, default value is 500
$MinDragOffsetToSlide: 20, //[Optional] Minimum drag offset to trigger slide , default value is 20
//$SlideWidth: 600, //[Optional] Width of every slide in pixels, default value is width of 'slides' container
//$SlideHeight: 300, //[Optional] Height of every slide in pixels, default value is height of 'slides' container
$SlideSpacing: 5, //[Optional] Space between each slide in pixels, default value is 0
$DisplayPieces: 1, //[Optional] Number of pieces to display (the slideshow would be disabled if the value is set to greater than 1), the default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park slide (this options applys only when slideshow disabled), default value is 0.
$UISearchMode: 1, //[Optional] The way (0 parellel, 1 recursive, default value is 1) to search UI components (slides container, loading screen, navigator container, arrow navigator container, thumbnail navigator container etc).
$PlayOrientation: 1, //[Optional] Orientation to play slide (for auto play, navigation), 1 horizental, 2 vertical, 5 horizental reverse, 6 vertical reverse, default value is 1
$DragOrientation: 3, //[Optional] Orientation to drag slide, 0 no drag, 1 horizental, 2 vertical, 3 either, default value is 1 (Note that the $DragOrientation should be the same as $PlayOrientation when $DisplayPieces is greater than 1, or parking position is not 0)
$ThumbnailNavigatorOptions: {
$Class: $JssorThumbnailNavigator$, //[Required] Class to create thumbnail navigator instance
$ChanceToShow: 2, //[Required] 0 Never, 1 Mouse Over, 2 Always
$ActionMode: 1, //[Optional] 0 None, 1 act by click, 2 act by mouse hover, 3 both, default value is 1
$AutoCenter: 3, //[Optional] Auto center thumbnail items in the thumbnail navigator container, 0 None, 1 Horizontal, 2 Vertical, 3 Both, default value is 3
$Lanes: 1, //[Optional] Specify lanes to arrange thumbnails, default value is 1
$SpacingX: 1, //[Optional] Horizontal space between each thumbnail in pixel, default value is 0
$SpacingY: 0, //[Optional] Vertical space between each thumbnail in pixel, default value is 0
$DisplayPieces: 5, //[Optional] Number of pieces to display, default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park thumbnail
$Orientation: 1, //[Optional] Orientation to arrange thumbnails, 1 horizental, 2 vertical, default value is 1
$DisableDrag: true //[Optional] Disable drag or not, default value is false
}
};
var jssor_slider1 = new $JssorSlider$("slider1_container", options);
//responsive code begin
//you can remove responsive code if you don't want the slider scales while window resizes
function ScaleSlider() |
ScaleSlider();
$(window).bind("load", ScaleSlider);
$(window).bind("resize", ScaleSlider);
$(window).bind("orientationchange", ScaleSlider);
//responsive code end
}); | {
var parentWidth = jssor_slider1.$Elmt.parentNode.clientWidth;
if (parentWidth) {
var sliderWidth = parentWidth;
//keep the slider width no more than 600
sliderWidth = Math.min(sliderWidth, 600);
jssor_slider1.$ScaleWidth(sliderWidth);
}
else
window.setTimeout(ScaleSlider, 30);
} | identifier_body |
script.js | jQuery(document).ready(function ($) {
var options = {
$AutoPlay: true, //[Optional] Whether to auto play, to enable slideshow, this option must be set to true, default value is false
$AutoPlaySteps: 1, //[Optional] Steps to go for each navigation request (this options applys only when slideshow disabled), the default value is 1
$AutoPlayInterval: 4000, //[Optional] Interval (in milliseconds) to go for next slide since the previous stopped if the slider is auto playing, default value is 3000
$PauseOnHover: 1, //[Optional] Whether to pause when mouse over if a slider is auto playing, 0 no pause, 1 pause for desktop, 2 pause for touch device, 3 pause for desktop and touch device, 4 freeze for desktop, 8 freeze for touch device, 12 freeze for desktop and touch device, default value is 1
$ArrowKeyNavigation: true, //[Optional] Allows keyboard (arrow key) navigation or not, default value is false
$SlideDuration: 500, //[Optional] Specifies default duration (swipe) for slide in milliseconds, default value is 500
$MinDragOffsetToSlide: 20, //[Optional] Minimum drag offset to trigger slide , default value is 20
//$SlideWidth: 600, //[Optional] Width of every slide in pixels, default value is width of 'slides' container
//$SlideHeight: 300, //[Optional] Height of every slide in pixels, default value is height of 'slides' container
$SlideSpacing: 5, //[Optional] Space between each slide in pixels, default value is 0
$DisplayPieces: 1, //[Optional] Number of pieces to display (the slideshow would be disabled if the value is set to greater than 1), the default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park slide (this options applys only when slideshow disabled), default value is 0.
$UISearchMode: 1, //[Optional] The way (0 parellel, 1 recursive, default value is 1) to search UI components (slides container, loading screen, navigator container, arrow navigator container, thumbnail navigator container etc).
$PlayOrientation: 1, //[Optional] Orientation to play slide (for auto play, navigation), 1 horizental, 2 vertical, 5 horizental reverse, 6 vertical reverse, default value is 1
$DragOrientation: 3, //[Optional] Orientation to drag slide, 0 no drag, 1 horizental, 2 vertical, 3 either, default value is 1 (Note that the $DragOrientation should be the same as $PlayOrientation when $DisplayPieces is greater than 1, or parking position is not 0)
$ThumbnailNavigatorOptions: {
$Class: $JssorThumbnailNavigator$, //[Required] Class to create thumbnail navigator instance
$ChanceToShow: 2, //[Required] 0 Never, 1 Mouse Over, 2 Always
$ActionMode: 1, //[Optional] 0 None, 1 act by click, 2 act by mouse hover, 3 both, default value is 1
$AutoCenter: 3, //[Optional] Auto center thumbnail items in the thumbnail navigator container, 0 None, 1 Horizontal, 2 Vertical, 3 Both, default value is 3
$Lanes: 1, //[Optional] Specify lanes to arrange thumbnails, default value is 1
$SpacingX: 1, //[Optional] Horizontal space between each thumbnail in pixel, default value is 0
$SpacingY: 0, //[Optional] Vertical space between each thumbnail in pixel, default value is 0
$DisplayPieces: 5, //[Optional] Number of pieces to display, default value is 1
$ParkingPosition: 0, //[Optional] The offset position to park thumbnail
$Orientation: 1, //[Optional] Orientation to arrange thumbnails, 1 horizental, 2 vertical, default value is 1
$DisableDrag: true //[Optional] Disable drag or not, default value is false
}
};
var jssor_slider1 = new $JssorSlider$("slider1_container", options);
//responsive code begin
//you can remove responsive code if you don't want the slider scales while window resizes
function | () {
var parentWidth = jssor_slider1.$Elmt.parentNode.clientWidth;
if (parentWidth) {
var sliderWidth = parentWidth;
//keep the slider width no more than 600
sliderWidth = Math.min(sliderWidth, 600);
jssor_slider1.$ScaleWidth(sliderWidth);
}
else
window.setTimeout(ScaleSlider, 30);
}
ScaleSlider();
$(window).bind("load", ScaleSlider);
$(window).bind("resize", ScaleSlider);
$(window).bind("orientationchange", ScaleSlider);
//responsive code end
}); | ScaleSlider | identifier_name |
lib.rs | //! Get information concerning the build target.
macro_rules! return_cfg {
($i:ident : $s:expr) => ( if cfg!($i = $s) { return $s; } );
($i:ident : $s:expr, $($t:expr),+) => ( return_cfg!($i: $s); return_cfg!($i: $($t),+) );
}
/// Collection of functions to give information on the build target.
pub struct Target;
impl Target {
/// Architecture; given by `target_arch`.
pub fn arch() -> &'static str {
return_cfg!(target_arch: "x86", "x86_64", "mips", "powerpc", "arm", "aarch64");
"unknown"
}
/// Endianness; given by `target_endian`.
pub fn endian() -> &'static str {
return_cfg!(target_endian: "little", "big");
""
}
/// Toolchain environment; given by `target_environment`.
pub fn env() -> &'static str {
return_cfg!(target_env: "musl", "msvc", "gnu");
""
}
/// OS familt; given by `target_family`.
pub fn family() -> &'static str {
return_cfg!(target_family: "unix", "windows");
"unknown"
}
/// Operating system; given by `target_os`.
pub fn os() -> &'static str |
/// Pointer width; given by `target_pointer_width`.
pub fn pointer_width() -> &'static str {
return_cfg!(target_pointer_width: "32", "64");
"unknown"
}
// TODO: enable once it's not experimental API.
// pub fn vendor() -> &'static str {
// return_cfg!(target_vendor: "apple", "pc");
// "unknown"
// }
}
| {
return_cfg!(target_os: "windows", "macos", "ios", "linux", "android", "freebsd", "dragonfly", "bitrig", "openbsd", "netbsd");
"unknown"
} | identifier_body |
lib.rs |
macro_rules! return_cfg {
($i:ident : $s:expr) => ( if cfg!($i = $s) { return $s; } );
($i:ident : $s:expr, $($t:expr),+) => ( return_cfg!($i: $s); return_cfg!($i: $($t),+) );
}
/// Collection of functions to give information on the build target.
pub struct Target;
impl Target {
/// Architecture; given by `target_arch`.
pub fn arch() -> &'static str {
return_cfg!(target_arch: "x86", "x86_64", "mips", "powerpc", "arm", "aarch64");
"unknown"
}
/// Endianness; given by `target_endian`.
pub fn endian() -> &'static str {
return_cfg!(target_endian: "little", "big");
""
}
/// Toolchain environment; given by `target_environment`.
pub fn env() -> &'static str {
return_cfg!(target_env: "musl", "msvc", "gnu");
""
}
/// OS familt; given by `target_family`.
pub fn family() -> &'static str {
return_cfg!(target_family: "unix", "windows");
"unknown"
}
/// Operating system; given by `target_os`.
pub fn os() -> &'static str {
return_cfg!(target_os: "windows", "macos", "ios", "linux", "android", "freebsd", "dragonfly", "bitrig", "openbsd", "netbsd");
"unknown"
}
/// Pointer width; given by `target_pointer_width`.
pub fn pointer_width() -> &'static str {
return_cfg!(target_pointer_width: "32", "64");
"unknown"
}
// TODO: enable once it's not experimental API.
// pub fn vendor() -> &'static str {
// return_cfg!(target_vendor: "apple", "pc");
// "unknown"
// }
} | //! Get information concerning the build target. | random_line_split |
|
lib.rs | //! Get information concerning the build target.
macro_rules! return_cfg {
($i:ident : $s:expr) => ( if cfg!($i = $s) { return $s; } );
($i:ident : $s:expr, $($t:expr),+) => ( return_cfg!($i: $s); return_cfg!($i: $($t),+) );
}
/// Collection of functions to give information on the build target.
pub struct Target;
impl Target {
/// Architecture; given by `target_arch`.
pub fn arch() -> &'static str {
return_cfg!(target_arch: "x86", "x86_64", "mips", "powerpc", "arm", "aarch64");
"unknown"
}
/// Endianness; given by `target_endian`.
pub fn endian() -> &'static str {
return_cfg!(target_endian: "little", "big");
""
}
/// Toolchain environment; given by `target_environment`.
pub fn env() -> &'static str {
return_cfg!(target_env: "musl", "msvc", "gnu");
""
}
/// OS familt; given by `target_family`.
pub fn family() -> &'static str {
return_cfg!(target_family: "unix", "windows");
"unknown"
}
/// Operating system; given by `target_os`.
pub fn os() -> &'static str {
return_cfg!(target_os: "windows", "macos", "ios", "linux", "android", "freebsd", "dragonfly", "bitrig", "openbsd", "netbsd");
"unknown"
}
/// Pointer width; given by `target_pointer_width`.
pub fn | () -> &'static str {
return_cfg!(target_pointer_width: "32", "64");
"unknown"
}
// TODO: enable once it's not experimental API.
// pub fn vendor() -> &'static str {
// return_cfg!(target_vendor: "apple", "pc");
// "unknown"
// }
}
| pointer_width | identifier_name |
plot_simulate_evoked_data.py | """
==============================
Generate simulated evoked data
==============================
"""
# Author: Daniel Strohmeier <[email protected]>
# Alexandre Gramfort <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne import (read_proj, read_forward_solution, read_cov, read_label,
pick_types_forward, pick_types)
from mne.io import Raw, read_info
from mne.datasets import sample
from mne.time_frequency import fit_iir_model_raw
from mne.viz import plot_sparse_source_estimates
from mne.simulation import simulate_sparse_stc, simulate_evoked
print(__doc__)
###############################################################################
# Load real data as templates
data_path = sample.data_path()
raw = Raw(data_path + '/MEG/sample/sample_audvis_raw.fif')
proj = read_proj(data_path + '/MEG/sample/sample_audvis_ecg_proj.fif')
raw.info['projs'] += proj
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # mark bad channels
fwd_fname = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
ave_fname = data_path + '/MEG/sample/sample_audvis-no-filter-ave.fif'
cov_fname = data_path + '/MEG/sample/sample_audvis-cov.fif'
fwd = read_forward_solution(fwd_fname, force_fixed=True, surf_ori=True)
fwd = pick_types_forward(fwd, meg=True, eeg=True, exclude=raw.info['bads'])
cov = read_cov(cov_fname)
info = read_info(ave_fname)
label_names = ['Aud-lh', 'Aud-rh']
labels = [read_label(data_path + '/MEG/sample/labels/%s.label' % ln)
for ln in label_names]
###############################################################################
# Generate source time courses from 2 dipoles and the correspond evoked data
times = np.arange(300, dtype=np.float) / raw.info['sfreq'] - 0.1
rng = np.random.RandomState(42)
def | (times):
"""Function to generate random source time courses"""
return (1e-9 * np.sin(30. * times) *
np.exp(- (times - 0.15 + 0.05 * rng.randn(1)) ** 2 / 0.01))
stc = simulate_sparse_stc(fwd['src'], n_dipoles=2, times=times,
random_state=42, labels=labels, data_fun=data_fun)
###############################################################################
# Generate noisy evoked data
picks = pick_types(raw.info, meg=True, exclude='bads')
iir_filter = fit_iir_model_raw(raw, order=5, picks=picks, tmin=60, tmax=180)[1]
snr = 6. # dB
evoked = simulate_evoked(fwd, stc, info, cov, snr, iir_filter=iir_filter)
###############################################################################
# Plot
plot_sparse_source_estimates(fwd['src'], stc, bgcolor=(1, 1, 1),
opacity=0.5, high_resolution=True)
plt.figure()
plt.psd(evoked.data[0])
evoked.plot()
| data_fun | identifier_name |
plot_simulate_evoked_data.py | """
==============================
Generate simulated evoked data
==============================
"""
# Author: Daniel Strohmeier <[email protected]>
# Alexandre Gramfort <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne import (read_proj, read_forward_solution, read_cov, read_label,
pick_types_forward, pick_types)
from mne.io import Raw, read_info
from mne.datasets import sample
from mne.time_frequency import fit_iir_model_raw
from mne.viz import plot_sparse_source_estimates
from mne.simulation import simulate_sparse_stc, simulate_evoked
print(__doc__)
###############################################################################
# Load real data as templates
data_path = sample.data_path()
raw = Raw(data_path + '/MEG/sample/sample_audvis_raw.fif')
proj = read_proj(data_path + '/MEG/sample/sample_audvis_ecg_proj.fif')
raw.info['projs'] += proj
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # mark bad channels
fwd_fname = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
ave_fname = data_path + '/MEG/sample/sample_audvis-no-filter-ave.fif'
cov_fname = data_path + '/MEG/sample/sample_audvis-cov.fif'
fwd = read_forward_solution(fwd_fname, force_fixed=True, surf_ori=True)
fwd = pick_types_forward(fwd, meg=True, eeg=True, exclude=raw.info['bads'])
cov = read_cov(cov_fname)
info = read_info(ave_fname)
label_names = ['Aud-lh', 'Aud-rh']
labels = [read_label(data_path + '/MEG/sample/labels/%s.label' % ln)
for ln in label_names]
###############################################################################
# Generate source time courses from 2 dipoles and the correspond evoked data
times = np.arange(300, dtype=np.float) / raw.info['sfreq'] - 0.1
rng = np.random.RandomState(42)
def data_fun(times):
|
stc = simulate_sparse_stc(fwd['src'], n_dipoles=2, times=times,
random_state=42, labels=labels, data_fun=data_fun)
###############################################################################
# Generate noisy evoked data
picks = pick_types(raw.info, meg=True, exclude='bads')
iir_filter = fit_iir_model_raw(raw, order=5, picks=picks, tmin=60, tmax=180)[1]
snr = 6. # dB
evoked = simulate_evoked(fwd, stc, info, cov, snr, iir_filter=iir_filter)
###############################################################################
# Plot
plot_sparse_source_estimates(fwd['src'], stc, bgcolor=(1, 1, 1),
opacity=0.5, high_resolution=True)
plt.figure()
plt.psd(evoked.data[0])
evoked.plot()
| """Function to generate random source time courses"""
return (1e-9 * np.sin(30. * times) *
np.exp(- (times - 0.15 + 0.05 * rng.randn(1)) ** 2 / 0.01)) | identifier_body |
plot_simulate_evoked_data.py | """
==============================
Generate simulated evoked data
============================== | # License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne import (read_proj, read_forward_solution, read_cov, read_label,
pick_types_forward, pick_types)
from mne.io import Raw, read_info
from mne.datasets import sample
from mne.time_frequency import fit_iir_model_raw
from mne.viz import plot_sparse_source_estimates
from mne.simulation import simulate_sparse_stc, simulate_evoked
print(__doc__)
###############################################################################
# Load real data as templates
data_path = sample.data_path()
raw = Raw(data_path + '/MEG/sample/sample_audvis_raw.fif')
proj = read_proj(data_path + '/MEG/sample/sample_audvis_ecg_proj.fif')
raw.info['projs'] += proj
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # mark bad channels
fwd_fname = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
ave_fname = data_path + '/MEG/sample/sample_audvis-no-filter-ave.fif'
cov_fname = data_path + '/MEG/sample/sample_audvis-cov.fif'
fwd = read_forward_solution(fwd_fname, force_fixed=True, surf_ori=True)
fwd = pick_types_forward(fwd, meg=True, eeg=True, exclude=raw.info['bads'])
cov = read_cov(cov_fname)
info = read_info(ave_fname)
label_names = ['Aud-lh', 'Aud-rh']
labels = [read_label(data_path + '/MEG/sample/labels/%s.label' % ln)
for ln in label_names]
###############################################################################
# Generate source time courses from 2 dipoles and the correspond evoked data
times = np.arange(300, dtype=np.float) / raw.info['sfreq'] - 0.1
rng = np.random.RandomState(42)
def data_fun(times):
"""Function to generate random source time courses"""
return (1e-9 * np.sin(30. * times) *
np.exp(- (times - 0.15 + 0.05 * rng.randn(1)) ** 2 / 0.01))
stc = simulate_sparse_stc(fwd['src'], n_dipoles=2, times=times,
random_state=42, labels=labels, data_fun=data_fun)
###############################################################################
# Generate noisy evoked data
picks = pick_types(raw.info, meg=True, exclude='bads')
iir_filter = fit_iir_model_raw(raw, order=5, picks=picks, tmin=60, tmax=180)[1]
snr = 6. # dB
evoked = simulate_evoked(fwd, stc, info, cov, snr, iir_filter=iir_filter)
###############################################################################
# Plot
plot_sparse_source_estimates(fwd['src'], stc, bgcolor=(1, 1, 1),
opacity=0.5, high_resolution=True)
plt.figure()
plt.psd(evoked.data[0])
evoked.plot() |
"""
# Author: Daniel Strohmeier <[email protected]>
# Alexandre Gramfort <[email protected]>
# | random_line_split |
pipe.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
|
if __name__ == '__main__':
p = pipe("pipefile", "none")
| def __init__(self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)
return True
except:
logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip()) | identifier_body |
pipe.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
def | (self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)
return True
except:
logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip())
if __name__ == '__main__':
p = pipe("pipefile", "none")
| __init__ | identifier_name |
pipe.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
def __init__(self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)
return True
except:
logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip())
if __name__ == '__main__':
| p = pipe("pipefile", "none") | conditional_block |
|
pipe.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version. | # GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
def __init__(self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)
return True
except:
logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip())
if __name__ == '__main__':
p = pipe("pipefile", "none") | #
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | random_line_split |
Purple_opacity.js | // <![CDATA[
(function($){
jQuery(function($){
$(".round_colorA>.Purple_li_board").hover(
function () {
$(this).stop(true,true).animate({
backgroundColor: "#d790d0"
}, 800 )
},
function () {
$(this).stop(true,true).animate({
backgroundColor: "#f6f6f6"
}, 500 )
}
);
$(".round_colorB>.Purple_li_board").hover(
function () {
$(this).stop(true,true).animate({
backgroundColor: "#d790d0"
}, 800 )
},
function () {
$(this).stop(true,true).animate({
backgroundColor: "#eee"
}, 500 )
}
); | function () {
$(this).stop(true,true).animate({
backgroundColor: "#d790d0"
}, 800 )
},
function () {
$(this).stop(true,true).animate({
backgroundColor: "#dadada"
}, 500 )
}
);
});
})(jQuery);
// ]]> |
$(".round_colorC>.Purple_li_board").hover( | random_line_split |
command_issuelink.js | /*
Create an issue link
*/
module.exports = function (task, jira, grunt, callback) {
'use strict';
// ## Create an issue link between two issues ##
// ### Takes ###
//
// * link: a link object | // * callback: for when it’s done
//
// ### Returns ###
// * error: string if there was an issue, null if success
//
// [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id288232)
/* {
* 'type': {
* 'name': 'requirement'
* },
* 'inwardIssue': {
* 'key': 'SYS-2080'
* },
* 'outwardIssue': {
* 'key': 'SYS-2081'
* },
* 'comment': {
* 'body': 'Linked related issue!',
* 'visibility': {
* 'type': 'GROUP',
* 'value': 'jira-users'
* }
* }
* }
*/
var link = task.getValue('link');
jira.issueLink(link, callback);
}; | random_line_split |
|
WebsocketClient.ts | import {AbstractCoder, Coder} from "../Crypto";
import {Connection} from "./Interface";
import * as R from "ramda";
import * as E from "../Errors";
import {Message, Request, Response, Callback} from "./Interface";
import {appStateActions} from "../../reducers/appStateReducer";
export {SeashellWebsocket}
enum OnCloseCode {
Normal = 1000,
Abnormal = 1006,
Unknown = 4000,
PingTimedOut = 4001
};
const RESPONSE_TIMEOUT = 5000;
class SeashellWebsocket {
public connection?: Connection;
private coder: AbstractCoder;
private websocket?: WebSocket;
private lastMsgID: number;
public requests: {[index: number]: Request<any>};
private closes: () => void;
private failures: () => void;
public debug: boolean; // toggle console.log for tests
private pingLoop: any;
private callbacks: {[index: number]: Callback};
private lastPong: number = 0;
private lastCB: number = 0;
constructor(debug?: boolean) {
this.debug = debug || false;
this.callbacks = [];
}
// Connects and authenticates the socket, sets up the disconnection monitor
// Pass a new Connection object to overwrite the previously held one
// It must be safe to call this function consecutively many times
public async connect(cnn: Connection): Promise<void> {
if (cnn.offline) {
// if offline, set the offline connection and exit
this.connection = cnn;
return;
}
const firstTime: () => boolean = () => ! this.connection;
console.log("Connecting to websocket...");
this.lastMsgID = 0;
this.requests = {};
this.requests[-1] = new Request({id: -1}); // server challenge
this.requests[-2] = new Request({id: -2}); // reply challenge
this.requests[-3] = new Request({id: -3});
this.requests[-4] = new Request({id: -4});
this.requests[-5] = new Request({id: -5});
this.requests[-3].callback = this.gen_cb("io");
this.requests[-4].callback = this.gen_cb("test");
this.requests[-5].callback = this.gen_cb("changes");
// if there's an existing websocket,
// if it's connecting or open: do nothing
// if it's closing or closed: schedule to open a new connection
if (this.websocket) {
const websocket = this.websocket;
switch (websocket.readyState) {
case websocket.CONNECTING: {
console.log("Socket is already connecting. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.OPEN: {
console.log("Socket is already connected. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.CLOSING: {
console.log("Existing websocket is closing. Wait to reopen new connection.");
const promise = new Promise<void>((accept, reject) => {
// wait for a graceful shutdown then reconnect
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
return promise;
}
case websocket.CLOSED: {
console.log("Existing websocket is closed. Reopening new connection.");
// pass through to continue connection
}
}
}
// continue connection
let connected: any;
let failed: any;
const rtv = new Promise<void>((resolve, reject) => {
connected = resolve;
failed = reject;
});
this.coder = new Coder(cnn.key as number[]);
try {
this.websocket = new WebSocket(cnn.wsURI);
this.websocket.onerror = (err) => {
firstTime() && failed(err);
};
} catch (err) {
console.error(`Could not create WebSocket connection to ${cnn.wsURI}:\n${err}`);
firstTime() && failed();
return;
}
// Websocket.onclose should race against authentication
this.websocket.onclose = (evt: CloseEvent) => {
this.invoke_cb("disconnected");
console.warn("Websocket lost connection.");
clearInterval(this.pingLoop);
for (const i in this.requests) {
if (parseInt(i) >= -2) {
if (evt.code === OnCloseCode.Unknown) {
this.requests[i].reject(new E.WebsocketError(evt.reason));
} else {
this.requests[i].reject(new E.RequestAborted("RequestAborted: Websocket disconnected."));
}
}
if (parseInt(i) >= 0) {
delete this.requests[i];
}
}
// exited abnormally,
// could be internet disruption, handshake timeout, connection refused
// if this.connection exists, then we have successfully connected automatically reconnect after 3s
if (evt.code === OnCloseCode.Abnormal && firstTime()) {
firstTime() && failed();
return;
}
// all other onclose codes:
console.warn("Reconnect in 5 seconds...");
setTimeout(() => {
// when user logs out,
// this.disconnect must clear this.connection
if (this.connection) {
// ignore a failure, we will attempt to reconnect anyway
this.connect(cnn).catch((err) => { });
} else {
console.warn("Gave up reconnection. User probably logged out.");
}
}, RESPONSE_TIMEOUT);
};
this.websocket.onopen = () => {
let timeoutCount = 0;
if (this.pingLoop) {
clearInterval(this.pingLoop);
}
this.pingLoop = setInterval(async () => {
timeoutCount++;
if (timeoutCount >= 3) {
console.warn(`Ping timed out. Server is not responsive. [${timeoutCount - 2}]`);
if (this.websocket) {// Always reachable
console.warn("Closing connection to restart...");
this.websocket.close(OnCloseCode.PingTimedOut); // force reconnect
}
}
await this.ping();
timeoutCount = 0;
}, RESPONSE_TIMEOUT);
};
this.websocket.onmessage = async (message: MessageEvent) => {
if (message.data instanceof Blob) {
const readerT = new FileReader();
readerT.onloadend = async () => {
await this.resolveRequest(readerT.result);
};
readerT.readAsText(message.data);
} else {
const u8arr = new Uint8Array(message.data);
const str: string = R.reduce((str, byte) => str + String.fromCharCode(byte), "", Array.from(u8arr));
await this.resolveRequest(str);
}
};
this.debug && console.log("Waiting for server response -- setting timeout for %d seconds...", RESPONSE_TIMEOUT);
// if the server doesn't response in 5s
// the default chrome's handshake timeout is too long
let responseTimeout = setTimeout(() => {
if (this.websocket) {
this.websocket.close();
// will close with 1006
// fall back to websocket.onclose()
}
}, RESPONSE_TIMEOUT);
let serverChallenge: any;
try {
serverChallenge = await this.requests[-1].received;
} catch (err) {
console.warn("Invalid server response -- timeout cleared.");
clearTimeout(responseTimeout);
throw err;
}
clearTimeout(responseTimeout);
try {
const result = await this.coder.answer(serverChallenge);
const response = [result.iv,
result.encrypted,
result.authTag,
result.nonce];
this.requests[-2].message = {
id: -2,
type: "clientAuth",
response: response
};
this.debug && console.log("Authenticating websocket...");
// Authentication should race against websocket.onclose
await this.sendRequest(this.requests[-2]);
this.invoke_cb("connected");
} catch (err) {
if (err instanceof E.RequestError) {
firstTime() && failed();
return;
} else {
throw err;
}
}
// connection done
this.connection = cnn;
connected();
return rtv;
}
private resolveRequest(responseText: string): void {
const response = <Response>(JSON.parse(responseText));
// Assume the response holds the message and response.id holds the
// message identifier that corresponds with it
// response.result will hold the result if the API call succeeded,
// error message otherwise.
const request = this.requests[response.id];
const time = new Date();
const diff = request.time ? time.getTime() - request.time : -1;
if (response.success) {
if (request.message.type === "ping") {
this.lastPong = Date.now();
} else {
this.debug && console.log(`Request ${response.id} succeeded after ${diff} ms`, response);
}
} else {
this.debug && console.warn(`Request ${response.id} failed after ${diff} ms`, request.message, response);
}
if (response.id >= 0) {
delete this.requests[response.id];
} else if (this.requests[response.id].callback) {
this.requests[response.id].callback(response.result);
}
if (response.success) {
request.resolve(response.result);
} else if (! this.isConnected()) {
// test if is not authenticated
// better have the backend reject with "unauthenticated" error,
// instead of the current "invalid message"
request.reject(new E.LoginRequired());
} else {
const diff = request.time ? time.getTime() - request.time : -1;
request.reject(new E.RequestError(`Request ${request.message.id} failed with response: ${response.result}`,
request,
response));
}
}
public disconnect(): void {
this.connection = undefined;
if (this.websocket) {
this.websocket.close(OnCloseCode.Normal);
clearInterval(this.pingLoop);
}
this.websocket = undefined;
}
public register_callback(type: string, cb: (message?: any) => any, now?: boolean): number {
this.callbacks[this.lastCB++] = new Callback(type, cb, now || false);
if (type === "disconnected" && ! this.isConnected() && now) {
cb();
} else if (type === "connected" && this.isConnected() && now) {
cb();
}
return this.lastCB - 1;
}
public unregister_callback(key: number) {
delete this.callbacks[key];
}
public async invoke_cb(type: string, message?: any): Promise<Array<any>> {
return (<any>Object).values(this.callbacks).filter(
(x: Callback) => { return x && x.type === type; }).map(
async (x: Callback) => { return x.cb(message); });
}
// Helper function to invoke the I/O callback.
private gen_cb = (type: string) => {
return async (message: any) => {
return this.invoke_cb(type, message);
};
}
private sendRequest<T>(request: Request<T>): Promise<T> {
// If user's computer goes to sleep the server doesn't receive response for 5 minutes,
// the next request should throw LoginRequired since the server will die of inactivity.
// This timeout should be <= the timeout on the server side.
// if (! R.contains(request.message.type, ["clientAuth", "authenticate", "ping"]) &&
// Date.now() - this.lastPong >= 10 * 1000) {
// throw new E.LoginRequired("You've been offline for a while. We'd like to confirm who you are.");
// }
if (! this.isConnected()) {
throw new E.NoInternet();
}
const msg = request.message;
const msgID = msg.id;
this.requests[msgID] = request;
const blob = JSON.stringify(msg);
if (msg.type !== "ping") {
this.debug && console.log(`Request ${msgID} was sent`, msg);
}
if (this.websocket) {
this.websocket.send(blob);
} else {
throw new E.LoginRequired();
}
return request.received;
}
/** Sends a message along the connection, ensuring that
* the server and client are properly authenticated.
*
* If the socket has not been properly authenticated,
* sends the message after the socket has been properly
* authenticated/set up. */
public sendMessage<T>(message: Message): Promise<T> {
const msgID = this.lastMsgID++;
message.id = msgID;
return this.sendRequest<T>(new Request<T>(message));
}
public isConnected(): boolean {
return this.websocket !== undefined &&
this.websocket.readyState === this.websocket.OPEN;
}
public async ping(): Promise<void> {
await this.sendMessage({
type: "ping"
});
}
public getUsername(): string |
}
| {
if (!this.connection) {
throw new E.WebsocketError("Trying to access username when the connection is not set.");
}
return this.connection.username;
} | identifier_body |
WebsocketClient.ts | import {AbstractCoder, Coder} from "../Crypto";
import {Connection} from "./Interface";
import * as R from "ramda";
import * as E from "../Errors";
import {Message, Request, Response, Callback} from "./Interface";
import {appStateActions} from "../../reducers/appStateReducer";
export {SeashellWebsocket}
enum OnCloseCode {
Normal = 1000,
Abnormal = 1006,
Unknown = 4000,
PingTimedOut = 4001
};
const RESPONSE_TIMEOUT = 5000;
class SeashellWebsocket {
public connection?: Connection;
private coder: AbstractCoder;
private websocket?: WebSocket;
private lastMsgID: number;
public requests: {[index: number]: Request<any>};
private closes: () => void;
private failures: () => void;
public debug: boolean; // toggle console.log for tests
private pingLoop: any;
private callbacks: {[index: number]: Callback};
private lastPong: number = 0;
private lastCB: number = 0;
constructor(debug?: boolean) {
this.debug = debug || false;
this.callbacks = [];
}
// Connects and authenticates the socket, sets up the disconnection monitor
// Pass a new Connection object to overwrite the previously held one
// It must be safe to call this function consecutively many times
public async connect(cnn: Connection): Promise<void> {
if (cnn.offline) {
// if offline, set the offline connection and exit
this.connection = cnn;
return;
}
const firstTime: () => boolean = () => ! this.connection;
console.log("Connecting to websocket...");
this.lastMsgID = 0;
this.requests = {};
this.requests[-1] = new Request({id: -1}); // server challenge
this.requests[-2] = new Request({id: -2}); // reply challenge
this.requests[-3] = new Request({id: -3});
this.requests[-4] = new Request({id: -4});
this.requests[-5] = new Request({id: -5});
this.requests[-3].callback = this.gen_cb("io");
this.requests[-4].callback = this.gen_cb("test");
this.requests[-5].callback = this.gen_cb("changes");
// if there's an existing websocket,
// if it's connecting or open: do nothing
// if it's closing or closed: schedule to open a new connection
if (this.websocket) {
const websocket = this.websocket;
switch (websocket.readyState) {
case websocket.CONNECTING: {
console.log("Socket is already connecting. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.OPEN: {
console.log("Socket is already connected. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.CLOSING: {
console.log("Existing websocket is closing. Wait to reopen new connection.");
const promise = new Promise<void>((accept, reject) => {
// wait for a graceful shutdown then reconnect
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
return promise;
}
case websocket.CLOSED: {
console.log("Existing websocket is closed. Reopening new connection.");
// pass through to continue connection
}
}
}
// continue connection
let connected: any;
let failed: any;
const rtv = new Promise<void>((resolve, reject) => {
connected = resolve;
failed = reject;
});
this.coder = new Coder(cnn.key as number[]);
try {
this.websocket = new WebSocket(cnn.wsURI);
this.websocket.onerror = (err) => {
firstTime() && failed(err);
};
} catch (err) {
console.error(`Could not create WebSocket connection to ${cnn.wsURI}:\n${err}`);
firstTime() && failed();
return;
}
// Websocket.onclose should race against authentication
this.websocket.onclose = (evt: CloseEvent) => {
this.invoke_cb("disconnected");
console.warn("Websocket lost connection.");
clearInterval(this.pingLoop);
for (const i in this.requests) {
if (parseInt(i) >= -2) {
if (evt.code === OnCloseCode.Unknown) {
this.requests[i].reject(new E.WebsocketError(evt.reason));
} else {
this.requests[i].reject(new E.RequestAborted("RequestAborted: Websocket disconnected."));
}
}
if (parseInt(i) >= 0) {
delete this.requests[i];
}
}
// exited abnormally,
// could be internet disruption, handshake timeout, connection refused
// if this.connection exists, then we have successfully connected automatically reconnect after 3s
if (evt.code === OnCloseCode.Abnormal && firstTime()) {
firstTime() && failed();
return;
}
// all other onclose codes:
console.warn("Reconnect in 5 seconds...");
setTimeout(() => {
// when user logs out,
// this.disconnect must clear this.connection
if (this.connection) {
// ignore a failure, we will attempt to reconnect anyway
this.connect(cnn).catch((err) => { });
} else {
console.warn("Gave up reconnection. User probably logged out.");
}
}, RESPONSE_TIMEOUT);
};
this.websocket.onopen = () => {
let timeoutCount = 0;
if (this.pingLoop) {
clearInterval(this.pingLoop);
}
this.pingLoop = setInterval(async () => {
timeoutCount++;
if (timeoutCount >= 3) {
console.warn(`Ping timed out. Server is not responsive. [${timeoutCount - 2}]`);
if (this.websocket) {// Always reachable
console.warn("Closing connection to restart...");
this.websocket.close(OnCloseCode.PingTimedOut); // force reconnect
}
}
await this.ping();
timeoutCount = 0;
}, RESPONSE_TIMEOUT);
};
this.websocket.onmessage = async (message: MessageEvent) => {
if (message.data instanceof Blob) {
const readerT = new FileReader();
readerT.onloadend = async () => {
await this.resolveRequest(readerT.result);
};
readerT.readAsText(message.data);
} else {
const u8arr = new Uint8Array(message.data);
const str: string = R.reduce((str, byte) => str + String.fromCharCode(byte), "", Array.from(u8arr));
await this.resolveRequest(str);
}
};
this.debug && console.log("Waiting for server response -- setting timeout for %d seconds...", RESPONSE_TIMEOUT);
// if the server doesn't response in 5s
// the default chrome's handshake timeout is too long
let responseTimeout = setTimeout(() => {
if (this.websocket) {
this.websocket.close();
// will close with 1006
// fall back to websocket.onclose()
}
}, RESPONSE_TIMEOUT);
let serverChallenge: any;
try {
serverChallenge = await this.requests[-1].received;
} catch (err) {
console.warn("Invalid server response -- timeout cleared.");
clearTimeout(responseTimeout);
throw err;
}
clearTimeout(responseTimeout);
try {
const result = await this.coder.answer(serverChallenge);
const response = [result.iv,
result.encrypted,
result.authTag,
result.nonce];
this.requests[-2].message = {
id: -2,
type: "clientAuth",
response: response
};
this.debug && console.log("Authenticating websocket...");
// Authentication should race against websocket.onclose
await this.sendRequest(this.requests[-2]);
this.invoke_cb("connected");
} catch (err) {
if (err instanceof E.RequestError) {
firstTime() && failed();
return;
} else {
throw err;
}
}
// connection done
this.connection = cnn;
connected();
return rtv;
}
private resolveRequest(responseText: string): void {
const response = <Response>(JSON.parse(responseText));
// Assume the response holds the message and response.id holds the
// message identifier that corresponds with it
// response.result will hold the result if the API call succeeded,
// error message otherwise.
const request = this.requests[response.id];
const time = new Date();
const diff = request.time ? time.getTime() - request.time : -1;
if (response.success) {
if (request.message.type === "ping") {
this.lastPong = Date.now();
} else {
this.debug && console.log(`Request ${response.id} succeeded after ${diff} ms`, response);
}
} else {
this.debug && console.warn(`Request ${response.id} failed after ${diff} ms`, request.message, response);
}
if (response.id >= 0) {
delete this.requests[response.id];
} else if (this.requests[response.id].callback) {
this.requests[response.id].callback(response.result);
}
if (response.success) {
request.resolve(response.result);
} else if (! this.isConnected()) {
// test if is not authenticated
// better have the backend reject with "unauthenticated" error,
// instead of the current "invalid message"
request.reject(new E.LoginRequired());
} else {
const diff = request.time ? time.getTime() - request.time : -1;
request.reject(new E.RequestError(`Request ${request.message.id} failed with response: ${response.result}`,
request,
response));
}
}
public disconnect(): void {
this.connection = undefined;
if (this.websocket) {
this.websocket.close(OnCloseCode.Normal);
clearInterval(this.pingLoop);
}
this.websocket = undefined;
}
public register_callback(type: string, cb: (message?: any) => any, now?: boolean): number {
this.callbacks[this.lastCB++] = new Callback(type, cb, now || false);
if (type === "disconnected" && ! this.isConnected() && now) {
cb();
} else if (type === "connected" && this.isConnected() && now) {
cb();
}
return this.lastCB - 1;
}
public unregister_callback(key: number) {
delete this.callbacks[key];
}
public async invoke_cb(type: string, message?: any): Promise<Array<any>> {
return (<any>Object).values(this.callbacks).filter(
(x: Callback) => { return x && x.type === type; }).map(
async (x: Callback) => { return x.cb(message); });
}
// Helper function to invoke the I/O callback.
private gen_cb = (type: string) => {
return async (message: any) => {
return this.invoke_cb(type, message);
};
}
private sendRequest<T>(request: Request<T>): Promise<T> {
// If user's computer goes to sleep the server doesn't receive response for 5 minutes,
// the next request should throw LoginRequired since the server will die of inactivity.
// This timeout should be <= the timeout on the server side.
// if (! R.contains(request.message.type, ["clientAuth", "authenticate", "ping"]) &&
// Date.now() - this.lastPong >= 10 * 1000) {
// throw new E.LoginRequired("You've been offline for a while. We'd like to confirm who you are.");
// }
if (! this.isConnected()) {
throw new E.NoInternet();
}
const msg = request.message;
const msgID = msg.id;
this.requests[msgID] = request;
const blob = JSON.stringify(msg);
if (msg.type !== "ping") {
this.debug && console.log(`Request ${msgID} was sent`, msg);
}
if (this.websocket) {
this.websocket.send(blob);
} else {
throw new E.LoginRequired();
}
return request.received;
}
/** Sends a message along the connection, ensuring that
* the server and client are properly authenticated.
*
* If the socket has not been properly authenticated,
* sends the message after the socket has been properly
* authenticated/set up. */
public | <T>(message: Message): Promise<T> {
const msgID = this.lastMsgID++;
message.id = msgID;
return this.sendRequest<T>(new Request<T>(message));
}
public isConnected(): boolean {
return this.websocket !== undefined &&
this.websocket.readyState === this.websocket.OPEN;
}
public async ping(): Promise<void> {
await this.sendMessage({
type: "ping"
});
}
public getUsername(): string {
if (!this.connection) {
throw new E.WebsocketError("Trying to access username when the connection is not set.");
}
return this.connection.username;
}
}
| sendMessage | identifier_name |
WebsocketClient.ts | import {AbstractCoder, Coder} from "../Crypto";
import {Connection} from "./Interface";
import * as R from "ramda";
import * as E from "../Errors";
import {Message, Request, Response, Callback} from "./Interface";
import {appStateActions} from "../../reducers/appStateReducer";
export {SeashellWebsocket}
enum OnCloseCode {
Normal = 1000,
Abnormal = 1006,
Unknown = 4000,
PingTimedOut = 4001
};
const RESPONSE_TIMEOUT = 5000;
class SeashellWebsocket {
public connection?: Connection;
private coder: AbstractCoder;
private websocket?: WebSocket;
private lastMsgID: number;
public requests: {[index: number]: Request<any>};
private closes: () => void;
private failures: () => void;
public debug: boolean; // toggle console.log for tests
private pingLoop: any;
private callbacks: {[index: number]: Callback};
private lastPong: number = 0;
private lastCB: number = 0;
constructor(debug?: boolean) {
this.debug = debug || false;
this.callbacks = [];
}
// Connects and authenticates the socket, sets up the disconnection monitor
// Pass a new Connection object to overwrite the previously held one
// It must be safe to call this function consecutively many times
public async connect(cnn: Connection): Promise<void> {
if (cnn.offline) {
// if offline, set the offline connection and exit
this.connection = cnn;
return;
}
const firstTime: () => boolean = () => ! this.connection;
console.log("Connecting to websocket...");
this.lastMsgID = 0;
this.requests = {};
this.requests[-1] = new Request({id: -1}); // server challenge
this.requests[-2] = new Request({id: -2}); // reply challenge
this.requests[-3] = new Request({id: -3});
this.requests[-4] = new Request({id: -4});
this.requests[-5] = new Request({id: -5});
this.requests[-3].callback = this.gen_cb("io");
this.requests[-4].callback = this.gen_cb("test");
this.requests[-5].callback = this.gen_cb("changes");
// if there's an existing websocket,
// if it's connecting or open: do nothing
// if it's closing or closed: schedule to open a new connection
if (this.websocket) {
const websocket = this.websocket;
switch (websocket.readyState) {
case websocket.CONNECTING: {
console.log("Socket is already connecting. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.OPEN: {
console.log("Socket is already connected. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.CLOSING: {
console.log("Existing websocket is closing. Wait to reopen new connection.");
const promise = new Promise<void>((accept, reject) => {
// wait for a graceful shutdown then reconnect
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
return promise;
}
case websocket.CLOSED: {
console.log("Existing websocket is closed. Reopening new connection.");
// pass through to continue connection
}
}
}
// continue connection
let connected: any;
let failed: any;
const rtv = new Promise<void>((resolve, reject) => {
connected = resolve;
failed = reject;
});
this.coder = new Coder(cnn.key as number[]);
try {
this.websocket = new WebSocket(cnn.wsURI);
this.websocket.onerror = (err) => {
firstTime() && failed(err);
};
} catch (err) {
console.error(`Could not create WebSocket connection to ${cnn.wsURI}:\n${err}`);
firstTime() && failed();
return;
}
// Websocket.onclose should race against authentication
this.websocket.onclose = (evt: CloseEvent) => {
this.invoke_cb("disconnected");
console.warn("Websocket lost connection.");
clearInterval(this.pingLoop);
for (const i in this.requests) {
if (parseInt(i) >= -2) {
if (evt.code === OnCloseCode.Unknown) {
this.requests[i].reject(new E.WebsocketError(evt.reason));
} else {
this.requests[i].reject(new E.RequestAborted("RequestAborted: Websocket disconnected."));
}
}
if (parseInt(i) >= 0) {
delete this.requests[i];
}
}
// exited abnormally,
// could be internet disruption, handshake timeout, connection refused
// if this.connection exists, then we have successfully connected automatically reconnect after 3s
if (evt.code === OnCloseCode.Abnormal && firstTime()) {
firstTime() && failed();
return;
}
// all other onclose codes:
console.warn("Reconnect in 5 seconds...");
setTimeout(() => {
// when user logs out,
// this.disconnect must clear this.connection
if (this.connection) {
// ignore a failure, we will attempt to reconnect anyway
this.connect(cnn).catch((err) => { });
} else {
console.warn("Gave up reconnection. User probably logged out.");
}
}, RESPONSE_TIMEOUT);
};
this.websocket.onopen = () => {
let timeoutCount = 0;
if (this.pingLoop) {
clearInterval(this.pingLoop);
}
this.pingLoop = setInterval(async () => {
timeoutCount++;
if (timeoutCount >= 3) {
console.warn(`Ping timed out. Server is not responsive. [${timeoutCount - 2}]`);
if (this.websocket) {// Always reachable
console.warn("Closing connection to restart...");
this.websocket.close(OnCloseCode.PingTimedOut); // force reconnect
}
}
await this.ping();
timeoutCount = 0;
}, RESPONSE_TIMEOUT);
}; | const readerT = new FileReader();
readerT.onloadend = async () => {
await this.resolveRequest(readerT.result);
};
readerT.readAsText(message.data);
} else {
const u8arr = new Uint8Array(message.data);
const str: string = R.reduce((str, byte) => str + String.fromCharCode(byte), "", Array.from(u8arr));
await this.resolveRequest(str);
}
};
this.debug && console.log("Waiting for server response -- setting timeout for %d seconds...", RESPONSE_TIMEOUT);
// if the server doesn't response in 5s
// the default chrome's handshake timeout is too long
let responseTimeout = setTimeout(() => {
if (this.websocket) {
this.websocket.close();
// will close with 1006
// fall back to websocket.onclose()
}
}, RESPONSE_TIMEOUT);
let serverChallenge: any;
try {
serverChallenge = await this.requests[-1].received;
} catch (err) {
console.warn("Invalid server response -- timeout cleared.");
clearTimeout(responseTimeout);
throw err;
}
clearTimeout(responseTimeout);
try {
const result = await this.coder.answer(serverChallenge);
const response = [result.iv,
result.encrypted,
result.authTag,
result.nonce];
this.requests[-2].message = {
id: -2,
type: "clientAuth",
response: response
};
this.debug && console.log("Authenticating websocket...");
// Authentication should race against websocket.onclose
await this.sendRequest(this.requests[-2]);
this.invoke_cb("connected");
} catch (err) {
if (err instanceof E.RequestError) {
firstTime() && failed();
return;
} else {
throw err;
}
}
// connection done
this.connection = cnn;
connected();
return rtv;
}
private resolveRequest(responseText: string): void {
const response = <Response>(JSON.parse(responseText));
// Assume the response holds the message and response.id holds the
// message identifier that corresponds with it
// response.result will hold the result if the API call succeeded,
// error message otherwise.
const request = this.requests[response.id];
const time = new Date();
const diff = request.time ? time.getTime() - request.time : -1;
if (response.success) {
if (request.message.type === "ping") {
this.lastPong = Date.now();
} else {
this.debug && console.log(`Request ${response.id} succeeded after ${diff} ms`, response);
}
} else {
this.debug && console.warn(`Request ${response.id} failed after ${diff} ms`, request.message, response);
}
if (response.id >= 0) {
delete this.requests[response.id];
} else if (this.requests[response.id].callback) {
this.requests[response.id].callback(response.result);
}
if (response.success) {
request.resolve(response.result);
} else if (! this.isConnected()) {
// test if is not authenticated
// better have the backend reject with "unauthenticated" error,
// instead of the current "invalid message"
request.reject(new E.LoginRequired());
} else {
const diff = request.time ? time.getTime() - request.time : -1;
request.reject(new E.RequestError(`Request ${request.message.id} failed with response: ${response.result}`,
request,
response));
}
}
public disconnect(): void {
this.connection = undefined;
if (this.websocket) {
this.websocket.close(OnCloseCode.Normal);
clearInterval(this.pingLoop);
}
this.websocket = undefined;
}
public register_callback(type: string, cb: (message?: any) => any, now?: boolean): number {
this.callbacks[this.lastCB++] = new Callback(type, cb, now || false);
if (type === "disconnected" && ! this.isConnected() && now) {
cb();
} else if (type === "connected" && this.isConnected() && now) {
cb();
}
return this.lastCB - 1;
}
public unregister_callback(key: number) {
delete this.callbacks[key];
}
public async invoke_cb(type: string, message?: any): Promise<Array<any>> {
return (<any>Object).values(this.callbacks).filter(
(x: Callback) => { return x && x.type === type; }).map(
async (x: Callback) => { return x.cb(message); });
}
// Helper function to invoke the I/O callback.
private gen_cb = (type: string) => {
return async (message: any) => {
return this.invoke_cb(type, message);
};
}
private sendRequest<T>(request: Request<T>): Promise<T> {
// If user's computer goes to sleep the server doesn't receive response for 5 minutes,
// the next request should throw LoginRequired since the server will die of inactivity.
// This timeout should be <= the timeout on the server side.
// if (! R.contains(request.message.type, ["clientAuth", "authenticate", "ping"]) &&
// Date.now() - this.lastPong >= 10 * 1000) {
// throw new E.LoginRequired("You've been offline for a while. We'd like to confirm who you are.");
// }
if (! this.isConnected()) {
throw new E.NoInternet();
}
const msg = request.message;
const msgID = msg.id;
this.requests[msgID] = request;
const blob = JSON.stringify(msg);
if (msg.type !== "ping") {
this.debug && console.log(`Request ${msgID} was sent`, msg);
}
if (this.websocket) {
this.websocket.send(blob);
} else {
throw new E.LoginRequired();
}
return request.received;
}
/** Sends a message along the connection, ensuring that
* the server and client are properly authenticated.
*
* If the socket has not been properly authenticated,
* sends the message after the socket has been properly
* authenticated/set up. */
public sendMessage<T>(message: Message): Promise<T> {
const msgID = this.lastMsgID++;
message.id = msgID;
return this.sendRequest<T>(new Request<T>(message));
}
public isConnected(): boolean {
return this.websocket !== undefined &&
this.websocket.readyState === this.websocket.OPEN;
}
public async ping(): Promise<void> {
await this.sendMessage({
type: "ping"
});
}
public getUsername(): string {
if (!this.connection) {
throw new E.WebsocketError("Trying to access username when the connection is not set.");
}
return this.connection.username;
}
} |
this.websocket.onmessage = async (message: MessageEvent) => {
if (message.data instanceof Blob) { | random_line_split |
WebsocketClient.ts | import {AbstractCoder, Coder} from "../Crypto";
import {Connection} from "./Interface";
import * as R from "ramda";
import * as E from "../Errors";
import {Message, Request, Response, Callback} from "./Interface";
import {appStateActions} from "../../reducers/appStateReducer";
export {SeashellWebsocket}
enum OnCloseCode {
Normal = 1000,
Abnormal = 1006,
Unknown = 4000,
PingTimedOut = 4001
};
const RESPONSE_TIMEOUT = 5000;
class SeashellWebsocket {
public connection?: Connection;
private coder: AbstractCoder;
private websocket?: WebSocket;
private lastMsgID: number;
public requests: {[index: number]: Request<any>};
private closes: () => void;
private failures: () => void;
public debug: boolean; // toggle console.log for tests
private pingLoop: any;
private callbacks: {[index: number]: Callback};
private lastPong: number = 0;
private lastCB: number = 0;
constructor(debug?: boolean) {
this.debug = debug || false;
this.callbacks = [];
}
// Connects and authenticates the socket, sets up the disconnection monitor
// Pass a new Connection object to overwrite the previously held one
// It must be safe to call this function consecutively many times
public async connect(cnn: Connection): Promise<void> {
if (cnn.offline) {
// if offline, set the offline connection and exit
this.connection = cnn;
return;
}
const firstTime: () => boolean = () => ! this.connection;
console.log("Connecting to websocket...");
this.lastMsgID = 0;
this.requests = {};
this.requests[-1] = new Request({id: -1}); // server challenge
this.requests[-2] = new Request({id: -2}); // reply challenge
this.requests[-3] = new Request({id: -3});
this.requests[-4] = new Request({id: -4});
this.requests[-5] = new Request({id: -5});
this.requests[-3].callback = this.gen_cb("io");
this.requests[-4].callback = this.gen_cb("test");
this.requests[-5].callback = this.gen_cb("changes");
// if there's an existing websocket,
// if it's connecting or open: do nothing
// if it's closing or closed: schedule to open a new connection
if (this.websocket) {
const websocket = this.websocket;
switch (websocket.readyState) {
case websocket.CONNECTING: {
console.log("Socket is already connecting. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.OPEN: {
console.log("Socket is already connected. Closing and reconnecting.");
const promise = new Promise<void>((accept, reject) => {
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
websocket.close();
return promise;
}
case websocket.CLOSING: {
console.log("Existing websocket is closing. Wait to reopen new connection.");
const promise = new Promise<void>((accept, reject) => {
// wait for a graceful shutdown then reconnect
websocket.onclose = () => {
this.connect(cnn).then(accept).catch(reject);
};
});
return promise;
}
case websocket.CLOSED: {
console.log("Existing websocket is closed. Reopening new connection.");
// pass through to continue connection
}
}
}
// continue connection
let connected: any;
let failed: any;
const rtv = new Promise<void>((resolve, reject) => {
connected = resolve;
failed = reject;
});
this.coder = new Coder(cnn.key as number[]);
try {
this.websocket = new WebSocket(cnn.wsURI);
this.websocket.onerror = (err) => {
firstTime() && failed(err);
};
} catch (err) {
console.error(`Could not create WebSocket connection to ${cnn.wsURI}:\n${err}`);
firstTime() && failed();
return;
}
// Websocket.onclose should race against authentication
this.websocket.onclose = (evt: CloseEvent) => {
this.invoke_cb("disconnected");
console.warn("Websocket lost connection.");
clearInterval(this.pingLoop);
for (const i in this.requests) {
if (parseInt(i) >= -2) {
if (evt.code === OnCloseCode.Unknown) {
this.requests[i].reject(new E.WebsocketError(evt.reason));
} else {
this.requests[i].reject(new E.RequestAborted("RequestAborted: Websocket disconnected."));
}
}
if (parseInt(i) >= 0) {
delete this.requests[i];
}
}
// exited abnormally,
// could be internet disruption, handshake timeout, connection refused
// if this.connection exists, then we have successfully connected automatically reconnect after 3s
if (evt.code === OnCloseCode.Abnormal && firstTime()) {
firstTime() && failed();
return;
}
// all other onclose codes:
console.warn("Reconnect in 5 seconds...");
setTimeout(() => {
// when user logs out,
// this.disconnect must clear this.connection
if (this.connection) {
// ignore a failure, we will attempt to reconnect anyway
this.connect(cnn).catch((err) => { });
} else {
console.warn("Gave up reconnection. User probably logged out.");
}
}, RESPONSE_TIMEOUT);
};
this.websocket.onopen = () => {
let timeoutCount = 0;
if (this.pingLoop) {
clearInterval(this.pingLoop);
}
this.pingLoop = setInterval(async () => {
timeoutCount++;
if (timeoutCount >= 3) {
console.warn(`Ping timed out. Server is not responsive. [${timeoutCount - 2}]`);
if (this.websocket) {// Always reachable
console.warn("Closing connection to restart...");
this.websocket.close(OnCloseCode.PingTimedOut); // force reconnect
}
}
await this.ping();
timeoutCount = 0;
}, RESPONSE_TIMEOUT);
};
this.websocket.onmessage = async (message: MessageEvent) => {
if (message.data instanceof Blob) {
const readerT = new FileReader();
readerT.onloadend = async () => {
await this.resolveRequest(readerT.result);
};
readerT.readAsText(message.data);
} else {
const u8arr = new Uint8Array(message.data);
const str: string = R.reduce((str, byte) => str + String.fromCharCode(byte), "", Array.from(u8arr));
await this.resolveRequest(str);
}
};
this.debug && console.log("Waiting for server response -- setting timeout for %d seconds...", RESPONSE_TIMEOUT);
// if the server doesn't response in 5s
// the default chrome's handshake timeout is too long
let responseTimeout = setTimeout(() => {
if (this.websocket) {
this.websocket.close();
// will close with 1006
// fall back to websocket.onclose()
}
}, RESPONSE_TIMEOUT);
let serverChallenge: any;
try {
serverChallenge = await this.requests[-1].received;
} catch (err) {
console.warn("Invalid server response -- timeout cleared.");
clearTimeout(responseTimeout);
throw err;
}
clearTimeout(responseTimeout);
try {
const result = await this.coder.answer(serverChallenge);
const response = [result.iv,
result.encrypted,
result.authTag,
result.nonce];
this.requests[-2].message = {
id: -2,
type: "clientAuth",
response: response
};
this.debug && console.log("Authenticating websocket...");
// Authentication should race against websocket.onclose
await this.sendRequest(this.requests[-2]);
this.invoke_cb("connected");
} catch (err) {
if (err instanceof E.RequestError) {
firstTime() && failed();
return;
} else {
throw err;
}
}
// connection done
this.connection = cnn;
connected();
return rtv;
}
private resolveRequest(responseText: string): void {
const response = <Response>(JSON.parse(responseText));
// Assume the response holds the message and response.id holds the
// message identifier that corresponds with it
// response.result will hold the result if the API call succeeded,
// error message otherwise.
const request = this.requests[response.id];
const time = new Date();
const diff = request.time ? time.getTime() - request.time : -1;
if (response.success) {
if (request.message.type === "ping") {
this.lastPong = Date.now();
} else {
this.debug && console.log(`Request ${response.id} succeeded after ${diff} ms`, response);
}
} else {
this.debug && console.warn(`Request ${response.id} failed after ${diff} ms`, request.message, response);
}
if (response.id >= 0) {
delete this.requests[response.id];
} else if (this.requests[response.id].callback) {
this.requests[response.id].callback(response.result);
}
if (response.success) | else if (! this.isConnected()) {
// test if is not authenticated
// better have the backend reject with "unauthenticated" error,
// instead of the current "invalid message"
request.reject(new E.LoginRequired());
} else {
const diff = request.time ? time.getTime() - request.time : -1;
request.reject(new E.RequestError(`Request ${request.message.id} failed with response: ${response.result}`,
request,
response));
}
}
public disconnect(): void {
this.connection = undefined;
if (this.websocket) {
this.websocket.close(OnCloseCode.Normal);
clearInterval(this.pingLoop);
}
this.websocket = undefined;
}
public register_callback(type: string, cb: (message?: any) => any, now?: boolean): number {
this.callbacks[this.lastCB++] = new Callback(type, cb, now || false);
if (type === "disconnected" && ! this.isConnected() && now) {
cb();
} else if (type === "connected" && this.isConnected() && now) {
cb();
}
return this.lastCB - 1;
}
public unregister_callback(key: number) {
delete this.callbacks[key];
}
public async invoke_cb(type: string, message?: any): Promise<Array<any>> {
return (<any>Object).values(this.callbacks).filter(
(x: Callback) => { return x && x.type === type; }).map(
async (x: Callback) => { return x.cb(message); });
}
// Helper function to invoke the I/O callback.
private gen_cb = (type: string) => {
return async (message: any) => {
return this.invoke_cb(type, message);
};
}
private sendRequest<T>(request: Request<T>): Promise<T> {
// If user's computer goes to sleep the server doesn't receive response for 5 minutes,
// the next request should throw LoginRequired since the server will die of inactivity.
// This timeout should be <= the timeout on the server side.
// if (! R.contains(request.message.type, ["clientAuth", "authenticate", "ping"]) &&
// Date.now() - this.lastPong >= 10 * 1000) {
// throw new E.LoginRequired("You've been offline for a while. We'd like to confirm who you are.");
// }
if (! this.isConnected()) {
throw new E.NoInternet();
}
const msg = request.message;
const msgID = msg.id;
this.requests[msgID] = request;
const blob = JSON.stringify(msg);
if (msg.type !== "ping") {
this.debug && console.log(`Request ${msgID} was sent`, msg);
}
if (this.websocket) {
this.websocket.send(blob);
} else {
throw new E.LoginRequired();
}
return request.received;
}
/** Sends a message along the connection, ensuring that
* the server and client are properly authenticated.
*
* If the socket has not been properly authenticated,
* sends the message after the socket has been properly
* authenticated/set up. */
public sendMessage<T>(message: Message): Promise<T> {
const msgID = this.lastMsgID++;
message.id = msgID;
return this.sendRequest<T>(new Request<T>(message));
}
public isConnected(): boolean {
return this.websocket !== undefined &&
this.websocket.readyState === this.websocket.OPEN;
}
public async ping(): Promise<void> {
await this.sendMessage({
type: "ping"
});
}
public getUsername(): string {
if (!this.connection) {
throw new E.WebsocketError("Trying to access username when the connection is not set.");
}
return this.connection.username;
}
}
| {
request.resolve(response.result);
} | conditional_block |
fs.rs | use std::env;
use std::fs;
#[derive(Debug, Copy, Clone)]
pub enum LlamaFile {
SdCardImg,
NandImg,
NandCid,
AesKeyDb,
Otp,
Boot9,
Boot11,
}
#[cfg(not(target_os = "windows"))]
fn make_filepath(filename: &str) -> String {
format!("{}/.config/llama/{}", env::var("HOME").unwrap(), filename)
}
#[cfg(target_os = "windows")]
fn make_filepath(filename: &str) -> String {
format!("{}/llama/{}", env::var("APPDATA").unwrap(), filename)
}
fn get_path(lf: LlamaFile) -> String {
let filename = match lf {
LlamaFile::SdCardImg => "sd.fat",
LlamaFile::NandImg => "nand.bin",
LlamaFile::NandCid => "nand-cid.bin",
LlamaFile::AesKeyDb => "aeskeydb.bin",
LlamaFile::Otp => "otp.bin",
LlamaFile::Boot9 => "boot9.bin",
LlamaFile::Boot11 => "boot11.bin",
};
make_filepath(filename)
}
pub fn open_file(lf: LlamaFile) -> Result<fs::File, String> {
let path = get_path(lf);
let res = fs::OpenOptions::new().read(true).write(true).open(path.as_str());
match res {
Ok(file) => Ok(file),
Err(_) => Err(format!("Could not open file `{}`", path))
}
}
pub fn create_file<F>(lf: LlamaFile, initializer: F) -> Result<fs::File, String>
where F: FnOnce(&mut fs::File) {
let path = get_path(lf);
let res = fs::OpenOptions::new()
.read(true).write(true)
.create(true).truncate(true)
.open(path.as_str());
let mut file = match res {
Ok(file) => file,
Err(x) => return Err(format!("Could not create file `{}`; {:?}", path, x))
};
initializer(&mut file); | } | Ok(file) | random_line_split |
fs.rs | use std::env;
use std::fs;
#[derive(Debug, Copy, Clone)]
pub enum | {
SdCardImg,
NandImg,
NandCid,
AesKeyDb,
Otp,
Boot9,
Boot11,
}
#[cfg(not(target_os = "windows"))]
fn make_filepath(filename: &str) -> String {
format!("{}/.config/llama/{}", env::var("HOME").unwrap(), filename)
}
#[cfg(target_os = "windows")]
fn make_filepath(filename: &str) -> String {
format!("{}/llama/{}", env::var("APPDATA").unwrap(), filename)
}
fn get_path(lf: LlamaFile) -> String {
let filename = match lf {
LlamaFile::SdCardImg => "sd.fat",
LlamaFile::NandImg => "nand.bin",
LlamaFile::NandCid => "nand-cid.bin",
LlamaFile::AesKeyDb => "aeskeydb.bin",
LlamaFile::Otp => "otp.bin",
LlamaFile::Boot9 => "boot9.bin",
LlamaFile::Boot11 => "boot11.bin",
};
make_filepath(filename)
}
pub fn open_file(lf: LlamaFile) -> Result<fs::File, String> {
let path = get_path(lf);
let res = fs::OpenOptions::new().read(true).write(true).open(path.as_str());
match res {
Ok(file) => Ok(file),
Err(_) => Err(format!("Could not open file `{}`", path))
}
}
pub fn create_file<F>(lf: LlamaFile, initializer: F) -> Result<fs::File, String>
where F: FnOnce(&mut fs::File) {
let path = get_path(lf);
let res = fs::OpenOptions::new()
.read(true).write(true)
.create(true).truncate(true)
.open(path.as_str());
let mut file = match res {
Ok(file) => file,
Err(x) => return Err(format!("Could not create file `{}`; {:?}", path, x))
};
initializer(&mut file);
Ok(file)
}
| LlamaFile | identifier_name |
estimate-item-size.ts | /**
* Estimates the number of Write Capacity Units that will be consumed when writing this item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateWriteCapacityUnits(item: TDynamoDBItem): number {
return Math.ceil(estimateItemSize(item) / 1024);
}
| * Estimates the number of Read Capacity Units that will be consumed when reading this item from DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateReadCapacityUnits(item: TDynamoDBItem): number {
return Math.ceil(estimateItemSize(item) / 4096);
}
/**
* Estimates the size of a DynamoDB item in bytes.
*
* For practical purposes, this is useful for estimating the amount of capacity units that will
* be consumed when reading or writing an item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number} the estimated number of bytes the item will require for storage in DynamoDB
*/
export function estimateItemSize(item: TDynamoDBItem): number {
let totalBytes = 0;
for (let key in item) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(item[key], key);
/* tslint:enable:forin */
}
return totalBytes;
}
/**
* Estimates the size of a DynamoDB AttributeValue in bytes.
*
* @param {AttributeValue} value
* @param {string} name
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
function estimateAttributeValueSize(value: AttributeValue, name?: string): number {
let totalBytes = 0;
// add the size of the attribute name
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
if (name) {
totalBytes += name.length;
}
let attributeKey = Object.keys(value)[0];
switch (attributeKey) {
case 'NULL':
case 'BOOL':
// 1 byte to store a null or boolean value
totalBytes += 1;
break;
case 'N':
case 'S':
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += value[attributeKey].length;
break;
case 'NS':
case 'SS':
// sum of sizes of each element in the set
let eSet = value[attributeKey];
for (let e of eSet) {
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += e.length;
}
break;
case 'L':
// overhead required for a DynamoDB List
totalBytes += 3;
// sum of the sizes of all AttributeValue elements in the list
let list = value[attributeKey];
for (let v of list) {
totalBytes += estimateAttributeValueSize(v);
}
break;
case 'M':
// overhead required for a DynamoDB Map
totalBytes += 3;
// sum of sizes of each element in the map
let map = value[attributeKey];
for (let key in map) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(map[key], key);
/* tslint:enable:forin */
}
break;
case 'B':
throw new Error('NotYetImplementedException: DynamoDB Binary data type is not yet supported');
case 'BS':
throw new Error('NotYetImplementedException: DynamoDB BinarySet data type is not yet supported');
default:
throw new Error('ValidationException: Invalid attributeKey "' + attributeKey + '"');
}
return totalBytes;
} | /** | random_line_split |
estimate-item-size.ts | /**
* Estimates the number of Write Capacity Units that will be consumed when writing this item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateWriteCapacityUnits(item: TDynamoDBItem): number {
return Math.ceil(estimateItemSize(item) / 1024);
}
/**
* Estimates the number of Read Capacity Units that will be consumed when reading this item from DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateReadCapacityUnits(item: TDynamoDBItem): number |
/**
* Estimates the size of a DynamoDB item in bytes.
*
* For practical purposes, this is useful for estimating the amount of capacity units that will
* be consumed when reading or writing an item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number} the estimated number of bytes the item will require for storage in DynamoDB
*/
export function estimateItemSize(item: TDynamoDBItem): number {
let totalBytes = 0;
for (let key in item) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(item[key], key);
/* tslint:enable:forin */
}
return totalBytes;
}
/**
* Estimates the size of a DynamoDB AttributeValue in bytes.
*
* @param {AttributeValue} value
* @param {string} name
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
function estimateAttributeValueSize(value: AttributeValue, name?: string): number {
let totalBytes = 0;
// add the size of the attribute name
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
if (name) {
totalBytes += name.length;
}
let attributeKey = Object.keys(value)[0];
switch (attributeKey) {
case 'NULL':
case 'BOOL':
// 1 byte to store a null or boolean value
totalBytes += 1;
break;
case 'N':
case 'S':
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += value[attributeKey].length;
break;
case 'NS':
case 'SS':
// sum of sizes of each element in the set
let eSet = value[attributeKey];
for (let e of eSet) {
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += e.length;
}
break;
case 'L':
// overhead required for a DynamoDB List
totalBytes += 3;
// sum of the sizes of all AttributeValue elements in the list
let list = value[attributeKey];
for (let v of list) {
totalBytes += estimateAttributeValueSize(v);
}
break;
case 'M':
// overhead required for a DynamoDB Map
totalBytes += 3;
// sum of sizes of each element in the map
let map = value[attributeKey];
for (let key in map) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(map[key], key);
/* tslint:enable:forin */
}
break;
case 'B':
throw new Error('NotYetImplementedException: DynamoDB Binary data type is not yet supported');
case 'BS':
throw new Error('NotYetImplementedException: DynamoDB BinarySet data type is not yet supported');
default:
throw new Error('ValidationException: Invalid attributeKey "' + attributeKey + '"');
}
return totalBytes;
} | {
return Math.ceil(estimateItemSize(item) / 4096);
} | identifier_body |
estimate-item-size.ts | /**
* Estimates the number of Write Capacity Units that will be consumed when writing this item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateWriteCapacityUnits(item: TDynamoDBItem): number {
return Math.ceil(estimateItemSize(item) / 1024);
}
/**
* Estimates the number of Read Capacity Units that will be consumed when reading this item from DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
export function estimateReadCapacityUnits(item: TDynamoDBItem): number {
return Math.ceil(estimateItemSize(item) / 4096);
}
/**
* Estimates the size of a DynamoDB item in bytes.
*
* For practical purposes, this is useful for estimating the amount of capacity units that will
* be consumed when reading or writing an item to DynamoDB.
*
* @param {TDynamoDBItem} item
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number} the estimated number of bytes the item will require for storage in DynamoDB
*/
export function | (item: TDynamoDBItem): number {
let totalBytes = 0;
for (let key in item) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(item[key], key);
/* tslint:enable:forin */
}
return totalBytes;
}
/**
* Estimates the size of a DynamoDB AttributeValue in bytes.
*
* @param {AttributeValue} value
* @param {string} name
* @see http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ItemSizeCalculations
* @returns {number}
*/
function estimateAttributeValueSize(value: AttributeValue, name?: string): number {
let totalBytes = 0;
// add the size of the attribute name
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
if (name) {
totalBytes += name.length;
}
let attributeKey = Object.keys(value)[0];
switch (attributeKey) {
case 'NULL':
case 'BOOL':
// 1 byte to store a null or boolean value
totalBytes += 1;
break;
case 'N':
case 'S':
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += value[attributeKey].length;
break;
case 'NS':
case 'SS':
// sum of sizes of each element in the set
let eSet = value[attributeKey];
for (let e of eSet) {
// assume the number is stored in string format
// assume strings are ~1 byte per character (accurate for alphanumeric English UTF-8 text)
totalBytes += e.length;
}
break;
case 'L':
// overhead required for a DynamoDB List
totalBytes += 3;
// sum of the sizes of all AttributeValue elements in the list
let list = value[attributeKey];
for (let v of list) {
totalBytes += estimateAttributeValueSize(v);
}
break;
case 'M':
// overhead required for a DynamoDB Map
totalBytes += 3;
// sum of sizes of each element in the map
let map = value[attributeKey];
for (let key in map) {
/* tslint:disable:forin */
// noinspection JSUnfilteredForInLoop
totalBytes += estimateAttributeValueSize(map[key], key);
/* tslint:enable:forin */
}
break;
case 'B':
throw new Error('NotYetImplementedException: DynamoDB Binary data type is not yet supported');
case 'BS':
throw new Error('NotYetImplementedException: DynamoDB BinarySet data type is not yet supported');
default:
throw new Error('ValidationException: Invalid attributeKey "' + attributeKey + '"');
}
return totalBytes;
} | estimateItemSize | identifier_name |
SiteExplorerRounded.tsx | /*
* Copyright (C) 2007-2022 Crafter Software Corporation. All Rights Reserved.
* | * it under the terms of the GNU General Public License version 3 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import React from 'react';
import { createSvgIcon } from '@mui/material/utils';
export default createSvgIcon(
<>
<path d="M20 6H12L10.59 4.59C10.21 4.21 9.7 4 9.17 4H4C2.9 4 2.01 4.9 2.01 6L2 18C2 19.1 2.9 20 4 20H20C21.1 20 22 19.1 22 18V8C22 6.9 21.1 6 20 6ZM19 18H5C4.45 18 4 17.55 4 17V9C4 8.45 4.45 8 5 8H19C19.55 8 20 8.45 20 9V17C20 17.55 19.55 18 19 18Z" />
<path d="M13.9223 14.1946H13.5406L13.4053 14.0641C13.9851 13.3876 14.2847 12.4647 14.1204 11.4837C13.8933 10.1404 12.7723 9.06766 11.4193 8.90337C9.37527 8.6521 7.65503 10.3723 7.9063 12.4163C8.07059 13.7693 9.14333 14.8904 10.4867 15.1175C11.4676 15.2818 12.3905 14.9822 13.067 14.4023L13.1975 14.5376V14.9194L15.2511 16.973C15.4493 17.1711 15.773 17.1711 15.9711 16.973C16.1692 16.7749 16.1692 16.4512 15.9711 16.253L13.9223 14.1946ZM11.023 14.1946C9.81982 14.1946 8.84856 13.2233 8.84856 12.0201C8.84856 10.8169 9.81982 9.84564 11.023 9.84564C12.2262 9.84564 13.1975 10.8169 13.1975 12.0201C13.1975 13.2233 12.2262 14.1946 11.023 14.1946Z" />
</>,
'SiteExplorerRounded'
); | * This program is free software: you can redistribute it and/or modify | random_line_split |
information_theory.py | """
.. todo::
WRITEME
"""
import theano.tensor as T
from theano.gof.op import get_debug_values
from theano.gof.op import debug_assert
import numpy as np
from theano.tensor.xlogx import xlogx
from pylearn2.utils import contains_nan, isfinite
def | (P):
"""
.. todo::
WRITEME properly
If P[i,j] represents the probability of some binary random variable X[i,j]
being 1, then rval[i] gives the entropy of the random vector X[i,:]
"""
for Pv in get_debug_values(P):
assert Pv.min() >= 0.0
assert Pv.max() <= 1.0
oneMinusP = 1. - P
PlogP = xlogx(P)
omPlogOmP = xlogx(oneMinusP)
term1 = - T.sum(PlogP, axis=1)
assert len(term1.type.broadcastable) == 1
term2 = - T.sum(omPlogOmP, axis=1)
assert len(term2.type.broadcastable) == 1
rval = term1 + term2
debug_vals = get_debug_values(PlogP, omPlogOmP, term1, term2, rval)
for plp, olo, t1, t2, rv in debug_vals:
debug_assert(isfinite(plp))
debug_assert(isfinite(olo))
debug_assert(not contains_nan(t1))
debug_assert(not contains_nan(t2))
debug_assert(not contains_nan(rv))
return rval
| entropy_binary_vector | identifier_name |
information_theory.py | """
.. todo::
WRITEME
"""
import theano.tensor as T
from theano.gof.op import get_debug_values
from theano.gof.op import debug_assert
import numpy as np
from theano.tensor.xlogx import xlogx
from pylearn2.utils import contains_nan, isfinite
def entropy_binary_vector(P):
| """
.. todo::
WRITEME properly
If P[i,j] represents the probability of some binary random variable X[i,j]
being 1, then rval[i] gives the entropy of the random vector X[i,:]
"""
for Pv in get_debug_values(P):
assert Pv.min() >= 0.0
assert Pv.max() <= 1.0
oneMinusP = 1. - P
PlogP = xlogx(P)
omPlogOmP = xlogx(oneMinusP)
term1 = - T.sum(PlogP, axis=1)
assert len(term1.type.broadcastable) == 1
term2 = - T.sum(omPlogOmP, axis=1)
assert len(term2.type.broadcastable) == 1
rval = term1 + term2
debug_vals = get_debug_values(PlogP, omPlogOmP, term1, term2, rval)
for plp, olo, t1, t2, rv in debug_vals:
debug_assert(isfinite(plp))
debug_assert(isfinite(olo))
debug_assert(not contains_nan(t1))
debug_assert(not contains_nan(t2))
debug_assert(not contains_nan(rv))
return rval | identifier_body |
|
information_theory.py | """
.. todo::
WRITEME
"""
import theano.tensor as T
from theano.gof.op import get_debug_values
from theano.gof.op import debug_assert
import numpy as np
from theano.tensor.xlogx import xlogx
from pylearn2.utils import contains_nan, isfinite
def entropy_binary_vector(P):
"""
.. todo::
WRITEME properly
If P[i,j] represents the probability of some binary random variable X[i,j]
being 1, then rval[i] gives the entropy of the random vector X[i,:]
"""
for Pv in get_debug_values(P):
assert Pv.min() >= 0.0
assert Pv.max() <= 1.0
oneMinusP = 1. - P
PlogP = xlogx(P)
omPlogOmP = xlogx(oneMinusP)
term1 = - T.sum(PlogP, axis=1)
assert len(term1.type.broadcastable) == 1
term2 = - T.sum(omPlogOmP, axis=1)
assert len(term2.type.broadcastable) == 1
rval = term1 + term2
debug_vals = get_debug_values(PlogP, omPlogOmP, term1, term2, rval)
for plp, olo, t1, t2, rv in debug_vals:
debug_assert(isfinite(plp))
debug_assert(isfinite(olo))
debug_assert(not contains_nan(t1))
debug_assert(not contains_nan(t2))
debug_assert(not contains_nan(rv))
| return rval | random_line_split |
|
information_theory.py | """
.. todo::
WRITEME
"""
import theano.tensor as T
from theano.gof.op import get_debug_values
from theano.gof.op import debug_assert
import numpy as np
from theano.tensor.xlogx import xlogx
from pylearn2.utils import contains_nan, isfinite
def entropy_binary_vector(P):
"""
.. todo::
WRITEME properly
If P[i,j] represents the probability of some binary random variable X[i,j]
being 1, then rval[i] gives the entropy of the random vector X[i,:]
"""
for Pv in get_debug_values(P):
assert Pv.min() >= 0.0
assert Pv.max() <= 1.0
oneMinusP = 1. - P
PlogP = xlogx(P)
omPlogOmP = xlogx(oneMinusP)
term1 = - T.sum(PlogP, axis=1)
assert len(term1.type.broadcastable) == 1
term2 = - T.sum(omPlogOmP, axis=1)
assert len(term2.type.broadcastable) == 1
rval = term1 + term2
debug_vals = get_debug_values(PlogP, omPlogOmP, term1, term2, rval)
for plp, olo, t1, t2, rv in debug_vals:
|
return rval
| debug_assert(isfinite(plp))
debug_assert(isfinite(olo))
debug_assert(not contains_nan(t1))
debug_assert(not contains_nan(t2))
debug_assert(not contains_nan(rv)) | conditional_block |
cookieeditor.js | /**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Displays and edits the value of a cookie.
* Intended only for debugging.
*/
goog.provide('goog.ui.CookieEditor');
goog.require('goog.asserts');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.events.EventType');
goog.require('goog.net.Cookies');
goog.require('goog.string');
goog.require('goog.style');
goog.require('goog.ui.Component');
goog.requireType('goog.events.Event');
/**
* Displays and edits the value of a cookie.
* @final
* @unrestricted
*/
goog.ui.CookieEditor = class extends goog.ui.Component {
/**
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
*/
constructor(opt_domHelper) {
'use strict';
super(opt_domHelper);
}
/**
* Sets the cookie which this component will edit.
* @param {string} cookieKey Cookie key.
*/
selectCookie(cookieKey) {
'use strict';
goog.asserts.assert(goog.net.Cookies.getInstance().isValidName(cookieKey));
this.cookieKey_ = cookieKey;
if (this.textAreaElem_) {
this.textAreaElem_.value =
goog.net.Cookies.getInstance().get(cookieKey) || '';
}
}
/** @override */
canDecorate() {
'use strict';
return false;
}
/** @override */
createDom() {
'use strict';
// Debug-only, so we don't need i18n.
this.clearButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Clear');
this.updateButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Update');
var value =
this.cookieKey_ && goog.net.Cookies.getInstance().get(this.cookieKey_);
this.textAreaElem_ = goog.dom.createDom(
goog.dom.TagName.TEXTAREA, /* attibutes */ null, value || '');
this.valueWarningElem_ = goog.dom.createDom(
goog.dom.TagName.SPAN,
/* attibutes */ {'style': 'display:none;color:red'},
'Invalid cookie value.');
this.setElementInternal(goog.dom.createDom(
goog.dom.TagName.DIV,
/* attibutes */ null, this.valueWarningElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.textAreaElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.clearButtonElem_,
this.updateButtonElem_));
}
/** @override */
enterDocument() |
/**
* Handles user clicking clear button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleClear_(e) {
'use strict';
if (this.cookieKey_) {
goog.net.Cookies.getInstance().remove(this.cookieKey_);
}
this.textAreaElem_.value = '';
}
/**
* Handles user clicking update button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleUpdate_(e) {
'use strict';
if (this.cookieKey_) {
var value = this.textAreaElem_.value;
if (value) {
// Strip line breaks.
value = goog.string.stripNewlines(value);
}
if (goog.net.Cookies.getInstance().isValidValue(value)) {
goog.net.Cookies.getInstance().set(this.cookieKey_, value);
goog.style.setElementShown(this.valueWarningElem_, false);
} else {
goog.style.setElementShown(this.valueWarningElem_, true);
}
}
}
/** @override */
disposeInternal() {
'use strict';
this.clearButtonElem_ = null;
this.cookieKey_ = null;
this.textAreaElem_ = null;
this.updateButtonElem_ = null;
this.valueWarningElem_ = null;
}
};
/**
* Cookie key.
* @type {?string}
* @private
*/
goog.ui.CookieEditor.prototype.cookieKey_;
/**
* Text area.
* @type {HTMLTextAreaElement}
* @private
*/
goog.ui.CookieEditor.prototype.textAreaElem_;
/**
* Clear button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.clearButtonElem_;
/**
* Invalid value warning text.
* @type {HTMLSpanElement}
* @private
*/
goog.ui.CookieEditor.prototype.valueWarningElem_;
/**
* Update button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.updateButtonElem_;
// TODO(user): add combobox for user to select different cookies
| {
'use strict';
super.enterDocument();
this.getHandler().listen(
this.clearButtonElem_, goog.events.EventType.CLICK, this.handleClear_);
this.getHandler().listen(
this.updateButtonElem_, goog.events.EventType.CLICK,
this.handleUpdate_);
} | identifier_body |
cookieeditor.js | /**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Displays and edits the value of a cookie.
* Intended only for debugging.
*/
goog.provide('goog.ui.CookieEditor');
goog.require('goog.asserts');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.events.EventType');
goog.require('goog.net.Cookies');
goog.require('goog.string');
goog.require('goog.style');
goog.require('goog.ui.Component');
goog.requireType('goog.events.Event');
/**
* Displays and edits the value of a cookie.
* @final
* @unrestricted
*/
goog.ui.CookieEditor = class extends goog.ui.Component {
/**
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
*/
constructor(opt_domHelper) {
'use strict';
super(opt_domHelper);
}
/**
* Sets the cookie which this component will edit.
* @param {string} cookieKey Cookie key.
*/
selectCookie(cookieKey) {
'use strict';
goog.asserts.assert(goog.net.Cookies.getInstance().isValidName(cookieKey));
this.cookieKey_ = cookieKey;
if (this.textAreaElem_) {
this.textAreaElem_.value =
goog.net.Cookies.getInstance().get(cookieKey) || '';
}
}
/** @override */
canDecorate() {
'use strict';
return false;
}
/** @override */
createDom() {
'use strict';
// Debug-only, so we don't need i18n.
this.clearButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Clear');
this.updateButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Update');
var value =
this.cookieKey_ && goog.net.Cookies.getInstance().get(this.cookieKey_);
this.textAreaElem_ = goog.dom.createDom(
goog.dom.TagName.TEXTAREA, /* attibutes */ null, value || '');
this.valueWarningElem_ = goog.dom.createDom(
goog.dom.TagName.SPAN,
/* attibutes */ {'style': 'display:none;color:red'},
'Invalid cookie value.');
this.setElementInternal(goog.dom.createDom(
goog.dom.TagName.DIV,
/* attibutes */ null, this.valueWarningElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.textAreaElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.clearButtonElem_,
this.updateButtonElem_));
}
/** @override */
enterDocument() {
'use strict';
super.enterDocument();
this.getHandler().listen(
this.clearButtonElem_, goog.events.EventType.CLICK, this.handleClear_);
this.getHandler().listen(
this.updateButtonElem_, goog.events.EventType.CLICK,
this.handleUpdate_);
}
/**
* Handles user clicking clear button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleClear_(e) {
'use strict';
if (this.cookieKey_) {
goog.net.Cookies.getInstance().remove(this.cookieKey_);
}
this.textAreaElem_.value = '';
}
/**
* Handles user clicking update button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleUpdate_(e) {
'use strict';
if (this.cookieKey_) {
var value = this.textAreaElem_.value;
if (value) {
// Strip line breaks.
value = goog.string.stripNewlines(value);
}
if (goog.net.Cookies.getInstance().isValidValue(value)) | else {
goog.style.setElementShown(this.valueWarningElem_, true);
}
}
}
/** @override */
disposeInternal() {
'use strict';
this.clearButtonElem_ = null;
this.cookieKey_ = null;
this.textAreaElem_ = null;
this.updateButtonElem_ = null;
this.valueWarningElem_ = null;
}
};
/**
* Cookie key.
* @type {?string}
* @private
*/
goog.ui.CookieEditor.prototype.cookieKey_;
/**
* Text area.
* @type {HTMLTextAreaElement}
* @private
*/
goog.ui.CookieEditor.prototype.textAreaElem_;
/**
* Clear button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.clearButtonElem_;
/**
* Invalid value warning text.
* @type {HTMLSpanElement}
* @private
*/
goog.ui.CookieEditor.prototype.valueWarningElem_;
/**
* Update button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.updateButtonElem_;
// TODO(user): add combobox for user to select different cookies
| {
goog.net.Cookies.getInstance().set(this.cookieKey_, value);
goog.style.setElementShown(this.valueWarningElem_, false);
} | conditional_block |
cookieeditor.js | /**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Displays and edits the value of a cookie.
* Intended only for debugging.
*/
goog.provide('goog.ui.CookieEditor');
goog.require('goog.asserts');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.events.EventType');
goog.require('goog.net.Cookies');
goog.require('goog.string');
goog.require('goog.style');
goog.require('goog.ui.Component');
goog.requireType('goog.events.Event');
/**
* Displays and edits the value of a cookie.
* @final
* @unrestricted
*/
goog.ui.CookieEditor = class extends goog.ui.Component {
/**
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
*/
constructor(opt_domHelper) {
'use strict';
super(opt_domHelper);
}
/**
* Sets the cookie which this component will edit.
* @param {string} cookieKey Cookie key.
*/
selectCookie(cookieKey) {
'use strict';
goog.asserts.assert(goog.net.Cookies.getInstance().isValidName(cookieKey));
this.cookieKey_ = cookieKey;
if (this.textAreaElem_) {
this.textAreaElem_.value =
goog.net.Cookies.getInstance().get(cookieKey) || '';
}
}
/** @override */
canDecorate() {
'use strict';
return false;
}
/** @override */
createDom() {
'use strict';
// Debug-only, so we don't need i18n.
this.clearButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Clear');
this.updateButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Update');
var value =
this.cookieKey_ && goog.net.Cookies.getInstance().get(this.cookieKey_);
this.textAreaElem_ = goog.dom.createDom(
goog.dom.TagName.TEXTAREA, /* attibutes */ null, value || '');
this.valueWarningElem_ = goog.dom.createDom(
goog.dom.TagName.SPAN,
/* attibutes */ {'style': 'display:none;color:red'},
'Invalid cookie value.');
this.setElementInternal(goog.dom.createDom(
goog.dom.TagName.DIV,
/* attibutes */ null, this.valueWarningElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.textAreaElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.clearButtonElem_,
this.updateButtonElem_));
}
/** @override */
enterDocument() {
'use strict';
super.enterDocument();
this.getHandler().listen(
this.clearButtonElem_, goog.events.EventType.CLICK, this.handleClear_);
this.getHandler().listen(
this.updateButtonElem_, goog.events.EventType.CLICK,
this.handleUpdate_);
}
/**
* Handles user clicking clear button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleClear_(e) {
'use strict';
if (this.cookieKey_) {
goog.net.Cookies.getInstance().remove(this.cookieKey_);
}
this.textAreaElem_.value = '';
}
/**
* Handles user clicking update button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleUpdate_(e) {
'use strict';
if (this.cookieKey_) {
var value = this.textAreaElem_.value;
if (value) {
// Strip line breaks.
value = goog.string.stripNewlines(value);
}
if (goog.net.Cookies.getInstance().isValidValue(value)) {
goog.net.Cookies.getInstance().set(this.cookieKey_, value);
goog.style.setElementShown(this.valueWarningElem_, false);
} else {
goog.style.setElementShown(this.valueWarningElem_, true);
}
}
}
/** @override */
disposeInternal() {
'use strict';
this.clearButtonElem_ = null;
this.cookieKey_ = null;
this.textAreaElem_ = null;
this.updateButtonElem_ = null;
this.valueWarningElem_ = null;
}
};
/**
* Cookie key.
* @type {?string}
* @private
*/
goog.ui.CookieEditor.prototype.cookieKey_;
/**
* Text area.
* @type {HTMLTextAreaElement}
* @private
*/
goog.ui.CookieEditor.prototype.textAreaElem_;
/**
* Clear button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.clearButtonElem_;
/**
* Invalid value warning text.
* @type {HTMLSpanElement}
* @private
*/
goog.ui.CookieEditor.prototype.valueWarningElem_;
/** | * @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.updateButtonElem_;
// TODO(user): add combobox for user to select different cookies | * Update button. | random_line_split |
cookieeditor.js | /**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Displays and edits the value of a cookie.
* Intended only for debugging.
*/
goog.provide('goog.ui.CookieEditor');
goog.require('goog.asserts');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.events.EventType');
goog.require('goog.net.Cookies');
goog.require('goog.string');
goog.require('goog.style');
goog.require('goog.ui.Component');
goog.requireType('goog.events.Event');
/**
* Displays and edits the value of a cookie.
* @final
* @unrestricted
*/
goog.ui.CookieEditor = class extends goog.ui.Component {
/**
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
*/
constructor(opt_domHelper) {
'use strict';
super(opt_domHelper);
}
/**
* Sets the cookie which this component will edit.
* @param {string} cookieKey Cookie key.
*/
selectCookie(cookieKey) {
'use strict';
goog.asserts.assert(goog.net.Cookies.getInstance().isValidName(cookieKey));
this.cookieKey_ = cookieKey;
if (this.textAreaElem_) {
this.textAreaElem_.value =
goog.net.Cookies.getInstance().get(cookieKey) || '';
}
}
/** @override */
canDecorate() {
'use strict';
return false;
}
/** @override */
createDom() {
'use strict';
// Debug-only, so we don't need i18n.
this.clearButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Clear');
this.updateButtonElem_ = goog.dom.createDom(
goog.dom.TagName.BUTTON, /* attributes */ null, 'Update');
var value =
this.cookieKey_ && goog.net.Cookies.getInstance().get(this.cookieKey_);
this.textAreaElem_ = goog.dom.createDom(
goog.dom.TagName.TEXTAREA, /* attibutes */ null, value || '');
this.valueWarningElem_ = goog.dom.createDom(
goog.dom.TagName.SPAN,
/* attibutes */ {'style': 'display:none;color:red'},
'Invalid cookie value.');
this.setElementInternal(goog.dom.createDom(
goog.dom.TagName.DIV,
/* attibutes */ null, this.valueWarningElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.textAreaElem_,
goog.dom.createDom(goog.dom.TagName.BR), this.clearButtonElem_,
this.updateButtonElem_));
}
/** @override */
enterDocument() {
'use strict';
super.enterDocument();
this.getHandler().listen(
this.clearButtonElem_, goog.events.EventType.CLICK, this.handleClear_);
this.getHandler().listen(
this.updateButtonElem_, goog.events.EventType.CLICK,
this.handleUpdate_);
}
/**
* Handles user clicking clear button.
* @param {!goog.events.Event} e The click event.
* @private
*/
handleClear_(e) {
'use strict';
if (this.cookieKey_) {
goog.net.Cookies.getInstance().remove(this.cookieKey_);
}
this.textAreaElem_.value = '';
}
/**
* Handles user clicking update button.
* @param {!goog.events.Event} e The click event.
* @private
*/
| (e) {
'use strict';
if (this.cookieKey_) {
var value = this.textAreaElem_.value;
if (value) {
// Strip line breaks.
value = goog.string.stripNewlines(value);
}
if (goog.net.Cookies.getInstance().isValidValue(value)) {
goog.net.Cookies.getInstance().set(this.cookieKey_, value);
goog.style.setElementShown(this.valueWarningElem_, false);
} else {
goog.style.setElementShown(this.valueWarningElem_, true);
}
}
}
/** @override */
disposeInternal() {
'use strict';
this.clearButtonElem_ = null;
this.cookieKey_ = null;
this.textAreaElem_ = null;
this.updateButtonElem_ = null;
this.valueWarningElem_ = null;
}
};
/**
* Cookie key.
* @type {?string}
* @private
*/
goog.ui.CookieEditor.prototype.cookieKey_;
/**
* Text area.
* @type {HTMLTextAreaElement}
* @private
*/
goog.ui.CookieEditor.prototype.textAreaElem_;
/**
* Clear button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.clearButtonElem_;
/**
* Invalid value warning text.
* @type {HTMLSpanElement}
* @private
*/
goog.ui.CookieEditor.prototype.valueWarningElem_;
/**
* Update button.
* @type {HTMLButtonElement}
* @private
*/
goog.ui.CookieEditor.prototype.updateButtonElem_;
// TODO(user): add combobox for user to select different cookies
| handleUpdate_ | identifier_name |
ActiveScan.ts | import { ZapScanBase } from './ZapScanBase';
import { ScanResult } from './../interfaces/types/ScanResult';
import { ZapActiveScanOptions } from './../interfaces/types/ZapScan';
import { ZapScanType } from '../enums/Enums';
import { TaskInput } from './TaskInput';
export class ActiveScan extends ZapScanBase {
zapScanType: ZapScanType = ZapScanType.Active;
private _scanOptions: ZapActiveScanOptions;
constructor(taskInputs: TaskInput) {
super(taskInputs);
/* Set Scan Type for Logging */
this.scanType = 'Active Scan';
/* Active Scan Options */
this._scanOptions = {
apikey: this.taskInputs.ZapApiKey,
url: this.taskInputs.TargetUrl,
contextId: this.taskInputs.ContextId,
method: this.taskInputs.Method,
inScopeOnly: String(this.taskInputs.InScopeOnly),
recurse: String(this.taskInputs.Recurse),
scanPolicyName: this.taskInputs.ScanPolicyName,
postData: this.taskInputs.PostData,
zapapiformat: 'JSON',
formMethod: 'GET' | // tslint:disable-next-line:no-http-string
uri: `http://${this.taskInputs.ZapApiUrl}/JSON/ascan/action/scan/`,
qs: this._scanOptions
};
}
ExecuteScan(): Promise<ScanResult> {
return super.ExecuteScan();
}
} | };
/* Scan Request Options */
this.requestOptions = { | random_line_split |
ActiveScan.ts | import { ZapScanBase } from './ZapScanBase';
import { ScanResult } from './../interfaces/types/ScanResult';
import { ZapActiveScanOptions } from './../interfaces/types/ZapScan';
import { ZapScanType } from '../enums/Enums';
import { TaskInput } from './TaskInput';
export class ActiveScan extends ZapScanBase {
zapScanType: ZapScanType = ZapScanType.Active;
private _scanOptions: ZapActiveScanOptions;
constructor(taskInputs: TaskInput) {
super(taskInputs);
/* Set Scan Type for Logging */
this.scanType = 'Active Scan';
/* Active Scan Options */
this._scanOptions = {
apikey: this.taskInputs.ZapApiKey,
url: this.taskInputs.TargetUrl,
contextId: this.taskInputs.ContextId,
method: this.taskInputs.Method,
inScopeOnly: String(this.taskInputs.InScopeOnly),
recurse: String(this.taskInputs.Recurse),
scanPolicyName: this.taskInputs.ScanPolicyName,
postData: this.taskInputs.PostData,
zapapiformat: 'JSON',
formMethod: 'GET'
};
/* Scan Request Options */
this.requestOptions = {
// tslint:disable-next-line:no-http-string
uri: `http://${this.taskInputs.ZapApiUrl}/JSON/ascan/action/scan/`,
qs: this._scanOptions
};
}
| (): Promise<ScanResult> {
return super.ExecuteScan();
}
} | ExecuteScan | identifier_name |
ActiveScan.ts | import { ZapScanBase } from './ZapScanBase';
import { ScanResult } from './../interfaces/types/ScanResult';
import { ZapActiveScanOptions } from './../interfaces/types/ZapScan';
import { ZapScanType } from '../enums/Enums';
import { TaskInput } from './TaskInput';
export class ActiveScan extends ZapScanBase {
zapScanType: ZapScanType = ZapScanType.Active;
private _scanOptions: ZapActiveScanOptions;
constructor(taskInputs: TaskInput) |
ExecuteScan(): Promise<ScanResult> {
return super.ExecuteScan();
}
} | {
super(taskInputs);
/* Set Scan Type for Logging */
this.scanType = 'Active Scan';
/* Active Scan Options */
this._scanOptions = {
apikey: this.taskInputs.ZapApiKey,
url: this.taskInputs.TargetUrl,
contextId: this.taskInputs.ContextId,
method: this.taskInputs.Method,
inScopeOnly: String(this.taskInputs.InScopeOnly),
recurse: String(this.taskInputs.Recurse),
scanPolicyName: this.taskInputs.ScanPolicyName,
postData: this.taskInputs.PostData,
zapapiformat: 'JSON',
formMethod: 'GET'
};
/* Scan Request Options */
this.requestOptions = {
// tslint:disable-next-line:no-http-string
uri: `http://${this.taskInputs.ZapApiUrl}/JSON/ascan/action/scan/`,
qs: this._scanOptions
};
} | identifier_body |
swift.py | '''
Add in /edx/app/edxapp/edx-platform/lms/envs/aws.py:
ORA2_SWIFT_URL = AUTH_TOKENS["ORA2_SWIFT_URL"]
ORA2_SWIFT_KEY = AUTH_TOKENS["ORA2_SWIFT_KEY"]
Add in /edx/app/edxapp/lms.auth.json
"ORA2_SWIFT_URL": "https://EXAMPLE",
"ORA2_SWIFT_KEY": "EXAMPLE",
ORA2_SWIFT_KEY should correspond to Meta Temp-Url-Key configure in swift. Run
'swift stat -v' to get it.
'''
import logging
import urlparse
import requests
import swiftclient
from django.conf import settings
from ..exceptions import FileUploadInternalError
from .base import BaseBackend
logger = logging.getLogger("openassessment.fileupload.api")
# prefix paths with current version, in case we need to roll it at some point
SWIFT_BACKEND_VERSION = 1
class Backend(BaseBackend):
"""
Upload openassessment student files to swift
"""
def get_upload_url(self, key, content_type):
|
def get_download_url(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='GET',
seconds=self.DOWNLOAD_URL_TIMEOUT
)
download_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.get(download_url)
return download_url if response.status_code == 200 else ""
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating a download URL."
)
raise FileUploadInternalError(ex)
def remove_file(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='%s/%s/%s' % (url.path, bucket_name, key_name),
key=key,
method='DELETE',
seconds=self.DOWNLOAD_URL_TIMEOUT)
remove_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.delete(remove_url)
return response.status_code == 204
except Exception as ex:
logger.exception(
u"An internal exception occurred while removing object on swift storage."
)
raise FileUploadInternalError(ex)
def get_settings():
"""
Returns the swift key and a parsed url.
Both are generated from django settings.
"""
url = getattr(settings, 'ORA2_SWIFT_URL', None)
key = getattr(settings, 'ORA2_SWIFT_KEY', None)
url = urlparse.urlparse(url)
return key, url
| bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='PUT',
seconds=self.UPLOAD_URL_TIMEOUT
)
return '%s://%s%s' % (url.scheme, url.netloc, temp_url)
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating an upload URL."
)
raise FileUploadInternalError(ex) | identifier_body |
swift.py | '''
Add in /edx/app/edxapp/edx-platform/lms/envs/aws.py:
ORA2_SWIFT_URL = AUTH_TOKENS["ORA2_SWIFT_URL"]
ORA2_SWIFT_KEY = AUTH_TOKENS["ORA2_SWIFT_KEY"]
Add in /edx/app/edxapp/lms.auth.json
"ORA2_SWIFT_URL": "https://EXAMPLE",
"ORA2_SWIFT_KEY": "EXAMPLE",
ORA2_SWIFT_KEY should correspond to Meta Temp-Url-Key configure in swift. Run
'swift stat -v' to get it.
'''
import logging
import urlparse
import requests
import swiftclient
from django.conf import settings
from ..exceptions import FileUploadInternalError
from .base import BaseBackend
logger = logging.getLogger("openassessment.fileupload.api")
# prefix paths with current version, in case we need to roll it at some point
SWIFT_BACKEND_VERSION = 1
class Backend(BaseBackend):
"""
Upload openassessment student files to swift
"""
def get_upload_url(self, key, content_type):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='PUT',
seconds=self.UPLOAD_URL_TIMEOUT
)
return '%s://%s%s' % (url.scheme, url.netloc, temp_url)
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating an upload URL."
)
raise FileUploadInternalError(ex)
def get_download_url(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='GET',
seconds=self.DOWNLOAD_URL_TIMEOUT
)
download_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.get(download_url)
return download_url if response.status_code == 200 else ""
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating a download URL."
)
raise FileUploadInternalError(ex)
def | (self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='%s/%s/%s' % (url.path, bucket_name, key_name),
key=key,
method='DELETE',
seconds=self.DOWNLOAD_URL_TIMEOUT)
remove_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.delete(remove_url)
return response.status_code == 204
except Exception as ex:
logger.exception(
u"An internal exception occurred while removing object on swift storage."
)
raise FileUploadInternalError(ex)
def get_settings():
"""
Returns the swift key and a parsed url.
Both are generated from django settings.
"""
url = getattr(settings, 'ORA2_SWIFT_URL', None)
key = getattr(settings, 'ORA2_SWIFT_KEY', None)
url = urlparse.urlparse(url)
return key, url
| remove_file | identifier_name |
swift.py | '''
Add in /edx/app/edxapp/edx-platform/lms/envs/aws.py:
ORA2_SWIFT_URL = AUTH_TOKENS["ORA2_SWIFT_URL"]
ORA2_SWIFT_KEY = AUTH_TOKENS["ORA2_SWIFT_KEY"]
Add in /edx/app/edxapp/lms.auth.json
"ORA2_SWIFT_URL": "https://EXAMPLE",
"ORA2_SWIFT_KEY": "EXAMPLE",
ORA2_SWIFT_KEY should correspond to Meta Temp-Url-Key configure in swift. Run
'swift stat -v' to get it.
'''
import logging
import urlparse
import requests
import swiftclient
from django.conf import settings
from ..exceptions import FileUploadInternalError
from .base import BaseBackend
logger = logging.getLogger("openassessment.fileupload.api")
# prefix paths with current version, in case we need to roll it at some point
SWIFT_BACKEND_VERSION = 1
class Backend(BaseBackend):
"""
Upload openassessment student files to swift
"""
def get_upload_url(self, key, content_type):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='PUT',
seconds=self.UPLOAD_URL_TIMEOUT
)
return '%s://%s%s' % (url.scheme, url.netloc, temp_url)
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating an upload URL."
)
raise FileUploadInternalError(ex)
def get_download_url(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='/v%s%s/%s/%s' % (SWIFT_BACKEND_VERSION, url.path, bucket_name, key_name),
key=key,
method='GET',
seconds=self.DOWNLOAD_URL_TIMEOUT
)
download_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.get(download_url)
return download_url if response.status_code == 200 else ""
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating a download URL."
)
raise FileUploadInternalError(ex)
def remove_file(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
key, url = get_settings()
try:
temp_url = swiftclient.utils.generate_temp_url(
path='%s/%s/%s' % (url.path, bucket_name, key_name),
key=key,
method='DELETE',
seconds=self.DOWNLOAD_URL_TIMEOUT)
remove_url = '%s://%s%s' % (url.scheme, url.netloc, temp_url)
response = requests.delete(remove_url)
return response.status_code == 204
except Exception as ex:
logger.exception(
u"An internal exception occurred while removing object on swift storage."
)
raise FileUploadInternalError(ex)
def get_settings():
"""
Returns the swift key and a parsed url.
Both are generated from django settings.
"""
url = getattr(settings, 'ORA2_SWIFT_URL', None) | return key, url | key = getattr(settings, 'ORA2_SWIFT_KEY', None)
url = urlparse.urlparse(url) | random_line_split |
dayzed-tests.tsx | import * as React from 'react';
import { render } from 'react-dom';
import Dayzed, { DateObj } from 'dayzed';
interface State {
selectedDate: Date;
monthOffset: number;
}
class App extends React.Component<{}, State> {
state = {
selectedDate: new Date(),
monthOffset: 0, | };
handleSetDate = (dateObj: DateObj) => {
this.setState({ selectedDate: dateObj.date });
}
render() {
return (
<Dayzed
selected={this.state.selectedDate}
offset={this.state.monthOffset}
onDateSelected={this.handleSetDate}
>
{({ calendars, ...rp }) => calendars.map(cal => (
<div>
Calendar:
{cal.weeks.map(week => (
<div>
Week:
{week.map(day => day && (
<span {...rp.getDateProps({ dateObj: day })}>Day({day.date.getDate()}):</span>
))}
</div>
))}
</div>
))}
</Dayzed>
);
}
}
render(<App />, document.body); | random_line_split |
|
app.js | 'use strict';
// Declare app level module which depends on views, and components
angular.module('scenarioEditor', [
'ngRoute',
'scenarioEditor.charView',
'scenarioEditor.lineView',
'scenarioEditor.convoView',
'scenarioEditor.version'
])
.config(['$routeProvider', function($routeProvider) {
$routeProvider.otherwise({redirectTo: '/charView'});
}])
.service('charService', function () {
var charData = [];
var charId = 0;
var currChar = 0;
return {
chars:function () {
return charData;
},
addChar:function () {
charId++;
charData.push({'id':charId,'name':'','states':[] });
},
deleteChar:function (character) {
charData.splice(charData.indexOf(character),1);
},
editChar:function (character) {
currChar = character.id;
},
getCurrChar:function () {
return currChar;
},
addStateToChar:function (character,id) {
charData[charData.indexOf(character)].states.push({'id':id,'name':'','convoId':0});
},
getStatesLength:function (character) {
return charData[charData.indexOf(character)].states.length;
}
};
})
.service('convoService', function () {
var convoData = [
{'id':0,'name':'Conversation 0'}
];
var currConversation = 0;
return {
conversations:function () {
return convoData;
},
addConversation:function () {
currConversation++;
convoData.push({'id':currConversation,'name':'Conversation '+currConversation});
},
editConversation:function (convo) {
//TODO: Make this work
},
deleteConversation:function (convo) {
convoData.splice(convoData.indexOf(convo),1);
}
};
})
.service('lineService', function () {
var lineData = [
{'id':0,'character':'','text':''}
];
var currLine = 0;
return {
lines:function () {
return lineData;
},
addLine:function () {
currLine++;
lineData.push({'id':currLine,'character':'',text:''});
},
deleteLine:function (character) {
lineData.splice(lineData.indexOf(character),1);
}
};
});
var scenarioEditor = angular.module('scenarioEditor');
scenarioEditor.controller('EditorCtrl', ['$scope', '$http', 'convoService', 'charService', 'lineService',
function ($scope,$http,convoService,charService,lineService) {
// ABSTRACTION LAYER
$scope.getChars = function () {
return charService.chars();
};
$scope.getConvos = function () {
return convoService.conversations();
};
$scope.getLines = function () {
return lineService.lines();
};
// CHECK FOR CHANGES
$scope.$watch('getChars()', function() { $scope.msg = '*'; $scope.dlVisible = false; }, true);
$scope.$watch('getConvos()', function() { $scope.msg = '*'; $scope.dlVisible = false; }, true);
$scope.$watch('getLines()', function() { $scope.msg = '*'; $scope.dlVisible = false; }, true);
// SAVE JSON FILE
$scope.dlVisible = false;
$scope.save = function() {
$scope.dataObj = {
characters : $scope.getChars(),
conversations : $scope.getConvos()
};
$http.post('postHandler.php', angular.toJson($scope.dataObj)).then(function(data) {
$scope.msg = 'Data saved.';
$scope.dlVisible = true;
});
| }
]); | $scope.msg2 = 'Data sent: '+ $scope.jsonData;
}; | random_line_split |
RelayDefaultNetworkLayer.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule RelayDefaultNetworkLayer
* @typechecks
* @flow
*/
'use strict';
var Promise = require('Promise');
import type RelayMutationRequest from 'RelayMutationRequest';
import type RelayQueryRequest from 'RelayQueryRequest';
var fetchWithRetries = require('fetchWithRetries');
import type {InitWithRetries} from 'fetchWithRetries';
type GraphQLError = {
message: string;
locations: Array<GraphQLErrorLocation>;
};
type GraphQLErrorLocation = {
column: number;
line: number;
};
class RelayDefaultNetworkLayer {
_uri: string;
_init: $FlowIssue; // InitWithRetries
constructor(uri: string, init?: ?InitWithRetries) {
this._uri = uri;
this._init = {...init};
// Bind instance methods to facilitate reuse when creating custom network
// layers.
var self: any = this;
self.sendMutation = this.sendMutation.bind(this);
self.sendQueries = this.sendQueries.bind(this);
self.supports = this.supports.bind(this);
}
sendMutation(request: RelayMutationRequest): Promise {
return this._sendMutation(request).then(
result => result.json()
).then(payload => {
if (payload.hasOwnProperty('errors')) {
var error = new Error(
'Server request for mutation `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else {
request.resolve({response: payload.data});
}
}).catch(
error => request.reject(error)
);
}
sendQueries(requests: Array<RelayQueryRequest>): Promise {
return Promise.all(requests.map(request => (
this._sendQuery(request).then(
result => result.json()
).then(payload => {
if (payload.hasOwnProperty('errors')) {
var error = new Error(
'Server request for query `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else if (!payload.hasOwnProperty('data')) {
request.reject(new Error(
'Server response was missing for query `' + request.getDebugName() +
'`.'
));
} else {
request.resolve({response: payload.data});
}
}).catch(
error => request.reject(error)
)
)));
}
supports(...options: Array<string>): boolean {
// Does not support the only defined option, "defer".
return false;
}
/**
* Sends a POST request with optional files.
*/
_sendMutation(request: RelayMutationRequest): Promise {
var init;
var files = request.getFiles();
if (files) {
if (!global.FormData) {
throw new Error('Uploading files without `FormData` not supported.');
}
var formData = new FormData();
formData.append('query', request.getQueryString());
formData.append('variables', JSON.stringify(request.getVariables()));
for (var filename in files) {
if (files.hasOwnProperty(filename)) {
formData.append(filename, files[filename]);
}
}
init = {
...this._init,
body: formData,
method: 'POST',
};
} else {
init = {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
};
}
return fetch(this._uri, init).then(throwOnServerError);
}
/**
* Sends a POST request and retries if the request fails or times out.
*/
_sendQuery(request: RelayQueryRequest): Promise {
return fetchWithRetries(this._uri, {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
});
}
}
/**
* Rejects HTTP responses with a status code that is not >= 200 and < 300.
* This is done to follow the internal behavior of `fetchWithRetries`.
*/
function throwOnServerError(response: any): any {
if (response.status >= 200 && response.status < 300) {
return response;
} else {
throw response;
}
}
/**
* Formats an error response from GraphQL server request.
*/
function formatRequestErrors(
request: RelayMutationRequest | RelayQueryRequest,
errors: Array<GraphQLError>
): string {
var CONTEXT_BEFORE = 20;
var CONTEXT_LENGTH = 60;
var queryLines = request.getQueryString().split('\n');
return errors.map(({locations, message}, ii) => {
var prefix = (ii + 1) + '. ';
var indent = ' '.repeat(prefix.length);
//custom errors thrown in graphql-server may not have locations
var locationMessage = locations ?
('\n' + locations.map(({column, line}) => {
var queryLine = queryLines[line - 1];
var offset = Math.min(column - 1, CONTEXT_BEFORE); | return [
queryLine.substr(column - 1 - offset, CONTEXT_LENGTH),
' '.repeat(offset) + '^^^'
].map(messageLine => indent + messageLine).join('\n');
}).join('\n')) :
'';
return prefix + message + locationMessage;
}).join('\n');
}
module.exports = RelayDefaultNetworkLayer; | random_line_split |
|
RelayDefaultNetworkLayer.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule RelayDefaultNetworkLayer
* @typechecks
* @flow
*/
'use strict';
var Promise = require('Promise');
import type RelayMutationRequest from 'RelayMutationRequest';
import type RelayQueryRequest from 'RelayQueryRequest';
var fetchWithRetries = require('fetchWithRetries');
import type {InitWithRetries} from 'fetchWithRetries';
type GraphQLError = {
message: string;
locations: Array<GraphQLErrorLocation>;
};
type GraphQLErrorLocation = {
column: number;
line: number;
};
class RelayDefaultNetworkLayer {
_uri: string;
_init: $FlowIssue; // InitWithRetries
constructor(uri: string, init?: ?InitWithRetries) {
this._uri = uri;
this._init = {...init};
// Bind instance methods to facilitate reuse when creating custom network
// layers.
var self: any = this;
self.sendMutation = this.sendMutation.bind(this);
self.sendQueries = this.sendQueries.bind(this);
self.supports = this.supports.bind(this);
}
sendMutation(request: RelayMutationRequest): Promise {
return this._sendMutation(request).then(
result => result.json()
).then(payload => {
if (payload.hasOwnProperty('errors')) {
var error = new Error(
'Server request for mutation `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else {
request.resolve({response: payload.data});
}
}).catch(
error => request.reject(error)
);
}
sendQueries(requests: Array<RelayQueryRequest>): Promise {
return Promise.all(requests.map(request => (
this._sendQuery(request).then(
result => result.json()
).then(payload => {
if (payload.hasOwnProperty('errors')) {
var error = new Error(
'Server request for query `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else if (!payload.hasOwnProperty('data')) {
request.reject(new Error(
'Server response was missing for query `' + request.getDebugName() +
'`.'
));
} else |
}).catch(
error => request.reject(error)
)
)));
}
supports(...options: Array<string>): boolean {
// Does not support the only defined option, "defer".
return false;
}
/**
* Sends a POST request with optional files.
*/
_sendMutation(request: RelayMutationRequest): Promise {
var init;
var files = request.getFiles();
if (files) {
if (!global.FormData) {
throw new Error('Uploading files without `FormData` not supported.');
}
var formData = new FormData();
formData.append('query', request.getQueryString());
formData.append('variables', JSON.stringify(request.getVariables()));
for (var filename in files) {
if (files.hasOwnProperty(filename)) {
formData.append(filename, files[filename]);
}
}
init = {
...this._init,
body: formData,
method: 'POST',
};
} else {
init = {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
};
}
return fetch(this._uri, init).then(throwOnServerError);
}
/**
* Sends a POST request and retries if the request fails or times out.
*/
_sendQuery(request: RelayQueryRequest): Promise {
return fetchWithRetries(this._uri, {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
});
}
}
/**
* Rejects HTTP responses with a status code that is not >= 200 and < 300.
* This is done to follow the internal behavior of `fetchWithRetries`.
*/
function throwOnServerError(response: any): any {
if (response.status >= 200 && response.status < 300) {
return response;
} else {
throw response;
}
}
/**
* Formats an error response from GraphQL server request.
*/
function formatRequestErrors(
request: RelayMutationRequest | RelayQueryRequest,
errors: Array<GraphQLError>
): string {
var CONTEXT_BEFORE = 20;
var CONTEXT_LENGTH = 60;
var queryLines = request.getQueryString().split('\n');
return errors.map(({locations, message}, ii) => {
var prefix = (ii + 1) + '. ';
var indent = ' '.repeat(prefix.length);
//custom errors thrown in graphql-server may not have locations
var locationMessage = locations ?
('\n' + locations.map(({column, line}) => {
var queryLine = queryLines[line - 1];
var offset = Math.min(column - 1, CONTEXT_BEFORE);
return [
queryLine.substr(column - 1 - offset, CONTEXT_LENGTH),
' '.repeat(offset) + '^^^'
].map(messageLine => indent + messageLine).join('\n');
}).join('\n')) :
'';
return prefix + message + locationMessage;
}).join('\n');
}
module.exports = RelayDefaultNetworkLayer;
| {
request.resolve({response: payload.data});
} | conditional_block |
SANSDiagnosticPageTest.py | # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import stresstesting
import mantid
from sans.state.data import get_data_builder
from sans.common.enums import (DetectorType, SANSFacility, IntegralEnum)
from sans.user_file.state_director import StateDirectorISIS
from sans.common.constants import EMPTY_NAME
from sans.common.general_functions import create_unmanaged_algorithm
from sans.gui_logic.models.diagnostics_page_model import run_integral
# -----------------------------------------------
# Tests for the SANSDiagnosticPage
# -----------------------------------------------
class SANSDiagnosticPageTest(unittest.TestCase):
def _compare_workspace(self, workspace, reference_file_name):
# Load the reference file
load_name = "LoadNexusProcessed"
load_options = {"Filename": reference_file_name,
"OutputWorkspace": EMPTY_NAME}
load_alg = create_unmanaged_algorithm(load_name, **load_options)
load_alg.execute()
reference_workspace = load_alg.getProperty("OutputWorkspace").value
# Save the workspace out and reload it again. This equalizes it with the reference workspace
f_name = os.path.join(mantid.config.getString('defaultsave.directory'),
'SANS_temp_single_core_reduction_testout.nxs')
save_name = "SaveNexus"
save_options = {"Filename": f_name,
"InputWorkspace": workspace}
save_alg = create_unmanaged_algorithm(save_name, **save_options)
save_alg.execute()
load_alg.setProperty("Filename", f_name)
load_alg.setProperty("OutputWorkspace", EMPTY_NAME)
load_alg.execute()
ws = load_alg.getProperty("OutputWorkspace").value
# Compare reference file with the output_workspace
# We need to disable the instrument comparison, it takes way too long
# We need to disable the sample -- since the sample has been modified (more logs are being written)
# operation how many entries can be found in the sample logs
compare_name = "CompareWorkspaces"
compare_options = {"Workspace1": ws,
"Workspace2": reference_workspace,
"Tolerance": 1e-6,
"CheckInstrument": False,
"CheckSample": False,
"ToleranceRelErr": True,
"CheckAllData": True,
"CheckMasking": True,
"CheckType": True,
"CheckAxes": True,
"CheckSpectraMap": True}
compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
compare_alg.setChild(False)
compare_alg.execute()
result = compare_alg.getProperty("Result").value
self.assertTrue(result)
# Remove file
if os.path.exists(f_name):
os.remove(f_name)
def test_that_produces_correct_workspace_for_SANS2D(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("SANS2D00034484")
data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY BEGIN -- Remove when appropriate
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Since we are dealing with event based data but we want to compare it with histogram data from the
# old reduction system we need to enable the compatibility mode
user_file_director.set_compatibility_builder_use_compatibility_mode(True)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY END
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "SANS2D_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
def test_that_produces_correct_workspace_multiperiod_LARMOR(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("LARMOR00013065")
data_builder.set_calibration("80tubeCalibration_1-05-2015_r3157-3160.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt")
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "LARMOR_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
class SANSDiagnosticPageRunnerTest(stresstesting.MantidStressTest):
def __init__(self):
stresstesting.MantidStressTest.__init__(self)
self._success = False
def runTest(self):
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SANSDiagnosticPageTest, 'test'))
runner = unittest.TextTestRunner()
res = runner.run(suite)
if res.wasSuccessful():
self._success = True
def requiredMemoryMB(self):
return 2000
def validate(self):
|
if __name__ == '__main__':
unittest.main()
| return self._success | identifier_body |
SANSDiagnosticPageTest.py | # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import stresstesting
import mantid
from sans.state.data import get_data_builder
from sans.common.enums import (DetectorType, SANSFacility, IntegralEnum)
from sans.user_file.state_director import StateDirectorISIS
from sans.common.constants import EMPTY_NAME
from sans.common.general_functions import create_unmanaged_algorithm
from sans.gui_logic.models.diagnostics_page_model import run_integral
# -----------------------------------------------
# Tests for the SANSDiagnosticPage
# -----------------------------------------------
class SANSDiagnosticPageTest(unittest.TestCase):
def _compare_workspace(self, workspace, reference_file_name):
# Load the reference file
load_name = "LoadNexusProcessed"
load_options = {"Filename": reference_file_name,
"OutputWorkspace": EMPTY_NAME}
load_alg = create_unmanaged_algorithm(load_name, **load_options)
load_alg.execute()
reference_workspace = load_alg.getProperty("OutputWorkspace").value
# Save the workspace out and reload it again. This equalizes it with the reference workspace
f_name = os.path.join(mantid.config.getString('defaultsave.directory'),
'SANS_temp_single_core_reduction_testout.nxs')
save_name = "SaveNexus"
save_options = {"Filename": f_name,
"InputWorkspace": workspace}
save_alg = create_unmanaged_algorithm(save_name, **save_options)
save_alg.execute()
load_alg.setProperty("Filename", f_name)
load_alg.setProperty("OutputWorkspace", EMPTY_NAME)
load_alg.execute()
ws = load_alg.getProperty("OutputWorkspace").value
# Compare reference file with the output_workspace
# We need to disable the instrument comparison, it takes way too long
# We need to disable the sample -- since the sample has been modified (more logs are being written)
# operation how many entries can be found in the sample logs
compare_name = "CompareWorkspaces"
compare_options = {"Workspace1": ws,
"Workspace2": reference_workspace,
"Tolerance": 1e-6,
"CheckInstrument": False,
"CheckSample": False,
"ToleranceRelErr": True,
"CheckAllData": True,
"CheckMasking": True,
"CheckType": True,
"CheckAxes": True,
"CheckSpectraMap": True}
compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
compare_alg.setChild(False)
compare_alg.execute()
result = compare_alg.getProperty("Result").value
self.assertTrue(result)
# Remove file
if os.path.exists(f_name):
os.remove(f_name)
def test_that_produces_correct_workspace_for_SANS2D(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("SANS2D00034484")
data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY BEGIN -- Remove when appropriate
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Since we are dealing with event based data but we want to compare it with histogram data from the
# old reduction system we need to enable the compatibility mode
user_file_director.set_compatibility_builder_use_compatibility_mode(True)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY END
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "SANS2D_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
def test_that_produces_correct_workspace_multiperiod_LARMOR(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("LARMOR00013065")
data_builder.set_calibration("80tubeCalibration_1-05-2015_r3157-3160.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt")
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state) |
class SANSDiagnosticPageRunnerTest(stresstesting.MantidStressTest):
def __init__(self):
stresstesting.MantidStressTest.__init__(self)
self._success = False
def runTest(self):
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SANSDiagnosticPageTest, 'test'))
runner = unittest.TextTestRunner()
res = runner.run(suite)
if res.wasSuccessful():
self._success = True
def requiredMemoryMB(self):
return 2000
def validate(self):
return self._success
if __name__ == '__main__':
unittest.main() |
# Evaluate it up to a defined point
reference_file_name = "LARMOR_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name) | random_line_split |
SANSDiagnosticPageTest.py | # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import stresstesting
import mantid
from sans.state.data import get_data_builder
from sans.common.enums import (DetectorType, SANSFacility, IntegralEnum)
from sans.user_file.state_director import StateDirectorISIS
from sans.common.constants import EMPTY_NAME
from sans.common.general_functions import create_unmanaged_algorithm
from sans.gui_logic.models.diagnostics_page_model import run_integral
# -----------------------------------------------
# Tests for the SANSDiagnosticPage
# -----------------------------------------------
class SANSDiagnosticPageTest(unittest.TestCase):
def _compare_workspace(self, workspace, reference_file_name):
# Load the reference file
load_name = "LoadNexusProcessed"
load_options = {"Filename": reference_file_name,
"OutputWorkspace": EMPTY_NAME}
load_alg = create_unmanaged_algorithm(load_name, **load_options)
load_alg.execute()
reference_workspace = load_alg.getProperty("OutputWorkspace").value
# Save the workspace out and reload it again. This equalizes it with the reference workspace
f_name = os.path.join(mantid.config.getString('defaultsave.directory'),
'SANS_temp_single_core_reduction_testout.nxs')
save_name = "SaveNexus"
save_options = {"Filename": f_name,
"InputWorkspace": workspace}
save_alg = create_unmanaged_algorithm(save_name, **save_options)
save_alg.execute()
load_alg.setProperty("Filename", f_name)
load_alg.setProperty("OutputWorkspace", EMPTY_NAME)
load_alg.execute()
ws = load_alg.getProperty("OutputWorkspace").value
# Compare reference file with the output_workspace
# We need to disable the instrument comparison, it takes way too long
# We need to disable the sample -- since the sample has been modified (more logs are being written)
# operation how many entries can be found in the sample logs
compare_name = "CompareWorkspaces"
compare_options = {"Workspace1": ws,
"Workspace2": reference_workspace,
"Tolerance": 1e-6,
"CheckInstrument": False,
"CheckSample": False,
"ToleranceRelErr": True,
"CheckAllData": True,
"CheckMasking": True,
"CheckType": True,
"CheckAxes": True,
"CheckSpectraMap": True}
compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
compare_alg.setChild(False)
compare_alg.execute()
result = compare_alg.getProperty("Result").value
self.assertTrue(result)
# Remove file
if os.path.exists(f_name):
|
def test_that_produces_correct_workspace_for_SANS2D(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("SANS2D00034484")
data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY BEGIN -- Remove when appropriate
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Since we are dealing with event based data but we want to compare it with histogram data from the
# old reduction system we need to enable the compatibility mode
user_file_director.set_compatibility_builder_use_compatibility_mode(True)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY END
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "SANS2D_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
def test_that_produces_correct_workspace_multiperiod_LARMOR(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("LARMOR00013065")
data_builder.set_calibration("80tubeCalibration_1-05-2015_r3157-3160.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt")
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "LARMOR_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
class SANSDiagnosticPageRunnerTest(stresstesting.MantidStressTest):
def __init__(self):
stresstesting.MantidStressTest.__init__(self)
self._success = False
def runTest(self):
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SANSDiagnosticPageTest, 'test'))
runner = unittest.TextTestRunner()
res = runner.run(suite)
if res.wasSuccessful():
self._success = True
def requiredMemoryMB(self):
return 2000
def validate(self):
return self._success
if __name__ == '__main__':
unittest.main()
| os.remove(f_name) | conditional_block |
SANSDiagnosticPageTest.py | # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import stresstesting
import mantid
from sans.state.data import get_data_builder
from sans.common.enums import (DetectorType, SANSFacility, IntegralEnum)
from sans.user_file.state_director import StateDirectorISIS
from sans.common.constants import EMPTY_NAME
from sans.common.general_functions import create_unmanaged_algorithm
from sans.gui_logic.models.diagnostics_page_model import run_integral
# -----------------------------------------------
# Tests for the SANSDiagnosticPage
# -----------------------------------------------
class SANSDiagnosticPageTest(unittest.TestCase):
def _compare_workspace(self, workspace, reference_file_name):
# Load the reference file
load_name = "LoadNexusProcessed"
load_options = {"Filename": reference_file_name,
"OutputWorkspace": EMPTY_NAME}
load_alg = create_unmanaged_algorithm(load_name, **load_options)
load_alg.execute()
reference_workspace = load_alg.getProperty("OutputWorkspace").value
# Save the workspace out and reload it again. This equalizes it with the reference workspace
f_name = os.path.join(mantid.config.getString('defaultsave.directory'),
'SANS_temp_single_core_reduction_testout.nxs')
save_name = "SaveNexus"
save_options = {"Filename": f_name,
"InputWorkspace": workspace}
save_alg = create_unmanaged_algorithm(save_name, **save_options)
save_alg.execute()
load_alg.setProperty("Filename", f_name)
load_alg.setProperty("OutputWorkspace", EMPTY_NAME)
load_alg.execute()
ws = load_alg.getProperty("OutputWorkspace").value
# Compare reference file with the output_workspace
# We need to disable the instrument comparison, it takes way too long
# We need to disable the sample -- since the sample has been modified (more logs are being written)
# operation how many entries can be found in the sample logs
compare_name = "CompareWorkspaces"
compare_options = {"Workspace1": ws,
"Workspace2": reference_workspace,
"Tolerance": 1e-6,
"CheckInstrument": False,
"CheckSample": False,
"ToleranceRelErr": True,
"CheckAllData": True,
"CheckMasking": True,
"CheckType": True,
"CheckAxes": True,
"CheckSpectraMap": True}
compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
compare_alg.setChild(False)
compare_alg.execute()
result = compare_alg.getProperty("Result").value
self.assertTrue(result)
# Remove file
if os.path.exists(f_name):
os.remove(f_name)
def test_that_produces_correct_workspace_for_SANS2D(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("SANS2D00034484")
data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY BEGIN -- Remove when appropriate
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Since we are dealing with event based data but we want to compare it with histogram data from the
# old reduction system we need to enable the compatibility mode
user_file_director.set_compatibility_builder_use_compatibility_mode(True)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# COMPATIBILITY END
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "SANS2D_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
def test_that_produces_correct_workspace_multiperiod_LARMOR(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("LARMOR00013065")
data_builder.set_calibration("80tubeCalibration_1-05-2015_r3157-3160.nxs")
data_state = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_state)
user_file_director.set_user_file("USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt")
# Construct the final state
state = user_file_director.construct()
# Act
output_workspaces = run_integral('', True, IntegralEnum.Horizontal, DetectorType.LAB, state)
# Evaluate it up to a defined point
reference_file_name = "LARMOR_ws_diagnostic_reference.nxs"
self._compare_workspace(output_workspaces[0], reference_file_name)
class SANSDiagnosticPageRunnerTest(stresstesting.MantidStressTest):
def __init__(self):
stresstesting.MantidStressTest.__init__(self)
self._success = False
def | (self):
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SANSDiagnosticPageTest, 'test'))
runner = unittest.TextTestRunner()
res = runner.run(suite)
if res.wasSuccessful():
self._success = True
def requiredMemoryMB(self):
return 2000
def validate(self):
return self._success
if __name__ == '__main__':
unittest.main()
| runTest | identifier_name |
fat_type.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Loaded representation for runtime types.
use diem_types::{account_address::AccountAddress, vm_status::StatusCode};
use move_core_types::{
identifier::Identifier,
language_storage::{StructTag, TypeTag},
value::{MoveStructLayout, MoveTypeLayout},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryInto;
use vm::{
errors::{PartialVMError, PartialVMResult},
file_format::AbilitySet,
};
#[derive(Debug, Clone, Copy)]
pub(crate) struct WrappedAbilitySet(pub AbilitySet);
impl Serialize for WrappedAbilitySet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.0.into_u8().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for WrappedAbilitySet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let byte = u8::deserialize(deserializer)?;
Ok(WrappedAbilitySet(AbilitySet::from_u8(byte).ok_or_else(
|| serde::de::Error::custom(format!("Invalid ability set: {:X}", byte)),
)?))
}
}
/// VM representation of a struct type in Move.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct FatStructType {
pub address: AccountAddress,
pub module: Identifier,
pub name: Identifier,
pub abilities: WrappedAbilitySet,
pub ty_args: Vec<FatType>,
pub layout: Vec<FatType>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) enum FatType {
Bool,
U8,
U64,
U128,
Address,
Signer,
Vector(Box<FatType>),
Struct(Box<FatStructType>),
Reference(Box<FatType>),
MutableReference(Box<FatType>),
TyParam(usize),
}
impl FatStructType {
pub fn subst(&self, ty_args: &[FatType]) -> PartialVMResult<FatStructType> {
Ok(Self {
address: self.address,
module: self.module.clone(), | .ty_args
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
layout: self
.layout
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
})
}
pub fn struct_tag(&self) -> PartialVMResult<StructTag> {
let ty_args = self
.ty_args
.iter()
.map(|ty| ty.type_tag())
.collect::<PartialVMResult<Vec<_>>>()?;
Ok(StructTag {
address: self.address,
module: self.module.clone(),
name: self.name.clone(),
type_params: ty_args,
})
}
}
impl FatType {
pub fn subst(&self, ty_args: &[FatType]) -> PartialVMResult<FatType> {
use FatType::*;
let res = match self {
TyParam(idx) => match ty_args.get(*idx) {
Some(ty) => ty.clone(),
None => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!(
"fat type substitution failed: index out of bounds -- len {} got {}",
ty_args.len(),
idx
)),
);
}
},
Bool => Bool,
U8 => U8,
U64 => U64,
U128 => U128,
Address => Address,
Signer => Signer,
Vector(ty) => Vector(Box::new(ty.subst(ty_args)?)),
Reference(ty) => Reference(Box::new(ty.subst(ty_args)?)),
MutableReference(ty) => MutableReference(Box::new(ty.subst(ty_args)?)),
Struct(struct_ty) => Struct(Box::new(struct_ty.subst(ty_args)?)),
};
Ok(res)
}
pub fn type_tag(&self) -> PartialVMResult<TypeTag> {
use FatType::*;
let res = match self {
Bool => TypeTag::Bool,
U8 => TypeTag::U8,
U64 => TypeTag::U64,
U128 => TypeTag::U128,
Address => TypeTag::Address,
Signer => TypeTag::Signer,
Vector(ty) => TypeTag::Vector(Box::new(ty.type_tag()?)),
Struct(struct_ty) => TypeTag::Struct(struct_ty.struct_tag()?),
Reference(_) | MutableReference(_) | TyParam(_) => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!("cannot derive type tag for {:?}", self)),
)
}
};
Ok(res)
}
}
impl TryInto<MoveStructLayout> for &FatStructType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveStructLayout, Self::Error> {
Ok(MoveStructLayout::new(
self.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
))
}
}
impl TryInto<MoveTypeLayout> for &FatType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveTypeLayout, Self::Error> {
Ok(match self {
FatType::Address => MoveTypeLayout::Address,
FatType::U8 => MoveTypeLayout::U8,
FatType::U64 => MoveTypeLayout::U64,
FatType::U128 => MoveTypeLayout::U128,
FatType::Bool => MoveTypeLayout::Bool,
FatType::Vector(v) => MoveTypeLayout::Vector(Box::new(v.as_ref().try_into()?)),
FatType::Struct(s) => MoveTypeLayout::Struct(MoveStructLayout::new(
s.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
)),
FatType::Signer => MoveTypeLayout::Signer,
_ => return Err(PartialVMError::new(StatusCode::ABORT_TYPE_MISMATCH_ERROR)),
})
}
} | name: self.name.clone(),
abilities: self.abilities,
ty_args: self | random_line_split |
fat_type.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Loaded representation for runtime types.
use diem_types::{account_address::AccountAddress, vm_status::StatusCode};
use move_core_types::{
identifier::Identifier,
language_storage::{StructTag, TypeTag},
value::{MoveStructLayout, MoveTypeLayout},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryInto;
use vm::{
errors::{PartialVMError, PartialVMResult},
file_format::AbilitySet,
};
#[derive(Debug, Clone, Copy)]
pub(crate) struct WrappedAbilitySet(pub AbilitySet);
impl Serialize for WrappedAbilitySet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.0.into_u8().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for WrappedAbilitySet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let byte = u8::deserialize(deserializer)?;
Ok(WrappedAbilitySet(AbilitySet::from_u8(byte).ok_or_else(
|| serde::de::Error::custom(format!("Invalid ability set: {:X}", byte)),
)?))
}
}
/// VM representation of a struct type in Move.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct FatStructType {
pub address: AccountAddress,
pub module: Identifier,
pub name: Identifier,
pub abilities: WrappedAbilitySet,
pub ty_args: Vec<FatType>,
pub layout: Vec<FatType>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) enum FatType {
Bool,
U8,
U64,
U128,
Address,
Signer,
Vector(Box<FatType>),
Struct(Box<FatStructType>),
Reference(Box<FatType>),
MutableReference(Box<FatType>),
TyParam(usize),
}
impl FatStructType {
pub fn subst(&self, ty_args: &[FatType]) -> PartialVMResult<FatStructType> {
Ok(Self {
address: self.address,
module: self.module.clone(),
name: self.name.clone(),
abilities: self.abilities,
ty_args: self
.ty_args
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
layout: self
.layout
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
})
}
pub fn struct_tag(&self) -> PartialVMResult<StructTag> {
let ty_args = self
.ty_args
.iter()
.map(|ty| ty.type_tag())
.collect::<PartialVMResult<Vec<_>>>()?;
Ok(StructTag {
address: self.address,
module: self.module.clone(),
name: self.name.clone(),
type_params: ty_args,
})
}
}
impl FatType {
pub fn subst(&self, ty_args: &[FatType]) -> PartialVMResult<FatType> {
use FatType::*;
let res = match self {
TyParam(idx) => match ty_args.get(*idx) {
Some(ty) => ty.clone(),
None => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!(
"fat type substitution failed: index out of bounds -- len {} got {}",
ty_args.len(),
idx
)),
);
}
},
Bool => Bool,
U8 => U8,
U64 => U64,
U128 => U128,
Address => Address,
Signer => Signer,
Vector(ty) => Vector(Box::new(ty.subst(ty_args)?)),
Reference(ty) => Reference(Box::new(ty.subst(ty_args)?)),
MutableReference(ty) => MutableReference(Box::new(ty.subst(ty_args)?)),
Struct(struct_ty) => Struct(Box::new(struct_ty.subst(ty_args)?)),
};
Ok(res)
}
pub fn type_tag(&self) -> PartialVMResult<TypeTag> {
use FatType::*;
let res = match self {
Bool => TypeTag::Bool,
U8 => TypeTag::U8,
U64 => TypeTag::U64,
U128 => TypeTag::U128,
Address => TypeTag::Address,
Signer => TypeTag::Signer,
Vector(ty) => TypeTag::Vector(Box::new(ty.type_tag()?)),
Struct(struct_ty) => TypeTag::Struct(struct_ty.struct_tag()?),
Reference(_) | MutableReference(_) | TyParam(_) => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!("cannot derive type tag for {:?}", self)),
)
}
};
Ok(res)
}
}
impl TryInto<MoveStructLayout> for &FatStructType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveStructLayout, Self::Error> {
Ok(MoveStructLayout::new(
self.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
))
}
}
impl TryInto<MoveTypeLayout> for &FatType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveTypeLayout, Self::Error> |
}
| {
Ok(match self {
FatType::Address => MoveTypeLayout::Address,
FatType::U8 => MoveTypeLayout::U8,
FatType::U64 => MoveTypeLayout::U64,
FatType::U128 => MoveTypeLayout::U128,
FatType::Bool => MoveTypeLayout::Bool,
FatType::Vector(v) => MoveTypeLayout::Vector(Box::new(v.as_ref().try_into()?)),
FatType::Struct(s) => MoveTypeLayout::Struct(MoveStructLayout::new(
s.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
)),
FatType::Signer => MoveTypeLayout::Signer,
_ => return Err(PartialVMError::new(StatusCode::ABORT_TYPE_MISMATCH_ERROR)),
})
} | identifier_body |
fat_type.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Loaded representation for runtime types.
use diem_types::{account_address::AccountAddress, vm_status::StatusCode};
use move_core_types::{
identifier::Identifier,
language_storage::{StructTag, TypeTag},
value::{MoveStructLayout, MoveTypeLayout},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryInto;
use vm::{
errors::{PartialVMError, PartialVMResult},
file_format::AbilitySet,
};
#[derive(Debug, Clone, Copy)]
pub(crate) struct WrappedAbilitySet(pub AbilitySet);
impl Serialize for WrappedAbilitySet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.0.into_u8().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for WrappedAbilitySet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let byte = u8::deserialize(deserializer)?;
Ok(WrappedAbilitySet(AbilitySet::from_u8(byte).ok_or_else(
|| serde::de::Error::custom(format!("Invalid ability set: {:X}", byte)),
)?))
}
}
/// VM representation of a struct type in Move.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct FatStructType {
pub address: AccountAddress,
pub module: Identifier,
pub name: Identifier,
pub abilities: WrappedAbilitySet,
pub ty_args: Vec<FatType>,
pub layout: Vec<FatType>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) enum FatType {
Bool,
U8,
U64,
U128,
Address,
Signer,
Vector(Box<FatType>),
Struct(Box<FatStructType>),
Reference(Box<FatType>),
MutableReference(Box<FatType>),
TyParam(usize),
}
impl FatStructType {
pub fn | (&self, ty_args: &[FatType]) -> PartialVMResult<FatStructType> {
Ok(Self {
address: self.address,
module: self.module.clone(),
name: self.name.clone(),
abilities: self.abilities,
ty_args: self
.ty_args
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
layout: self
.layout
.iter()
.map(|ty| ty.subst(ty_args))
.collect::<PartialVMResult<_>>()?,
})
}
pub fn struct_tag(&self) -> PartialVMResult<StructTag> {
let ty_args = self
.ty_args
.iter()
.map(|ty| ty.type_tag())
.collect::<PartialVMResult<Vec<_>>>()?;
Ok(StructTag {
address: self.address,
module: self.module.clone(),
name: self.name.clone(),
type_params: ty_args,
})
}
}
impl FatType {
pub fn subst(&self, ty_args: &[FatType]) -> PartialVMResult<FatType> {
use FatType::*;
let res = match self {
TyParam(idx) => match ty_args.get(*idx) {
Some(ty) => ty.clone(),
None => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!(
"fat type substitution failed: index out of bounds -- len {} got {}",
ty_args.len(),
idx
)),
);
}
},
Bool => Bool,
U8 => U8,
U64 => U64,
U128 => U128,
Address => Address,
Signer => Signer,
Vector(ty) => Vector(Box::new(ty.subst(ty_args)?)),
Reference(ty) => Reference(Box::new(ty.subst(ty_args)?)),
MutableReference(ty) => MutableReference(Box::new(ty.subst(ty_args)?)),
Struct(struct_ty) => Struct(Box::new(struct_ty.subst(ty_args)?)),
};
Ok(res)
}
pub fn type_tag(&self) -> PartialVMResult<TypeTag> {
use FatType::*;
let res = match self {
Bool => TypeTag::Bool,
U8 => TypeTag::U8,
U64 => TypeTag::U64,
U128 => TypeTag::U128,
Address => TypeTag::Address,
Signer => TypeTag::Signer,
Vector(ty) => TypeTag::Vector(Box::new(ty.type_tag()?)),
Struct(struct_ty) => TypeTag::Struct(struct_ty.struct_tag()?),
Reference(_) | MutableReference(_) | TyParam(_) => {
return Err(
PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)
.with_message(format!("cannot derive type tag for {:?}", self)),
)
}
};
Ok(res)
}
}
impl TryInto<MoveStructLayout> for &FatStructType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveStructLayout, Self::Error> {
Ok(MoveStructLayout::new(
self.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
))
}
}
impl TryInto<MoveTypeLayout> for &FatType {
type Error = PartialVMError;
fn try_into(self) -> Result<MoveTypeLayout, Self::Error> {
Ok(match self {
FatType::Address => MoveTypeLayout::Address,
FatType::U8 => MoveTypeLayout::U8,
FatType::U64 => MoveTypeLayout::U64,
FatType::U128 => MoveTypeLayout::U128,
FatType::Bool => MoveTypeLayout::Bool,
FatType::Vector(v) => MoveTypeLayout::Vector(Box::new(v.as_ref().try_into()?)),
FatType::Struct(s) => MoveTypeLayout::Struct(MoveStructLayout::new(
s.layout
.iter()
.map(|ty| ty.try_into())
.collect::<PartialVMResult<Vec<_>>>()?,
)),
FatType::Signer => MoveTypeLayout::Signer,
_ => return Err(PartialVMError::new(StatusCode::ABORT_TYPE_MISMATCH_ERROR)),
})
}
}
| subst | identifier_name |
job_queue.rs | use std::collections::HashSet;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::sync::TaskPool;
use std::sync::mpsc::{channel, Sender, Receiver};
use term::color::YELLOW;
use core::{Package, PackageId, Resolve, PackageSet};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, Dependency, profile};
use super::job::Job;
/// A management structure of the entire dependency graph to compile.
///
/// This structure is backed by the `DependencyQueue` type and manages the
/// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
queue: DependencyQueue<(&'a PackageId, Stage),
(&'a Package, Vec<(Job, Freshness)>)>,
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
packages: &'a PackageSet,
active: u32,
pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
}
/// A helper structure for metadata about the state of a building package.
struct PendingBuild {
/// Number of jobs currently active
amt: u32,
/// Current freshness state of this package. Any dirty target within a
/// package will cause the entire package to become dirty.
fresh: Freshness,
}
/// Current stage of compilation for an individual package.
///
/// This is the second layer of keys on the dependency queue to track the state
/// of where a particular package is in the compilation pipeline. Each of these
/// stages has a network of dependencies among them, outlined by the
/// `Dependency` implementation found below.
///
/// Each build step for a package is registered with one of these stages, and
/// each stage has a vector of work to perform in parallel.
#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show, Copy)]
pub enum Stage {
Start,
BuildCustomBuild,
RunCustomBuild,
Libraries,
Binaries,
LibraryTests,
BinaryTests,
}
type Message = (PackageId, Stage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
config: &Config) -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
JobQueue {
pool: TaskPool::new(config.jobs() as usize),
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
resolve: resolve,
packages: packages,
active: 0,
pending: HashMap::new(),
state: HashMap::new(),
ignored: HashSet::new(),
printed: HashSet::new(),
}
}
pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
jobs: Vec<(Job, Freshness)>) {
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
match self.state.entry(pkg.get_package_id()) {
Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
Vacant(entry) => { entry.insert(fresh); }
};
// Add the package to the dependency graph
self.queue.enqueue(&(self.resolve, self.packages), Fresh,
(pkg.get_package_id(), stage),
(pkg, jobs));
}
pub fn ignore(&mut self, pkg: &'a Package) {
self.ignored.insert(pkg.get_package_id());
}
/// Execute all jobs necessary to build the dependency graph.
///
/// This function will spawn off `config.jobs()` workers to build all of the
/// necessary dependencies, in order. Freshness is propagated as far as
/// possible along each dependency chain.
pub fn execute(&mut self, config: &Config) -> CargoResult<()> {
let _p = profile::start("executing the job graph");
// Iteratively execute the dependency graph. Each turn of this loop will
// schedule as much work as possible and then wait for one job to finish,
// possibly scheduling more work afterwards.
while self.queue.len() > 0 {
loop {
match self.queue.dequeue() {
Some((fresh, (_, stage), (pkg, jobs))) => {
info!("start: {} {:?}", pkg, stage);
try!(self.run(pkg, stage, fresh, jobs, config));
}
None => break,
}
}
// Now that all possible work has been scheduled, wait for a piece
// of work to finish. If any package fails to build then we stop
// scheduling work as quickly as possibly.
let (id, stage, fresh, result) = self.rx.recv().unwrap();
info!(" end: {} {:?}", id, stage);
let id = *self.state.keys().find(|&k| *k == &id).unwrap();
self.active -= 1;
match result {
Ok(()) => {
let state = &mut self.pending[(id, stage)];
state.amt -= 1;
state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
self.queue.finish(&(id, stage), state.fresh);
}
}
Err(e) => {
if self.active > 0 {
try!(config.shell().say(
"Build failed, waiting for other \
jobs to finish...", YELLOW)); | }
return Err(e)
}
}
}
log!(5, "rustc jobs completed");
Ok(())
}
/// Execute a stage of compilation for a package.
///
/// The input freshness is from `dequeue()` and indicates the combined
/// freshness of all upstream dependencies. This function will schedule all
/// work in `jobs` to be executed.
fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs as u32};
let id = pkg.get_package_id().clone();
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
self.active += amt;
self.pending.insert((pkg.get_package_id(), stage), PendingBuild {
amt: amt,
fresh: fresh,
});
let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
let id = id.clone();
let (desc_tx, desc_rx) = channel();
self.pool.execute(move|| {
my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap();
});
// only the first message of each job is processed
match desc_rx.recv() {
Ok(msg) => running.push(msg),
Err(..) => {}
}
}
// If no work was scheduled, make sure that a message is actually send
// on this channel.
if njobs == 0 {
self.tx.send((id, stage, fresh, Ok(()))).unwrap();
}
// Print out some nice progress information
//
// This isn't super trivial becuase we don't want to print loads and
// loads of information to the console, but we also want to produce a
// faithful representation of what's happening. This is somewhat nuanced
// as a package can start compiling *very* early on because of custom
// build commands and such.
//
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
let print = !self.ignored.contains(&pkg.get_package_id());
let print = print && !self.printed.contains(&pkg.get_package_id());
if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
self.printed.insert(pkg.get_package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
c.status("Fresh", pkg)
})),
Dirty => try!(config.shell().status("Compiling", pkg))
}
}
for msg in running.iter() {
try!(config.shell().verbose(|c| c.status("Running", msg)));
}
Ok(())
}
}
impl<'a> Dependency for (&'a PackageId, Stage) {
type Context = (&'a Resolve, &'a PackageSet);
fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
-> Vec<(&'a PackageId, Stage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// a pair of the package being built and the stage that it's at.
//
// Each stage here lists dependencies on the previous stages except for
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
.filter(|dep| *dep != id)
.map(|dep| {
(dep, pkg.get_dependencies().iter().find(|d| {
d.get_name() == dep.get_name()
}).unwrap())
});
match stage {
Stage::Start => Vec::new(),
// Building the build command itself starts off pretty easily,we
// just need to depend on all of the library stages of our own build
// dependencies (making them available to us).
Stage::BuildCustomBuild => {
let mut base = vec![(id, Stage::Start)];
base.extend(deps.filter(|&(_, dep)| dep.is_build())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// When running a custom build command, we need to be sure that our
// own custom build command is actually built, and then we need to
// wait for all our dependencies to finish their custom build
// commands themselves (as they may provide input to us).
Stage::RunCustomBuild => {
let mut base = vec![(id, Stage::BuildCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::RunCustomBuild)));
base
}
// Building a library depends on our own custom build command plus
// all our transitive dependencies.
Stage::Libraries => {
let mut base = vec![(id, Stage::RunCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// Binaries only depend on libraries being available. Note that they
// do not depend on dev-dependencies.
Stage::Binaries => vec![(id, Stage::Libraries)],
// Tests depend on all dependencies (including dev-dependencies) in
// addition to the library stage for this package. Note, however,
// that library tests only need to depend the custom build command
// being run, not the libraries themselves.
Stage::BinaryTests | Stage::LibraryTests => {
let mut base = if stage == Stage::BinaryTests {
vec![(id, Stage::Libraries)]
} else {
vec![(id, Stage::RunCustomBuild)]
};
base.extend(deps.map(|(id, _)| (id, Stage::Libraries)));
base
}
}
}
} | for _ in self.rx.iter().take(self.active as usize) {} | random_line_split |
job_queue.rs | use std::collections::HashSet;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::sync::TaskPool;
use std::sync::mpsc::{channel, Sender, Receiver};
use term::color::YELLOW;
use core::{Package, PackageId, Resolve, PackageSet};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, Dependency, profile};
use super::job::Job;
/// A management structure of the entire dependency graph to compile.
///
/// This structure is backed by the `DependencyQueue` type and manages the
/// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
queue: DependencyQueue<(&'a PackageId, Stage),
(&'a Package, Vec<(Job, Freshness)>)>,
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
packages: &'a PackageSet,
active: u32,
pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
}
/// A helper structure for metadata about the state of a building package.
struct PendingBuild {
/// Number of jobs currently active
amt: u32,
/// Current freshness state of this package. Any dirty target within a
/// package will cause the entire package to become dirty.
fresh: Freshness,
}
/// Current stage of compilation for an individual package.
///
/// This is the second layer of keys on the dependency queue to track the state
/// of where a particular package is in the compilation pipeline. Each of these
/// stages has a network of dependencies among them, outlined by the
/// `Dependency` implementation found below.
///
/// Each build step for a package is registered with one of these stages, and
/// each stage has a vector of work to perform in parallel.
#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show, Copy)]
pub enum Stage {
Start,
BuildCustomBuild,
RunCustomBuild,
Libraries,
Binaries,
LibraryTests,
BinaryTests,
}
type Message = (PackageId, Stage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
config: &Config) -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
JobQueue {
pool: TaskPool::new(config.jobs() as usize),
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
resolve: resolve,
packages: packages,
active: 0,
pending: HashMap::new(),
state: HashMap::new(),
ignored: HashSet::new(),
printed: HashSet::new(),
}
}
pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
jobs: Vec<(Job, Freshness)>) {
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
match self.state.entry(pkg.get_package_id()) {
Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
Vacant(entry) => { entry.insert(fresh); }
};
// Add the package to the dependency graph
self.queue.enqueue(&(self.resolve, self.packages), Fresh,
(pkg.get_package_id(), stage),
(pkg, jobs));
}
pub fn ignore(&mut self, pkg: &'a Package) {
self.ignored.insert(pkg.get_package_id());
}
/// Execute all jobs necessary to build the dependency graph.
///
/// This function will spawn off `config.jobs()` workers to build all of the
/// necessary dependencies, in order. Freshness is propagated as far as
/// possible along each dependency chain.
pub fn execute(&mut self, config: &Config) -> CargoResult<()> {
let _p = profile::start("executing the job graph");
// Iteratively execute the dependency graph. Each turn of this loop will
// schedule as much work as possible and then wait for one job to finish,
// possibly scheduling more work afterwards.
while self.queue.len() > 0 {
loop {
match self.queue.dequeue() {
Some((fresh, (_, stage), (pkg, jobs))) => {
info!("start: {} {:?}", pkg, stage);
try!(self.run(pkg, stage, fresh, jobs, config));
}
None => break,
}
}
// Now that all possible work has been scheduled, wait for a piece
// of work to finish. If any package fails to build then we stop
// scheduling work as quickly as possibly.
let (id, stage, fresh, result) = self.rx.recv().unwrap();
info!(" end: {} {:?}", id, stage);
let id = *self.state.keys().find(|&k| *k == &id).unwrap();
self.active -= 1;
match result {
Ok(()) => {
let state = &mut self.pending[(id, stage)];
state.amt -= 1;
state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
self.queue.finish(&(id, stage), state.fresh);
}
}
Err(e) => {
if self.active > 0 {
try!(config.shell().say(
"Build failed, waiting for other \
jobs to finish...", YELLOW));
for _ in self.rx.iter().take(self.active as usize) {}
}
return Err(e)
}
}
}
log!(5, "rustc jobs completed");
Ok(())
}
/// Execute a stage of compilation for a package.
///
/// The input freshness is from `dequeue()` and indicates the combined
/// freshness of all upstream dependencies. This function will schedule all
/// work in `jobs` to be executed.
fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs as u32};
let id = pkg.get_package_id().clone();
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
self.active += amt;
self.pending.insert((pkg.get_package_id(), stage), PendingBuild {
amt: amt,
fresh: fresh,
});
let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
let id = id.clone();
let (desc_tx, desc_rx) = channel();
self.pool.execute(move|| {
my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap();
});
// only the first message of each job is processed
match desc_rx.recv() {
Ok(msg) => running.push(msg),
Err(..) => {}
}
}
// If no work was scheduled, make sure that a message is actually send
// on this channel.
if njobs == 0 {
self.tx.send((id, stage, fresh, Ok(()))).unwrap();
}
// Print out some nice progress information
//
// This isn't super trivial becuase we don't want to print loads and
// loads of information to the console, but we also want to produce a
// faithful representation of what's happening. This is somewhat nuanced
// as a package can start compiling *very* early on because of custom
// build commands and such.
//
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
let print = !self.ignored.contains(&pkg.get_package_id());
let print = print && !self.printed.contains(&pkg.get_package_id());
if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
self.printed.insert(pkg.get_package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
c.status("Fresh", pkg)
})),
Dirty => try!(config.shell().status("Compiling", pkg))
}
}
for msg in running.iter() {
try!(config.shell().verbose(|c| c.status("Running", msg)));
}
Ok(())
}
}
impl<'a> Dependency for (&'a PackageId, Stage) {
type Context = (&'a Resolve, &'a PackageSet);
fn | (&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
-> Vec<(&'a PackageId, Stage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// a pair of the package being built and the stage that it's at.
//
// Each stage here lists dependencies on the previous stages except for
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
.filter(|dep| *dep != id)
.map(|dep| {
(dep, pkg.get_dependencies().iter().find(|d| {
d.get_name() == dep.get_name()
}).unwrap())
});
match stage {
Stage::Start => Vec::new(),
// Building the build command itself starts off pretty easily,we
// just need to depend on all of the library stages of our own build
// dependencies (making them available to us).
Stage::BuildCustomBuild => {
let mut base = vec![(id, Stage::Start)];
base.extend(deps.filter(|&(_, dep)| dep.is_build())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// When running a custom build command, we need to be sure that our
// own custom build command is actually built, and then we need to
// wait for all our dependencies to finish their custom build
// commands themselves (as they may provide input to us).
Stage::RunCustomBuild => {
let mut base = vec![(id, Stage::BuildCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::RunCustomBuild)));
base
}
// Building a library depends on our own custom build command plus
// all our transitive dependencies.
Stage::Libraries => {
let mut base = vec![(id, Stage::RunCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// Binaries only depend on libraries being available. Note that they
// do not depend on dev-dependencies.
Stage::Binaries => vec![(id, Stage::Libraries)],
// Tests depend on all dependencies (including dev-dependencies) in
// addition to the library stage for this package. Note, however,
// that library tests only need to depend the custom build command
// being run, not the libraries themselves.
Stage::BinaryTests | Stage::LibraryTests => {
let mut base = if stage == Stage::BinaryTests {
vec![(id, Stage::Libraries)]
} else {
vec![(id, Stage::RunCustomBuild)]
};
base.extend(deps.map(|(id, _)| (id, Stage::Libraries)));
base
}
}
}
}
| dependencies | identifier_name |
job_queue.rs | use std::collections::HashSet;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::sync::TaskPool;
use std::sync::mpsc::{channel, Sender, Receiver};
use term::color::YELLOW;
use core::{Package, PackageId, Resolve, PackageSet};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, Dependency, profile};
use super::job::Job;
/// A management structure of the entire dependency graph to compile.
///
/// This structure is backed by the `DependencyQueue` type and manages the
/// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
queue: DependencyQueue<(&'a PackageId, Stage),
(&'a Package, Vec<(Job, Freshness)>)>,
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
packages: &'a PackageSet,
active: u32,
pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
}
/// A helper structure for metadata about the state of a building package.
struct PendingBuild {
/// Number of jobs currently active
amt: u32,
/// Current freshness state of this package. Any dirty target within a
/// package will cause the entire package to become dirty.
fresh: Freshness,
}
/// Current stage of compilation for an individual package.
///
/// This is the second layer of keys on the dependency queue to track the state
/// of where a particular package is in the compilation pipeline. Each of these
/// stages has a network of dependencies among them, outlined by the
/// `Dependency` implementation found below.
///
/// Each build step for a package is registered with one of these stages, and
/// each stage has a vector of work to perform in parallel.
#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show, Copy)]
pub enum Stage {
Start,
BuildCustomBuild,
RunCustomBuild,
Libraries,
Binaries,
LibraryTests,
BinaryTests,
}
type Message = (PackageId, Stage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
config: &Config) -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
JobQueue {
pool: TaskPool::new(config.jobs() as usize),
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
resolve: resolve,
packages: packages,
active: 0,
pending: HashMap::new(),
state: HashMap::new(),
ignored: HashSet::new(),
printed: HashSet::new(),
}
}
pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
jobs: Vec<(Job, Freshness)>) {
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
match self.state.entry(pkg.get_package_id()) {
Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
Vacant(entry) => { entry.insert(fresh); }
};
// Add the package to the dependency graph
self.queue.enqueue(&(self.resolve, self.packages), Fresh,
(pkg.get_package_id(), stage),
(pkg, jobs));
}
pub fn ignore(&mut self, pkg: &'a Package) |
/// Execute all jobs necessary to build the dependency graph.
///
/// This function will spawn off `config.jobs()` workers to build all of the
/// necessary dependencies, in order. Freshness is propagated as far as
/// possible along each dependency chain.
pub fn execute(&mut self, config: &Config) -> CargoResult<()> {
let _p = profile::start("executing the job graph");
// Iteratively execute the dependency graph. Each turn of this loop will
// schedule as much work as possible and then wait for one job to finish,
// possibly scheduling more work afterwards.
while self.queue.len() > 0 {
loop {
match self.queue.dequeue() {
Some((fresh, (_, stage), (pkg, jobs))) => {
info!("start: {} {:?}", pkg, stage);
try!(self.run(pkg, stage, fresh, jobs, config));
}
None => break,
}
}
// Now that all possible work has been scheduled, wait for a piece
// of work to finish. If any package fails to build then we stop
// scheduling work as quickly as possibly.
let (id, stage, fresh, result) = self.rx.recv().unwrap();
info!(" end: {} {:?}", id, stage);
let id = *self.state.keys().find(|&k| *k == &id).unwrap();
self.active -= 1;
match result {
Ok(()) => {
let state = &mut self.pending[(id, stage)];
state.amt -= 1;
state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
self.queue.finish(&(id, stage), state.fresh);
}
}
Err(e) => {
if self.active > 0 {
try!(config.shell().say(
"Build failed, waiting for other \
jobs to finish...", YELLOW));
for _ in self.rx.iter().take(self.active as usize) {}
}
return Err(e)
}
}
}
log!(5, "rustc jobs completed");
Ok(())
}
/// Execute a stage of compilation for a package.
///
/// The input freshness is from `dequeue()` and indicates the combined
/// freshness of all upstream dependencies. This function will schedule all
/// work in `jobs` to be executed.
fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs as u32};
let id = pkg.get_package_id().clone();
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
self.active += amt;
self.pending.insert((pkg.get_package_id(), stage), PendingBuild {
amt: amt,
fresh: fresh,
});
let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
let id = id.clone();
let (desc_tx, desc_rx) = channel();
self.pool.execute(move|| {
my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap();
});
// only the first message of each job is processed
match desc_rx.recv() {
Ok(msg) => running.push(msg),
Err(..) => {}
}
}
// If no work was scheduled, make sure that a message is actually send
// on this channel.
if njobs == 0 {
self.tx.send((id, stage, fresh, Ok(()))).unwrap();
}
// Print out some nice progress information
//
// This isn't super trivial becuase we don't want to print loads and
// loads of information to the console, but we also want to produce a
// faithful representation of what's happening. This is somewhat nuanced
// as a package can start compiling *very* early on because of custom
// build commands and such.
//
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
let print = !self.ignored.contains(&pkg.get_package_id());
let print = print && !self.printed.contains(&pkg.get_package_id());
if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
self.printed.insert(pkg.get_package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
c.status("Fresh", pkg)
})),
Dirty => try!(config.shell().status("Compiling", pkg))
}
}
for msg in running.iter() {
try!(config.shell().verbose(|c| c.status("Running", msg)));
}
Ok(())
}
}
impl<'a> Dependency for (&'a PackageId, Stage) {
type Context = (&'a Resolve, &'a PackageSet);
fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
-> Vec<(&'a PackageId, Stage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// a pair of the package being built and the stage that it's at.
//
// Each stage here lists dependencies on the previous stages except for
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
.filter(|dep| *dep != id)
.map(|dep| {
(dep, pkg.get_dependencies().iter().find(|d| {
d.get_name() == dep.get_name()
}).unwrap())
});
match stage {
Stage::Start => Vec::new(),
// Building the build command itself starts off pretty easily,we
// just need to depend on all of the library stages of our own build
// dependencies (making them available to us).
Stage::BuildCustomBuild => {
let mut base = vec![(id, Stage::Start)];
base.extend(deps.filter(|&(_, dep)| dep.is_build())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// When running a custom build command, we need to be sure that our
// own custom build command is actually built, and then we need to
// wait for all our dependencies to finish their custom build
// commands themselves (as they may provide input to us).
Stage::RunCustomBuild => {
let mut base = vec![(id, Stage::BuildCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::RunCustomBuild)));
base
}
// Building a library depends on our own custom build command plus
// all our transitive dependencies.
Stage::Libraries => {
let mut base = vec![(id, Stage::RunCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// Binaries only depend on libraries being available. Note that they
// do not depend on dev-dependencies.
Stage::Binaries => vec![(id, Stage::Libraries)],
// Tests depend on all dependencies (including dev-dependencies) in
// addition to the library stage for this package. Note, however,
// that library tests only need to depend the custom build command
// being run, not the libraries themselves.
Stage::BinaryTests | Stage::LibraryTests => {
let mut base = if stage == Stage::BinaryTests {
vec![(id, Stage::Libraries)]
} else {
vec![(id, Stage::RunCustomBuild)]
};
base.extend(deps.map(|(id, _)| (id, Stage::Libraries)));
base
}
}
}
}
| {
self.ignored.insert(pkg.get_package_id());
} | identifier_body |
job_queue.rs | use std::collections::HashSet;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::sync::TaskPool;
use std::sync::mpsc::{channel, Sender, Receiver};
use term::color::YELLOW;
use core::{Package, PackageId, Resolve, PackageSet};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, Dependency, profile};
use super::job::Job;
/// A management structure of the entire dependency graph to compile.
///
/// This structure is backed by the `DependencyQueue` type and manages the
/// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
queue: DependencyQueue<(&'a PackageId, Stage),
(&'a Package, Vec<(Job, Freshness)>)>,
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
packages: &'a PackageSet,
active: u32,
pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
}
/// A helper structure for metadata about the state of a building package.
struct PendingBuild {
/// Number of jobs currently active
amt: u32,
/// Current freshness state of this package. Any dirty target within a
/// package will cause the entire package to become dirty.
fresh: Freshness,
}
/// Current stage of compilation for an individual package.
///
/// This is the second layer of keys on the dependency queue to track the state
/// of where a particular package is in the compilation pipeline. Each of these
/// stages has a network of dependencies among them, outlined by the
/// `Dependency` implementation found below.
///
/// Each build step for a package is registered with one of these stages, and
/// each stage has a vector of work to perform in parallel.
#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show, Copy)]
pub enum Stage {
Start,
BuildCustomBuild,
RunCustomBuild,
Libraries,
Binaries,
LibraryTests,
BinaryTests,
}
type Message = (PackageId, Stage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
config: &Config) -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
JobQueue {
pool: TaskPool::new(config.jobs() as usize),
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
resolve: resolve,
packages: packages,
active: 0,
pending: HashMap::new(),
state: HashMap::new(),
ignored: HashSet::new(),
printed: HashSet::new(),
}
}
pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
jobs: Vec<(Job, Freshness)>) {
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
match self.state.entry(pkg.get_package_id()) {
Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
Vacant(entry) => |
};
// Add the package to the dependency graph
self.queue.enqueue(&(self.resolve, self.packages), Fresh,
(pkg.get_package_id(), stage),
(pkg, jobs));
}
pub fn ignore(&mut self, pkg: &'a Package) {
self.ignored.insert(pkg.get_package_id());
}
/// Execute all jobs necessary to build the dependency graph.
///
/// This function will spawn off `config.jobs()` workers to build all of the
/// necessary dependencies, in order. Freshness is propagated as far as
/// possible along each dependency chain.
pub fn execute(&mut self, config: &Config) -> CargoResult<()> {
let _p = profile::start("executing the job graph");
// Iteratively execute the dependency graph. Each turn of this loop will
// schedule as much work as possible and then wait for one job to finish,
// possibly scheduling more work afterwards.
while self.queue.len() > 0 {
loop {
match self.queue.dequeue() {
Some((fresh, (_, stage), (pkg, jobs))) => {
info!("start: {} {:?}", pkg, stage);
try!(self.run(pkg, stage, fresh, jobs, config));
}
None => break,
}
}
// Now that all possible work has been scheduled, wait for a piece
// of work to finish. If any package fails to build then we stop
// scheduling work as quickly as possibly.
let (id, stage, fresh, result) = self.rx.recv().unwrap();
info!(" end: {} {:?}", id, stage);
let id = *self.state.keys().find(|&k| *k == &id).unwrap();
self.active -= 1;
match result {
Ok(()) => {
let state = &mut self.pending[(id, stage)];
state.amt -= 1;
state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
self.queue.finish(&(id, stage), state.fresh);
}
}
Err(e) => {
if self.active > 0 {
try!(config.shell().say(
"Build failed, waiting for other \
jobs to finish...", YELLOW));
for _ in self.rx.iter().take(self.active as usize) {}
}
return Err(e)
}
}
}
log!(5, "rustc jobs completed");
Ok(())
}
/// Execute a stage of compilation for a package.
///
/// The input freshness is from `dequeue()` and indicates the combined
/// freshness of all upstream dependencies. This function will schedule all
/// work in `jobs` to be executed.
fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs as u32};
let id = pkg.get_package_id().clone();
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
self.active += amt;
self.pending.insert((pkg.get_package_id(), stage), PendingBuild {
amt: amt,
fresh: fresh,
});
let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
let id = id.clone();
let (desc_tx, desc_rx) = channel();
self.pool.execute(move|| {
my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap();
});
// only the first message of each job is processed
match desc_rx.recv() {
Ok(msg) => running.push(msg),
Err(..) => {}
}
}
// If no work was scheduled, make sure that a message is actually send
// on this channel.
if njobs == 0 {
self.tx.send((id, stage, fresh, Ok(()))).unwrap();
}
// Print out some nice progress information
//
// This isn't super trivial becuase we don't want to print loads and
// loads of information to the console, but we also want to produce a
// faithful representation of what's happening. This is somewhat nuanced
// as a package can start compiling *very* early on because of custom
// build commands and such.
//
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
let print = !self.ignored.contains(&pkg.get_package_id());
let print = print && !self.printed.contains(&pkg.get_package_id());
if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
self.printed.insert(pkg.get_package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
c.status("Fresh", pkg)
})),
Dirty => try!(config.shell().status("Compiling", pkg))
}
}
for msg in running.iter() {
try!(config.shell().verbose(|c| c.status("Running", msg)));
}
Ok(())
}
}
impl<'a> Dependency for (&'a PackageId, Stage) {
type Context = (&'a Resolve, &'a PackageSet);
fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
-> Vec<(&'a PackageId, Stage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// a pair of the package being built and the stage that it's at.
//
// Each stage here lists dependencies on the previous stages except for
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
.filter(|dep| *dep != id)
.map(|dep| {
(dep, pkg.get_dependencies().iter().find(|d| {
d.get_name() == dep.get_name()
}).unwrap())
});
match stage {
Stage::Start => Vec::new(),
// Building the build command itself starts off pretty easily,we
// just need to depend on all of the library stages of our own build
// dependencies (making them available to us).
Stage::BuildCustomBuild => {
let mut base = vec![(id, Stage::Start)];
base.extend(deps.filter(|&(_, dep)| dep.is_build())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// When running a custom build command, we need to be sure that our
// own custom build command is actually built, and then we need to
// wait for all our dependencies to finish their custom build
// commands themselves (as they may provide input to us).
Stage::RunCustomBuild => {
let mut base = vec![(id, Stage::BuildCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::RunCustomBuild)));
base
}
// Building a library depends on our own custom build command plus
// all our transitive dependencies.
Stage::Libraries => {
let mut base = vec![(id, Stage::RunCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
.map(|(id, _)| (id, Stage::Libraries)));
base
}
// Binaries only depend on libraries being available. Note that they
// do not depend on dev-dependencies.
Stage::Binaries => vec![(id, Stage::Libraries)],
// Tests depend on all dependencies (including dev-dependencies) in
// addition to the library stage for this package. Note, however,
// that library tests only need to depend the custom build command
// being run, not the libraries themselves.
Stage::BinaryTests | Stage::LibraryTests => {
let mut base = if stage == Stage::BinaryTests {
vec![(id, Stage::Libraries)]
} else {
vec![(id, Stage::RunCustomBuild)]
};
base.extend(deps.map(|(id, _)| (id, Stage::Libraries)));
base
}
}
}
}
| { entry.insert(fresh); } | conditional_block |
models.py | from django.core.exceptions import ValidationError
from django.db import models
class ActionLog(models.Model):
ACTIONS_TYPES = (
# A translation has been created.
("translation:created", "Translation created"),
# A translation has been deleted.
("translation:deleted", "Translation deleted"),
# A translation has been approved.
("translation:approved", "Translation approved"),
# A translation has been unapproved.
("translation:unapproved", "Translation unapproved"),
# A translation has been rejected.
("translation:rejected", "Translation rejected"),
# A translation has been unrejected.
("translation:unrejected", "Translation unrejected"),
# A comment has been added.
("comment:added", "Comment added"),
)
action_type = models.CharField(max_length=50, choices=ACTIONS_TYPES)
created_at = models.DateTimeField(auto_now_add=True)
performed_by = models.ForeignKey(
"auth.User", models.SET_NULL, related_name="actions", null=True
)
# Used to track on what translation related actions apply.
translation = models.ForeignKey(
"base.Translation", models.CASCADE, blank=True, null=True,
)
# Used when a translation has been deleted or a team comment has been added.
entity = models.ForeignKey("base.Entity", models.CASCADE, blank=True, null=True,)
locale = models.ForeignKey("base.Locale", models.CASCADE, blank=True, null=True,)
def validate_action_type_choice(self):
valid_types = [t[0] for t in self.ACTIONS_TYPES]
if self.action_type not in valid_types:
|
def validate_foreign_keys_per_action(self):
if self.action_type == "translation:deleted" and (
self.translation or not self.entity or not self.locale
):
raise ValidationError(
'For action type "translation:deleted", `entity` and `locale` are required'
)
if self.action_type == "comment:added" and not (
(self.translation and not self.locale and not self.entity)
or (not self.translation and self.locale and self.entity)
):
raise ValidationError(
'For action type "comment:added", either `translation` or `entity` and `locale` are required'
)
if (
self.action_type != "translation:deleted"
and self.action_type != "comment:added"
) and (not self.translation or self.entity or self.locale):
raise ValidationError(
'For action type "{}", only `translation` is accepted'.format(
self.action_type
)
)
def save(self, *args, **kwargs):
self.validate_action_type_choice()
self.validate_foreign_keys_per_action()
super(ActionLog, self).save(*args, **kwargs)
| raise ValidationError(
'Action type "{}" is not one of the permitted values: {}'.format(
self.action_type, ", ".join(valid_types)
)
) | conditional_block |
models.py | from django.core.exceptions import ValidationError
from django.db import models
class ActionLog(models.Model):
ACTIONS_TYPES = (
# A translation has been created.
("translation:created", "Translation created"),
# A translation has been deleted.
("translation:deleted", "Translation deleted"),
# A translation has been approved.
("translation:approved", "Translation approved"),
# A translation has been unapproved.
("translation:unapproved", "Translation unapproved"),
# A translation has been rejected.
("translation:rejected", "Translation rejected"),
# A translation has been unrejected.
("translation:unrejected", "Translation unrejected"),
# A comment has been added.
("comment:added", "Comment added"),
)
action_type = models.CharField(max_length=50, choices=ACTIONS_TYPES)
created_at = models.DateTimeField(auto_now_add=True)
performed_by = models.ForeignKey(
"auth.User", models.SET_NULL, related_name="actions", null=True
)
# Used to track on what translation related actions apply.
translation = models.ForeignKey(
"base.Translation", models.CASCADE, blank=True, null=True,
)
# Used when a translation has been deleted or a team comment has been added.
entity = models.ForeignKey("base.Entity", models.CASCADE, blank=True, null=True,)
locale = models.ForeignKey("base.Locale", models.CASCADE, blank=True, null=True,)
def validate_action_type_choice(self):
valid_types = [t[0] for t in self.ACTIONS_TYPES]
if self.action_type not in valid_types:
raise ValidationError(
'Action type "{}" is not one of the permitted values: {}'.format(
self.action_type, ", ".join(valid_types)
)
)
def validate_foreign_keys_per_action(self):
if self.action_type == "translation:deleted" and (
self.translation or not self.entity or not self.locale
):
raise ValidationError(
'For action type "translation:deleted", `entity` and `locale` are required'
)
if self.action_type == "comment:added" and not (
(self.translation and not self.locale and not self.entity)
or (not self.translation and self.locale and self.entity)
):
raise ValidationError(
'For action type "comment:added", either `translation` or `entity` and `locale` are required'
)
if (
self.action_type != "translation:deleted"
and self.action_type != "comment:added"
) and (not self.translation or self.entity or self.locale):
raise ValidationError(
'For action type "{}", only `translation` is accepted'.format(
self.action_type
) | def save(self, *args, **kwargs):
self.validate_action_type_choice()
self.validate_foreign_keys_per_action()
super(ActionLog, self).save(*args, **kwargs) | )
| random_line_split |
models.py | from django.core.exceptions import ValidationError
from django.db import models
class ActionLog(models.Model):
ACTIONS_TYPES = (
# A translation has been created.
("translation:created", "Translation created"),
# A translation has been deleted.
("translation:deleted", "Translation deleted"),
# A translation has been approved.
("translation:approved", "Translation approved"),
# A translation has been unapproved.
("translation:unapproved", "Translation unapproved"),
# A translation has been rejected.
("translation:rejected", "Translation rejected"),
# A translation has been unrejected.
("translation:unrejected", "Translation unrejected"),
# A comment has been added.
("comment:added", "Comment added"),
)
action_type = models.CharField(max_length=50, choices=ACTIONS_TYPES)
created_at = models.DateTimeField(auto_now_add=True)
performed_by = models.ForeignKey(
"auth.User", models.SET_NULL, related_name="actions", null=True
)
# Used to track on what translation related actions apply.
translation = models.ForeignKey(
"base.Translation", models.CASCADE, blank=True, null=True,
)
# Used when a translation has been deleted or a team comment has been added.
entity = models.ForeignKey("base.Entity", models.CASCADE, blank=True, null=True,)
locale = models.ForeignKey("base.Locale", models.CASCADE, blank=True, null=True,)
def validate_action_type_choice(self):
valid_types = [t[0] for t in self.ACTIONS_TYPES]
if self.action_type not in valid_types:
raise ValidationError(
'Action type "{}" is not one of the permitted values: {}'.format(
self.action_type, ", ".join(valid_types)
)
)
def | (self):
if self.action_type == "translation:deleted" and (
self.translation or not self.entity or not self.locale
):
raise ValidationError(
'For action type "translation:deleted", `entity` and `locale` are required'
)
if self.action_type == "comment:added" and not (
(self.translation and not self.locale and not self.entity)
or (not self.translation and self.locale and self.entity)
):
raise ValidationError(
'For action type "comment:added", either `translation` or `entity` and `locale` are required'
)
if (
self.action_type != "translation:deleted"
and self.action_type != "comment:added"
) and (not self.translation or self.entity or self.locale):
raise ValidationError(
'For action type "{}", only `translation` is accepted'.format(
self.action_type
)
)
def save(self, *args, **kwargs):
self.validate_action_type_choice()
self.validate_foreign_keys_per_action()
super(ActionLog, self).save(*args, **kwargs)
| validate_foreign_keys_per_action | identifier_name |
models.py | from django.core.exceptions import ValidationError
from django.db import models
class ActionLog(models.Model):
ACTIONS_TYPES = (
# A translation has been created.
("translation:created", "Translation created"),
# A translation has been deleted.
("translation:deleted", "Translation deleted"),
# A translation has been approved.
("translation:approved", "Translation approved"),
# A translation has been unapproved.
("translation:unapproved", "Translation unapproved"),
# A translation has been rejected.
("translation:rejected", "Translation rejected"),
# A translation has been unrejected.
("translation:unrejected", "Translation unrejected"),
# A comment has been added.
("comment:added", "Comment added"),
)
action_type = models.CharField(max_length=50, choices=ACTIONS_TYPES)
created_at = models.DateTimeField(auto_now_add=True)
performed_by = models.ForeignKey(
"auth.User", models.SET_NULL, related_name="actions", null=True
)
# Used to track on what translation related actions apply.
translation = models.ForeignKey(
"base.Translation", models.CASCADE, blank=True, null=True,
)
# Used when a translation has been deleted or a team comment has been added.
entity = models.ForeignKey("base.Entity", models.CASCADE, blank=True, null=True,)
locale = models.ForeignKey("base.Locale", models.CASCADE, blank=True, null=True,)
def validate_action_type_choice(self):
valid_types = [t[0] for t in self.ACTIONS_TYPES]
if self.action_type not in valid_types:
raise ValidationError(
'Action type "{}" is not one of the permitted values: {}'.format(
self.action_type, ", ".join(valid_types)
)
)
def validate_foreign_keys_per_action(self):
if self.action_type == "translation:deleted" and (
self.translation or not self.entity or not self.locale
):
raise ValidationError(
'For action type "translation:deleted", `entity` and `locale` are required'
)
if self.action_type == "comment:added" and not (
(self.translation and not self.locale and not self.entity)
or (not self.translation and self.locale and self.entity)
):
raise ValidationError(
'For action type "comment:added", either `translation` or `entity` and `locale` are required'
)
if (
self.action_type != "translation:deleted"
and self.action_type != "comment:added"
) and (not self.translation or self.entity or self.locale):
raise ValidationError(
'For action type "{}", only `translation` is accepted'.format(
self.action_type
)
)
def save(self, *args, **kwargs):
| self.validate_action_type_choice()
self.validate_foreign_keys_per_action()
super(ActionLog, self).save(*args, **kwargs) | identifier_body |
|
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
# | # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class BppSuite(CMakePackage):
"""BppSuite is a suite of ready-to-use programs for phylogenetic and
sequence analysis."""
homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite"
url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz"
version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
depends_on('[email protected]:', type='build')
depends_on('texinfo', type='build')
depends_on('bpp-core')
depends_on('bpp-seq')
depends_on('bpp-phyl') | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but | random_line_split |
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class BppSuite(CMakePackage):
| """BppSuite is a suite of ready-to-use programs for phylogenetic and
sequence analysis."""
homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite"
url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz"
version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
depends_on('[email protected]:', type='build')
depends_on('texinfo', type='build')
depends_on('bpp-core')
depends_on('bpp-seq')
depends_on('bpp-phyl') | identifier_body |
|
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class | (CMakePackage):
"""BppSuite is a suite of ready-to-use programs for phylogenetic and
sequence analysis."""
homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite"
url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz"
version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
depends_on('[email protected]:', type='build')
depends_on('texinfo', type='build')
depends_on('bpp-core')
depends_on('bpp-seq')
depends_on('bpp-phyl')
| BppSuite | identifier_name |
virtual_interface.py | # Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
class VirtualInterface(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'address': fields.StringField(nullable=True),
'network_id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'uuid': fields.UUIDField(),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
|
@base.remotable_classmethod
def get_by_id(cls, context, vif_id):
db_vif = db.virtual_interface_get(context, vif_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_uuid(cls, context, vif_uuid):
db_vif = db.virtual_interface_get_by_uuid(context, vif_uuid)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_address(cls, context, address):
db_vif = db.virtual_interface_get_by_address(context, address)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_instance_and_network(cls, context, instance_uuid, network_id):
db_vif = db.virtual_interface_get_by_instance_and_network(context,
instance_uuid, network_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
db_vif = db.virtual_interface_create(self._context, updates)
self._from_db_object(self._context, self, db_vif)
@base.remotable_classmethod
def delete_by_instance_uuid(cls, context, instance_uuid):
db.virtual_interface_delete_by_instance(context, instance_uuid)
class VirtualInterfaceList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('VirtualInterface'),
}
child_versions = {
'1.0': '1.0',
}
@base.remotable_classmethod
def get_all(cls, context):
db_vifs = db.virtual_interface_get_all(context)
return base.obj_make_list(context, cls(context),
objects.VirtualInterface, db_vifs)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False):
db_vifs = db.virtual_interface_get_by_instance(context, instance_uuid,
use_slave=use_slave)
return base.obj_make_list(context, cls(context),
objects.VirtualInterface, db_vifs)
| for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.