file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
evaluateXPathToStrings.ts | import IDomFacade from './domFacade/IDomFacade';
import evaluateXPath, { EvaluableExpression } from './evaluateXPath';
import { Options } from './types/Options';
/**
* Evaluates an XPath on the given contextNode. Returns the string result as if the XPath is wrapped in string(...).
*
* @public
*
* @param selector - The selector to execute. Supports XPath 3.1.
* @param contextItem - The node from which to run the XPath.
* @param domFacade - The domFacade (or DomFacade like interface) for retrieving relations.
* @param variables - Extra variables (name to value). Values can be number, string, boolean, nodes or object literals and arrays.
* @param options - Extra options for evaluating this XPath.
*
* @returns The string result.
*/
export default function evaluateXPathToStrings(
selector: EvaluableExpression,
contextItem?: any | null,
domFacade?: IDomFacade | null,
variables?: { [s: string]: any } | null,
options?: Options | null
): string[] | {
return evaluateXPath(
selector,
contextItem,
domFacade,
variables,
evaluateXPath.STRINGS_TYPE,
options
);
} | identifier_body |
|
evaluateXPathToStrings.ts | import IDomFacade from './domFacade/IDomFacade';
import evaluateXPath, { EvaluableExpression } from './evaluateXPath';
import { Options } from './types/Options';
/**
* Evaluates an XPath on the given contextNode. Returns the string result as if the XPath is wrapped in string(...).
*
* @public
*
* @param selector - The selector to execute. Supports XPath 3.1.
* @param contextItem - The node from which to run the XPath.
* @param domFacade - The domFacade (or DomFacade like interface) for retrieving relations.
* @param variables - Extra variables (name to value). Values can be number, string, boolean, nodes or object literals and arrays.
* @param options - Extra options for evaluating this XPath.
*
* @returns The string result.
*/
export default function | (
selector: EvaluableExpression,
contextItem?: any | null,
domFacade?: IDomFacade | null,
variables?: { [s: string]: any } | null,
options?: Options | null
): string[] {
return evaluateXPath(
selector,
contextItem,
domFacade,
variables,
evaluateXPath.STRINGS_TYPE,
options
);
}
| evaluateXPathToStrings | identifier_name |
CreateMapView.js | import React, { Component } from 'react';
import { GoogleMapLoader, GoogleMap, Marker, SearchBox } from 'react-google-maps';
class CreateMapView extends Component {
constructor(props) {
super(props);
this.state = {
coordinates: '',
};
this.handlePlacesChanged = this.handlePlacesChanged.bind(this);
}
handlePlacesChanged() {
const places = this.refs.searchBox.getPlaces();
const address = places[0].formatted_address;
const lat = places[0].geometry.location.lat().toString();
const lng = places[0].geometry.location.lng().toString();
const coordinates = lat.concat(',').concat(lng);
this.props.addMarker(coordinates);
this.props.onLocationChange(address);
}
render() {
return (
<div className="ui card">
<GoogleMapLoader
options={{ mapTypeControl: false }}
containerElement={
<div
{...this.props}
style={{
height: '250px',
width: '500px',
}}
/>
}
googleMapElement={
<GoogleMap
zoom={this.props.zoom}
center={this.props.center}
onClick={this.props.addMarker.bind(this)}
options={{ disableDefaultUI: true }}
>
<SearchBox
className="searchBox"
ref="searchBox"
bounds={this.props.bounds}
controlPosition={google.maps.ControlPosition.TOP_CENTER}
placeholder="Enter spread location (e.g. a park)"
onPlacesChanged={this.handlePlacesChanged.bind(this)}
/>
{this.props.markers.map((marker) => {
return (
<Marker
{...marker}
/>
);
})}
</GoogleMap>
}
/>
</div>
);
| }
export default CreateMapView; | }
| random_line_split |
CreateMapView.js | import React, { Component } from 'react';
import { GoogleMapLoader, GoogleMap, Marker, SearchBox } from 'react-google-maps';
class CreateMapView extends Component {
constructor(props) {
super(props);
this.state = {
coordinates: '',
};
this.handlePlacesChanged = this.handlePlacesChanged.bind(this);
}
handlePlacesChanged() {
const places = this.refs.searchBox.getPlaces();
const address = places[0].formatted_address;
const lat = places[0].geometry.location.lat().toString();
const lng = places[0].geometry.location.lng().toString();
const coordinates = lat.concat(',').concat(lng);
this.props.addMarker(coordinates);
this.props.onLocationChange(address);
}
render() |
}
export default CreateMapView;
| {
return (
<div className="ui card">
<GoogleMapLoader
options={{ mapTypeControl: false }}
containerElement={
<div
{...this.props}
style={{
height: '250px',
width: '500px',
}}
/>
}
googleMapElement={
<GoogleMap
zoom={this.props.zoom}
center={this.props.center}
onClick={this.props.addMarker.bind(this)}
options={{ disableDefaultUI: true }}
>
<SearchBox
className="searchBox"
ref="searchBox"
bounds={this.props.bounds}
controlPosition={google.maps.ControlPosition.TOP_CENTER}
placeholder="Enter spread location (e.g. a park)"
onPlacesChanged={this.handlePlacesChanged.bind(this)}
/>
{this.props.markers.map((marker) => {
return (
<Marker
{...marker}
/>
);
})}
</GoogleMap>
}
/>
</div>
);
} | identifier_body |
CreateMapView.js | import React, { Component } from 'react';
import { GoogleMapLoader, GoogleMap, Marker, SearchBox } from 'react-google-maps';
class CreateMapView extends Component {
constructor(props) {
super(props);
this.state = {
coordinates: '',
};
this.handlePlacesChanged = this.handlePlacesChanged.bind(this);
}
| () {
const places = this.refs.searchBox.getPlaces();
const address = places[0].formatted_address;
const lat = places[0].geometry.location.lat().toString();
const lng = places[0].geometry.location.lng().toString();
const coordinates = lat.concat(',').concat(lng);
this.props.addMarker(coordinates);
this.props.onLocationChange(address);
}
render() {
return (
<div className="ui card">
<GoogleMapLoader
options={{ mapTypeControl: false }}
containerElement={
<div
{...this.props}
style={{
height: '250px',
width: '500px',
}}
/>
}
googleMapElement={
<GoogleMap
zoom={this.props.zoom}
center={this.props.center}
onClick={this.props.addMarker.bind(this)}
options={{ disableDefaultUI: true }}
>
<SearchBox
className="searchBox"
ref="searchBox"
bounds={this.props.bounds}
controlPosition={google.maps.ControlPosition.TOP_CENTER}
placeholder="Enter spread location (e.g. a park)"
onPlacesChanged={this.handlePlacesChanged.bind(this)}
/>
{this.props.markers.map((marker) => {
return (
<Marker
{...marker}
/>
);
})}
</GoogleMap>
}
/>
</div>
);
}
}
export default CreateMapView;
| handlePlacesChanged | identifier_name |
search.js | var search = (function () {
var exports = {};
function narrow_or_search_for_term(search_string) {
var search_query_box = $("#search_query");
ui_util.change_tab_to('#home');
var operators = Filter.parse(search_string);
narrow.activate(operators, {trigger: 'search'});
// It's sort of annoying that this is not in a position to
// blur the search box, because it means that Esc won't
// unnarrow, it'll leave the searchbox.
// Narrowing will have already put some operators in the search box,
// so leave the current text in.
search_query_box.blur();
return search_query_box.val();
}
function update_buttons_with_focus(focused) {
var search_query = $('#search_query');
// Show buttons iff the search input is focused, or has non-empty contents,
// or we are narrowed.
if (focused
|| search_query.val()
|| narrow_state.active()) {
$('.search_button').prop('disabled', false);
} else {
$('.search_button').attr('disabled', 'disabled');
}
}
exports.update_button_visibility = function () {
update_buttons_with_focus($('#search_query').is(':focus'));
};
exports.initialize = function () {
// Data storage for the typeahead.
// This maps a search string to an object with a "description" field.
// (It's a bit of legacy that we have an object with only one important
// field. There's also a "search_string" field on each element that actually
// just represents the key of the hash, so it's redundant.)
var search_object = {};
$("#search_query").typeahead({
source: function (query) {
var suggestions = search_suggestion.get_suggestions(query);
// Update our global search_object hash
search_object = suggestions.lookup_table;
return suggestions.strings;
},
fixed: true,
items: 12,
helpOnEmptyStrings: true,
naturalSearch: true,
highlighter: function (item) {
var obj = search_object[item];
return obj.description;
},
matcher: function () {
return true;
},
updater: narrow_or_search_for_term,
sorter: function (items) {
return items;
},
});
$("#searchbox_form").keydown(function (e) {
exports.update_button_visibility();
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// Don't submit the form so that the typeahead can instead
// handle our Enter keypress. Any searching that needs
// to be done will be handled in the keyup.
e.preventDefault();
return false;
}
}).keyup(function (e) {
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// We just pressed enter and the box had focus, which
// means we didn't use the typeahead at all. In that
// case, we should act as though we're searching by
// operators. (The reason the other actions don't call
// this codepath is that they first all blur the box to
// indicate that they've done what they need to do)
narrow.activate(Filter.parse(search_query_box.val()), {trigger: 'search'});
search_query_box.blur();
update_buttons_with_focus(false);
}
});
// Some of these functions don't actually need to be exported,
// but the code was moved here from elsewhere, and it would be
// more work to re-order everything and make them private.
$('#search_exit').on('click', exports.clear_search);
var query = $('#search_query');
query.on('focus', exports.focus_search)
.on('blur' , function () {
// The search query box is a visual cue as to
// whether search or narrowing is active. If
// the user blurs the search box, then we should
// update the search string to reflect the currect
// narrow (or lack of narrow).
//
// But we can't do this right away, because
// selecting something in the typeahead menu causes
// the box to lose focus a moment before.
//
// The workaround is to check 100ms later -- long
// enough for the search to have gone through, but
// short enough that the user won't notice (though
// really it would be OK if they did).
setTimeout(function () {
var search_string = narrow_state.search_string();
query.val(search_string);
exports.update_button_visibility();
}, 100);
});
};
exports.focus_search = function () {
// The search bar is not focused yet, but will be.
update_buttons_with_focus(true);
};
exports.initiate_search = function () {
$('#search_query').select();
};
exports.clear_search = function () {
narrow.deactivate();
$('#search_query').blur();
exports.update_button_visibility();
};
return exports;
}());
if (typeof module !== 'undefined') | {
module.exports = search;
} | conditional_block |
|
search.js | var search = (function () {
var exports = {};
function narrow_or_search_for_term(search_string) |
function update_buttons_with_focus(focused) {
var search_query = $('#search_query');
// Show buttons iff the search input is focused, or has non-empty contents,
// or we are narrowed.
if (focused
|| search_query.val()
|| narrow_state.active()) {
$('.search_button').prop('disabled', false);
} else {
$('.search_button').attr('disabled', 'disabled');
}
}
exports.update_button_visibility = function () {
update_buttons_with_focus($('#search_query').is(':focus'));
};
exports.initialize = function () {
// Data storage for the typeahead.
// This maps a search string to an object with a "description" field.
// (It's a bit of legacy that we have an object with only one important
// field. There's also a "search_string" field on each element that actually
// just represents the key of the hash, so it's redundant.)
var search_object = {};
$("#search_query").typeahead({
source: function (query) {
var suggestions = search_suggestion.get_suggestions(query);
// Update our global search_object hash
search_object = suggestions.lookup_table;
return suggestions.strings;
},
fixed: true,
items: 12,
helpOnEmptyStrings: true,
naturalSearch: true,
highlighter: function (item) {
var obj = search_object[item];
return obj.description;
},
matcher: function () {
return true;
},
updater: narrow_or_search_for_term,
sorter: function (items) {
return items;
},
});
$("#searchbox_form").keydown(function (e) {
exports.update_button_visibility();
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// Don't submit the form so that the typeahead can instead
// handle our Enter keypress. Any searching that needs
// to be done will be handled in the keyup.
e.preventDefault();
return false;
}
}).keyup(function (e) {
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// We just pressed enter and the box had focus, which
// means we didn't use the typeahead at all. In that
// case, we should act as though we're searching by
// operators. (The reason the other actions don't call
// this codepath is that they first all blur the box to
// indicate that they've done what they need to do)
narrow.activate(Filter.parse(search_query_box.val()), {trigger: 'search'});
search_query_box.blur();
update_buttons_with_focus(false);
}
});
// Some of these functions don't actually need to be exported,
// but the code was moved here from elsewhere, and it would be
// more work to re-order everything and make them private.
$('#search_exit').on('click', exports.clear_search);
var query = $('#search_query');
query.on('focus', exports.focus_search)
.on('blur' , function () {
// The search query box is a visual cue as to
// whether search or narrowing is active. If
// the user blurs the search box, then we should
// update the search string to reflect the currect
// narrow (or lack of narrow).
//
// But we can't do this right away, because
// selecting something in the typeahead menu causes
// the box to lose focus a moment before.
//
// The workaround is to check 100ms later -- long
// enough for the search to have gone through, but
// short enough that the user won't notice (though
// really it would be OK if they did).
setTimeout(function () {
var search_string = narrow_state.search_string();
query.val(search_string);
exports.update_button_visibility();
}, 100);
});
};
exports.focus_search = function () {
// The search bar is not focused yet, but will be.
update_buttons_with_focus(true);
};
exports.initiate_search = function () {
$('#search_query').select();
};
exports.clear_search = function () {
narrow.deactivate();
$('#search_query').blur();
exports.update_button_visibility();
};
return exports;
}());
if (typeof module !== 'undefined') {
module.exports = search;
}
| {
var search_query_box = $("#search_query");
ui_util.change_tab_to('#home');
var operators = Filter.parse(search_string);
narrow.activate(operators, {trigger: 'search'});
// It's sort of annoying that this is not in a position to
// blur the search box, because it means that Esc won't
// unnarrow, it'll leave the searchbox.
// Narrowing will have already put some operators in the search box,
// so leave the current text in.
search_query_box.blur();
return search_query_box.val();
} | identifier_body |
search.js | var search = (function () {
var exports = {};
function narrow_or_search_for_term(search_string) {
var search_query_box = $("#search_query");
ui_util.change_tab_to('#home');
var operators = Filter.parse(search_string);
narrow.activate(operators, {trigger: 'search'});
// It's sort of annoying that this is not in a position to
// blur the search box, because it means that Esc won't
// unnarrow, it'll leave the searchbox.
// Narrowing will have already put some operators in the search box,
// so leave the current text in.
search_query_box.blur();
return search_query_box.val();
}
function update_buttons_with_focus(focused) {
var search_query = $('#search_query');
// Show buttons iff the search input is focused, or has non-empty contents,
// or we are narrowed.
if (focused
|| search_query.val()
|| narrow_state.active()) {
$('.search_button').prop('disabled', false);
} else {
$('.search_button').attr('disabled', 'disabled');
}
}
exports.update_button_visibility = function () {
update_buttons_with_focus($('#search_query').is(':focus'));
};
exports.initialize = function () {
// Data storage for the typeahead.
// This maps a search string to an object with a "description" field.
// (It's a bit of legacy that we have an object with only one important
// field. There's also a "search_string" field on each element that actually
// just represents the key of the hash, so it's redundant.)
var search_object = {};
$("#search_query").typeahead({
source: function (query) {
var suggestions = search_suggestion.get_suggestions(query);
// Update our global search_object hash
search_object = suggestions.lookup_table;
return suggestions.strings;
},
fixed: true,
items: 12,
helpOnEmptyStrings: true,
naturalSearch: true,
highlighter: function (item) {
var obj = search_object[item];
return obj.description;
},
matcher: function () {
return true;
},
updater: narrow_or_search_for_term,
sorter: function (items) {
return items;
},
});
$("#searchbox_form").keydown(function (e) {
exports.update_button_visibility();
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// Don't submit the form so that the typeahead can instead
// handle our Enter keypress. Any searching that needs
// to be done will be handled in the keyup.
e.preventDefault();
return false;
}
}).keyup(function (e) {
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// We just pressed enter and the box had focus, which
// means we didn't use the typeahead at all. In that
// case, we should act as though we're searching by
// operators. (The reason the other actions don't call
// this codepath is that they first all blur the box to
// indicate that they've done what they need to do)
narrow.activate(Filter.parse(search_query_box.val()), {trigger: 'search'});
search_query_box.blur();
update_buttons_with_focus(false);
}
});
// Some of these functions don't actually need to be exported,
// but the code was moved here from elsewhere, and it would be
// more work to re-order everything and make them private.
$('#search_exit').on('click', exports.clear_search);
var query = $('#search_query');
query.on('focus', exports.focus_search)
.on('blur' , function () {
// The search query box is a visual cue as to
// whether search or narrowing is active. If
// the user blurs the search box, then we should
// update the search string to reflect the currect
// narrow (or lack of narrow).
//
// But we can't do this right away, because
// selecting something in the typeahead menu causes
// the box to lose focus a moment before.
//
// The workaround is to check 100ms later -- long
// enough for the search to have gone through, but
// short enough that the user won't notice (though
// really it would be OK if they did).
setTimeout(function () {
var search_string = narrow_state.search_string();
query.val(search_string);
exports.update_button_visibility();
}, 100);
});
};
exports.focus_search = function () {
// The search bar is not focused yet, but will be.
update_buttons_with_focus(true);
};
| $('#search_query').select();
};
exports.clear_search = function () {
narrow.deactivate();
$('#search_query').blur();
exports.update_button_visibility();
};
return exports;
}());
if (typeof module !== 'undefined') {
module.exports = search;
} | exports.initiate_search = function () { | random_line_split |
search.js | var search = (function () {
var exports = {};
function | (search_string) {
var search_query_box = $("#search_query");
ui_util.change_tab_to('#home');
var operators = Filter.parse(search_string);
narrow.activate(operators, {trigger: 'search'});
// It's sort of annoying that this is not in a position to
// blur the search box, because it means that Esc won't
// unnarrow, it'll leave the searchbox.
// Narrowing will have already put some operators in the search box,
// so leave the current text in.
search_query_box.blur();
return search_query_box.val();
}
function update_buttons_with_focus(focused) {
var search_query = $('#search_query');
// Show buttons iff the search input is focused, or has non-empty contents,
// or we are narrowed.
if (focused
|| search_query.val()
|| narrow_state.active()) {
$('.search_button').prop('disabled', false);
} else {
$('.search_button').attr('disabled', 'disabled');
}
}
exports.update_button_visibility = function () {
update_buttons_with_focus($('#search_query').is(':focus'));
};
exports.initialize = function () {
// Data storage for the typeahead.
// This maps a search string to an object with a "description" field.
// (It's a bit of legacy that we have an object with only one important
// field. There's also a "search_string" field on each element that actually
// just represents the key of the hash, so it's redundant.)
var search_object = {};
$("#search_query").typeahead({
source: function (query) {
var suggestions = search_suggestion.get_suggestions(query);
// Update our global search_object hash
search_object = suggestions.lookup_table;
return suggestions.strings;
},
fixed: true,
items: 12,
helpOnEmptyStrings: true,
naturalSearch: true,
highlighter: function (item) {
var obj = search_object[item];
return obj.description;
},
matcher: function () {
return true;
},
updater: narrow_or_search_for_term,
sorter: function (items) {
return items;
},
});
$("#searchbox_form").keydown(function (e) {
exports.update_button_visibility();
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// Don't submit the form so that the typeahead can instead
// handle our Enter keypress. Any searching that needs
// to be done will be handled in the keyup.
e.preventDefault();
return false;
}
}).keyup(function (e) {
var code = e.which;
var search_query_box = $("#search_query");
if (code === 13 && search_query_box.is(":focus")) {
// We just pressed enter and the box had focus, which
// means we didn't use the typeahead at all. In that
// case, we should act as though we're searching by
// operators. (The reason the other actions don't call
// this codepath is that they first all blur the box to
// indicate that they've done what they need to do)
narrow.activate(Filter.parse(search_query_box.val()), {trigger: 'search'});
search_query_box.blur();
update_buttons_with_focus(false);
}
});
// Some of these functions don't actually need to be exported,
// but the code was moved here from elsewhere, and it would be
// more work to re-order everything and make them private.
$('#search_exit').on('click', exports.clear_search);
var query = $('#search_query');
query.on('focus', exports.focus_search)
.on('blur' , function () {
// The search query box is a visual cue as to
// whether search or narrowing is active. If
// the user blurs the search box, then we should
// update the search string to reflect the currect
// narrow (or lack of narrow).
//
// But we can't do this right away, because
// selecting something in the typeahead menu causes
// the box to lose focus a moment before.
//
// The workaround is to check 100ms later -- long
// enough for the search to have gone through, but
// short enough that the user won't notice (though
// really it would be OK if they did).
setTimeout(function () {
var search_string = narrow_state.search_string();
query.val(search_string);
exports.update_button_visibility();
}, 100);
});
};
exports.focus_search = function () {
// The search bar is not focused yet, but will be.
update_buttons_with_focus(true);
};
exports.initiate_search = function () {
$('#search_query').select();
};
exports.clear_search = function () {
narrow.deactivate();
$('#search_query').blur();
exports.update_button_visibility();
};
return exports;
}());
if (typeof module !== 'undefined') {
module.exports = search;
}
| narrow_or_search_for_term | identifier_name |
setSecret.py | # Copyright (C) 2010-2012 Red Hat, Inc.
# This work is licensed under the GNU GPLv2 or later.
# Test secret series command, check the set secret value, get secret value
import base64
from libvirttestapi.src import sharedmod
from xml.dom import minidom
from libvirt import libvirtError
required_params = ('secretUUID', 'value',)
optional_params = {}
def check_setSecret(value, secretobj, logger):
"""check whether the secret value is set correctly
"""
secretvalue = secretobj.value(0)
original_data = base64.decodestring(secretvalue).decode()
if original_data == value:
|
else:
logger.info("Set secret value failed")
return 1
def setSecret(params):
"""set a secret value
"""
logger = params['logger']
secretUUID = params['secretUUID']
value = params['value']
data = base64.encodestring(value.encode()).decode('ascii')
try:
conn = sharedmod.libvirtobj['conn']
secretobj = conn.secretLookupByUUIDString(secretUUID)
private = minidom.parseString(secretobj.XMLDesc(0)).\
getElementsByTagName('secret')[0].getAttribute('private')
secretobj.setValue(data, 0)
"""if private is no, the value of secret can be get; if the private is
yes, can't get the value of the secret.
"""
if private == 'no':
logger.info("the value of secret %s is %s" % (secretUUID,
secretobj.value(0).decode()))
ret = check_setSecret(value, secretobj, logger)
return ret
else:
logger.info("the value of secret %s is %s" % (secretUUID, data))
logger.info("can not check the value via libvirt since secret %s "
"is private" % secretUUID)
return 0
except libvirtError as e:
logger.error("libvirt call failed: " + e.get_error_message())
return 1
| logger.info("Set secret value successfully")
return 0 | conditional_block |
setSecret.py | # Copyright (C) 2010-2012 Red Hat, Inc.
# This work is licensed under the GNU GPLv2 or later.
# Test secret series command, check the set secret value, get secret value
import base64
from libvirttestapi.src import sharedmod
from xml.dom import minidom
from libvirt import libvirtError
required_params = ('secretUUID', 'value',)
optional_params = {}
def check_setSecret(value, secretobj, logger):
|
def setSecret(params):
"""set a secret value
"""
logger = params['logger']
secretUUID = params['secretUUID']
value = params['value']
data = base64.encodestring(value.encode()).decode('ascii')
try:
conn = sharedmod.libvirtobj['conn']
secretobj = conn.secretLookupByUUIDString(secretUUID)
private = minidom.parseString(secretobj.XMLDesc(0)).\
getElementsByTagName('secret')[0].getAttribute('private')
secretobj.setValue(data, 0)
"""if private is no, the value of secret can be get; if the private is
yes, can't get the value of the secret.
"""
if private == 'no':
logger.info("the value of secret %s is %s" % (secretUUID,
secretobj.value(0).decode()))
ret = check_setSecret(value, secretobj, logger)
return ret
else:
logger.info("the value of secret %s is %s" % (secretUUID, data))
logger.info("can not check the value via libvirt since secret %s "
"is private" % secretUUID)
return 0
except libvirtError as e:
logger.error("libvirt call failed: " + e.get_error_message())
return 1
| """check whether the secret value is set correctly
"""
secretvalue = secretobj.value(0)
original_data = base64.decodestring(secretvalue).decode()
if original_data == value:
logger.info("Set secret value successfully")
return 0
else:
logger.info("Set secret value failed")
return 1 | identifier_body |
setSecret.py | # Copyright (C) 2010-2012 Red Hat, Inc.
# This work is licensed under the GNU GPLv2 or later.
# Test secret series command, check the set secret value, get secret value
import base64
from libvirttestapi.src import sharedmod
from xml.dom import minidom
from libvirt import libvirtError
required_params = ('secretUUID', 'value',)
optional_params = {}
def check_setSecret(value, secretobj, logger):
"""check whether the secret value is set correctly
"""
secretvalue = secretobj.value(0)
original_data = base64.decodestring(secretvalue).decode()
if original_data == value:
logger.info("Set secret value successfully")
return 0
else:
logger.info("Set secret value failed")
return 1
def setSecret(params):
"""set a secret value
"""
logger = params['logger']
secretUUID = params['secretUUID']
value = params['value']
data = base64.encodestring(value.encode()).decode('ascii')
try:
conn = sharedmod.libvirtobj['conn']
secretobj = conn.secretLookupByUUIDString(secretUUID) | private = minidom.parseString(secretobj.XMLDesc(0)).\
getElementsByTagName('secret')[0].getAttribute('private')
secretobj.setValue(data, 0)
"""if private is no, the value of secret can be get; if the private is
yes, can't get the value of the secret.
"""
if private == 'no':
logger.info("the value of secret %s is %s" % (secretUUID,
secretobj.value(0).decode()))
ret = check_setSecret(value, secretobj, logger)
return ret
else:
logger.info("the value of secret %s is %s" % (secretUUID, data))
logger.info("can not check the value via libvirt since secret %s "
"is private" % secretUUID)
return 0
except libvirtError as e:
logger.error("libvirt call failed: " + e.get_error_message())
return 1 | random_line_split |
|
setSecret.py | # Copyright (C) 2010-2012 Red Hat, Inc.
# This work is licensed under the GNU GPLv2 or later.
# Test secret series command, check the set secret value, get secret value
import base64
from libvirttestapi.src import sharedmod
from xml.dom import minidom
from libvirt import libvirtError
required_params = ('secretUUID', 'value',)
optional_params = {}
def check_setSecret(value, secretobj, logger):
"""check whether the secret value is set correctly
"""
secretvalue = secretobj.value(0)
original_data = base64.decodestring(secretvalue).decode()
if original_data == value:
logger.info("Set secret value successfully")
return 0
else:
logger.info("Set secret value failed")
return 1
def | (params):
"""set a secret value
"""
logger = params['logger']
secretUUID = params['secretUUID']
value = params['value']
data = base64.encodestring(value.encode()).decode('ascii')
try:
conn = sharedmod.libvirtobj['conn']
secretobj = conn.secretLookupByUUIDString(secretUUID)
private = minidom.parseString(secretobj.XMLDesc(0)).\
getElementsByTagName('secret')[0].getAttribute('private')
secretobj.setValue(data, 0)
"""if private is no, the value of secret can be get; if the private is
yes, can't get the value of the secret.
"""
if private == 'no':
logger.info("the value of secret %s is %s" % (secretUUID,
secretobj.value(0).decode()))
ret = check_setSecret(value, secretobj, logger)
return ret
else:
logger.info("the value of secret %s is %s" % (secretUUID, data))
logger.info("can not check the value via libvirt since secret %s "
"is private" % secretUUID)
return 0
except libvirtError as e:
logger.error("libvirt call failed: " + e.get_error_message())
return 1
| setSecret | identifier_name |
bind-by-move-neither-can-live-while-the-other-survives-4.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct X { x: (), }
impl Drop for X {
fn finalize(&self) {
error!("destructor runs"); |
fn main() {
let x = Some((X { x: () }, X { x: () }));
match x {
Some((_y, ref _z)) => { }, //~ ERROR cannot bind by-move and by-ref in the same pattern
None => fail!()
}
} | }
} | random_line_split |
bind-by-move-neither-can-live-while-the-other-survives-4.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct X { x: (), }
impl Drop for X {
fn finalize(&self) {
error!("destructor runs");
}
}
fn main() | {
let x = Some((X { x: () }, X { x: () }));
match x {
Some((_y, ref _z)) => { }, //~ ERROR cannot bind by-move and by-ref in the same pattern
None => fail!()
}
} | identifier_body |
|
bind-by-move-neither-can-live-while-the-other-survives-4.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct X { x: (), }
impl Drop for X {
fn | (&self) {
error!("destructor runs");
}
}
fn main() {
let x = Some((X { x: () }, X { x: () }));
match x {
Some((_y, ref _z)) => { }, //~ ERROR cannot bind by-move and by-ref in the same pattern
None => fail!()
}
}
| finalize | identifier_name |
bind-by-move-neither-can-live-while-the-other-survives-4.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct X { x: (), }
impl Drop for X {
fn finalize(&self) {
error!("destructor runs");
}
}
fn main() {
let x = Some((X { x: () }, X { x: () }));
match x {
Some((_y, ref _z)) => | , //~ ERROR cannot bind by-move and by-ref in the same pattern
None => fail!()
}
}
| { } | conditional_block |
views.py | from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
| from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = SubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(**kwargs)
return context
class SubmissionCreateView(CreateView):
model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form) | random_line_split |
|
views.py | from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = SubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(**kwargs)
return context
class SubmissionCreateView(CreateView):
| model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form) | identifier_body |
|
views.py | from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = SubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(**kwargs)
return context
class SubmissionCreateView(CreateView):
model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def | (self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form)
| form_valid | identifier_name |
index.tsx | import React, { Fragment, ReactElement, ReactNode } from 'react';
type Node = Element | Text;
type Mark =
| 'bold'
| 'italic'
| 'underline'
| 'strikethrough'
| 'code'
| 'superscript'
| 'subscript'
| 'keyboard';
type Element = {
children: Node[];
[key: string]: unknown;
};
type Text = {
text: string;
[key: string]: unknown;
};
type Component<Props> = (props: Props) => ReactElement | null;
type OnlyChildrenComponent = Component<{ children: ReactNode }> | keyof JSX.IntrinsicElements;
type MarkRenderers = { [Key in Mark]: OnlyChildrenComponent };
interface Renderers {
inline: {
link: Component<{ children: ReactNode; href: string }> | 'a';
relationship: Component<{
relationship: string;
data: { id: string; label: string | undefined; data: Record<string, any> | undefined } | null;
}>;
} & MarkRenderers;
block: {
block: OnlyChildrenComponent;
paragraph: Component<{ children: ReactNode; textAlign: 'center' | 'end' | undefined }>;
blockquote: OnlyChildrenComponent;
code: Component<{ children: string }> | keyof JSX.IntrinsicElements;
layout: Component<{ layout: [number, ...number[]]; children: ReactElement[] }>;
divider: Component<{}> | keyof JSX.IntrinsicElements;
heading: Component<{
level: 1 | 2 | 3 | 4 | 5 | 6;
children: ReactNode;
textAlign: 'center' | 'end' | undefined;
}>;
list: Component<{ type: 'ordered' | 'unordered'; children: ReactElement[] }>;
};
}
export const defaultRenderers: Renderers = {
inline: {
bold: 'strong',
code: 'code',
keyboard: 'kbd',
strikethrough: 's',
italic: 'em',
link: 'a',
subscript: 'sub',
superscript: 'sup',
underline: 'u',
relationship: ({ data }) => {
return <span>{data?.label || data?.id}</span>;
},
},
block: {
block: 'div',
blockquote: 'blockquote',
paragraph: ({ children, textAlign }) => {
return <p style={{ textAlign }}>{children}</p>;
},
divider: 'hr',
heading: ({ level, children, textAlign }) => {
let Heading = `h${level}` as 'h1';
return <Heading style={{ textAlign }} children={children} />;
},
code: 'pre',
list: ({ children, type }) => {
const List = type === 'ordered' ? 'ol' : 'ul';
return (
<List>
{children.map((x, i) => (
<li key={i}>{x}</li>
))}
</List>
);
},
layout: ({ children, layout }) => {
return (
<div
style={{
display: 'grid',
gridTemplateColumns: layout.map(x => `${x}fr`).join(' '),
}}
>
{children.map((element, i) => (
<div key={i}>{element}</div>
))}
</div>
);
},
},
};
function DocumentNode({
node: _node,
componentBlocks,
renderers,
}: {
node: Element | Text;
renderers: Renderers;
// TODO: allow inferring from the component blocks
componentBlocks: Record<string, Component<any>>;
}): ReactElement {
if (typeof _node.text === 'string') {
let child = <Fragment>{_node.text}</Fragment>;
(Object.keys(renderers.inline) as (keyof typeof renderers.inline)[]).forEach(markName => {
if (markName !== 'link' && markName !== 'relationship' && _node[markName]) {
const Mark = renderers.inline[markName];
child = <Mark>{child}</Mark>;
}
});
return child;
}
const node = _node as Element;
const children = node.children.map((x, i) => (
<DocumentNode node={x} componentBlocks={componentBlocks} renderers={renderers} key={i} />
));
switch (node.type as string) {
case 'blockquote': {
return <renderers.block.blockquote children={children} />;
}
case 'paragraph': {
return <renderers.block.paragraph textAlign={node.textAlign as any} children={children} />;
} | case 'code': {
if (
node.children.length === 1 &&
node.children[0] &&
typeof node.children[0].text === 'string'
) {
return <renderers.block.code>{node.children[0].text}</renderers.block.code>;
}
break;
}
case 'layout': {
return <renderers.block.layout layout={node.layout as any} children={children} />;
}
case 'divider': {
return <renderers.block.divider />;
}
case 'heading': {
return (
<renderers.block.heading
textAlign={node.textAlign as any}
level={node.level as any}
children={children}
/>
);
}
case 'component-block': {
const Comp = componentBlocks[node.component as string];
if (Comp) {
const props = createComponentBlockProps(node, children);
return (
<renderers.block.block>
<Comp {...props} />
</renderers.block.block>
);
}
break;
}
case 'ordered-list':
case 'unordered-list': {
return (
<renderers.block.list
children={children}
type={node.type === 'ordered-list' ? 'ordered' : 'unordered'}
/>
);
}
case 'relationship': {
const data = node.data as any;
return (
<renderers.inline.relationship
relationship={node.relationship as string}
data={data ? { id: data.id, label: data.label, data: data.data } : null}
/>
);
}
case 'link': {
return <renderers.inline.link href={node.href as string}>{children}</renderers.inline.link>;
}
}
return <Fragment>{children}</Fragment>;
}
function set(obj: Record<string, any>, propPath: (string | number)[], value: any) {
if (propPath.length === 1) {
obj[propPath[0]] = value;
} else {
let firstElement = propPath.shift()!;
set(obj[firstElement], propPath, value);
}
}
function createComponentBlockProps(node: Element, children: ReactElement[]) {
const formProps = JSON.parse(JSON.stringify(node.props));
node.children.forEach((child, i) => {
if (child.propPath) {
const propPath = [...(child.propPath as any)];
set(formProps, propPath, children[i]);
}
});
return formProps;
}
export type DocumentRendererProps<
ComponentBlocks extends Record<string, Component<any>> = Record<string, Component<any>>
> = {
document: Element[];
renderers?: { inline?: Partial<Renderers['inline']>; block?: Partial<Renderers['block']> };
componentBlocks?: ComponentBlocks;
};
export function DocumentRenderer<ComponentBlocks extends Record<string, Component<any>>>(
props: DocumentRendererProps<ComponentBlocks>
) {
const renderers = {
inline: { ...defaultRenderers.inline, ...props.renderers?.inline },
block: { ...defaultRenderers.block, ...props.renderers?.block },
};
const componentBlocks = props.componentBlocks || {};
return (
<Fragment>
{props.document.map((x, i) => (
<DocumentNode node={x} componentBlocks={componentBlocks} renderers={renderers} key={i} />
))}
</Fragment>
);
} | random_line_split |
|
index.tsx | import React, { Fragment, ReactElement, ReactNode } from 'react';
type Node = Element | Text;
type Mark =
| 'bold'
| 'italic'
| 'underline'
| 'strikethrough'
| 'code'
| 'superscript'
| 'subscript'
| 'keyboard';
type Element = {
children: Node[];
[key: string]: unknown;
};
type Text = {
text: string;
[key: string]: unknown;
};
type Component<Props> = (props: Props) => ReactElement | null;
type OnlyChildrenComponent = Component<{ children: ReactNode }> | keyof JSX.IntrinsicElements;
type MarkRenderers = { [Key in Mark]: OnlyChildrenComponent };
interface Renderers {
inline: {
link: Component<{ children: ReactNode; href: string }> | 'a';
relationship: Component<{
relationship: string;
data: { id: string; label: string | undefined; data: Record<string, any> | undefined } | null;
}>;
} & MarkRenderers;
block: {
block: OnlyChildrenComponent;
paragraph: Component<{ children: ReactNode; textAlign: 'center' | 'end' | undefined }>;
blockquote: OnlyChildrenComponent;
code: Component<{ children: string }> | keyof JSX.IntrinsicElements;
layout: Component<{ layout: [number, ...number[]]; children: ReactElement[] }>;
divider: Component<{}> | keyof JSX.IntrinsicElements;
heading: Component<{
level: 1 | 2 | 3 | 4 | 5 | 6;
children: ReactNode;
textAlign: 'center' | 'end' | undefined;
}>;
list: Component<{ type: 'ordered' | 'unordered'; children: ReactElement[] }>;
};
}
export const defaultRenderers: Renderers = {
inline: {
bold: 'strong',
code: 'code',
keyboard: 'kbd',
strikethrough: 's',
italic: 'em',
link: 'a',
subscript: 'sub',
superscript: 'sup',
underline: 'u',
relationship: ({ data }) => {
return <span>{data?.label || data?.id}</span>;
},
},
block: {
block: 'div',
blockquote: 'blockquote',
paragraph: ({ children, textAlign }) => {
return <p style={{ textAlign }}>{children}</p>;
},
divider: 'hr',
heading: ({ level, children, textAlign }) => {
let Heading = `h${level}` as 'h1';
return <Heading style={{ textAlign }} children={children} />;
},
code: 'pre',
list: ({ children, type }) => {
const List = type === 'ordered' ? 'ol' : 'ul';
return (
<List>
{children.map((x, i) => (
<li key={i}>{x}</li>
))}
</List>
);
},
layout: ({ children, layout }) => {
return (
<div
style={{
display: 'grid',
gridTemplateColumns: layout.map(x => `${x}fr`).join(' '),
}}
>
{children.map((element, i) => (
<div key={i}>{element}</div>
))}
</div>
);
},
},
};
function DocumentNode({
node: _node,
componentBlocks,
renderers,
}: {
node: Element | Text;
renderers: Renderers;
// TODO: allow inferring from the component blocks
componentBlocks: Record<string, Component<any>>;
}): ReactElement {
if (typeof _node.text === 'string') {
let child = <Fragment>{_node.text}</Fragment>;
(Object.keys(renderers.inline) as (keyof typeof renderers.inline)[]).forEach(markName => {
if (markName !== 'link' && markName !== 'relationship' && _node[markName]) {
const Mark = renderers.inline[markName];
child = <Mark>{child}</Mark>;
}
});
return child;
}
const node = _node as Element;
const children = node.children.map((x, i) => (
<DocumentNode node={x} componentBlocks={componentBlocks} renderers={renderers} key={i} />
));
switch (node.type as string) {
case 'blockquote': {
return <renderers.block.blockquote children={children} />;
}
case 'paragraph': {
return <renderers.block.paragraph textAlign={node.textAlign as any} children={children} />;
}
case 'code': {
if (
node.children.length === 1 &&
node.children[0] &&
typeof node.children[0].text === 'string'
) {
return <renderers.block.code>{node.children[0].text}</renderers.block.code>;
}
break;
}
case 'layout': {
return <renderers.block.layout layout={node.layout as any} children={children} />;
}
case 'divider': {
return <renderers.block.divider />;
}
case 'heading': {
return (
<renderers.block.heading
textAlign={node.textAlign as any}
level={node.level as any}
children={children}
/>
);
}
case 'component-block': {
const Comp = componentBlocks[node.component as string];
if (Comp) {
const props = createComponentBlockProps(node, children);
return (
<renderers.block.block>
<Comp {...props} />
</renderers.block.block>
);
}
break;
}
case 'ordered-list':
case 'unordered-list': {
return (
<renderers.block.list
children={children}
type={node.type === 'ordered-list' ? 'ordered' : 'unordered'}
/>
);
}
case 'relationship': {
const data = node.data as any;
return (
<renderers.inline.relationship
relationship={node.relationship as string}
data={data ? { id: data.id, label: data.label, data: data.data } : null}
/>
);
}
case 'link': {
return <renderers.inline.link href={node.href as string}>{children}</renderers.inline.link>;
}
}
return <Fragment>{children}</Fragment>;
}
function set(obj: Record<string, any>, propPath: (string | number)[], value: any) {
if (propPath.length === 1) | else {
let firstElement = propPath.shift()!;
set(obj[firstElement], propPath, value);
}
}
function createComponentBlockProps(node: Element, children: ReactElement[]) {
const formProps = JSON.parse(JSON.stringify(node.props));
node.children.forEach((child, i) => {
if (child.propPath) {
const propPath = [...(child.propPath as any)];
set(formProps, propPath, children[i]);
}
});
return formProps;
}
export type DocumentRendererProps<
ComponentBlocks extends Record<string, Component<any>> = Record<string, Component<any>>
> = {
document: Element[];
renderers?: { inline?: Partial<Renderers['inline']>; block?: Partial<Renderers['block']> };
componentBlocks?: ComponentBlocks;
};
export function DocumentRenderer<ComponentBlocks extends Record<string, Component<any>>>(
props: DocumentRendererProps<ComponentBlocks>
) {
const renderers = {
inline: { ...defaultRenderers.inline, ...props.renderers?.inline },
block: { ...defaultRenderers.block, ...props.renderers?.block },
};
const componentBlocks = props.componentBlocks || {};
return (
<Fragment>
{props.document.map((x, i) => (
<DocumentNode node={x} componentBlocks={componentBlocks} renderers={renderers} key={i} />
))}
</Fragment>
);
}
| {
obj[propPath[0]] = value;
} | conditional_block |
events_receiver.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <[email protected]>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import zmq
import json
import logging as log
from zmq.eventloop import ioloop, zmqstream
from laniakea.msgstream import verify_event_message, event_message_is_valid_and_signed
class EventsReceiver:
'''
Lighthouse module handling event stream submissions,
registering them and publishing them to the world.
'''
def __init__(self, endpoint, pub_queue):
from glob import glob
from laniakea.localconfig import LocalConfig
from laniakea.msgstream import keyfile_read_verify_key
self._socket = None
self._ctx = zmq.Context.instance()
self._pub_queue = pub_queue
self._endpoint = endpoint
self._trusted_keys = {}
# TODO: Implement auto-reloading of valid keys list if directory changes
for keyfname in glob(os.path.join(LocalConfig().trusted_curve_keys_dir, '*')):
signer_id, verify_key = keyfile_read_verify_key(keyfname)
if signer_id and verify_key:
self._trusted_keys[signer_id] = verify_key
def | (self, socket, msg):
data = str(msg[1], 'utf-8', 'replace')
try:
event = json.loads(data)
except json.JSONDecodeError as e:
# we ignore invalid requests
log.info('Received invalid JSON message from sender: %s (%s)', data if len(data) > 1 else msg, str(e))
return
# check if the message is actually valid and can be processed
if not event_message_is_valid_and_signed(event):
# we currently just silently ignore invalid submissions
return
signatures = event.get('signatures')
signature_checked = False
for signer in signatures.keys():
key = self._trusted_keys.get(signer)
if not key:
continue
try:
verify_event_message(signer, event, key, assume_valid=True)
except Exception as e:
log.info('Invalid signature on event ({}): {}'.format(str(e), str(event)))
return
# if we are here, we verified a signature without issues, which means
# the message is legit and we can sign it ourselves and publish it
signature_checked = True
if not signature_checked:
log.info('Unable to verify signature on event: {}'.format(str(event)))
return
# now publish the event to the world
self._pub_queue.put([bytes(event['tag'], 'utf-8'),
bytes(data, 'utf-8')])
def run(self):
if self._socket:
log.warning('Tried to run an already running event receiver again.')
return
self._socket = self._ctx.socket(zmq.ROUTER)
self._socket.bind(self._endpoint)
server_stream = zmqstream.ZMQStream(self._socket)
server_stream.on_recv_stream(self._event_message_received)
ioloop.IOLoop.instance().start()
| _event_message_received | identifier_name |
events_receiver.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <[email protected]>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import zmq
import json
import logging as log
from zmq.eventloop import ioloop, zmqstream
from laniakea.msgstream import verify_event_message, event_message_is_valid_and_signed
class EventsReceiver:
| '''
Lighthouse module handling event stream submissions,
registering them and publishing them to the world.
'''
def __init__(self, endpoint, pub_queue):
from glob import glob
from laniakea.localconfig import LocalConfig
from laniakea.msgstream import keyfile_read_verify_key
self._socket = None
self._ctx = zmq.Context.instance()
self._pub_queue = pub_queue
self._endpoint = endpoint
self._trusted_keys = {}
# TODO: Implement auto-reloading of valid keys list if directory changes
for keyfname in glob(os.path.join(LocalConfig().trusted_curve_keys_dir, '*')):
signer_id, verify_key = keyfile_read_verify_key(keyfname)
if signer_id and verify_key:
self._trusted_keys[signer_id] = verify_key
def _event_message_received(self, socket, msg):
data = str(msg[1], 'utf-8', 'replace')
try:
event = json.loads(data)
except json.JSONDecodeError as e:
# we ignore invalid requests
log.info('Received invalid JSON message from sender: %s (%s)', data if len(data) > 1 else msg, str(e))
return
# check if the message is actually valid and can be processed
if not event_message_is_valid_and_signed(event):
# we currently just silently ignore invalid submissions
return
signatures = event.get('signatures')
signature_checked = False
for signer in signatures.keys():
key = self._trusted_keys.get(signer)
if not key:
continue
try:
verify_event_message(signer, event, key, assume_valid=True)
except Exception as e:
log.info('Invalid signature on event ({}): {}'.format(str(e), str(event)))
return
# if we are here, we verified a signature without issues, which means
# the message is legit and we can sign it ourselves and publish it
signature_checked = True
if not signature_checked:
log.info('Unable to verify signature on event: {}'.format(str(event)))
return
# now publish the event to the world
self._pub_queue.put([bytes(event['tag'], 'utf-8'),
bytes(data, 'utf-8')])
def run(self):
if self._socket:
log.warning('Tried to run an already running event receiver again.')
return
self._socket = self._ctx.socket(zmq.ROUTER)
self._socket.bind(self._endpoint)
server_stream = zmqstream.ZMQStream(self._socket)
server_stream.on_recv_stream(self._event_message_received)
ioloop.IOLoop.instance().start() | identifier_body |
|
events_receiver.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <[email protected]>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import zmq
import json
import logging as log
from zmq.eventloop import ioloop, zmqstream
from laniakea.msgstream import verify_event_message, event_message_is_valid_and_signed
class EventsReceiver:
'''
Lighthouse module handling event stream submissions,
registering them and publishing them to the world.
'''
def __init__(self, endpoint, pub_queue):
from glob import glob
from laniakea.localconfig import LocalConfig
from laniakea.msgstream import keyfile_read_verify_key
self._socket = None
self._ctx = zmq.Context.instance()
self._pub_queue = pub_queue
self._endpoint = endpoint
self._trusted_keys = {}
# TODO: Implement auto-reloading of valid keys list if directory changes
for keyfname in glob(os.path.join(LocalConfig().trusted_curve_keys_dir, '*')):
signer_id, verify_key = keyfile_read_verify_key(keyfname)
if signer_id and verify_key:
self._trusted_keys[signer_id] = verify_key
def _event_message_received(self, socket, msg):
data = str(msg[1], 'utf-8', 'replace')
try:
event = json.loads(data)
except json.JSONDecodeError as e:
# we ignore invalid requests
log.info('Received invalid JSON message from sender: %s (%s)', data if len(data) > 1 else msg, str(e))
return
# check if the message is actually valid and can be processed
if not event_message_is_valid_and_signed(event):
# we currently just silently ignore invalid submissions
return
signatures = event.get('signatures')
signature_checked = False
for signer in signatures.keys():
key = self._trusted_keys.get(signer)
if not key:
continue
try:
verify_event_message(signer, event, key, assume_valid=True)
except Exception as e:
log.info('Invalid signature on event ({}): {}'.format(str(e), str(event)))
return
# if we are here, we verified a signature without issues, which means
# the message is legit and we can sign it ourselves and publish it
signature_checked = True
if not signature_checked:
|
# now publish the event to the world
self._pub_queue.put([bytes(event['tag'], 'utf-8'),
bytes(data, 'utf-8')])
def run(self):
if self._socket:
log.warning('Tried to run an already running event receiver again.')
return
self._socket = self._ctx.socket(zmq.ROUTER)
self._socket.bind(self._endpoint)
server_stream = zmqstream.ZMQStream(self._socket)
server_stream.on_recv_stream(self._event_message_received)
ioloop.IOLoop.instance().start()
| log.info('Unable to verify signature on event: {}'.format(str(event)))
return | conditional_block |
events_receiver.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <[email protected]>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import zmq
import json
import logging as log
from zmq.eventloop import ioloop, zmqstream
from laniakea.msgstream import verify_event_message, event_message_is_valid_and_signed
class EventsReceiver:
'''
Lighthouse module handling event stream submissions,
registering them and publishing them to the world.
'''
def __init__(self, endpoint, pub_queue):
from glob import glob
from laniakea.localconfig import LocalConfig
from laniakea.msgstream import keyfile_read_verify_key
self._socket = None
self._ctx = zmq.Context.instance()
self._pub_queue = pub_queue
self._endpoint = endpoint
self._trusted_keys = {}
| for keyfname in glob(os.path.join(LocalConfig().trusted_curve_keys_dir, '*')):
signer_id, verify_key = keyfile_read_verify_key(keyfname)
if signer_id and verify_key:
self._trusted_keys[signer_id] = verify_key
def _event_message_received(self, socket, msg):
data = str(msg[1], 'utf-8', 'replace')
try:
event = json.loads(data)
except json.JSONDecodeError as e:
# we ignore invalid requests
log.info('Received invalid JSON message from sender: %s (%s)', data if len(data) > 1 else msg, str(e))
return
# check if the message is actually valid and can be processed
if not event_message_is_valid_and_signed(event):
# we currently just silently ignore invalid submissions
return
signatures = event.get('signatures')
signature_checked = False
for signer in signatures.keys():
key = self._trusted_keys.get(signer)
if not key:
continue
try:
verify_event_message(signer, event, key, assume_valid=True)
except Exception as e:
log.info('Invalid signature on event ({}): {}'.format(str(e), str(event)))
return
# if we are here, we verified a signature without issues, which means
# the message is legit and we can sign it ourselves and publish it
signature_checked = True
if not signature_checked:
log.info('Unable to verify signature on event: {}'.format(str(event)))
return
# now publish the event to the world
self._pub_queue.put([bytes(event['tag'], 'utf-8'),
bytes(data, 'utf-8')])
def run(self):
if self._socket:
log.warning('Tried to run an already running event receiver again.')
return
self._socket = self._ctx.socket(zmq.ROUTER)
self._socket.bind(self._endpoint)
server_stream = zmqstream.ZMQStream(self._socket)
server_stream.on_recv_stream(self._event_message_received)
ioloop.IOLoop.instance().start() | # TODO: Implement auto-reloading of valid keys list if directory changes | random_line_split |
TodoChart.js | var colors = ["#FF6384", "#4BC0C0", "#FFCE56", "#E7E9ED", "#36A2EB", "#F38630", "#E0E4CC", "#69D2E7", "#F7464A", "#E2EAE9", "#D4CCC5", "#949FB1", "#4D5360"];
function getChartColors(len) {
if (len >= colors.length) {
| return colors.slice(0, len);
}
function getChartColor(i) {
if (i >= colors.length) {
return colors[i % colors.length];
}
return colors[i];
}
function getChartColors2(len) {
if (len >= colors.length) {
var newColor = new Array();
for (var i = 0; i < len; i++) {
newColor[i] = getChartColor(i);
}
return newColor;
}
return colors;
}
| return colors;
}
| conditional_block |
TodoChart.js | var colors = ["#FF6384", "#4BC0C0", "#FFCE56", "#E7E9ED", "#36A2EB", "#F38630", "#E0E4CC", "#69D2E7", "#F7464A", "#E2EAE9", "#D4CCC5", "#949FB1", "#4D5360"];
function getChartColors(len) {
if (len >= colors.length) {
return colors;
}
return colors.slice(0, len);
}
function getChartColor(i) {
if (i >= colors.length) {
return colors[i % colors.length];
}
return colors[i];
}
function ge | en) {
if (len >= colors.length) {
var newColor = new Array();
for (var i = 0; i < len; i++) {
newColor[i] = getChartColor(i);
}
return newColor;
}
return colors;
}
| tChartColors2(l | identifier_name |
TodoChart.js | var colors = ["#FF6384", "#4BC0C0", "#FFCE56", "#E7E9ED", "#36A2EB", "#F38630", "#E0E4CC", "#69D2E7", "#F7464A", "#E2EAE9", "#D4CCC5", "#949FB1", "#4D5360"];
function getChartColors(len) {
if (len >= colors.length) {
return colors;
}
return colors.slice(0, len);
}
function getChartColor(i) {
| function getChartColors2(len) {
if (len >= colors.length) {
var newColor = new Array();
for (var i = 0; i < len; i++) {
newColor[i] = getChartColor(i);
}
return newColor;
}
return colors;
}
| if (i >= colors.length) {
return colors[i % colors.length];
}
return colors[i];
}
| identifier_body |
TodoChart.js | var colors = ["#FF6384", "#4BC0C0", "#FFCE56", "#E7E9ED", "#36A2EB", "#F38630", "#E0E4CC", "#69D2E7", "#F7464A", "#E2EAE9", "#D4CCC5", "#949FB1", "#4D5360"];
function getChartColors(len) {
if (len >= colors.length) {
return colors;
}
return colors.slice(0, len);
}
function getChartColor(i) {
if (i >= colors.length) {
return colors[i % colors.length];
}
return colors[i];
}
function getChartColors2(len) {
if (len >= colors.length) {
var newColor = new Array();
for (var i = 0; i < len; i++) {
newColor[i] = getChartColor(i);
}
return newColor;
}
return colors; | } | random_line_split |
|
datepicker-zh-TW.js | /* Chinese initialisation for the jQuery UI date picker plugin. */
/* Written by Ressol ([email protected]). */
(function (factory) {
// AMD. Register as an anonymous module.
module.exports = factory(require('../datepicker'));;
}(function (datepicker) {
datepicker.regional['zh-TW'] = {
closeText: '\u95DC\u9589',
prevText: '<\u4E0A\u6708',
nextText: '\u4E0B\u6708>',
currentText: '\u4ECA\u5929',
monthNames: [
'\u4E00\u6708',
'\u4E8C\u6708',
'\u4E09\u6708',
'\u56DB\u6708',
'\u4E94\u6708',
'\u516D\u6708',
'\u4E03\u6708',
'\u516B\u6708',
'\u4E5D\u6708',
'\u5341\u6708', | '\u5341\u4E8C\u6708'
],
monthNamesShort: [
'\u4E00\u6708',
'\u4E8C\u6708',
'\u4E09\u6708',
'\u56DB\u6708',
'\u4E94\u6708',
'\u516D\u6708',
'\u4E03\u6708',
'\u516B\u6708',
'\u4E5D\u6708',
'\u5341\u6708',
'\u5341\u4E00\u6708',
'\u5341\u4E8C\u6708'
],
dayNames: [
'\u661F\u671F\u65E5',
'\u661F\u671F\u4E00',
'\u661F\u671F\u4E8C',
'\u661F\u671F\u4E09',
'\u661F\u671F\u56DB',
'\u661F\u671F\u4E94',
'\u661F\u671F\u516D'
],
dayNamesShort: [
'\u5468\u65E5',
'\u5468\u4E00',
'\u5468\u4E8C',
'\u5468\u4E09',
'\u5468\u56DB',
'\u5468\u4E94',
'\u5468\u516D'
],
dayNamesMin: [
'\u65E5',
'\u4E00',
'\u4E8C',
'\u4E09',
'\u56DB',
'\u4E94',
'\u516D'
],
weekHeader: '\u5468',
dateFormat: 'yy/mm/dd',
firstDay: 1,
isRTL: false,
showMonthAfterYear: true,
yearSuffix: '\u5E74'
};
datepicker.setDefaults(datepicker.regional['zh-TW']);
return datepicker.regional['zh-TW'];
})); | '\u5341\u4E00\u6708', | random_line_split |
ckan_search.py | import os
import time
import mechanize
CKAN = os.environ.get('CKAN', 'http://data.england.nhs.uk/')
class Transaction(object):
def __init__(self):
self.custom_timers = {}
def run(self):
# create a Browser instance
br = mechanize.Browser()
# don't bother with robots.txt
br.set_handle_robots(False)
# add a custom header so CKAN allows our requests
br.addheaders = [('User-agent', 'Mozilla/5.0 Compatible')]
# start the timer
start_timer = time.time()
# submit the request
br.open(CKAN)
# stop the timer
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Load_Front_Page'] = latency
# think-time
time.sleep(2)
# select first (zero-based) form on page
br.select_form(nr=0)
# set form field
br.form['q'] = 'england'
start_timer = time.time()
br.submit()
assert 'datasets found for' in br.response().read(), 'Search not performed'
# verify responses are valid
assert (br.response().code == 200), 'Bad HTTP Response'
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Search'] = latency
# think-time
time.sleep(2)
if __name__ == '__main__':
| trans = Transaction()
trans.run()
for timer in trans.custom_timers:
print '%s: %.5f secs' % (timer, trans.custom_timers[timer]) | conditional_block |
|
ckan_search.py | import os
import time
import mechanize
CKAN = os.environ.get('CKAN', 'http://data.england.nhs.uk/')
class Transaction(object):
def | (self):
self.custom_timers = {}
def run(self):
# create a Browser instance
br = mechanize.Browser()
# don't bother with robots.txt
br.set_handle_robots(False)
# add a custom header so CKAN allows our requests
br.addheaders = [('User-agent', 'Mozilla/5.0 Compatible')]
# start the timer
start_timer = time.time()
# submit the request
br.open(CKAN)
# stop the timer
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Load_Front_Page'] = latency
# think-time
time.sleep(2)
# select first (zero-based) form on page
br.select_form(nr=0)
# set form field
br.form['q'] = 'england'
start_timer = time.time()
br.submit()
assert 'datasets found for' in br.response().read(), 'Search not performed'
# verify responses are valid
assert (br.response().code == 200), 'Bad HTTP Response'
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Search'] = latency
# think-time
time.sleep(2)
if __name__ == '__main__':
trans = Transaction()
trans.run()
for timer in trans.custom_timers:
print '%s: %.5f secs' % (timer, trans.custom_timers[timer])
| __init__ | identifier_name |
ckan_search.py | import os
import time
import mechanize
CKAN = os.environ.get('CKAN', 'http://data.england.nhs.uk/')
class Transaction(object):
def __init__(self):
self.custom_timers = {}
def run(self):
# create a Browser instance
br = mechanize.Browser()
# don't bother with robots.txt
br.set_handle_robots(False)
# add a custom header so CKAN allows our requests
br.addheaders = [('User-agent', 'Mozilla/5.0 Compatible')]
# start the timer
start_timer = time.time()
# submit the request
br.open(CKAN)
# stop the timer
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Load_Front_Page'] = latency
# think-time
time.sleep(2)
# select first (zero-based) form on page
br.select_form(nr=0)
# set form field | br.submit()
assert 'datasets found for' in br.response().read(), 'Search not performed'
# verify responses are valid
assert (br.response().code == 200), 'Bad HTTP Response'
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Search'] = latency
# think-time
time.sleep(2)
if __name__ == '__main__':
trans = Transaction()
trans.run()
for timer in trans.custom_timers:
print '%s: %.5f secs' % (timer, trans.custom_timers[timer]) | br.form['q'] = 'england'
start_timer = time.time() | random_line_split |
ckan_search.py | import os
import time
import mechanize
CKAN = os.environ.get('CKAN', 'http://data.england.nhs.uk/')
class Transaction(object):
def __init__(self):
|
def run(self):
# create a Browser instance
br = mechanize.Browser()
# don't bother with robots.txt
br.set_handle_robots(False)
# add a custom header so CKAN allows our requests
br.addheaders = [('User-agent', 'Mozilla/5.0 Compatible')]
# start the timer
start_timer = time.time()
# submit the request
br.open(CKAN)
# stop the timer
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Load_Front_Page'] = latency
# think-time
time.sleep(2)
# select first (zero-based) form on page
br.select_form(nr=0)
# set form field
br.form['q'] = 'england'
start_timer = time.time()
br.submit()
assert 'datasets found for' in br.response().read(), 'Search not performed'
# verify responses are valid
assert (br.response().code == 200), 'Bad HTTP Response'
latency = time.time() - start_timer
# store the custom timer
self.custom_timers['Search'] = latency
# think-time
time.sleep(2)
if __name__ == '__main__':
trans = Transaction()
trans.run()
for timer in trans.custom_timers:
print '%s: %.5f secs' % (timer, trans.custom_timers[timer])
| self.custom_timers = {} | identifier_body |
filter_scheduler.py | # Copyright (c) 2011 Intel Corporation
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating shares.
You can customize this scheduler by specifying your own share Filters and
Weighing Functions.
"""
import operator
from manila import exception
from manila.openstack.common import importutils
from manila.openstack.common import log as logging
from manila.scheduler import driver
from manila.scheduler import scheduler_options
from oslo.config import cfg
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.cost_function_cache = None
self.options = scheduler_options.SchedulerOptions()
self.max_attempts = self._max_attempts()
def schedule(self, context, topic, method, *args, **kwargs):
"""The schedule() contract requires we return the one
best-suited host for this request.
"""
self._schedule(context, topic, *args, **kwargs)
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def _post_select_populate_filter_properties(self, filter_properties,
host_state):
"""Add additional information to the filter properties after a host has
been selected by the scheduling process.
"""
# Add a retry entry for the selected volume backend:
self._add_retry_host(filter_properties, host_state.host)
def | (self, filter_properties, host):
"""Add a retry entry for the selected volume backend. In the event that
the request gets re-scheduled, this entry will signal that the given
backend has already been tried.
"""
retry = filter_properties.get('retry', None)
if not retry:
return
hosts = retry['hosts']
hosts.append(host)
def _max_attempts(self):
max_attempts = CONF.scheduler_max_attempts
if max_attempts < 1:
msg = _("Invalid value for 'scheduler_max_attempts', "
"must be >=1")
raise exception.InvalidParameterValue(err=msg)
return max_attempts
def schedule_create_share(self, context, request_spec, filter_properties):
weighed_host = self._schedule_share(context,
request_spec,
filter_properties)
if not weighed_host:
raise exception.NoValidHost(reason="")
host = weighed_host.obj.host
share_id = request_spec['share_id']
snapshot_id = request_spec['snapshot_id']
updated_share = driver.share_update_db(context, share_id, host)
self._post_select_populate_filter_properties(filter_properties,
weighed_host.obj)
# context is not serializable
filter_properties.pop('context', None)
self.share_rpcapi.create_share(context, updated_share, host,
request_spec=request_spec,
filter_properties=filter_properties,
snapshot_id=snapshot_id)
def _schedule_share(self, context, request_spec, filter_properties=None):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
share_properties = request_spec['share_properties']
# Since Manila is using mixed filters from Oslo and it's own, which
# takes 'resource_XX' and 'volume_XX' as input respectively, copying
# 'volume_XX' to 'resource_XX' will make both filters happy.
resource_properties = share_properties.copy()
share_type = request_spec.get("share_type", {})
resource_type = request_spec.get("share_type", {})
request_spec.update({'resource_properties': resource_properties})
config_options = self._get_configuration_options()
if filter_properties is None:
filter_properties = {}
self._populate_retry_share(filter_properties, resource_properties)
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'share_type': share_type,
'resource_type': resource_type
})
self.populate_filter_properties_share(request_spec, filter_properties)
# Find our local list of acceptable hosts by filtering and
# weighing our options. we virtually consume resources on
# it so subsequent selections can adjust accordingly.
# Note: remember, we are using an iterator here. So only
# traverse this list once.
hosts = self.host_manager.get_all_host_states_share(elevated)
# Filter local hosts based on requirements ...
hosts = self.host_manager.get_filtered_hosts(hosts,
filter_properties)
if not hosts:
return None
LOG.debug(_("Filtered share %(hosts)s") % locals())
# weighted_host = WeightedHost() ... the best
# host for the job.
weighed_hosts = self.host_manager.get_weighed_hosts(hosts,
filter_properties)
best_host = weighed_hosts[0]
LOG.debug(_("Choosing for share: %(best_host)s") % locals())
#NOTE(rushiagr): updating the available space parameters at same place
best_host.obj.consume_from_volume(share_properties)
return best_host
def _populate_retry_share(self, filter_properties, properties):
"""Populate filter properties with history of retries for this
request. If maximum retries is exceeded, raise NoValidHost.
"""
max_attempts = self.max_attempts
retry = filter_properties.pop('retry', {})
if max_attempts == 1:
# re-scheduling is disabled.
return
# retry is enabled, update attempt count:
if retry:
retry['num_attempts'] += 1
else:
retry = {
'num_attempts': 1,
'hosts': [] # list of share service hosts tried
}
filter_properties['retry'] = retry
share_id = properties.get('share_id')
self._log_share_error(share_id, retry)
if retry['num_attempts'] > max_attempts:
msg = _("Exceeded max scheduling attempts %(max_attempts)d for "
"share %(share_id)s") % locals()
raise exception.NoValidHost(reason=msg)
def _log_share_error(self, share_id, retry):
"""If the request contained an exception from a previous share
create operation, log it to aid debugging.
"""
exc = retry.pop('exc', None) # string-ified exception from share
if not exc:
return # no exception info from a previous attempt, skip
hosts = retry.get('hosts', None)
if not hosts:
return # no previously attempted hosts, skip
last_host = hosts[-1]
msg = _("Error scheduling %(share_id)s from last share-service: "
"%(last_host)s : %(exc)s") % locals()
LOG.error(msg)
def populate_filter_properties_share(self, request_spec,
filter_properties):
"""Stuff things into filter_properties. Can be overridden in a
subclass to add more data.
"""
shr = request_spec['share_properties']
filter_properties['size'] = shr['size']
filter_properties['availability_zone'] = shr.get('availability_zone')
filter_properties['user_id'] = shr.get('user_id')
filter_properties['metadata'] = shr.get('metadata')
| _add_retry_host | identifier_name |
filter_scheduler.py | # Copyright (c) 2011 Intel Corporation
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating shares.
You can customize this scheduler by specifying your own share Filters and
Weighing Functions.
"""
import operator
from manila import exception
from manila.openstack.common import importutils
from manila.openstack.common import log as logging
from manila.scheduler import driver
from manila.scheduler import scheduler_options
from oslo.config import cfg
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.cost_function_cache = None
self.options = scheduler_options.SchedulerOptions()
self.max_attempts = self._max_attempts()
def schedule(self, context, topic, method, *args, **kwargs):
"""The schedule() contract requires we return the one
best-suited host for this request.
"""
self._schedule(context, topic, *args, **kwargs)
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def _post_select_populate_filter_properties(self, filter_properties,
host_state):
"""Add additional information to the filter properties after a host has
been selected by the scheduling process.
"""
# Add a retry entry for the selected volume backend:
self._add_retry_host(filter_properties, host_state.host)
def _add_retry_host(self, filter_properties, host):
"""Add a retry entry for the selected volume backend. In the event that
the request gets re-scheduled, this entry will signal that the given
backend has already been tried.
"""
retry = filter_properties.get('retry', None)
if not retry:
return
hosts = retry['hosts']
hosts.append(host)
def _max_attempts(self):
max_attempts = CONF.scheduler_max_attempts
if max_attempts < 1:
msg = _("Invalid value for 'scheduler_max_attempts', "
"must be >=1") |
def schedule_create_share(self, context, request_spec, filter_properties):
weighed_host = self._schedule_share(context,
request_spec,
filter_properties)
if not weighed_host:
raise exception.NoValidHost(reason="")
host = weighed_host.obj.host
share_id = request_spec['share_id']
snapshot_id = request_spec['snapshot_id']
updated_share = driver.share_update_db(context, share_id, host)
self._post_select_populate_filter_properties(filter_properties,
weighed_host.obj)
# context is not serializable
filter_properties.pop('context', None)
self.share_rpcapi.create_share(context, updated_share, host,
request_spec=request_spec,
filter_properties=filter_properties,
snapshot_id=snapshot_id)
def _schedule_share(self, context, request_spec, filter_properties=None):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
share_properties = request_spec['share_properties']
# Since Manila is using mixed filters from Oslo and it's own, which
# takes 'resource_XX' and 'volume_XX' as input respectively, copying
# 'volume_XX' to 'resource_XX' will make both filters happy.
resource_properties = share_properties.copy()
share_type = request_spec.get("share_type", {})
resource_type = request_spec.get("share_type", {})
request_spec.update({'resource_properties': resource_properties})
config_options = self._get_configuration_options()
if filter_properties is None:
filter_properties = {}
self._populate_retry_share(filter_properties, resource_properties)
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'share_type': share_type,
'resource_type': resource_type
})
self.populate_filter_properties_share(request_spec, filter_properties)
# Find our local list of acceptable hosts by filtering and
# weighing our options. we virtually consume resources on
# it so subsequent selections can adjust accordingly.
# Note: remember, we are using an iterator here. So only
# traverse this list once.
hosts = self.host_manager.get_all_host_states_share(elevated)
# Filter local hosts based on requirements ...
hosts = self.host_manager.get_filtered_hosts(hosts,
filter_properties)
if not hosts:
return None
LOG.debug(_("Filtered share %(hosts)s") % locals())
# weighted_host = WeightedHost() ... the best
# host for the job.
weighed_hosts = self.host_manager.get_weighed_hosts(hosts,
filter_properties)
best_host = weighed_hosts[0]
LOG.debug(_("Choosing for share: %(best_host)s") % locals())
#NOTE(rushiagr): updating the available space parameters at same place
best_host.obj.consume_from_volume(share_properties)
return best_host
def _populate_retry_share(self, filter_properties, properties):
"""Populate filter properties with history of retries for this
request. If maximum retries is exceeded, raise NoValidHost.
"""
max_attempts = self.max_attempts
retry = filter_properties.pop('retry', {})
if max_attempts == 1:
# re-scheduling is disabled.
return
# retry is enabled, update attempt count:
if retry:
retry['num_attempts'] += 1
else:
retry = {
'num_attempts': 1,
'hosts': [] # list of share service hosts tried
}
filter_properties['retry'] = retry
share_id = properties.get('share_id')
self._log_share_error(share_id, retry)
if retry['num_attempts'] > max_attempts:
msg = _("Exceeded max scheduling attempts %(max_attempts)d for "
"share %(share_id)s") % locals()
raise exception.NoValidHost(reason=msg)
def _log_share_error(self, share_id, retry):
"""If the request contained an exception from a previous share
create operation, log it to aid debugging.
"""
exc = retry.pop('exc', None) # string-ified exception from share
if not exc:
return # no exception info from a previous attempt, skip
hosts = retry.get('hosts', None)
if not hosts:
return # no previously attempted hosts, skip
last_host = hosts[-1]
msg = _("Error scheduling %(share_id)s from last share-service: "
"%(last_host)s : %(exc)s") % locals()
LOG.error(msg)
def populate_filter_properties_share(self, request_spec,
filter_properties):
"""Stuff things into filter_properties. Can be overridden in a
subclass to add more data.
"""
shr = request_spec['share_properties']
filter_properties['size'] = shr['size']
filter_properties['availability_zone'] = shr.get('availability_zone')
filter_properties['user_id'] = shr.get('user_id')
filter_properties['metadata'] = shr.get('metadata') | raise exception.InvalidParameterValue(err=msg)
return max_attempts | random_line_split |
filter_scheduler.py | # Copyright (c) 2011 Intel Corporation
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating shares.
You can customize this scheduler by specifying your own share Filters and
Weighing Functions.
"""
import operator
from manila import exception
from manila.openstack.common import importutils
from manila.openstack.common import log as logging
from manila.scheduler import driver
from manila.scheduler import scheduler_options
from oslo.config import cfg
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.cost_function_cache = None
self.options = scheduler_options.SchedulerOptions()
self.max_attempts = self._max_attempts()
def schedule(self, context, topic, method, *args, **kwargs):
"""The schedule() contract requires we return the one
best-suited host for this request.
"""
self._schedule(context, topic, *args, **kwargs)
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def _post_select_populate_filter_properties(self, filter_properties,
host_state):
|
def _add_retry_host(self, filter_properties, host):
"""Add a retry entry for the selected volume backend. In the event that
the request gets re-scheduled, this entry will signal that the given
backend has already been tried.
"""
retry = filter_properties.get('retry', None)
if not retry:
return
hosts = retry['hosts']
hosts.append(host)
def _max_attempts(self):
max_attempts = CONF.scheduler_max_attempts
if max_attempts < 1:
msg = _("Invalid value for 'scheduler_max_attempts', "
"must be >=1")
raise exception.InvalidParameterValue(err=msg)
return max_attempts
def schedule_create_share(self, context, request_spec, filter_properties):
weighed_host = self._schedule_share(context,
request_spec,
filter_properties)
if not weighed_host:
raise exception.NoValidHost(reason="")
host = weighed_host.obj.host
share_id = request_spec['share_id']
snapshot_id = request_spec['snapshot_id']
updated_share = driver.share_update_db(context, share_id, host)
self._post_select_populate_filter_properties(filter_properties,
weighed_host.obj)
# context is not serializable
filter_properties.pop('context', None)
self.share_rpcapi.create_share(context, updated_share, host,
request_spec=request_spec,
filter_properties=filter_properties,
snapshot_id=snapshot_id)
def _schedule_share(self, context, request_spec, filter_properties=None):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
share_properties = request_spec['share_properties']
# Since Manila is using mixed filters from Oslo and it's own, which
# takes 'resource_XX' and 'volume_XX' as input respectively, copying
# 'volume_XX' to 'resource_XX' will make both filters happy.
resource_properties = share_properties.copy()
share_type = request_spec.get("share_type", {})
resource_type = request_spec.get("share_type", {})
request_spec.update({'resource_properties': resource_properties})
config_options = self._get_configuration_options()
if filter_properties is None:
filter_properties = {}
self._populate_retry_share(filter_properties, resource_properties)
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'share_type': share_type,
'resource_type': resource_type
})
self.populate_filter_properties_share(request_spec, filter_properties)
# Find our local list of acceptable hosts by filtering and
# weighing our options. we virtually consume resources on
# it so subsequent selections can adjust accordingly.
# Note: remember, we are using an iterator here. So only
# traverse this list once.
hosts = self.host_manager.get_all_host_states_share(elevated)
# Filter local hosts based on requirements ...
hosts = self.host_manager.get_filtered_hosts(hosts,
filter_properties)
if not hosts:
return None
LOG.debug(_("Filtered share %(hosts)s") % locals())
# weighted_host = WeightedHost() ... the best
# host for the job.
weighed_hosts = self.host_manager.get_weighed_hosts(hosts,
filter_properties)
best_host = weighed_hosts[0]
LOG.debug(_("Choosing for share: %(best_host)s") % locals())
#NOTE(rushiagr): updating the available space parameters at same place
best_host.obj.consume_from_volume(share_properties)
return best_host
def _populate_retry_share(self, filter_properties, properties):
"""Populate filter properties with history of retries for this
request. If maximum retries is exceeded, raise NoValidHost.
"""
max_attempts = self.max_attempts
retry = filter_properties.pop('retry', {})
if max_attempts == 1:
# re-scheduling is disabled.
return
# retry is enabled, update attempt count:
if retry:
retry['num_attempts'] += 1
else:
retry = {
'num_attempts': 1,
'hosts': [] # list of share service hosts tried
}
filter_properties['retry'] = retry
share_id = properties.get('share_id')
self._log_share_error(share_id, retry)
if retry['num_attempts'] > max_attempts:
msg = _("Exceeded max scheduling attempts %(max_attempts)d for "
"share %(share_id)s") % locals()
raise exception.NoValidHost(reason=msg)
def _log_share_error(self, share_id, retry):
"""If the request contained an exception from a previous share
create operation, log it to aid debugging.
"""
exc = retry.pop('exc', None) # string-ified exception from share
if not exc:
return # no exception info from a previous attempt, skip
hosts = retry.get('hosts', None)
if not hosts:
return # no previously attempted hosts, skip
last_host = hosts[-1]
msg = _("Error scheduling %(share_id)s from last share-service: "
"%(last_host)s : %(exc)s") % locals()
LOG.error(msg)
def populate_filter_properties_share(self, request_spec,
filter_properties):
"""Stuff things into filter_properties. Can be overridden in a
subclass to add more data.
"""
shr = request_spec['share_properties']
filter_properties['size'] = shr['size']
filter_properties['availability_zone'] = shr.get('availability_zone')
filter_properties['user_id'] = shr.get('user_id')
filter_properties['metadata'] = shr.get('metadata')
| """Add additional information to the filter properties after a host has
been selected by the scheduling process.
"""
# Add a retry entry for the selected volume backend:
self._add_retry_host(filter_properties, host_state.host) | identifier_body |
filter_scheduler.py | # Copyright (c) 2011 Intel Corporation
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating shares.
You can customize this scheduler by specifying your own share Filters and
Weighing Functions.
"""
import operator
from manila import exception
from manila.openstack.common import importutils
from manila.openstack.common import log as logging
from manila.scheduler import driver
from manila.scheduler import scheduler_options
from oslo.config import cfg
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.cost_function_cache = None
self.options = scheduler_options.SchedulerOptions()
self.max_attempts = self._max_attempts()
def schedule(self, context, topic, method, *args, **kwargs):
"""The schedule() contract requires we return the one
best-suited host for this request.
"""
self._schedule(context, topic, *args, **kwargs)
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def _post_select_populate_filter_properties(self, filter_properties,
host_state):
"""Add additional information to the filter properties after a host has
been selected by the scheduling process.
"""
# Add a retry entry for the selected volume backend:
self._add_retry_host(filter_properties, host_state.host)
def _add_retry_host(self, filter_properties, host):
"""Add a retry entry for the selected volume backend. In the event that
the request gets re-scheduled, this entry will signal that the given
backend has already been tried.
"""
retry = filter_properties.get('retry', None)
if not retry:
return
hosts = retry['hosts']
hosts.append(host)
def _max_attempts(self):
max_attempts = CONF.scheduler_max_attempts
if max_attempts < 1:
msg = _("Invalid value for 'scheduler_max_attempts', "
"must be >=1")
raise exception.InvalidParameterValue(err=msg)
return max_attempts
def schedule_create_share(self, context, request_spec, filter_properties):
weighed_host = self._schedule_share(context,
request_spec,
filter_properties)
if not weighed_host:
raise exception.NoValidHost(reason="")
host = weighed_host.obj.host
share_id = request_spec['share_id']
snapshot_id = request_spec['snapshot_id']
updated_share = driver.share_update_db(context, share_id, host)
self._post_select_populate_filter_properties(filter_properties,
weighed_host.obj)
# context is not serializable
filter_properties.pop('context', None)
self.share_rpcapi.create_share(context, updated_share, host,
request_spec=request_spec,
filter_properties=filter_properties,
snapshot_id=snapshot_id)
def _schedule_share(self, context, request_spec, filter_properties=None):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
share_properties = request_spec['share_properties']
# Since Manila is using mixed filters from Oslo and it's own, which
# takes 'resource_XX' and 'volume_XX' as input respectively, copying
# 'volume_XX' to 'resource_XX' will make both filters happy.
resource_properties = share_properties.copy()
share_type = request_spec.get("share_type", {})
resource_type = request_spec.get("share_type", {})
request_spec.update({'resource_properties': resource_properties})
config_options = self._get_configuration_options()
if filter_properties is None:
filter_properties = {}
self._populate_retry_share(filter_properties, resource_properties)
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'share_type': share_type,
'resource_type': resource_type
})
self.populate_filter_properties_share(request_spec, filter_properties)
# Find our local list of acceptable hosts by filtering and
# weighing our options. we virtually consume resources on
# it so subsequent selections can adjust accordingly.
# Note: remember, we are using an iterator here. So only
# traverse this list once.
hosts = self.host_manager.get_all_host_states_share(elevated)
# Filter local hosts based on requirements ...
hosts = self.host_manager.get_filtered_hosts(hosts,
filter_properties)
if not hosts:
return None
LOG.debug(_("Filtered share %(hosts)s") % locals())
# weighted_host = WeightedHost() ... the best
# host for the job.
weighed_hosts = self.host_manager.get_weighed_hosts(hosts,
filter_properties)
best_host = weighed_hosts[0]
LOG.debug(_("Choosing for share: %(best_host)s") % locals())
#NOTE(rushiagr): updating the available space parameters at same place
best_host.obj.consume_from_volume(share_properties)
return best_host
def _populate_retry_share(self, filter_properties, properties):
"""Populate filter properties with history of retries for this
request. If maximum retries is exceeded, raise NoValidHost.
"""
max_attempts = self.max_attempts
retry = filter_properties.pop('retry', {})
if max_attempts == 1:
# re-scheduling is disabled.
return
# retry is enabled, update attempt count:
if retry:
retry['num_attempts'] += 1
else:
|
filter_properties['retry'] = retry
share_id = properties.get('share_id')
self._log_share_error(share_id, retry)
if retry['num_attempts'] > max_attempts:
msg = _("Exceeded max scheduling attempts %(max_attempts)d for "
"share %(share_id)s") % locals()
raise exception.NoValidHost(reason=msg)
def _log_share_error(self, share_id, retry):
"""If the request contained an exception from a previous share
create operation, log it to aid debugging.
"""
exc = retry.pop('exc', None) # string-ified exception from share
if not exc:
return # no exception info from a previous attempt, skip
hosts = retry.get('hosts', None)
if not hosts:
return # no previously attempted hosts, skip
last_host = hosts[-1]
msg = _("Error scheduling %(share_id)s from last share-service: "
"%(last_host)s : %(exc)s") % locals()
LOG.error(msg)
def populate_filter_properties_share(self, request_spec,
filter_properties):
"""Stuff things into filter_properties. Can be overridden in a
subclass to add more data.
"""
shr = request_spec['share_properties']
filter_properties['size'] = shr['size']
filter_properties['availability_zone'] = shr.get('availability_zone')
filter_properties['user_id'] = shr.get('user_id')
filter_properties['metadata'] = shr.get('metadata')
| retry = {
'num_attempts': 1,
'hosts': [] # list of share service hosts tried
} | conditional_block |
aws.js | AwsApplicationModel = AwsDeploy.Model.extend({
idAttribute: "application_name"
});
AwsApplicationCollection = AwsDeploy.Collection.extend({
model: AwsApplicationModel,
url: "/aws/apps"
});
AwsEnvironmentModel = AwsDeploy.Model.extend({
idAttribute: "environment_id"
});
AwsEnvironmentCollection = AwsDeploy.Collection.extend({
model: AwsEnvironmentModel,
url: function () {
return "/aws/apps/" + this.application_name + "/environments";
}
});
AwsApplicationVersionModel = AwsDeploy.Model.extend({
idAttribute: "version_label"
});
AwsApplicationVersionCollection = AwsDeploy.Collection.extend({
model: AwsApplicationVersionModel,
url: function () {
return "/aws/apps/" + this.application_name + "/versions";
} | });
AwsS3BucketCollection = AwsDeploy.Collection.extend({
model: AwsS3BucketModel,
url: function () {
return "/aws/s3/buckets";
}
}); | });
AwsS3BucketModel = AwsDeploy.Model.extend({
idAttribute: "bucket_name" | random_line_split |
headphonemon.py | # Copyright 2015 Christoph Reiter
# 2017 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
from quodlibet.qltk import Icons
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import fcntl
import subprocess
from gi.repository import GLib, GObject
from quodlibet import _
from quodlibet.plugins.events import EventPlugin
from quodlibet.util import print_d
from quodlibet import app
def get_headphone_status():
"""Returns if headphones are connected."""
# No idea if this is a good way... but works here :)
try:
data = subprocess.check_output(["pactl", "list", "sinks"])
except OSError:
return False | for line in data.splitlines():
if line.strip() == b"Active Port: analog-output-headphones":
return True
else:
return False
class HeadphoneAction(object):
DISCONNECTED = 0
CONNECTED = 1
class HeadphoneMonitor(GObject.Object):
"""Monitors the headphone connection state advertised by pulseaudio.
After start() is called will emit a signal in case headphones
get connected or disconnected.
If pulseaudio isn't active this will work but always return
a disconnected status.
The changed signal will never be emitted with the same status multiple
times.
"""
__gsignals__ = {
'action': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
def __init__(self):
super(HeadphoneMonitor, self).__init__()
self._subscribe_id = None
self._process = None
self._status = None
def is_connected(self):
"""Returns whether headphones are currently connected"""
if self._status is None:
raise Exception("call start() first")
return self._status
def _emit(self):
self.emit("action",
HeadphoneAction.CONNECTED if self._status else
HeadphoneAction.DISCONNECTED)
def _update_status(self):
assert self._status is not None
new_status = get_headphone_status()
if new_status != self._status:
self._status = new_status
self._emit()
return
def start(self):
"""Start the monitoring process.
Once this gets called the "changed" signal will be emitted.
"""
NULL = open(os.devnull, 'wb')
try:
self._process = subprocess.Popen(
["pactl", "subscribe"], stdout=subprocess.PIPE, stderr=NULL)
except OSError:
self._status = False
return
f = self._process.stdout
fcntl.fcntl(f, fcntl.F_SETFL, os.O_NONBLOCK)
def can_read_cb(fd, flags):
if flags & (GLib.IOCondition.HUP | GLib.IOCondition.ERR):
f.close()
self._subscribe_id = None
return False
data = f.read()
if not data:
f.close()
self._subscribe_id = None
return False
# querying the status also results in client events which would
# lead us into an endless loop. Instead just something if there
# is a sink event
if b" on sink " in data:
self._update_status()
return True
self._status = get_headphone_status()
self._subscribe_id = GLib.io_add_watch(
f, GLib.PRIORITY_HIGH,
GLib.IOCondition.IN | GLib.IOCondition.ERR | GLib.IOCondition.HUP,
can_read_cb)
def stop(self):
"""Stop the monitoring process.
After this returns no signal will be emitted.
Can be called multiple times.
start() can be called to start monitoring again after this returns.
"""
if self._subscribe_id is not None:
GLib.source_remove(self._subscribe_id)
self._subscribe_id = None
if self._process is not None:
self._process.terminate()
self._process.wait()
self._process = None
self._status = None
class HeadphoneMonitorPlugin(EventPlugin):
PLUGIN_ID = "HeadphoneMonitor"
PLUGIN_NAME = _("Pause on Headphone Unplug")
PLUGIN_DESC = _("Pauses in case headphones get unplugged and unpauses in "
"case they get plugged in again.")
PLUGIN_ICON = Icons.MEDIA_PLAYBACK_PAUSE
def enabled(self):
self._was_paused = False
self._do_act = False
self._mon = HeadphoneMonitor()
self._mon.connect("action", self._changed)
self._mon.start()
def _changed(self, mon, action):
if action == HeadphoneAction.DISCONNECTED:
print_d("Headphones disconnected")
if self._do_act:
do_act = self._do_act
self._was_paused = app.player.paused
app.player.paused = True
self._do_act = do_act
elif action == HeadphoneAction.CONNECTED:
print_d("Headphones connected")
if self._do_act:
do_act = self._do_act
app.player.paused = self._was_paused
self._do_act = do_act
def disabled(self):
self._mon.stop()
del self._mon
def plugin_on_paused(self):
self._do_act = False
def plugin_on_unpaused(self):
self._do_act = self._mon.is_connected() | random_line_split |
|
headphonemon.py | # Copyright 2015 Christoph Reiter
# 2017 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
from quodlibet.qltk import Icons
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import fcntl
import subprocess
from gi.repository import GLib, GObject
from quodlibet import _
from quodlibet.plugins.events import EventPlugin
from quodlibet.util import print_d
from quodlibet import app
def get_headphone_status():
"""Returns if headphones are connected."""
# No idea if this is a good way... but works here :)
try:
data = subprocess.check_output(["pactl", "list", "sinks"])
except OSError:
return False
for line in data.splitlines():
if line.strip() == b"Active Port: analog-output-headphones":
return True
else:
return False
class HeadphoneAction(object):
DISCONNECTED = 0
CONNECTED = 1
class HeadphoneMonitor(GObject.Object):
"""Monitors the headphone connection state advertised by pulseaudio.
After start() is called will emit a signal in case headphones
get connected or disconnected.
If pulseaudio isn't active this will work but always return
a disconnected status.
The changed signal will never be emitted with the same status multiple
times.
"""
__gsignals__ = {
'action': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
def __init__(self):
super(HeadphoneMonitor, self).__init__()
self._subscribe_id = None
self._process = None
self._status = None
def is_connected(self):
"""Returns whether headphones are currently connected"""
if self._status is None:
raise Exception("call start() first")
return self._status
def _emit(self):
self.emit("action",
HeadphoneAction.CONNECTED if self._status else
HeadphoneAction.DISCONNECTED)
def _update_status(self):
assert self._status is not None
new_status = get_headphone_status()
if new_status != self._status:
|
def start(self):
"""Start the monitoring process.
Once this gets called the "changed" signal will be emitted.
"""
NULL = open(os.devnull, 'wb')
try:
self._process = subprocess.Popen(
["pactl", "subscribe"], stdout=subprocess.PIPE, stderr=NULL)
except OSError:
self._status = False
return
f = self._process.stdout
fcntl.fcntl(f, fcntl.F_SETFL, os.O_NONBLOCK)
def can_read_cb(fd, flags):
if flags & (GLib.IOCondition.HUP | GLib.IOCondition.ERR):
f.close()
self._subscribe_id = None
return False
data = f.read()
if not data:
f.close()
self._subscribe_id = None
return False
# querying the status also results in client events which would
# lead us into an endless loop. Instead just something if there
# is a sink event
if b" on sink " in data:
self._update_status()
return True
self._status = get_headphone_status()
self._subscribe_id = GLib.io_add_watch(
f, GLib.PRIORITY_HIGH,
GLib.IOCondition.IN | GLib.IOCondition.ERR | GLib.IOCondition.HUP,
can_read_cb)
def stop(self):
"""Stop the monitoring process.
After this returns no signal will be emitted.
Can be called multiple times.
start() can be called to start monitoring again after this returns.
"""
if self._subscribe_id is not None:
GLib.source_remove(self._subscribe_id)
self._subscribe_id = None
if self._process is not None:
self._process.terminate()
self._process.wait()
self._process = None
self._status = None
class HeadphoneMonitorPlugin(EventPlugin):
PLUGIN_ID = "HeadphoneMonitor"
PLUGIN_NAME = _("Pause on Headphone Unplug")
PLUGIN_DESC = _("Pauses in case headphones get unplugged and unpauses in "
"case they get plugged in again.")
PLUGIN_ICON = Icons.MEDIA_PLAYBACK_PAUSE
def enabled(self):
self._was_paused = False
self._do_act = False
self._mon = HeadphoneMonitor()
self._mon.connect("action", self._changed)
self._mon.start()
def _changed(self, mon, action):
if action == HeadphoneAction.DISCONNECTED:
print_d("Headphones disconnected")
if self._do_act:
do_act = self._do_act
self._was_paused = app.player.paused
app.player.paused = True
self._do_act = do_act
elif action == HeadphoneAction.CONNECTED:
print_d("Headphones connected")
if self._do_act:
do_act = self._do_act
app.player.paused = self._was_paused
self._do_act = do_act
def disabled(self):
self._mon.stop()
del self._mon
def plugin_on_paused(self):
self._do_act = False
def plugin_on_unpaused(self):
self._do_act = self._mon.is_connected()
| self._status = new_status
self._emit()
return | conditional_block |
headphonemon.py | # Copyright 2015 Christoph Reiter
# 2017 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
from quodlibet.qltk import Icons
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import fcntl
import subprocess
from gi.repository import GLib, GObject
from quodlibet import _
from quodlibet.plugins.events import EventPlugin
from quodlibet.util import print_d
from quodlibet import app
def get_headphone_status():
"""Returns if headphones are connected."""
# No idea if this is a good way... but works here :)
try:
data = subprocess.check_output(["pactl", "list", "sinks"])
except OSError:
return False
for line in data.splitlines():
if line.strip() == b"Active Port: analog-output-headphones":
return True
else:
return False
class HeadphoneAction(object):
DISCONNECTED = 0
CONNECTED = 1
class HeadphoneMonitor(GObject.Object):
"""Monitors the headphone connection state advertised by pulseaudio.
After start() is called will emit a signal in case headphones
get connected or disconnected.
If pulseaudio isn't active this will work but always return
a disconnected status.
The changed signal will never be emitted with the same status multiple
times.
"""
__gsignals__ = {
'action': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
def __init__(self):
super(HeadphoneMonitor, self).__init__()
self._subscribe_id = None
self._process = None
self._status = None
def is_connected(self):
"""Returns whether headphones are currently connected"""
if self._status is None:
raise Exception("call start() first")
return self._status
def _emit(self):
self.emit("action",
HeadphoneAction.CONNECTED if self._status else
HeadphoneAction.DISCONNECTED)
def _update_status(self):
assert self._status is not None
new_status = get_headphone_status()
if new_status != self._status:
self._status = new_status
self._emit()
return
def start(self):
"""Start the monitoring process.
Once this gets called the "changed" signal will be emitted.
"""
NULL = open(os.devnull, 'wb')
try:
self._process = subprocess.Popen(
["pactl", "subscribe"], stdout=subprocess.PIPE, stderr=NULL)
except OSError:
self._status = False
return
f = self._process.stdout
fcntl.fcntl(f, fcntl.F_SETFL, os.O_NONBLOCK)
def can_read_cb(fd, flags):
if flags & (GLib.IOCondition.HUP | GLib.IOCondition.ERR):
f.close()
self._subscribe_id = None
return False
data = f.read()
if not data:
f.close()
self._subscribe_id = None
return False
# querying the status also results in client events which would
# lead us into an endless loop. Instead just something if there
# is a sink event
if b" on sink " in data:
self._update_status()
return True
self._status = get_headphone_status()
self._subscribe_id = GLib.io_add_watch(
f, GLib.PRIORITY_HIGH,
GLib.IOCondition.IN | GLib.IOCondition.ERR | GLib.IOCondition.HUP,
can_read_cb)
def stop(self):
"""Stop the monitoring process.
After this returns no signal will be emitted.
Can be called multiple times.
start() can be called to start monitoring again after this returns.
"""
if self._subscribe_id is not None:
GLib.source_remove(self._subscribe_id)
self._subscribe_id = None
if self._process is not None:
self._process.terminate()
self._process.wait()
self._process = None
self._status = None
class | (EventPlugin):
PLUGIN_ID = "HeadphoneMonitor"
PLUGIN_NAME = _("Pause on Headphone Unplug")
PLUGIN_DESC = _("Pauses in case headphones get unplugged and unpauses in "
"case they get plugged in again.")
PLUGIN_ICON = Icons.MEDIA_PLAYBACK_PAUSE
def enabled(self):
self._was_paused = False
self._do_act = False
self._mon = HeadphoneMonitor()
self._mon.connect("action", self._changed)
self._mon.start()
def _changed(self, mon, action):
if action == HeadphoneAction.DISCONNECTED:
print_d("Headphones disconnected")
if self._do_act:
do_act = self._do_act
self._was_paused = app.player.paused
app.player.paused = True
self._do_act = do_act
elif action == HeadphoneAction.CONNECTED:
print_d("Headphones connected")
if self._do_act:
do_act = self._do_act
app.player.paused = self._was_paused
self._do_act = do_act
def disabled(self):
self._mon.stop()
del self._mon
def plugin_on_paused(self):
self._do_act = False
def plugin_on_unpaused(self):
self._do_act = self._mon.is_connected()
| HeadphoneMonitorPlugin | identifier_name |
headphonemon.py | # Copyright 2015 Christoph Reiter
# 2017 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
from quodlibet.qltk import Icons
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import fcntl
import subprocess
from gi.repository import GLib, GObject
from quodlibet import _
from quodlibet.plugins.events import EventPlugin
from quodlibet.util import print_d
from quodlibet import app
def get_headphone_status():
"""Returns if headphones are connected."""
# No idea if this is a good way... but works here :)
try:
data = subprocess.check_output(["pactl", "list", "sinks"])
except OSError:
return False
for line in data.splitlines():
if line.strip() == b"Active Port: analog-output-headphones":
return True
else:
return False
class HeadphoneAction(object):
DISCONNECTED = 0
CONNECTED = 1
class HeadphoneMonitor(GObject.Object):
"""Monitors the headphone connection state advertised by pulseaudio.
After start() is called will emit a signal in case headphones
get connected or disconnected.
If pulseaudio isn't active this will work but always return
a disconnected status.
The changed signal will never be emitted with the same status multiple
times.
"""
__gsignals__ = {
'action': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
def __init__(self):
super(HeadphoneMonitor, self).__init__()
self._subscribe_id = None
self._process = None
self._status = None
def is_connected(self):
"""Returns whether headphones are currently connected"""
if self._status is None:
raise Exception("call start() first")
return self._status
def _emit(self):
self.emit("action",
HeadphoneAction.CONNECTED if self._status else
HeadphoneAction.DISCONNECTED)
def _update_status(self):
assert self._status is not None
new_status = get_headphone_status()
if new_status != self._status:
self._status = new_status
self._emit()
return
def start(self):
"""Start the monitoring process.
Once this gets called the "changed" signal will be emitted.
"""
NULL = open(os.devnull, 'wb')
try:
self._process = subprocess.Popen(
["pactl", "subscribe"], stdout=subprocess.PIPE, stderr=NULL)
except OSError:
self._status = False
return
f = self._process.stdout
fcntl.fcntl(f, fcntl.F_SETFL, os.O_NONBLOCK)
def can_read_cb(fd, flags):
if flags & (GLib.IOCondition.HUP | GLib.IOCondition.ERR):
f.close()
self._subscribe_id = None
return False
data = f.read()
if not data:
f.close()
self._subscribe_id = None
return False
# querying the status also results in client events which would
# lead us into an endless loop. Instead just something if there
# is a sink event
if b" on sink " in data:
self._update_status()
return True
self._status = get_headphone_status()
self._subscribe_id = GLib.io_add_watch(
f, GLib.PRIORITY_HIGH,
GLib.IOCondition.IN | GLib.IOCondition.ERR | GLib.IOCondition.HUP,
can_read_cb)
def stop(self):
"""Stop the monitoring process.
After this returns no signal will be emitted.
Can be called multiple times.
start() can be called to start monitoring again after this returns.
"""
if self._subscribe_id is not None:
GLib.source_remove(self._subscribe_id)
self._subscribe_id = None
if self._process is not None:
self._process.terminate()
self._process.wait()
self._process = None
self._status = None
class HeadphoneMonitorPlugin(EventPlugin):
| PLUGIN_ID = "HeadphoneMonitor"
PLUGIN_NAME = _("Pause on Headphone Unplug")
PLUGIN_DESC = _("Pauses in case headphones get unplugged and unpauses in "
"case they get plugged in again.")
PLUGIN_ICON = Icons.MEDIA_PLAYBACK_PAUSE
def enabled(self):
self._was_paused = False
self._do_act = False
self._mon = HeadphoneMonitor()
self._mon.connect("action", self._changed)
self._mon.start()
def _changed(self, mon, action):
if action == HeadphoneAction.DISCONNECTED:
print_d("Headphones disconnected")
if self._do_act:
do_act = self._do_act
self._was_paused = app.player.paused
app.player.paused = True
self._do_act = do_act
elif action == HeadphoneAction.CONNECTED:
print_d("Headphones connected")
if self._do_act:
do_act = self._do_act
app.player.paused = self._was_paused
self._do_act = do_act
def disabled(self):
self._mon.stop()
del self._mon
def plugin_on_paused(self):
self._do_act = False
def plugin_on_unpaused(self):
self._do_act = self._mon.is_connected() | identifier_body |
|
settings_menu.ts | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview
* 'settings-menu' shows a menu with a hardcoded set of pages and subpages.
*/
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_icons_css.m.js';
import 'chrome://resources/cr_elements/cr_menu_selector/cr_menu_selector.js';
import 'chrome://resources/cr_elements/cr_nav_menu_item_style.js';
import 'chrome://resources/cr_elements/icons.m.js';
import 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js';
import 'chrome://resources/polymer/v3_0/iron-icon/iron-icon.js';
import 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import 'chrome://resources/polymer/v3_0/paper-ripple/paper-ripple.js';
import '../icons.js';
import '../settings_shared_css.js';
import {assert} from 'chrome://resources/js/assert.m.js';
import {IronSelectorElement} from 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import {html, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {loadTimeData} from '../i18n_setup.js';
import {PageVisibility} from '../page_visibility.js';
import {Route, RouteObserverMixin, RouteObserverMixinInterface, Router} from '../router.js';
export interface SettingsMenuElement {
$: {
topMenu: IronSelectorElement,
subMenu: IronSelectorElement,
};
}
const SettingsMenuElementBase = RouteObserverMixin(PolymerElement) as
{new (): PolymerElement & RouteObserverMixinInterface};
export class SettingsMenuElement extends SettingsMenuElementBase {
static get is() {
return 'settings-menu';
}
static get template() {
return html`{__html_template__}`;
}
static get properties() {
return {
advancedOpened: {
type: Boolean,
value: false,
notify: true,
},
/**
* Dictionary defining page visibility.
*/
pageVisibility: Object,
enableLandingPageRedesign_: {
type: Boolean,
value: () => loadTimeData.getBoolean('enableLandingPageRedesign'),
},
};
}
advancedOpened: boolean;
pageVisibility: PageVisibility;
private enableLandingPageRedesign_: boolean;
currentRouteChanged(newRoute: Route) {
// Focus the initially selected path.
const anchors = this.shadowRoot!.querySelectorAll('a');
for (let i = 0; i < anchors.length; ++i) {
const anchorRoute = Router.getInstance().getRouteForPath(
anchors[i].getAttribute('href')!); | this.setSelectedUrl_(anchors[i].href);
return;
}
}
this.setSelectedUrl_(''); // Nothing is selected.
}
focusFirstItem() {
const firstFocusableItem = this.shadowRoot!.querySelector<HTMLElement>(
'[role=menuitem]:not([hidden])');
if (firstFocusableItem) {
firstFocusableItem.focus();
}
}
private onAdvancedButtonToggle_() {
this.advancedOpened = !this.advancedOpened;
}
/**
* Prevent clicks on sidebar items from navigating. These are only links for
* accessibility purposes, taps are handled separately by <iron-selector>.
*/
private onLinkClick_(event: Event) {
if ((event.target as HTMLElement).matches('a:not(#extensionsLink)')) {
event.preventDefault();
}
}
/**
* Keeps both menus in sync. |url| needs to come from |element.href| because
* |iron-list| uses the entire url. Using |getAttribute| will not work.
*/
private setSelectedUrl_(url: string) {
this.$.topMenu.selected = this.$.subMenu.selected = url;
}
private onSelectorActivate_(event: CustomEvent<{selected: string}>) {
this.setSelectedUrl_(event.detail.selected);
const path = new URL(event.detail.selected).pathname;
const route = Router.getInstance().getRouteForPath(path);
assert(route, 'settings-menu has an entry with an invalid route.');
Router.getInstance().navigateTo(
route!, /* dynamicParams */ undefined, /* removeSearch */ true);
}
/**
* @param opened Whether the menu is expanded.
* @return Which icon to use.
*/
private arrowState_(opened: boolean): string {
return opened ? 'cr:arrow-drop-up' : 'cr:arrow-drop-down';
}
private onExtensionsLinkClick_() {
chrome.metricsPrivate.recordUserAction(
'SettingsMenu_ExtensionsLinkClicked');
}
private boolToString_(bool: boolean): string {
return bool.toString();
}
}
customElements.define(SettingsMenuElement.is, SettingsMenuElement); | if (anchorRoute && anchorRoute.contains(newRoute)) { | random_line_split |
settings_menu.ts | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview
* 'settings-menu' shows a menu with a hardcoded set of pages and subpages.
*/
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_icons_css.m.js';
import 'chrome://resources/cr_elements/cr_menu_selector/cr_menu_selector.js';
import 'chrome://resources/cr_elements/cr_nav_menu_item_style.js';
import 'chrome://resources/cr_elements/icons.m.js';
import 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js';
import 'chrome://resources/polymer/v3_0/iron-icon/iron-icon.js';
import 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import 'chrome://resources/polymer/v3_0/paper-ripple/paper-ripple.js';
import '../icons.js';
import '../settings_shared_css.js';
import {assert} from 'chrome://resources/js/assert.m.js';
import {IronSelectorElement} from 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import {html, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {loadTimeData} from '../i18n_setup.js';
import {PageVisibility} from '../page_visibility.js';
import {Route, RouteObserverMixin, RouteObserverMixinInterface, Router} from '../router.js';
export interface SettingsMenuElement {
$: {
topMenu: IronSelectorElement,
subMenu: IronSelectorElement,
};
}
const SettingsMenuElementBase = RouteObserverMixin(PolymerElement) as
{new (): PolymerElement & RouteObserverMixinInterface};
export class SettingsMenuElement extends SettingsMenuElementBase {
static get is() {
return 'settings-menu';
}
static get template() {
return html`{__html_template__}`;
}
static get properties() {
return {
advancedOpened: {
type: Boolean,
value: false,
notify: true,
},
/**
* Dictionary defining page visibility.
*/
pageVisibility: Object,
enableLandingPageRedesign_: {
type: Boolean,
value: () => loadTimeData.getBoolean('enableLandingPageRedesign'),
},
};
}
advancedOpened: boolean;
pageVisibility: PageVisibility;
private enableLandingPageRedesign_: boolean;
currentRouteChanged(newRoute: Route) {
// Focus the initially selected path.
const anchors = this.shadowRoot!.querySelectorAll('a');
for (let i = 0; i < anchors.length; ++i) |
this.setSelectedUrl_(''); // Nothing is selected.
}
focusFirstItem() {
const firstFocusableItem = this.shadowRoot!.querySelector<HTMLElement>(
'[role=menuitem]:not([hidden])');
if (firstFocusableItem) {
firstFocusableItem.focus();
}
}
private onAdvancedButtonToggle_() {
this.advancedOpened = !this.advancedOpened;
}
/**
* Prevent clicks on sidebar items from navigating. These are only links for
* accessibility purposes, taps are handled separately by <iron-selector>.
*/
private onLinkClick_(event: Event) {
if ((event.target as HTMLElement).matches('a:not(#extensionsLink)')) {
event.preventDefault();
}
}
/**
* Keeps both menus in sync. |url| needs to come from |element.href| because
* |iron-list| uses the entire url. Using |getAttribute| will not work.
*/
private setSelectedUrl_(url: string) {
this.$.topMenu.selected = this.$.subMenu.selected = url;
}
private onSelectorActivate_(event: CustomEvent<{selected: string}>) {
this.setSelectedUrl_(event.detail.selected);
const path = new URL(event.detail.selected).pathname;
const route = Router.getInstance().getRouteForPath(path);
assert(route, 'settings-menu has an entry with an invalid route.');
Router.getInstance().navigateTo(
route!, /* dynamicParams */ undefined, /* removeSearch */ true);
}
/**
* @param opened Whether the menu is expanded.
* @return Which icon to use.
*/
private arrowState_(opened: boolean): string {
return opened ? 'cr:arrow-drop-up' : 'cr:arrow-drop-down';
}
private onExtensionsLinkClick_() {
chrome.metricsPrivate.recordUserAction(
'SettingsMenu_ExtensionsLinkClicked');
}
private boolToString_(bool: boolean): string {
return bool.toString();
}
}
customElements.define(SettingsMenuElement.is, SettingsMenuElement);
| {
const anchorRoute = Router.getInstance().getRouteForPath(
anchors[i].getAttribute('href')!);
if (anchorRoute && anchorRoute.contains(newRoute)) {
this.setSelectedUrl_(anchors[i].href);
return;
}
} | conditional_block |
settings_menu.ts | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview
* 'settings-menu' shows a menu with a hardcoded set of pages and subpages.
*/
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_icons_css.m.js';
import 'chrome://resources/cr_elements/cr_menu_selector/cr_menu_selector.js';
import 'chrome://resources/cr_elements/cr_nav_menu_item_style.js';
import 'chrome://resources/cr_elements/icons.m.js';
import 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js';
import 'chrome://resources/polymer/v3_0/iron-icon/iron-icon.js';
import 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import 'chrome://resources/polymer/v3_0/paper-ripple/paper-ripple.js';
import '../icons.js';
import '../settings_shared_css.js';
import {assert} from 'chrome://resources/js/assert.m.js';
import {IronSelectorElement} from 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import {html, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {loadTimeData} from '../i18n_setup.js';
import {PageVisibility} from '../page_visibility.js';
import {Route, RouteObserverMixin, RouteObserverMixinInterface, Router} from '../router.js';
export interface SettingsMenuElement {
$: {
topMenu: IronSelectorElement,
subMenu: IronSelectorElement,
};
}
const SettingsMenuElementBase = RouteObserverMixin(PolymerElement) as
{new (): PolymerElement & RouteObserverMixinInterface};
export class SettingsMenuElement extends SettingsMenuElementBase {
static get is() {
return 'settings-menu';
}
static get template() {
return html`{__html_template__}`;
}
static get properties() {
return {
advancedOpened: {
type: Boolean,
value: false,
notify: true,
},
/**
* Dictionary defining page visibility.
*/
pageVisibility: Object,
enableLandingPageRedesign_: {
type: Boolean,
value: () => loadTimeData.getBoolean('enableLandingPageRedesign'),
},
};
}
advancedOpened: boolean;
pageVisibility: PageVisibility;
private enableLandingPageRedesign_: boolean;
currentRouteChanged(newRoute: Route) {
// Focus the initially selected path.
const anchors = this.shadowRoot!.querySelectorAll('a');
for (let i = 0; i < anchors.length; ++i) {
const anchorRoute = Router.getInstance().getRouteForPath(
anchors[i].getAttribute('href')!);
if (anchorRoute && anchorRoute.contains(newRoute)) {
this.setSelectedUrl_(anchors[i].href);
return;
}
}
this.setSelectedUrl_(''); // Nothing is selected.
}
focusFirstItem() {
const firstFocusableItem = this.shadowRoot!.querySelector<HTMLElement>(
'[role=menuitem]:not([hidden])');
if (firstFocusableItem) {
firstFocusableItem.focus();
}
}
private onAdvancedButtonToggle_() {
this.advancedOpened = !this.advancedOpened;
}
/**
* Prevent clicks on sidebar items from navigating. These are only links for
* accessibility purposes, taps are handled separately by <iron-selector>.
*/
private onLinkClick_(event: Event) {
if ((event.target as HTMLElement).matches('a:not(#extensionsLink)')) {
event.preventDefault();
}
}
/**
* Keeps both menus in sync. |url| needs to come from |element.href| because
* |iron-list| uses the entire url. Using |getAttribute| will not work.
*/
private setSelectedUrl_(url: string) {
this.$.topMenu.selected = this.$.subMenu.selected = url;
}
private onSelectorActivate_(event: CustomEvent<{selected: string}>) {
this.setSelectedUrl_(event.detail.selected);
const path = new URL(event.detail.selected).pathname;
const route = Router.getInstance().getRouteForPath(path);
assert(route, 'settings-menu has an entry with an invalid route.');
Router.getInstance().navigateTo(
route!, /* dynamicParams */ undefined, /* removeSearch */ true);
}
/**
* @param opened Whether the menu is expanded.
* @return Which icon to use.
*/
private arrowState_(opened: boolean): string {
return opened ? 'cr:arrow-drop-up' : 'cr:arrow-drop-down';
}
private onExtensionsLinkClick_() {
chrome.metricsPrivate.recordUserAction(
'SettingsMenu_ExtensionsLinkClicked');
}
private boolToString_(bool: boolean): string |
}
customElements.define(SettingsMenuElement.is, SettingsMenuElement);
| {
return bool.toString();
} | identifier_body |
settings_menu.ts | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview
* 'settings-menu' shows a menu with a hardcoded set of pages and subpages.
*/
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_icons_css.m.js';
import 'chrome://resources/cr_elements/cr_menu_selector/cr_menu_selector.js';
import 'chrome://resources/cr_elements/cr_nav_menu_item_style.js';
import 'chrome://resources/cr_elements/icons.m.js';
import 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js';
import 'chrome://resources/polymer/v3_0/iron-icon/iron-icon.js';
import 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import 'chrome://resources/polymer/v3_0/paper-ripple/paper-ripple.js';
import '../icons.js';
import '../settings_shared_css.js';
import {assert} from 'chrome://resources/js/assert.m.js';
import {IronSelectorElement} from 'chrome://resources/polymer/v3_0/iron-selector/iron-selector.js';
import {html, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {loadTimeData} from '../i18n_setup.js';
import {PageVisibility} from '../page_visibility.js';
import {Route, RouteObserverMixin, RouteObserverMixinInterface, Router} from '../router.js';
export interface SettingsMenuElement {
$: {
topMenu: IronSelectorElement,
subMenu: IronSelectorElement,
};
}
const SettingsMenuElementBase = RouteObserverMixin(PolymerElement) as
{new (): PolymerElement & RouteObserverMixinInterface};
export class SettingsMenuElement extends SettingsMenuElementBase {
static get is() {
return 'settings-menu';
}
static get template() {
return html`{__html_template__}`;
}
static get properties() {
return {
advancedOpened: {
type: Boolean,
value: false,
notify: true,
},
/**
* Dictionary defining page visibility.
*/
pageVisibility: Object,
enableLandingPageRedesign_: {
type: Boolean,
value: () => loadTimeData.getBoolean('enableLandingPageRedesign'),
},
};
}
advancedOpened: boolean;
pageVisibility: PageVisibility;
private enableLandingPageRedesign_: boolean;
currentRouteChanged(newRoute: Route) {
// Focus the initially selected path.
const anchors = this.shadowRoot!.querySelectorAll('a');
for (let i = 0; i < anchors.length; ++i) {
const anchorRoute = Router.getInstance().getRouteForPath(
anchors[i].getAttribute('href')!);
if (anchorRoute && anchorRoute.contains(newRoute)) {
this.setSelectedUrl_(anchors[i].href);
return;
}
}
this.setSelectedUrl_(''); // Nothing is selected.
}
focusFirstItem() {
const firstFocusableItem = this.shadowRoot!.querySelector<HTMLElement>(
'[role=menuitem]:not([hidden])');
if (firstFocusableItem) {
firstFocusableItem.focus();
}
}
private onAdvancedButtonToggle_() {
this.advancedOpened = !this.advancedOpened;
}
/**
* Prevent clicks on sidebar items from navigating. These are only links for
* accessibility purposes, taps are handled separately by <iron-selector>.
*/
private | (event: Event) {
if ((event.target as HTMLElement).matches('a:not(#extensionsLink)')) {
event.preventDefault();
}
}
/**
* Keeps both menus in sync. |url| needs to come from |element.href| because
* |iron-list| uses the entire url. Using |getAttribute| will not work.
*/
private setSelectedUrl_(url: string) {
this.$.topMenu.selected = this.$.subMenu.selected = url;
}
private onSelectorActivate_(event: CustomEvent<{selected: string}>) {
this.setSelectedUrl_(event.detail.selected);
const path = new URL(event.detail.selected).pathname;
const route = Router.getInstance().getRouteForPath(path);
assert(route, 'settings-menu has an entry with an invalid route.');
Router.getInstance().navigateTo(
route!, /* dynamicParams */ undefined, /* removeSearch */ true);
}
/**
* @param opened Whether the menu is expanded.
* @return Which icon to use.
*/
private arrowState_(opened: boolean): string {
return opened ? 'cr:arrow-drop-up' : 'cr:arrow-drop-down';
}
private onExtensionsLinkClick_() {
chrome.metricsPrivate.recordUserAction(
'SettingsMenu_ExtensionsLinkClicked');
}
private boolToString_(bool: boolean): string {
return bool.toString();
}
}
customElements.define(SettingsMenuElement.is, SettingsMenuElement);
| onLinkClick_ | identifier_name |
coverage.py | #!/usr/bin/env python3
import os, sys, glob, pickle, subprocess
sys.path.insert(0, os.path.dirname(__file__))
from clang import cindex
sys.path = sys.path[1:]
def configure_libclang():
llvm_libdirs = ['/usr/lib/llvm-3.2/lib', '/usr/lib64/llvm']
try:
libdir = subprocess.check_output(['llvm-config', '--libdir']).decode('utf-8').strip()
llvm_libdirs.insert(0, libdir)
except OSError:
pass
for d in llvm_libdirs:
if not os.path.exists(d):
continue
files = glob.glob(os.path.join(d, 'libclang.so*'))
if len(files) != 0:
cindex.Config.set_library_file(files[0])
return
class Call:
def __init__(self, cursor, decl):
self.ident = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
self.decl_filename = decl.location.file.name.decode('utf-8')
class Definition:
def __init__(self, cursor):
self.ident = cursor.spelling.decode('utf-8')
self.display = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
def process_diagnostics(tu):
diagnostics = tu.diagnostics
haserr = False
for d in diagnostics:
sys.stderr.write('{0}\n'.format(d.format.decode('utf-8')))
if d.severity > cindex.Diagnostic.Warning:
haserr = True
if haserr:
sys.exit(1)
def walk_cursors(tu, files):
proc = list(tu.cursor.get_children())
while len(proc) > 0:
cursor = proc[0]
proc = proc[1:]
if cursor.location.file is None:
continue
fname = cursor.location.file.name.decode('utf-8')
if fname in files:
yield cursor
proc += list(cursor.get_children())
def newer(a, b):
try:
return os.stat(a).st_mtime > os.stat(b).st_mtime
except:
return True
def scan_libgit2_glib(cflags, files, git2dir):
files = [os.path.abspath(f) for f in files]
dname = os.path.dirname(__file__)
allcalls = {}
l = 0
if not os.getenv('SILENT'):
sys.stderr.write('\n')
i = 0
for f in files:
if not os.getenv('SILENT'):
name = os.path.basename(f)
if len(name) > l:
l = len(name)
perc = int((i / len(files)) * 100)
sys.stderr.write('[{0: >3}%] Processing ... {1}{2}\r'.format(perc, name, ' ' * (l - len(name))))
i += 1
astf = os.path.join(dname, '.' + os.path.basename(f) + '.cache')
if not newer(f, astf):
with open(astf, 'rb') as fo:
calls = pickle.load(fo)
else:
tu = cindex.TranslationUnit.from_source(f, cflags)
process_diagnostics(tu)
calls = {}
for cursor in walk_cursors(tu, files):
if cursor.kind == cindex.CursorKind.CALL_EXPR or \
cursor.kind == cindex.CursorKind.DECL_REF_EXPR:
cdecl = cursor.get_referenced()
if cdecl.kind != cindex.CursorKind.FUNCTION_DECL:
continue
if (not cdecl is None) and (not cdecl.location.file is None):
fdefname = cdecl.location.file.name.decode('utf-8')
if fdefname.startswith(git2dir):
call = Call(cursor, cdecl)
if call.ident in calls:
calls[call.ident].append(call)
else:
calls[call.ident] = [call]
with open(astf, 'wb') as fo:
pickle.dump(calls, fo)
for k in calls:
if k in allcalls:
allcalls[k] += calls[k]
else:
allcalls[k] = list(calls[k])
if not os.getenv('SILENT'):
sys.stderr.write('\r[100%] Processing ... done{0}\n'.format(' ' * (l - 4)))
return allcalls
def scan_libgit2(cflags, git2dir):
tu = cindex.TranslationUnit.from_source(git2dir + '.h', cflags)
process_diagnostics(tu)
headers = glob.glob(os.path.join(git2dir, '*.h'))
defs = {}
objapi = ['lookup', 'lookup_prefix', 'free', 'id', 'owner']
objderiv = ['commit', 'tree', 'tag', 'blob']
ignore = set()
for deriv in objderiv:
for api in objapi:
ignore.add('git_' + deriv + '_' + api)
for cursor in walk_cursors(tu, headers):
if cursor.kind == cindex.CursorKind.FUNCTION_DECL:
deff = Definition(cursor)
if not deff.ident in ignore:
defs[deff.ident] = deff
return defs
configure_libclang()
pos = sys.argv.index('--')
cflags = sys.argv[1:pos]
files = sys.argv[pos+1:]
incdir = os.getenv('LIBGIT2_INCLUDE_DIR')
defs = scan_libgit2(cflags, incdir)
calls = scan_libgit2_glib(cflags, files, incdir)
notused = {}
perfile = {}
nperfile = {}
for d in defs:
o = defs[d]
if not d in calls:
notused[d] = defs[d]
if not o.filename in nperfile:
nperfile[o.filename] = [o]
else:
nperfile[o.filename].append(o)
if not o.filename in perfile:
perfile[o.filename] = [o]
else:
perfile[o.filename].append(o)
ss = [notused[f] for f in notused]
ss.sort(key=lambda x: '{0} {1}'.format(os.path.basename(x.filename), x.ident))
lastf = None
keys = list(perfile.keys())
keys.sort()
for filename in keys:
b = os.path.basename(filename)
f = perfile[filename] |
if filename in nperfile:
n_nperfile = len(nperfile[filename])
else:
n_nperfile = 0
perc = int(((n_perfile - n_nperfile) / n_perfile) * 100)
print('\n File {0}, coverage {1}% ({2} out of {3}):'.format(b, perc, n_perfile - n_nperfile, n_perfile))
cp = list(f)
cp.sort(key=lambda x: "{0} {1}".format(not x.ident in calls, x.ident))
for d in cp:
if d.ident in calls:
print(' \033[32m✓ {0}\033[0m'.format(d.display))
else:
print(' \033[31m✗ {0}\033[0m'.format(d.display))
perc = int(((len(defs) - len(notused)) / len(defs)) * 100)
print('\nTotal coverage: {0}% ({1} functions out of {2} are being called)\n'.format(perc, len(defs) - len(notused), len(defs)))
# vi:ts=4:et |
n_perfile = len(f) | random_line_split |
coverage.py | #!/usr/bin/env python3
import os, sys, glob, pickle, subprocess
sys.path.insert(0, os.path.dirname(__file__))
from clang import cindex
sys.path = sys.path[1:]
def configure_libclang():
llvm_libdirs = ['/usr/lib/llvm-3.2/lib', '/usr/lib64/llvm']
try:
libdir = subprocess.check_output(['llvm-config', '--libdir']).decode('utf-8').strip()
llvm_libdirs.insert(0, libdir)
except OSError:
pass
for d in llvm_libdirs:
if not os.path.exists(d):
continue
files = glob.glob(os.path.join(d, 'libclang.so*'))
if len(files) != 0:
cindex.Config.set_library_file(files[0])
return
class | :
def __init__(self, cursor, decl):
self.ident = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
self.decl_filename = decl.location.file.name.decode('utf-8')
class Definition:
def __init__(self, cursor):
self.ident = cursor.spelling.decode('utf-8')
self.display = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
def process_diagnostics(tu):
diagnostics = tu.diagnostics
haserr = False
for d in diagnostics:
sys.stderr.write('{0}\n'.format(d.format.decode('utf-8')))
if d.severity > cindex.Diagnostic.Warning:
haserr = True
if haserr:
sys.exit(1)
def walk_cursors(tu, files):
proc = list(tu.cursor.get_children())
while len(proc) > 0:
cursor = proc[0]
proc = proc[1:]
if cursor.location.file is None:
continue
fname = cursor.location.file.name.decode('utf-8')
if fname in files:
yield cursor
proc += list(cursor.get_children())
def newer(a, b):
try:
return os.stat(a).st_mtime > os.stat(b).st_mtime
except:
return True
def scan_libgit2_glib(cflags, files, git2dir):
files = [os.path.abspath(f) for f in files]
dname = os.path.dirname(__file__)
allcalls = {}
l = 0
if not os.getenv('SILENT'):
sys.stderr.write('\n')
i = 0
for f in files:
if not os.getenv('SILENT'):
name = os.path.basename(f)
if len(name) > l:
l = len(name)
perc = int((i / len(files)) * 100)
sys.stderr.write('[{0: >3}%] Processing ... {1}{2}\r'.format(perc, name, ' ' * (l - len(name))))
i += 1
astf = os.path.join(dname, '.' + os.path.basename(f) + '.cache')
if not newer(f, astf):
with open(astf, 'rb') as fo:
calls = pickle.load(fo)
else:
tu = cindex.TranslationUnit.from_source(f, cflags)
process_diagnostics(tu)
calls = {}
for cursor in walk_cursors(tu, files):
if cursor.kind == cindex.CursorKind.CALL_EXPR or \
cursor.kind == cindex.CursorKind.DECL_REF_EXPR:
cdecl = cursor.get_referenced()
if cdecl.kind != cindex.CursorKind.FUNCTION_DECL:
continue
if (not cdecl is None) and (not cdecl.location.file is None):
fdefname = cdecl.location.file.name.decode('utf-8')
if fdefname.startswith(git2dir):
call = Call(cursor, cdecl)
if call.ident in calls:
calls[call.ident].append(call)
else:
calls[call.ident] = [call]
with open(astf, 'wb') as fo:
pickle.dump(calls, fo)
for k in calls:
if k in allcalls:
allcalls[k] += calls[k]
else:
allcalls[k] = list(calls[k])
if not os.getenv('SILENT'):
sys.stderr.write('\r[100%] Processing ... done{0}\n'.format(' ' * (l - 4)))
return allcalls
def scan_libgit2(cflags, git2dir):
tu = cindex.TranslationUnit.from_source(git2dir + '.h', cflags)
process_diagnostics(tu)
headers = glob.glob(os.path.join(git2dir, '*.h'))
defs = {}
objapi = ['lookup', 'lookup_prefix', 'free', 'id', 'owner']
objderiv = ['commit', 'tree', 'tag', 'blob']
ignore = set()
for deriv in objderiv:
for api in objapi:
ignore.add('git_' + deriv + '_' + api)
for cursor in walk_cursors(tu, headers):
if cursor.kind == cindex.CursorKind.FUNCTION_DECL:
deff = Definition(cursor)
if not deff.ident in ignore:
defs[deff.ident] = deff
return defs
configure_libclang()
pos = sys.argv.index('--')
cflags = sys.argv[1:pos]
files = sys.argv[pos+1:]
incdir = os.getenv('LIBGIT2_INCLUDE_DIR')
defs = scan_libgit2(cflags, incdir)
calls = scan_libgit2_glib(cflags, files, incdir)
notused = {}
perfile = {}
nperfile = {}
for d in defs:
o = defs[d]
if not d in calls:
notused[d] = defs[d]
if not o.filename in nperfile:
nperfile[o.filename] = [o]
else:
nperfile[o.filename].append(o)
if not o.filename in perfile:
perfile[o.filename] = [o]
else:
perfile[o.filename].append(o)
ss = [notused[f] for f in notused]
ss.sort(key=lambda x: '{0} {1}'.format(os.path.basename(x.filename), x.ident))
lastf = None
keys = list(perfile.keys())
keys.sort()
for filename in keys:
b = os.path.basename(filename)
f = perfile[filename]
n_perfile = len(f)
if filename in nperfile:
n_nperfile = len(nperfile[filename])
else:
n_nperfile = 0
perc = int(((n_perfile - n_nperfile) / n_perfile) * 100)
print('\n File {0}, coverage {1}% ({2} out of {3}):'.format(b, perc, n_perfile - n_nperfile, n_perfile))
cp = list(f)
cp.sort(key=lambda x: "{0} {1}".format(not x.ident in calls, x.ident))
for d in cp:
if d.ident in calls:
print(' \033[32m✓ {0}\033[0m'.format(d.display))
else:
print(' \033[31m✗ {0}\033[0m'.format(d.display))
perc = int(((len(defs) - len(notused)) / len(defs)) * 100)
print('\nTotal coverage: {0}% ({1} functions out of {2} are being called)\n'.format(perc, len(defs) - len(notused), len(defs)))
# vi:ts=4:et
| Call | identifier_name |
coverage.py | #!/usr/bin/env python3
import os, sys, glob, pickle, subprocess
sys.path.insert(0, os.path.dirname(__file__))
from clang import cindex
sys.path = sys.path[1:]
def configure_libclang():
llvm_libdirs = ['/usr/lib/llvm-3.2/lib', '/usr/lib64/llvm']
try:
libdir = subprocess.check_output(['llvm-config', '--libdir']).decode('utf-8').strip()
llvm_libdirs.insert(0, libdir)
except OSError:
pass
for d in llvm_libdirs:
if not os.path.exists(d):
continue
files = glob.glob(os.path.join(d, 'libclang.so*'))
if len(files) != 0:
cindex.Config.set_library_file(files[0])
return
class Call:
def __init__(self, cursor, decl):
self.ident = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
self.decl_filename = decl.location.file.name.decode('utf-8')
class Definition:
def __init__(self, cursor):
self.ident = cursor.spelling.decode('utf-8')
self.display = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
def process_diagnostics(tu):
diagnostics = tu.diagnostics
haserr = False
for d in diagnostics:
sys.stderr.write('{0}\n'.format(d.format.decode('utf-8')))
if d.severity > cindex.Diagnostic.Warning:
haserr = True
if haserr:
sys.exit(1)
def walk_cursors(tu, files):
proc = list(tu.cursor.get_children())
while len(proc) > 0:
cursor = proc[0]
proc = proc[1:]
if cursor.location.file is None:
continue
fname = cursor.location.file.name.decode('utf-8')
if fname in files:
yield cursor
proc += list(cursor.get_children())
def newer(a, b):
try:
return os.stat(a).st_mtime > os.stat(b).st_mtime
except:
return True
def scan_libgit2_glib(cflags, files, git2dir):
files = [os.path.abspath(f) for f in files]
dname = os.path.dirname(__file__)
allcalls = {}
l = 0
if not os.getenv('SILENT'):
sys.stderr.write('\n')
i = 0
for f in files:
if not os.getenv('SILENT'):
name = os.path.basename(f)
if len(name) > l:
l = len(name)
perc = int((i / len(files)) * 100)
sys.stderr.write('[{0: >3}%] Processing ... {1}{2}\r'.format(perc, name, ' ' * (l - len(name))))
i += 1
astf = os.path.join(dname, '.' + os.path.basename(f) + '.cache')
if not newer(f, astf):
with open(astf, 'rb') as fo:
calls = pickle.load(fo)
else:
tu = cindex.TranslationUnit.from_source(f, cflags)
process_diagnostics(tu)
calls = {}
for cursor in walk_cursors(tu, files):
if cursor.kind == cindex.CursorKind.CALL_EXPR or \
cursor.kind == cindex.CursorKind.DECL_REF_EXPR:
cdecl = cursor.get_referenced()
if cdecl.kind != cindex.CursorKind.FUNCTION_DECL:
continue
if (not cdecl is None) and (not cdecl.location.file is None):
fdefname = cdecl.location.file.name.decode('utf-8')
if fdefname.startswith(git2dir):
call = Call(cursor, cdecl)
if call.ident in calls:
calls[call.ident].append(call)
else:
calls[call.ident] = [call]
with open(astf, 'wb') as fo:
pickle.dump(calls, fo)
for k in calls:
if k in allcalls:
allcalls[k] += calls[k]
else:
allcalls[k] = list(calls[k])
if not os.getenv('SILENT'):
sys.stderr.write('\r[100%] Processing ... done{0}\n'.format(' ' * (l - 4)))
return allcalls
def scan_libgit2(cflags, git2dir):
|
configure_libclang()
pos = sys.argv.index('--')
cflags = sys.argv[1:pos]
files = sys.argv[pos+1:]
incdir = os.getenv('LIBGIT2_INCLUDE_DIR')
defs = scan_libgit2(cflags, incdir)
calls = scan_libgit2_glib(cflags, files, incdir)
notused = {}
perfile = {}
nperfile = {}
for d in defs:
o = defs[d]
if not d in calls:
notused[d] = defs[d]
if not o.filename in nperfile:
nperfile[o.filename] = [o]
else:
nperfile[o.filename].append(o)
if not o.filename in perfile:
perfile[o.filename] = [o]
else:
perfile[o.filename].append(o)
ss = [notused[f] for f in notused]
ss.sort(key=lambda x: '{0} {1}'.format(os.path.basename(x.filename), x.ident))
lastf = None
keys = list(perfile.keys())
keys.sort()
for filename in keys:
b = os.path.basename(filename)
f = perfile[filename]
n_perfile = len(f)
if filename in nperfile:
n_nperfile = len(nperfile[filename])
else:
n_nperfile = 0
perc = int(((n_perfile - n_nperfile) / n_perfile) * 100)
print('\n File {0}, coverage {1}% ({2} out of {3}):'.format(b, perc, n_perfile - n_nperfile, n_perfile))
cp = list(f)
cp.sort(key=lambda x: "{0} {1}".format(not x.ident in calls, x.ident))
for d in cp:
if d.ident in calls:
print(' \033[32m✓ {0}\033[0m'.format(d.display))
else:
print(' \033[31m✗ {0}\033[0m'.format(d.display))
perc = int(((len(defs) - len(notused)) / len(defs)) * 100)
print('\nTotal coverage: {0}% ({1} functions out of {2} are being called)\n'.format(perc, len(defs) - len(notused), len(defs)))
# vi:ts=4:et
| tu = cindex.TranslationUnit.from_source(git2dir + '.h', cflags)
process_diagnostics(tu)
headers = glob.glob(os.path.join(git2dir, '*.h'))
defs = {}
objapi = ['lookup', 'lookup_prefix', 'free', 'id', 'owner']
objderiv = ['commit', 'tree', 'tag', 'blob']
ignore = set()
for deriv in objderiv:
for api in objapi:
ignore.add('git_' + deriv + '_' + api)
for cursor in walk_cursors(tu, headers):
if cursor.kind == cindex.CursorKind.FUNCTION_DECL:
deff = Definition(cursor)
if not deff.ident in ignore:
defs[deff.ident] = deff
return defs | identifier_body |
coverage.py | #!/usr/bin/env python3
import os, sys, glob, pickle, subprocess
sys.path.insert(0, os.path.dirname(__file__))
from clang import cindex
sys.path = sys.path[1:]
def configure_libclang():
llvm_libdirs = ['/usr/lib/llvm-3.2/lib', '/usr/lib64/llvm']
try:
libdir = subprocess.check_output(['llvm-config', '--libdir']).decode('utf-8').strip()
llvm_libdirs.insert(0, libdir)
except OSError:
pass
for d in llvm_libdirs:
if not os.path.exists(d):
continue
files = glob.glob(os.path.join(d, 'libclang.so*'))
if len(files) != 0:
cindex.Config.set_library_file(files[0])
return
class Call:
def __init__(self, cursor, decl):
self.ident = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
self.decl_filename = decl.location.file.name.decode('utf-8')
class Definition:
def __init__(self, cursor):
self.ident = cursor.spelling.decode('utf-8')
self.display = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
def process_diagnostics(tu):
diagnostics = tu.diagnostics
haserr = False
for d in diagnostics:
sys.stderr.write('{0}\n'.format(d.format.decode('utf-8')))
if d.severity > cindex.Diagnostic.Warning:
haserr = True
if haserr:
sys.exit(1)
def walk_cursors(tu, files):
proc = list(tu.cursor.get_children())
while len(proc) > 0:
cursor = proc[0]
proc = proc[1:]
if cursor.location.file is None:
continue
fname = cursor.location.file.name.decode('utf-8')
if fname in files:
|
def newer(a, b):
try:
return os.stat(a).st_mtime > os.stat(b).st_mtime
except:
return True
def scan_libgit2_glib(cflags, files, git2dir):
files = [os.path.abspath(f) for f in files]
dname = os.path.dirname(__file__)
allcalls = {}
l = 0
if not os.getenv('SILENT'):
sys.stderr.write('\n')
i = 0
for f in files:
if not os.getenv('SILENT'):
name = os.path.basename(f)
if len(name) > l:
l = len(name)
perc = int((i / len(files)) * 100)
sys.stderr.write('[{0: >3}%] Processing ... {1}{2}\r'.format(perc, name, ' ' * (l - len(name))))
i += 1
astf = os.path.join(dname, '.' + os.path.basename(f) + '.cache')
if not newer(f, astf):
with open(astf, 'rb') as fo:
calls = pickle.load(fo)
else:
tu = cindex.TranslationUnit.from_source(f, cflags)
process_diagnostics(tu)
calls = {}
for cursor in walk_cursors(tu, files):
if cursor.kind == cindex.CursorKind.CALL_EXPR or \
cursor.kind == cindex.CursorKind.DECL_REF_EXPR:
cdecl = cursor.get_referenced()
if cdecl.kind != cindex.CursorKind.FUNCTION_DECL:
continue
if (not cdecl is None) and (not cdecl.location.file is None):
fdefname = cdecl.location.file.name.decode('utf-8')
if fdefname.startswith(git2dir):
call = Call(cursor, cdecl)
if call.ident in calls:
calls[call.ident].append(call)
else:
calls[call.ident] = [call]
with open(astf, 'wb') as fo:
pickle.dump(calls, fo)
for k in calls:
if k in allcalls:
allcalls[k] += calls[k]
else:
allcalls[k] = list(calls[k])
if not os.getenv('SILENT'):
sys.stderr.write('\r[100%] Processing ... done{0}\n'.format(' ' * (l - 4)))
return allcalls
def scan_libgit2(cflags, git2dir):
tu = cindex.TranslationUnit.from_source(git2dir + '.h', cflags)
process_diagnostics(tu)
headers = glob.glob(os.path.join(git2dir, '*.h'))
defs = {}
objapi = ['lookup', 'lookup_prefix', 'free', 'id', 'owner']
objderiv = ['commit', 'tree', 'tag', 'blob']
ignore = set()
for deriv in objderiv:
for api in objapi:
ignore.add('git_' + deriv + '_' + api)
for cursor in walk_cursors(tu, headers):
if cursor.kind == cindex.CursorKind.FUNCTION_DECL:
deff = Definition(cursor)
if not deff.ident in ignore:
defs[deff.ident] = deff
return defs
configure_libclang()
pos = sys.argv.index('--')
cflags = sys.argv[1:pos]
files = sys.argv[pos+1:]
incdir = os.getenv('LIBGIT2_INCLUDE_DIR')
defs = scan_libgit2(cflags, incdir)
calls = scan_libgit2_glib(cflags, files, incdir)
notused = {}
perfile = {}
nperfile = {}
for d in defs:
o = defs[d]
if not d in calls:
notused[d] = defs[d]
if not o.filename in nperfile:
nperfile[o.filename] = [o]
else:
nperfile[o.filename].append(o)
if not o.filename in perfile:
perfile[o.filename] = [o]
else:
perfile[o.filename].append(o)
ss = [notused[f] for f in notused]
ss.sort(key=lambda x: '{0} {1}'.format(os.path.basename(x.filename), x.ident))
lastf = None
keys = list(perfile.keys())
keys.sort()
for filename in keys:
b = os.path.basename(filename)
f = perfile[filename]
n_perfile = len(f)
if filename in nperfile:
n_nperfile = len(nperfile[filename])
else:
n_nperfile = 0
perc = int(((n_perfile - n_nperfile) / n_perfile) * 100)
print('\n File {0}, coverage {1}% ({2} out of {3}):'.format(b, perc, n_perfile - n_nperfile, n_perfile))
cp = list(f)
cp.sort(key=lambda x: "{0} {1}".format(not x.ident in calls, x.ident))
for d in cp:
if d.ident in calls:
print(' \033[32m✓ {0}\033[0m'.format(d.display))
else:
print(' \033[31m✗ {0}\033[0m'.format(d.display))
perc = int(((len(defs) - len(notused)) / len(defs)) * 100)
print('\nTotal coverage: {0}% ({1} functions out of {2} are being called)\n'.format(perc, len(defs) - len(notused), len(defs)))
# vi:ts=4:et
| yield cursor
proc += list(cursor.get_children()) | conditional_block |
transforms.js | import Ember from 'ember';
const {
String: { htmlSafe }
} = Ember;
export function pageTransform(width) {
let transform = `transform: translate3d(${width}%, 0px, 0px)`;
return htmlSafe(`-webkit-${transform}; -ms-${transform}; ${transform}`);
}
export function pageTransformOffset(offset, columns) {
let width = 1.0 / columns * 100.0;
let widthStr = -(width + offset).toFixed(14);
return pageTransform(widthStr);
}
let SLIDER_TRANSFORM_COMMON = 'transition-duration: 400ms; transition-timing-function: cubic-bezier(0.5, 0, 0.1, 0); transition-delay: 0ms;';
export function | (width) {
return htmlSafe(`transform: translate3d(${width}px, 0px, 0px); ${SLIDER_TRANSFORM_COMMON}`);
}
export function sliderTransformHover(width) {
return htmlSafe(`transform: scale(2) translate3d(${width}px, 0px, 0px); z-index: 4; ${SLIDER_TRANSFORM_COMMON}`);
}
| sliderTransform | identifier_name |
transforms.js | import Ember from 'ember';
const {
String: { htmlSafe }
} = Ember;
export function pageTransform(width) {
let transform = `transform: translate3d(${width}%, 0px, 0px)`;
return htmlSafe(`-webkit-${transform}; -ms-${transform}; ${transform}`);
}
export function pageTransformOffset(offset, columns) {
let width = 1.0 / columns * 100.0;
let widthStr = -(width + offset).toFixed(14);
return pageTransform(widthStr);
}
let SLIDER_TRANSFORM_COMMON = 'transition-duration: 400ms; transition-timing-function: cubic-bezier(0.5, 0, 0.1, 0); transition-delay: 0ms;';
export function sliderTransform(width) {
return htmlSafe(`transform: translate3d(${width}px, 0px, 0px); ${SLIDER_TRANSFORM_COMMON}`);
} |
export function sliderTransformHover(width) {
return htmlSafe(`transform: scale(2) translate3d(${width}px, 0px, 0px); z-index: 4; ${SLIDER_TRANSFORM_COMMON}`);
} | random_line_split |
|
transforms.js | import Ember from 'ember';
const {
String: { htmlSafe }
} = Ember;
export function pageTransform(width) |
export function pageTransformOffset(offset, columns) {
let width = 1.0 / columns * 100.0;
let widthStr = -(width + offset).toFixed(14);
return pageTransform(widthStr);
}
let SLIDER_TRANSFORM_COMMON = 'transition-duration: 400ms; transition-timing-function: cubic-bezier(0.5, 0, 0.1, 0); transition-delay: 0ms;';
export function sliderTransform(width) {
return htmlSafe(`transform: translate3d(${width}px, 0px, 0px); ${SLIDER_TRANSFORM_COMMON}`);
}
export function sliderTransformHover(width) {
return htmlSafe(`transform: scale(2) translate3d(${width}px, 0px, 0px); z-index: 4; ${SLIDER_TRANSFORM_COMMON}`);
}
| {
let transform = `transform: translate3d(${width}%, 0px, 0px)`;
return htmlSafe(`-webkit-${transform}; -ms-${transform}; ${transform}`);
} | identifier_body |
policy_base.js | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './strings.m.js';
import 'chrome://resources/js/action_link.js';
// <if expr="is_ios">
import 'chrome://resources/js/ios/web_ui.js';
// </if>
import {addSingletonGetter, addWebUIListener} from 'chrome://resources/js/cr.m.js';
import {define as crUiDefine} from 'chrome://resources/js/cr/ui.m.js';
import {FocusOutlineManager} from 'chrome://resources/js/cr/ui/focus_outline_manager.m.js';
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
import {$} from 'chrome://resources/js/util.m.js';
/**
* @typedef {{
* [id: string]: {
* name: string,
* policyNames: !Array<string>,
* }}
*/
let PolicyNamesResponse;
/**
* @typedef {!Array<{
* name: string,
* id: ?String,
* policies: {[name: string]: policy.Policy},
* precedenceOrder: ?Array<string>,
* }>}
*/
let PolicyValuesResponse;
/**
* @typedef {{
* level: string,
* scope: string,
* source: string,
* value: any,
* }}
*/
let Conflict;
/**
* @typedef {{
* ignored?: boolean,
* name: string,
* level: string,
* link: ?string,
* scope: string,
* source: string,
* error: string,
* warning: string,
* info: string,
* value: any,
* deprecated: ?boolean,
* future: ?boolean,
* allSourcesMerged: ?boolean,
* conflicts: ?Array<!Conflict>,
* superseded: ?Array<!Conflict>,
* }}
*/
let Policy;
/**
* @typedef {{
* id: ?string,
* isExtension?: boolean,
* name: string,
* policies: !Array<!Policy>,
* precedenceOrder: ?Array<string>,
* }}
*/
let PolicyTableModel;
/**
* A box that shows the status of cloud policy for a device, machine or user.
* @constructor
* @extends {HTMLFieldSetElement}
*/
const StatusBox = crUiDefine(function() {
const node = $('status-box-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
StatusBox.prototype = {
// Set up the prototype chain.
__proto__: HTMLFieldSetElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {},
/**
* Sets the text of a particular named label element in the status box
* and updates the visibility if needed.
* @param {string} labelName The name of the label element that is being
* updated.
* @param {string} labelValue The new text content for the label.
* @param {boolean=} needsToBeShown True if we want to show the label
* False otherwise.
*/
setLabelAndShow_(labelName, labelValue, needsToBeShown = true) {
const labelElement = this.querySelector(labelName);
labelElement.textContent = labelValue ? ' ' + labelValue : '';
if (needsToBeShown) {
labelElement.parentElement.hidden = false;
}
},
/**
* Populate the box with the given cloud policy status.
* @param {string} scope The policy scope, either "device", "machine",
* "user", or "updater".
* @param {Object} status Dictionary with information about the status.
*/
initialize(scope, status) {
const notSpecifiedString = loadTimeData.getString('notSpecified');
// Set appropriate box legend based on status key
this.querySelector('.legend').textContent =
loadTimeData.getString(status.boxLegendKey);
if (scope === 'device') {
// Populate the device naming information.
// Populate the asset identifier.
this.setLabelAndShow_('.asset-id', status.assetId || notSpecifiedString);
// Populate the device location.
this.setLabelAndShow_('.location', status.location || notSpecifiedString);
// Populate the directory API ID.
this.setLabelAndShow_(
'.directory-api-id', status.directoryApiId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
// For off-hours policy, indicate if it's active or not.
if (status.isOffHoursActive != null) {
this.setLabelAndShow_(
'.is-offhours-active',
loadTimeData.getString(
status.isOffHoursActive ? 'offHoursActive' :
'offHoursNotActive'));
}
} else if (scope === 'machine') {
this.setLabelAndShow_('.machine-enrollment-device-id', status.deviceId);
this.setLabelAndShow_(
'.machine-enrollment-token', status.enrollmentToken);
if (status.machine) {
this.setLabelAndShow_('.machine-enrollment-name', status.machine);
}
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
} else if (scope === 'updater') {
if (status.version) {
this.setLabelAndShow_('.version', status.version);
}
if (status.domain) {
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
}
} else {
// Populate the topmost item with the username.
this.setLabelAndShow_('.username', status.username);
// Populate the user gaia id.
this.setLabelAndShow_('.gaia-id', status.gaiaId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
if (status.isAffiliated != null) {
this.setLabelAndShow_(
'.is-affiliated',
loadTimeData.getString(
status.isAffiliated ? 'isAffiliatedYes' : 'isAffiliatedNo'));
}
}
if (status.enterpriseDomainManager) {
this.setLabelAndShow_('.managed-by', status.enterpriseDomainManager);
}
if (status.timeSinceLastRefresh) {
this.setLabelAndShow_(
'.time-since-last-refresh', status.timeSinceLastRefresh);
}
if (scope !== 'updater') {
this.setLabelAndShow_('.refresh-interval', status.refreshInterval);
this.setLabelAndShow_('.status', status.status);
this.setLabelAndShow_(
'.policy-push',
loadTimeData.getString(
status.policiesPushAvailable ? 'policiesPushOn' :
'policiesPushOff'));
}
if (status.lastCloudReportSentTimestamp) {
this.setLabelAndShow_(
'.last-cloud-report-sent-timestamp',
status.lastCloudReportSentTimestamp + ' (' +
status.timeSinceLastCloudReportSent + ')');
}
},
};
/**
* A single policy conflict's entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyConflict = crUiDefine(function() {
const node = $('policy-conflict-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyConflict.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Conflict} conflict
* @param {string} row_label
*/
initialize(conflict, row_label) {
this.querySelector('.scope').textContent = loadTimeData.getString(
conflict.scope === 'user' ? 'scopeUser' : 'scopeDevice');
this.querySelector('.level').textContent = loadTimeData.getString(
conflict.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
this.querySelector('.source').textContent =
loadTimeData.getString(conflict.source);
this.querySelector('.value.row .value').textContent = conflict.value;
this.querySelector('.name').textContent = loadTimeData.getString(row_label);
}
};
/**
* A single policy's row entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyRow = crUiDefine(function() {
const node = $('policy-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
const toggle = this.querySelector('.policy.row .toggle');
toggle.addEventListener('click', this.toggleExpanded_.bind(this));
const copy = this.querySelector('.copy-value');
copy.addEventListener('click', this.copyValue_.bind(this));
},
/** @param {Policy} policy */
initialize(policy) {
/** @type {Policy} */
this.policy = policy;
/** @private {boolean} */
this.unset_ = policy.value === undefined;
/** @private {boolean} */
this.hasErrors_ = !!policy.error;
/** @private {boolean} */
this.hasWarnings_ = !!policy.warning;
/** @private {boolean} */
this.hasInfos_ = !!policy.info;
/** @private {boolean} */
this.hasConflicts_ = !!policy.conflicts;
/** @private {boolean} */
this.hasSuperseded_ = !!policy.superseded;
/** @private {boolean} */
this.isMergedValue_ = !!policy.allSourcesMerged;
/** @private {boolean} */
this.deprecated_ = !!policy.deprecated;
/** @private {boolean} */
this.future_ = !!policy.future;
// Populate the name column.
const nameDisplay = this.querySelector('.name .link span');
nameDisplay.textContent = policy.name;
if (policy.link) {
const link = this.querySelector('.name .link');
link.href = policy.link;
link.title = loadTimeData.getStringF('policyLearnMore', policy.name);
} else {
this.classList.add('no-help-link');
}
// Populate the remaining columns with policy scope, level and value if a
// value has been set. Otherwise, leave them blank.
if (!this.unset_) {
const scopeDisplay = this.querySelector('.scope');
scopeDisplay.textContent = loadTimeData.getString(
policy.scope === 'user' ? 'scopeUser' : 'scopeDevice');
const levelDisplay = this.querySelector('.level');
levelDisplay.textContent = loadTimeData.getString(
policy.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
const sourceDisplay = this.querySelector('.source');
sourceDisplay.textContent = loadTimeData.getString(policy.source);
// Reduces load on the DOM for long values;
const truncatedValue =
(policy.value && policy.value.toString().length > 256) ?
`${policy.value.toString().substr(0, 256)}\u2026` :
policy.value;
const valueDisplay = this.querySelector('.value');
valueDisplay.textContent = truncatedValue;
const copyLink = this.querySelector('.copy .link');
copyLink.title = loadTimeData.getStringF('policyCopyValue', policy.name);
const valueRowContentDisplay = this.querySelector('.value.row .value');
valueRowContentDisplay.textContent = policy.value;
const errorRowContentDisplay = this.querySelector('.errors.row .value');
errorRowContentDisplay.textContent = policy.error;
const warningRowContentDisplay =
this.querySelector('.warnings.row .value');
warningRowContentDisplay.textContent = policy.warning;
const infoRowContentDisplay = this.querySelector('.infos.row .value');
infoRowContentDisplay.textContent = policy.info;
const messagesDisplay = this.querySelector('.messages');
const errorsNotice =
this.hasErrors_ ? loadTimeData.getString('error') : '';
const deprecationNotice =
this.deprecated_ ? loadTimeData.getString('deprecated') : '';
const futureNotice = this.future_ ? loadTimeData.getString('future') : '';
const warningsNotice =
this.hasWarnings_ ? loadTimeData.getString('warning') : '';
const conflictsNotice = this.hasConflicts_ && !this.isMergedValue_ ?
loadTimeData.getString('conflict') :
'';
const ignoredNotice =
this.policy.ignored ? loadTimeData.getString('ignored') : '';
let notice =
[
errorsNotice, deprecationNotice, futureNotice, warningsNotice,
ignoredNotice, conflictsNotice
].filter(x => !!x)
.join(', ') ||
loadTimeData.getString('ok');
const supersededNotice = this.hasSuperseded_ && !this.isMergedValue_ ?
loadTimeData.getString('superseding') :
'';
if (supersededNotice) {
// Include superseded notice regardless of other notices
notice += `, ${supersededNotice}`;
}
messagesDisplay.textContent = notice;
if (policy.conflicts) {
policy.conflicts.forEach(conflict => {
const row = new PolicyConflict;
row.initialize(conflict, 'conflictValue');
this.appendChild(row);
});
}
if (policy.superseded) {
policy.superseded.forEach(superseded => {
const row = new PolicyConflict;
row.initialize(superseded, 'supersededValue');
this.appendChild(row);
});
}
} else {
const messagesDisplay = this.querySelector('.messages');
messagesDisplay.textContent = loadTimeData.getString('unset');
}
},
/**
* Copies the policy's value to the clipboard.
* @private
*/
copyValue_() {
const policyValueDisplay = this.querySelector('.value.row .value');
// Select the text that will be copied.
const selection = window.getSelection();
const range = window.document.createRange();
range.selectNodeContents(policyValueDisplay);
selection.removeAllRanges();
selection.addRange(range);
// Copy the policy value to the clipboard.
navigator.clipboard.writeText(policyValueDisplay.innerText).catch(error => {
console.error('Unable to copy policy value to clipboard:', error);
});
},
/**
* Toggle the visibility of an additional row containing the complete text.
* @private
*/
toggleExpanded_() {
const warningRowDisplay = this.querySelector('.warnings.row');
const errorRowDisplay = this.querySelector('.errors.row');
const infoRowDisplay = this.querySelector('.infos.row');
const valueRowDisplay = this.querySelector('.value.row');
valueRowDisplay.hidden = !valueRowDisplay.hidden;
if (valueRowDisplay.hidden) {
this.classList.remove('expanded');
} else {
this.classList.add('expanded');
}
this.querySelector('.show-more').hidden = !valueRowDisplay.hidden;
this.querySelector('.show-less').hidden = valueRowDisplay.hidden;
if (this.hasWarnings_) {
warningRowDisplay.hidden = !warningRowDisplay.hidden;
}
if (this.hasErrors_) {
errorRowDisplay.hidden = !errorRowDisplay.hidden;
}
if (this.hasInfos_) {
infoRowDisplay.hidden = !infoRowDisplay.hidden;
}
this.querySelectorAll('.policy-conflict-data')
.forEach(row => row.hidden = !row.hidden);
this.querySelectorAll('.policy-superseded-data')
.forEach(row => row.hidden = !row.hidden);
},
};
/**
* A row describing the current policy precedence.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyPrecedenceRow = crUiDefine(function() {
const node = $('policy-precedence-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyPrecedenceRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Array<String>} precedenceOrder Array containing ordered strings
* which represent the order of policy precedence.
*/
initialize(precedenceOrder) {
this.querySelector('.precedence.row > .value').textContent =
precedenceOrder.join(' > ');
}
};
/**
* A table of policies and their values.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyTable = crUiDefine(function() {
const node = $('policy-table-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyTable.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
this.policies_ = {};
this.filterPattern_ = '';
},
/** @param {PolicyTableModel} dataModel */
update(dataModel) {
// Clear policies
const mainContent = this.querySelector('.main');
const policies = this.querySelectorAll('.policy-data');
this.querySelector('.header').textContent = dataModel.name;
this.querySelector('.id').textContent = dataModel.id;
this.querySelector('.id').hidden = !dataModel.id;
policies.forEach(row => mainContent.removeChild(row));
dataModel.policies
.sort((a, b) => {
if ((a.value !== undefined && b.value !== undefined) ||
a.value === b.value) {
if (a.link !== undefined && b.link !== undefined) {
// Sorting the policies in ascending alpha order.
return a.name > b.name ? 1 : -1;
}
// Sorting so unknown policies are last.
return a.link !== undefined ? -1 : 1;
}
// Sorting so unset values are last.
return a.value !== undefined ? -1 : 1;
})
.forEach(policy => {
const policyRow = new PolicyRow;
policyRow.initialize(policy);
mainContent.appendChild(policyRow);
});
this.filter();
// Show the current policy precedence order in the Policy Precedence table.
if (dataModel.name === 'Policy Precedence') {
// Clear previous precedence row.
const precedenceRowOld = this.querySelectorAll('.policy-precedence-data');
precedenceRowOld.forEach(row => mainContent.removeChild(row));
const precedenceRow = new PolicyPrecedenceRow;
precedenceRow.initialize(dataModel.precedenceOrder);
mainContent.appendChild(precedenceRow);
}
},
/**
* Set the filter pattern. Only policies whose name contains |pattern| are
* shown in the policy table. The filter is case insensitive. It can be
* disabled by setting |pattern| to an empty string.
* @param {string} pattern The filter pattern.
*/
setFilterPattern(pattern) {
this.filterPattern_ = pattern.toLowerCase();
this.filter();
},
/**
* Filter policies. Only policies whose name contains the filter pattern are
* shown in the table. Furthermore, policies whose value is not currently
* set are only shown if the corresponding checkbox is checked.
*/
filter() {
const showUnset = $('show-unset').checked;
const policies = this.querySelectorAll('.policy-data');
for (let i = 0; i < policies.length; i++) {
const policyDisplay = policies[i];
policyDisplay.hidden =
policyDisplay.policy.value === undefined && !showUnset ||
policyDisplay.policy.name.toLowerCase().indexOf(
this.filterPattern_) === -1;
}
this.querySelector('.no-policy').hidden =
!!this.querySelector('.policy-data:not([hidden])');
},
};
/**
* A singleton object that handles communication between browser and WebUI.
*/
export class Page {
constructor() {
/** @type {?Element} */
this.mainSection = null;
/** @type {{[id: string]: PolicyTable}} */
this.policyTables = {};
}
/**
* Main initialization function. Called by the browser on page load.
*/
initialize() {
FocusOutlineManager.forDocument(document);
this.mainSection = $('main-section');
// Place the initial focus on the filter input field.
$('filter').focus();
$('filter').onsearch = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].setFilterPattern($('filter').value);
}
};
$('reload-policies').onclick = () => {
$('reload-policies').disabled = true;
$('screen-reader-message').textContent =
loadTimeData.getString('loadingPolicies');
chrome.send('reloadPolicies');
};
const exportButton = $('export-policies');
const hideExportButton = loadTimeData.valueExists('hideExportButton') &&
loadTimeData.getBoolean('hideExportButton');
if (hideExportButton) {
exportButton.style.display = 'none';
} else {
exportButton.onclick = () => {
chrome.send('exportPoliciesJSON');
};
}
$('copy-policies').onclick = () => {
chrome.send('copyPoliciesJSON');
};
$('show-unset').onchange = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].filter();
}
};
chrome.send('listenPoliciesUpdates');
addWebUIListener('status-updated', status => this.setStatus(status));
addWebUIListener(
'policies-updated',
(names, values) => this.onPoliciesReceived_(names, values));
addWebUIListener('download-json', json => this.downloadJson(json));
}
/**
* @param {PolicyNamesResponse} policyNames
* @param {PolicyValuesResponse} policyValues
* @private
*/
onPoliciesReceived_(policyNames, policyValues) |
/**
* Triggers the download of the policies as a JSON file.
* @param {String} json The policies as a JSON string.
*/
downloadJson(json) {
const blob = new Blob([json], {type: 'application/json'});
const blobUrl = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = blobUrl;
link.download = 'policies.json';
document.body.appendChild(link);
link.dispatchEvent(new MouseEvent(
'click', {bubbles: true, cancelable: true, view: window}));
document.body.removeChild(link);
}
/** @param {PolicyTableModel} dataModel */
createOrUpdatePolicyTable(dataModel) {
const id = `${dataModel.name}-${dataModel.id}`;
if (!this.policyTables[id]) {
this.policyTables[id] = new PolicyTable;
this.mainSection.appendChild(this.policyTables[id]);
}
this.policyTables[id].update(dataModel);
}
/**
* Update the status section of the page to show the current cloud policy
* status.
* @param {Object} status Dictionary containing the current policy status.
*/
setStatus(status) {
// Remove any existing status boxes.
const container = $('status-box-container');
while (container.firstChild) {
container.removeChild(container.firstChild);
}
// Hide the status section.
const section = $('status-section');
section.hidden = true;
// Add a status box for each scope that has a cloud policy status.
for (const scope in status) {
const box = new StatusBox;
box.initialize(scope, status[scope]);
container.appendChild(box);
// Show the status section.
section.hidden = false;
}
}
/**
* Re-enable the reload policies button when the previous request to reload
* policies values has completed.
*/
reloadPoliciesDone() {
$('reload-policies').disabled = false;
$('screen-reader-message').textContent =
loadTimeData.getString('loadPoliciesDone');
}
}
// Make Page a singleton.
addSingletonGetter(Page);
| {
/** @type {Array<!PolicyTableModel>} */
const policyGroups = policyValues.map(value => {
const knownPolicyNames =
policyNames[value.id] ? policyNames[value.id].policyNames : [];
const knownPolicyNamesSet = new Set(knownPolicyNames);
const receivedPolicyNames = Object.keys(value.policies);
const allPolicyNames =
Array.from(new Set([...knownPolicyNames, ...receivedPolicyNames]));
const policies = allPolicyNames.map(
name => Object.assign(
{
name,
link: [
policyNames.chrome.policyNames,
policyNames.precedence?.policyNames
].includes(knownPolicyNames) &&
knownPolicyNamesSet.has(name) ?
`https://chromeenterprise.google/policies/?policy=${name}` :
undefined,
},
value.policies[name]));
return {
name: value.forSigninScreen ?
`${value.name} [${loadTimeData.getString('signinProfile')}]` :
value.name,
id: value.isExtension ? value.id : null,
policies,
...(value.precedenceOrder && {precedenceOrder: value.precedenceOrder}),
};
});
policyGroups.forEach(group => this.createOrUpdatePolicyTable(group));
this.reloadPoliciesDone();
} | identifier_body |
policy_base.js | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './strings.m.js';
import 'chrome://resources/js/action_link.js';
// <if expr="is_ios">
import 'chrome://resources/js/ios/web_ui.js';
// </if>
import {addSingletonGetter, addWebUIListener} from 'chrome://resources/js/cr.m.js';
import {define as crUiDefine} from 'chrome://resources/js/cr/ui.m.js';
import {FocusOutlineManager} from 'chrome://resources/js/cr/ui/focus_outline_manager.m.js';
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
import {$} from 'chrome://resources/js/util.m.js';
/**
* @typedef {{
* [id: string]: {
* name: string,
* policyNames: !Array<string>,
* }}
*/
let PolicyNamesResponse;
/**
* @typedef {!Array<{
* name: string,
* id: ?String,
* policies: {[name: string]: policy.Policy},
* precedenceOrder: ?Array<string>,
* }>}
*/
let PolicyValuesResponse;
/**
* @typedef {{
* level: string,
* scope: string,
* source: string,
* value: any,
* }}
*/
let Conflict;
/**
* @typedef {{
* ignored?: boolean,
* name: string,
* level: string,
* link: ?string,
* scope: string,
* source: string,
* error: string,
* warning: string,
* info: string,
* value: any,
* deprecated: ?boolean,
* future: ?boolean,
* allSourcesMerged: ?boolean,
* conflicts: ?Array<!Conflict>,
* superseded: ?Array<!Conflict>,
* }}
*/
let Policy;
/**
* @typedef {{
* id: ?string,
* isExtension?: boolean,
* name: string,
* policies: !Array<!Policy>,
* precedenceOrder: ?Array<string>,
* }}
*/
let PolicyTableModel;
/**
* A box that shows the status of cloud policy for a device, machine or user.
* @constructor
* @extends {HTMLFieldSetElement}
*/
const StatusBox = crUiDefine(function() {
const node = $('status-box-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
StatusBox.prototype = {
// Set up the prototype chain.
__proto__: HTMLFieldSetElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {},
/**
* Sets the text of a particular named label element in the status box
* and updates the visibility if needed.
* @param {string} labelName The name of the label element that is being
* updated.
* @param {string} labelValue The new text content for the label.
* @param {boolean=} needsToBeShown True if we want to show the label
* False otherwise.
*/
setLabelAndShow_(labelName, labelValue, needsToBeShown = true) {
const labelElement = this.querySelector(labelName);
labelElement.textContent = labelValue ? ' ' + labelValue : '';
if (needsToBeShown) {
labelElement.parentElement.hidden = false;
}
},
/**
* Populate the box with the given cloud policy status.
* @param {string} scope The policy scope, either "device", "machine",
* "user", or "updater".
* @param {Object} status Dictionary with information about the status.
*/
initialize(scope, status) {
const notSpecifiedString = loadTimeData.getString('notSpecified');
// Set appropriate box legend based on status key
this.querySelector('.legend').textContent =
loadTimeData.getString(status.boxLegendKey);
if (scope === 'device') {
// Populate the device naming information.
// Populate the asset identifier.
this.setLabelAndShow_('.asset-id', status.assetId || notSpecifiedString);
// Populate the device location.
this.setLabelAndShow_('.location', status.location || notSpecifiedString);
// Populate the directory API ID.
this.setLabelAndShow_(
'.directory-api-id', status.directoryApiId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
// For off-hours policy, indicate if it's active or not.
if (status.isOffHoursActive != null) {
this.setLabelAndShow_(
'.is-offhours-active',
loadTimeData.getString(
status.isOffHoursActive ? 'offHoursActive' :
'offHoursNotActive'));
}
} else if (scope === 'machine') {
this.setLabelAndShow_('.machine-enrollment-device-id', status.deviceId);
this.setLabelAndShow_(
'.machine-enrollment-token', status.enrollmentToken);
if (status.machine) {
this.setLabelAndShow_('.machine-enrollment-name', status.machine);
}
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
} else if (scope === 'updater') {
if (status.version) {
this.setLabelAndShow_('.version', status.version);
}
if (status.domain) {
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
}
} else {
// Populate the topmost item with the username.
this.setLabelAndShow_('.username', status.username);
// Populate the user gaia id.
this.setLabelAndShow_('.gaia-id', status.gaiaId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
if (status.isAffiliated != null) {
this.setLabelAndShow_(
'.is-affiliated',
loadTimeData.getString(
status.isAffiliated ? 'isAffiliatedYes' : 'isAffiliatedNo'));
}
}
if (status.enterpriseDomainManager) {
this.setLabelAndShow_('.managed-by', status.enterpriseDomainManager);
}
if (status.timeSinceLastRefresh) {
this.setLabelAndShow_(
'.time-since-last-refresh', status.timeSinceLastRefresh);
}
if (scope !== 'updater') {
this.setLabelAndShow_('.refresh-interval', status.refreshInterval);
this.setLabelAndShow_('.status', status.status);
this.setLabelAndShow_(
'.policy-push',
loadTimeData.getString(
status.policiesPushAvailable ? 'policiesPushOn' :
'policiesPushOff'));
}
if (status.lastCloudReportSentTimestamp) {
this.setLabelAndShow_(
'.last-cloud-report-sent-timestamp',
status.lastCloudReportSentTimestamp + ' (' +
status.timeSinceLastCloudReportSent + ')');
}
},
};
/**
* A single policy conflict's entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyConflict = crUiDefine(function() {
const node = $('policy-conflict-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyConflict.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Conflict} conflict
* @param {string} row_label
*/
initialize(conflict, row_label) {
this.querySelector('.scope').textContent = loadTimeData.getString(
conflict.scope === 'user' ? 'scopeUser' : 'scopeDevice');
this.querySelector('.level').textContent = loadTimeData.getString(
conflict.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
this.querySelector('.source').textContent =
loadTimeData.getString(conflict.source);
this.querySelector('.value.row .value').textContent = conflict.value;
this.querySelector('.name').textContent = loadTimeData.getString(row_label);
}
};
/**
* A single policy's row entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyRow = crUiDefine(function() {
const node = $('policy-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
const toggle = this.querySelector('.policy.row .toggle');
toggle.addEventListener('click', this.toggleExpanded_.bind(this));
const copy = this.querySelector('.copy-value');
copy.addEventListener('click', this.copyValue_.bind(this));
},
/** @param {Policy} policy */
initialize(policy) {
/** @type {Policy} */
this.policy = policy;
/** @private {boolean} */
this.unset_ = policy.value === undefined;
/** @private {boolean} */
this.hasErrors_ = !!policy.error;
/** @private {boolean} */
this.hasWarnings_ = !!policy.warning;
/** @private {boolean} */
this.hasInfos_ = !!policy.info;
/** @private {boolean} */
this.hasConflicts_ = !!policy.conflicts;
/** @private {boolean} */
this.hasSuperseded_ = !!policy.superseded;
/** @private {boolean} */
this.isMergedValue_ = !!policy.allSourcesMerged;
/** @private {boolean} */
this.deprecated_ = !!policy.deprecated;
/** @private {boolean} */
this.future_ = !!policy.future;
// Populate the name column.
const nameDisplay = this.querySelector('.name .link span');
nameDisplay.textContent = policy.name;
if (policy.link) {
const link = this.querySelector('.name .link');
link.href = policy.link;
link.title = loadTimeData.getStringF('policyLearnMore', policy.name);
} else {
this.classList.add('no-help-link');
}
// Populate the remaining columns with policy scope, level and value if a
// value has been set. Otherwise, leave them blank.
if (!this.unset_) {
const scopeDisplay = this.querySelector('.scope');
scopeDisplay.textContent = loadTimeData.getString(
policy.scope === 'user' ? 'scopeUser' : 'scopeDevice');
const levelDisplay = this.querySelector('.level');
levelDisplay.textContent = loadTimeData.getString(
policy.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
const sourceDisplay = this.querySelector('.source');
sourceDisplay.textContent = loadTimeData.getString(policy.source);
// Reduces load on the DOM for long values;
const truncatedValue =
(policy.value && policy.value.toString().length > 256) ?
`${policy.value.toString().substr(0, 256)}\u2026` :
policy.value;
const valueDisplay = this.querySelector('.value');
valueDisplay.textContent = truncatedValue;
const copyLink = this.querySelector('.copy .link');
copyLink.title = loadTimeData.getStringF('policyCopyValue', policy.name);
const valueRowContentDisplay = this.querySelector('.value.row .value');
valueRowContentDisplay.textContent = policy.value;
const errorRowContentDisplay = this.querySelector('.errors.row .value');
errorRowContentDisplay.textContent = policy.error;
const warningRowContentDisplay =
this.querySelector('.warnings.row .value');
warningRowContentDisplay.textContent = policy.warning;
const infoRowContentDisplay = this.querySelector('.infos.row .value');
infoRowContentDisplay.textContent = policy.info;
const messagesDisplay = this.querySelector('.messages');
const errorsNotice =
this.hasErrors_ ? loadTimeData.getString('error') : '';
const deprecationNotice =
this.deprecated_ ? loadTimeData.getString('deprecated') : '';
const futureNotice = this.future_ ? loadTimeData.getString('future') : '';
const warningsNotice =
this.hasWarnings_ ? loadTimeData.getString('warning') : '';
const conflictsNotice = this.hasConflicts_ && !this.isMergedValue_ ?
loadTimeData.getString('conflict') :
'';
const ignoredNotice =
this.policy.ignored ? loadTimeData.getString('ignored') : '';
let notice =
[
errorsNotice, deprecationNotice, futureNotice, warningsNotice,
ignoredNotice, conflictsNotice
].filter(x => !!x)
.join(', ') ||
loadTimeData.getString('ok');
const supersededNotice = this.hasSuperseded_ && !this.isMergedValue_ ?
loadTimeData.getString('superseding') :
'';
if (supersededNotice) |
messagesDisplay.textContent = notice;
if (policy.conflicts) {
policy.conflicts.forEach(conflict => {
const row = new PolicyConflict;
row.initialize(conflict, 'conflictValue');
this.appendChild(row);
});
}
if (policy.superseded) {
policy.superseded.forEach(superseded => {
const row = new PolicyConflict;
row.initialize(superseded, 'supersededValue');
this.appendChild(row);
});
}
} else {
const messagesDisplay = this.querySelector('.messages');
messagesDisplay.textContent = loadTimeData.getString('unset');
}
},
/**
* Copies the policy's value to the clipboard.
* @private
*/
copyValue_() {
const policyValueDisplay = this.querySelector('.value.row .value');
// Select the text that will be copied.
const selection = window.getSelection();
const range = window.document.createRange();
range.selectNodeContents(policyValueDisplay);
selection.removeAllRanges();
selection.addRange(range);
// Copy the policy value to the clipboard.
navigator.clipboard.writeText(policyValueDisplay.innerText).catch(error => {
console.error('Unable to copy policy value to clipboard:', error);
});
},
/**
* Toggle the visibility of an additional row containing the complete text.
* @private
*/
toggleExpanded_() {
const warningRowDisplay = this.querySelector('.warnings.row');
const errorRowDisplay = this.querySelector('.errors.row');
const infoRowDisplay = this.querySelector('.infos.row');
const valueRowDisplay = this.querySelector('.value.row');
valueRowDisplay.hidden = !valueRowDisplay.hidden;
if (valueRowDisplay.hidden) {
this.classList.remove('expanded');
} else {
this.classList.add('expanded');
}
this.querySelector('.show-more').hidden = !valueRowDisplay.hidden;
this.querySelector('.show-less').hidden = valueRowDisplay.hidden;
if (this.hasWarnings_) {
warningRowDisplay.hidden = !warningRowDisplay.hidden;
}
if (this.hasErrors_) {
errorRowDisplay.hidden = !errorRowDisplay.hidden;
}
if (this.hasInfos_) {
infoRowDisplay.hidden = !infoRowDisplay.hidden;
}
this.querySelectorAll('.policy-conflict-data')
.forEach(row => row.hidden = !row.hidden);
this.querySelectorAll('.policy-superseded-data')
.forEach(row => row.hidden = !row.hidden);
},
};
/**
* A row describing the current policy precedence.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyPrecedenceRow = crUiDefine(function() {
const node = $('policy-precedence-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyPrecedenceRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Array<String>} precedenceOrder Array containing ordered strings
* which represent the order of policy precedence.
*/
initialize(precedenceOrder) {
this.querySelector('.precedence.row > .value').textContent =
precedenceOrder.join(' > ');
}
};
/**
* A table of policies and their values.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyTable = crUiDefine(function() {
const node = $('policy-table-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyTable.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
this.policies_ = {};
this.filterPattern_ = '';
},
/** @param {PolicyTableModel} dataModel */
update(dataModel) {
// Clear policies
const mainContent = this.querySelector('.main');
const policies = this.querySelectorAll('.policy-data');
this.querySelector('.header').textContent = dataModel.name;
this.querySelector('.id').textContent = dataModel.id;
this.querySelector('.id').hidden = !dataModel.id;
policies.forEach(row => mainContent.removeChild(row));
dataModel.policies
.sort((a, b) => {
if ((a.value !== undefined && b.value !== undefined) ||
a.value === b.value) {
if (a.link !== undefined && b.link !== undefined) {
// Sorting the policies in ascending alpha order.
return a.name > b.name ? 1 : -1;
}
// Sorting so unknown policies are last.
return a.link !== undefined ? -1 : 1;
}
// Sorting so unset values are last.
return a.value !== undefined ? -1 : 1;
})
.forEach(policy => {
const policyRow = new PolicyRow;
policyRow.initialize(policy);
mainContent.appendChild(policyRow);
});
this.filter();
// Show the current policy precedence order in the Policy Precedence table.
if (dataModel.name === 'Policy Precedence') {
// Clear previous precedence row.
const precedenceRowOld = this.querySelectorAll('.policy-precedence-data');
precedenceRowOld.forEach(row => mainContent.removeChild(row));
const precedenceRow = new PolicyPrecedenceRow;
precedenceRow.initialize(dataModel.precedenceOrder);
mainContent.appendChild(precedenceRow);
}
},
/**
* Set the filter pattern. Only policies whose name contains |pattern| are
* shown in the policy table. The filter is case insensitive. It can be
* disabled by setting |pattern| to an empty string.
* @param {string} pattern The filter pattern.
*/
setFilterPattern(pattern) {
this.filterPattern_ = pattern.toLowerCase();
this.filter();
},
/**
* Filter policies. Only policies whose name contains the filter pattern are
* shown in the table. Furthermore, policies whose value is not currently
* set are only shown if the corresponding checkbox is checked.
*/
filter() {
const showUnset = $('show-unset').checked;
const policies = this.querySelectorAll('.policy-data');
for (let i = 0; i < policies.length; i++) {
const policyDisplay = policies[i];
policyDisplay.hidden =
policyDisplay.policy.value === undefined && !showUnset ||
policyDisplay.policy.name.toLowerCase().indexOf(
this.filterPattern_) === -1;
}
this.querySelector('.no-policy').hidden =
!!this.querySelector('.policy-data:not([hidden])');
},
};
/**
* A singleton object that handles communication between browser and WebUI.
*/
export class Page {
constructor() {
/** @type {?Element} */
this.mainSection = null;
/** @type {{[id: string]: PolicyTable}} */
this.policyTables = {};
}
/**
* Main initialization function. Called by the browser on page load.
*/
initialize() {
FocusOutlineManager.forDocument(document);
this.mainSection = $('main-section');
// Place the initial focus on the filter input field.
$('filter').focus();
$('filter').onsearch = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].setFilterPattern($('filter').value);
}
};
$('reload-policies').onclick = () => {
$('reload-policies').disabled = true;
$('screen-reader-message').textContent =
loadTimeData.getString('loadingPolicies');
chrome.send('reloadPolicies');
};
const exportButton = $('export-policies');
const hideExportButton = loadTimeData.valueExists('hideExportButton') &&
loadTimeData.getBoolean('hideExportButton');
if (hideExportButton) {
exportButton.style.display = 'none';
} else {
exportButton.onclick = () => {
chrome.send('exportPoliciesJSON');
};
}
$('copy-policies').onclick = () => {
chrome.send('copyPoliciesJSON');
};
$('show-unset').onchange = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].filter();
}
};
chrome.send('listenPoliciesUpdates');
addWebUIListener('status-updated', status => this.setStatus(status));
addWebUIListener(
'policies-updated',
(names, values) => this.onPoliciesReceived_(names, values));
addWebUIListener('download-json', json => this.downloadJson(json));
}
/**
* @param {PolicyNamesResponse} policyNames
* @param {PolicyValuesResponse} policyValues
* @private
*/
onPoliciesReceived_(policyNames, policyValues) {
/** @type {Array<!PolicyTableModel>} */
const policyGroups = policyValues.map(value => {
const knownPolicyNames =
policyNames[value.id] ? policyNames[value.id].policyNames : [];
const knownPolicyNamesSet = new Set(knownPolicyNames);
const receivedPolicyNames = Object.keys(value.policies);
const allPolicyNames =
Array.from(new Set([...knownPolicyNames, ...receivedPolicyNames]));
const policies = allPolicyNames.map(
name => Object.assign(
{
name,
link: [
policyNames.chrome.policyNames,
policyNames.precedence?.policyNames
].includes(knownPolicyNames) &&
knownPolicyNamesSet.has(name) ?
`https://chromeenterprise.google/policies/?policy=${name}` :
undefined,
},
value.policies[name]));
return {
name: value.forSigninScreen ?
`${value.name} [${loadTimeData.getString('signinProfile')}]` :
value.name,
id: value.isExtension ? value.id : null,
policies,
...(value.precedenceOrder && {precedenceOrder: value.precedenceOrder}),
};
});
policyGroups.forEach(group => this.createOrUpdatePolicyTable(group));
this.reloadPoliciesDone();
}
/**
* Triggers the download of the policies as a JSON file.
* @param {String} json The policies as a JSON string.
*/
downloadJson(json) {
const blob = new Blob([json], {type: 'application/json'});
const blobUrl = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = blobUrl;
link.download = 'policies.json';
document.body.appendChild(link);
link.dispatchEvent(new MouseEvent(
'click', {bubbles: true, cancelable: true, view: window}));
document.body.removeChild(link);
}
/** @param {PolicyTableModel} dataModel */
createOrUpdatePolicyTable(dataModel) {
const id = `${dataModel.name}-${dataModel.id}`;
if (!this.policyTables[id]) {
this.policyTables[id] = new PolicyTable;
this.mainSection.appendChild(this.policyTables[id]);
}
this.policyTables[id].update(dataModel);
}
/**
* Update the status section of the page to show the current cloud policy
* status.
* @param {Object} status Dictionary containing the current policy status.
*/
setStatus(status) {
// Remove any existing status boxes.
const container = $('status-box-container');
while (container.firstChild) {
container.removeChild(container.firstChild);
}
// Hide the status section.
const section = $('status-section');
section.hidden = true;
// Add a status box for each scope that has a cloud policy status.
for (const scope in status) {
const box = new StatusBox;
box.initialize(scope, status[scope]);
container.appendChild(box);
// Show the status section.
section.hidden = false;
}
}
/**
* Re-enable the reload policies button when the previous request to reload
* policies values has completed.
*/
reloadPoliciesDone() {
$('reload-policies').disabled = false;
$('screen-reader-message').textContent =
loadTimeData.getString('loadPoliciesDone');
}
}
// Make Page a singleton.
addSingletonGetter(Page);
| {
// Include superseded notice regardless of other notices
notice += `, ${supersededNotice}`;
} | conditional_block |
policy_base.js | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './strings.m.js';
import 'chrome://resources/js/action_link.js';
// <if expr="is_ios">
import 'chrome://resources/js/ios/web_ui.js';
// </if>
import {addSingletonGetter, addWebUIListener} from 'chrome://resources/js/cr.m.js';
import {define as crUiDefine} from 'chrome://resources/js/cr/ui.m.js';
import {FocusOutlineManager} from 'chrome://resources/js/cr/ui/focus_outline_manager.m.js';
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
import {$} from 'chrome://resources/js/util.m.js';
/**
* @typedef {{
* [id: string]: {
* name: string,
* policyNames: !Array<string>,
* }}
*/
let PolicyNamesResponse;
/**
* @typedef {!Array<{
* name: string,
* id: ?String,
* policies: {[name: string]: policy.Policy},
* precedenceOrder: ?Array<string>,
* }>}
*/
let PolicyValuesResponse;
/**
* @typedef {{
* level: string,
* scope: string,
* source: string,
* value: any,
* }}
*/
let Conflict;
/**
* @typedef {{
* ignored?: boolean,
* name: string,
* level: string,
* link: ?string,
* scope: string,
* source: string,
* error: string,
* warning: string,
* info: string,
* value: any,
* deprecated: ?boolean,
* future: ?boolean,
* allSourcesMerged: ?boolean,
* conflicts: ?Array<!Conflict>,
* superseded: ?Array<!Conflict>,
* }}
*/
let Policy;
/**
* @typedef {{
* id: ?string,
* isExtension?: boolean,
* name: string,
* policies: !Array<!Policy>,
* precedenceOrder: ?Array<string>,
* }}
*/
let PolicyTableModel;
/**
* A box that shows the status of cloud policy for a device, machine or user.
* @constructor
* @extends {HTMLFieldSetElement}
*/
const StatusBox = crUiDefine(function() {
const node = $('status-box-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
StatusBox.prototype = {
// Set up the prototype chain.
__proto__: HTMLFieldSetElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {},
/**
* Sets the text of a particular named label element in the status box
* and updates the visibility if needed.
* @param {string} labelName The name of the label element that is being
* updated.
* @param {string} labelValue The new text content for the label.
* @param {boolean=} needsToBeShown True if we want to show the label
* False otherwise.
*/
setLabelAndShow_(labelName, labelValue, needsToBeShown = true) {
const labelElement = this.querySelector(labelName);
labelElement.textContent = labelValue ? ' ' + labelValue : '';
if (needsToBeShown) {
labelElement.parentElement.hidden = false;
}
},
/**
* Populate the box with the given cloud policy status.
* @param {string} scope The policy scope, either "device", "machine",
* "user", or "updater".
* @param {Object} status Dictionary with information about the status.
*/
initialize(scope, status) {
const notSpecifiedString = loadTimeData.getString('notSpecified');
// Set appropriate box legend based on status key
this.querySelector('.legend').textContent =
loadTimeData.getString(status.boxLegendKey);
if (scope === 'device') {
// Populate the device naming information.
// Populate the asset identifier.
this.setLabelAndShow_('.asset-id', status.assetId || notSpecifiedString);
// Populate the device location.
this.setLabelAndShow_('.location', status.location || notSpecifiedString);
// Populate the directory API ID.
this.setLabelAndShow_(
'.directory-api-id', status.directoryApiId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
// For off-hours policy, indicate if it's active or not.
if (status.isOffHoursActive != null) {
this.setLabelAndShow_(
'.is-offhours-active',
loadTimeData.getString(
status.isOffHoursActive ? 'offHoursActive' :
'offHoursNotActive'));
}
} else if (scope === 'machine') {
this.setLabelAndShow_('.machine-enrollment-device-id', status.deviceId);
this.setLabelAndShow_(
'.machine-enrollment-token', status.enrollmentToken);
if (status.machine) {
this.setLabelAndShow_('.machine-enrollment-name', status.machine);
}
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
} else if (scope === 'updater') {
if (status.version) {
this.setLabelAndShow_('.version', status.version);
}
if (status.domain) {
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
}
} else {
// Populate the topmost item with the username.
this.setLabelAndShow_('.username', status.username);
// Populate the user gaia id.
this.setLabelAndShow_('.gaia-id', status.gaiaId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
if (status.isAffiliated != null) {
this.setLabelAndShow_(
'.is-affiliated',
loadTimeData.getString(
status.isAffiliated ? 'isAffiliatedYes' : 'isAffiliatedNo'));
}
}
if (status.enterpriseDomainManager) {
this.setLabelAndShow_('.managed-by', status.enterpriseDomainManager);
}
if (status.timeSinceLastRefresh) {
this.setLabelAndShow_(
'.time-since-last-refresh', status.timeSinceLastRefresh);
}
if (scope !== 'updater') {
this.setLabelAndShow_('.refresh-interval', status.refreshInterval);
this.setLabelAndShow_('.status', status.status);
this.setLabelAndShow_(
'.policy-push',
loadTimeData.getString(
status.policiesPushAvailable ? 'policiesPushOn' :
'policiesPushOff'));
}
if (status.lastCloudReportSentTimestamp) {
this.setLabelAndShow_(
'.last-cloud-report-sent-timestamp',
status.lastCloudReportSentTimestamp + ' (' +
status.timeSinceLastCloudReportSent + ')');
}
},
};
/**
* A single policy conflict's entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyConflict = crUiDefine(function() {
const node = $('policy-conflict-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyConflict.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Conflict} conflict
* @param {string} row_label
*/
initialize(conflict, row_label) {
this.querySelector('.scope').textContent = loadTimeData.getString(
conflict.scope === 'user' ? 'scopeUser' : 'scopeDevice');
this.querySelector('.level').textContent = loadTimeData.getString(
conflict.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
this.querySelector('.source').textContent =
loadTimeData.getString(conflict.source);
this.querySelector('.value.row .value').textContent = conflict.value;
this.querySelector('.name').textContent = loadTimeData.getString(row_label);
}
};
/**
* A single policy's row entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyRow = crUiDefine(function() {
const node = $('policy-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
const toggle = this.querySelector('.policy.row .toggle');
toggle.addEventListener('click', this.toggleExpanded_.bind(this));
const copy = this.querySelector('.copy-value');
copy.addEventListener('click', this.copyValue_.bind(this));
},
/** @param {Policy} policy */
initialize(policy) {
/** @type {Policy} */
this.policy = policy;
/** @private {boolean} */
this.unset_ = policy.value === undefined;
/** @private {boolean} */
this.hasErrors_ = !!policy.error;
/** @private {boolean} */
this.hasWarnings_ = !!policy.warning;
/** @private {boolean} */
this.hasInfos_ = !!policy.info;
/** @private {boolean} */
this.hasConflicts_ = !!policy.conflicts;
/** @private {boolean} */
this.hasSuperseded_ = !!policy.superseded;
/** @private {boolean} */
this.isMergedValue_ = !!policy.allSourcesMerged;
/** @private {boolean} */
this.deprecated_ = !!policy.deprecated;
/** @private {boolean} */
this.future_ = !!policy.future;
// Populate the name column.
const nameDisplay = this.querySelector('.name .link span');
nameDisplay.textContent = policy.name;
if (policy.link) {
const link = this.querySelector('.name .link');
link.href = policy.link;
link.title = loadTimeData.getStringF('policyLearnMore', policy.name);
} else {
this.classList.add('no-help-link');
}
// Populate the remaining columns with policy scope, level and value if a
// value has been set. Otherwise, leave them blank.
if (!this.unset_) {
const scopeDisplay = this.querySelector('.scope');
scopeDisplay.textContent = loadTimeData.getString(
policy.scope === 'user' ? 'scopeUser' : 'scopeDevice');
const levelDisplay = this.querySelector('.level');
levelDisplay.textContent = loadTimeData.getString(
policy.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
const sourceDisplay = this.querySelector('.source');
sourceDisplay.textContent = loadTimeData.getString(policy.source);
// Reduces load on the DOM for long values;
const truncatedValue =
(policy.value && policy.value.toString().length > 256) ?
`${policy.value.toString().substr(0, 256)}\u2026` :
policy.value;
const valueDisplay = this.querySelector('.value');
valueDisplay.textContent = truncatedValue;
const copyLink = this.querySelector('.copy .link');
copyLink.title = loadTimeData.getStringF('policyCopyValue', policy.name);
const valueRowContentDisplay = this.querySelector('.value.row .value');
valueRowContentDisplay.textContent = policy.value;
const errorRowContentDisplay = this.querySelector('.errors.row .value');
errorRowContentDisplay.textContent = policy.error;
const warningRowContentDisplay =
this.querySelector('.warnings.row .value');
warningRowContentDisplay.textContent = policy.warning;
const infoRowContentDisplay = this.querySelector('.infos.row .value');
infoRowContentDisplay.textContent = policy.info;
const messagesDisplay = this.querySelector('.messages');
const errorsNotice =
this.hasErrors_ ? loadTimeData.getString('error') : '';
const deprecationNotice =
this.deprecated_ ? loadTimeData.getString('deprecated') : '';
const futureNotice = this.future_ ? loadTimeData.getString('future') : '';
const warningsNotice =
this.hasWarnings_ ? loadTimeData.getString('warning') : '';
const conflictsNotice = this.hasConflicts_ && !this.isMergedValue_ ?
loadTimeData.getString('conflict') :
'';
const ignoredNotice =
this.policy.ignored ? loadTimeData.getString('ignored') : '';
let notice =
[
errorsNotice, deprecationNotice, futureNotice, warningsNotice,
ignoredNotice, conflictsNotice
].filter(x => !!x)
.join(', ') ||
loadTimeData.getString('ok');
const supersededNotice = this.hasSuperseded_ && !this.isMergedValue_ ?
loadTimeData.getString('superseding') :
'';
if (supersededNotice) {
// Include superseded notice regardless of other notices
notice += `, ${supersededNotice}`;
}
messagesDisplay.textContent = notice;
if (policy.conflicts) {
policy.conflicts.forEach(conflict => {
const row = new PolicyConflict;
row.initialize(conflict, 'conflictValue');
this.appendChild(row);
});
}
if (policy.superseded) {
policy.superseded.forEach(superseded => {
const row = new PolicyConflict;
row.initialize(superseded, 'supersededValue');
this.appendChild(row);
});
}
} else {
const messagesDisplay = this.querySelector('.messages');
messagesDisplay.textContent = loadTimeData.getString('unset');
}
},
/**
* Copies the policy's value to the clipboard.
* @private
*/
copyValue_() {
const policyValueDisplay = this.querySelector('.value.row .value');
// Select the text that will be copied.
const selection = window.getSelection();
const range = window.document.createRange();
range.selectNodeContents(policyValueDisplay);
selection.removeAllRanges();
selection.addRange(range);
// Copy the policy value to the clipboard.
navigator.clipboard.writeText(policyValueDisplay.innerText).catch(error => {
console.error('Unable to copy policy value to clipboard:', error);
});
},
/**
* Toggle the visibility of an additional row containing the complete text.
* @private
*/
toggleExpanded_() {
const warningRowDisplay = this.querySelector('.warnings.row');
const errorRowDisplay = this.querySelector('.errors.row');
const infoRowDisplay = this.querySelector('.infos.row');
const valueRowDisplay = this.querySelector('.value.row');
valueRowDisplay.hidden = !valueRowDisplay.hidden;
if (valueRowDisplay.hidden) {
this.classList.remove('expanded');
} else {
this.classList.add('expanded');
}
this.querySelector('.show-more').hidden = !valueRowDisplay.hidden;
this.querySelector('.show-less').hidden = valueRowDisplay.hidden;
if (this.hasWarnings_) {
warningRowDisplay.hidden = !warningRowDisplay.hidden;
}
if (this.hasErrors_) {
errorRowDisplay.hidden = !errorRowDisplay.hidden;
}
if (this.hasInfos_) {
infoRowDisplay.hidden = !infoRowDisplay.hidden;
}
this.querySelectorAll('.policy-conflict-data')
.forEach(row => row.hidden = !row.hidden);
this.querySelectorAll('.policy-superseded-data')
.forEach(row => row.hidden = !row.hidden);
},
};
/**
* A row describing the current policy precedence.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyPrecedenceRow = crUiDefine(function() {
const node = $('policy-precedence-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyPrecedenceRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Array<String>} precedenceOrder Array containing ordered strings
* which represent the order of policy precedence.
*/
initialize(precedenceOrder) {
this.querySelector('.precedence.row > .value').textContent =
precedenceOrder.join(' > ');
}
};
/**
* A table of policies and their values.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyTable = crUiDefine(function() {
const node = $('policy-table-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyTable.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
this.policies_ = {};
this.filterPattern_ = '';
},
/** @param {PolicyTableModel} dataModel */
update(dataModel) {
// Clear policies
const mainContent = this.querySelector('.main');
const policies = this.querySelectorAll('.policy-data');
this.querySelector('.header').textContent = dataModel.name;
this.querySelector('.id').textContent = dataModel.id;
this.querySelector('.id').hidden = !dataModel.id;
policies.forEach(row => mainContent.removeChild(row));
dataModel.policies
.sort((a, b) => {
if ((a.value !== undefined && b.value !== undefined) ||
a.value === b.value) {
if (a.link !== undefined && b.link !== undefined) {
// Sorting the policies in ascending alpha order.
return a.name > b.name ? 1 : -1;
}
// Sorting so unknown policies are last.
return a.link !== undefined ? -1 : 1;
}
// Sorting so unset values are last.
return a.value !== undefined ? -1 : 1;
})
.forEach(policy => {
const policyRow = new PolicyRow;
policyRow.initialize(policy);
mainContent.appendChild(policyRow);
});
this.filter();
// Show the current policy precedence order in the Policy Precedence table.
if (dataModel.name === 'Policy Precedence') {
// Clear previous precedence row.
const precedenceRowOld = this.querySelectorAll('.policy-precedence-data');
precedenceRowOld.forEach(row => mainContent.removeChild(row));
const precedenceRow = new PolicyPrecedenceRow;
precedenceRow.initialize(dataModel.precedenceOrder);
mainContent.appendChild(precedenceRow);
}
},
/**
* Set the filter pattern. Only policies whose name contains |pattern| are
* shown in the policy table. The filter is case insensitive. It can be
* disabled by setting |pattern| to an empty string.
* @param {string} pattern The filter pattern.
*/
setFilterPattern(pattern) {
this.filterPattern_ = pattern.toLowerCase();
this.filter();
},
/**
* Filter policies. Only policies whose name contains the filter pattern are
* shown in the table. Furthermore, policies whose value is not currently
* set are only shown if the corresponding checkbox is checked.
*/
| () {
const showUnset = $('show-unset').checked;
const policies = this.querySelectorAll('.policy-data');
for (let i = 0; i < policies.length; i++) {
const policyDisplay = policies[i];
policyDisplay.hidden =
policyDisplay.policy.value === undefined && !showUnset ||
policyDisplay.policy.name.toLowerCase().indexOf(
this.filterPattern_) === -1;
}
this.querySelector('.no-policy').hidden =
!!this.querySelector('.policy-data:not([hidden])');
},
};
/**
* A singleton object that handles communication between browser and WebUI.
*/
export class Page {
constructor() {
/** @type {?Element} */
this.mainSection = null;
/** @type {{[id: string]: PolicyTable}} */
this.policyTables = {};
}
/**
* Main initialization function. Called by the browser on page load.
*/
initialize() {
FocusOutlineManager.forDocument(document);
this.mainSection = $('main-section');
// Place the initial focus on the filter input field.
$('filter').focus();
$('filter').onsearch = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].setFilterPattern($('filter').value);
}
};
$('reload-policies').onclick = () => {
$('reload-policies').disabled = true;
$('screen-reader-message').textContent =
loadTimeData.getString('loadingPolicies');
chrome.send('reloadPolicies');
};
const exportButton = $('export-policies');
const hideExportButton = loadTimeData.valueExists('hideExportButton') &&
loadTimeData.getBoolean('hideExportButton');
if (hideExportButton) {
exportButton.style.display = 'none';
} else {
exportButton.onclick = () => {
chrome.send('exportPoliciesJSON');
};
}
$('copy-policies').onclick = () => {
chrome.send('copyPoliciesJSON');
};
$('show-unset').onchange = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].filter();
}
};
chrome.send('listenPoliciesUpdates');
addWebUIListener('status-updated', status => this.setStatus(status));
addWebUIListener(
'policies-updated',
(names, values) => this.onPoliciesReceived_(names, values));
addWebUIListener('download-json', json => this.downloadJson(json));
}
/**
* @param {PolicyNamesResponse} policyNames
* @param {PolicyValuesResponse} policyValues
* @private
*/
onPoliciesReceived_(policyNames, policyValues) {
/** @type {Array<!PolicyTableModel>} */
const policyGroups = policyValues.map(value => {
const knownPolicyNames =
policyNames[value.id] ? policyNames[value.id].policyNames : [];
const knownPolicyNamesSet = new Set(knownPolicyNames);
const receivedPolicyNames = Object.keys(value.policies);
const allPolicyNames =
Array.from(new Set([...knownPolicyNames, ...receivedPolicyNames]));
const policies = allPolicyNames.map(
name => Object.assign(
{
name,
link: [
policyNames.chrome.policyNames,
policyNames.precedence?.policyNames
].includes(knownPolicyNames) &&
knownPolicyNamesSet.has(name) ?
`https://chromeenterprise.google/policies/?policy=${name}` :
undefined,
},
value.policies[name]));
return {
name: value.forSigninScreen ?
`${value.name} [${loadTimeData.getString('signinProfile')}]` :
value.name,
id: value.isExtension ? value.id : null,
policies,
...(value.precedenceOrder && {precedenceOrder: value.precedenceOrder}),
};
});
policyGroups.forEach(group => this.createOrUpdatePolicyTable(group));
this.reloadPoliciesDone();
}
/**
* Triggers the download of the policies as a JSON file.
* @param {String} json The policies as a JSON string.
*/
downloadJson(json) {
const blob = new Blob([json], {type: 'application/json'});
const blobUrl = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = blobUrl;
link.download = 'policies.json';
document.body.appendChild(link);
link.dispatchEvent(new MouseEvent(
'click', {bubbles: true, cancelable: true, view: window}));
document.body.removeChild(link);
}
/** @param {PolicyTableModel} dataModel */
createOrUpdatePolicyTable(dataModel) {
const id = `${dataModel.name}-${dataModel.id}`;
if (!this.policyTables[id]) {
this.policyTables[id] = new PolicyTable;
this.mainSection.appendChild(this.policyTables[id]);
}
this.policyTables[id].update(dataModel);
}
/**
* Update the status section of the page to show the current cloud policy
* status.
* @param {Object} status Dictionary containing the current policy status.
*/
setStatus(status) {
// Remove any existing status boxes.
const container = $('status-box-container');
while (container.firstChild) {
container.removeChild(container.firstChild);
}
// Hide the status section.
const section = $('status-section');
section.hidden = true;
// Add a status box for each scope that has a cloud policy status.
for (const scope in status) {
const box = new StatusBox;
box.initialize(scope, status[scope]);
container.appendChild(box);
// Show the status section.
section.hidden = false;
}
}
/**
* Re-enable the reload policies button when the previous request to reload
* policies values has completed.
*/
reloadPoliciesDone() {
$('reload-policies').disabled = false;
$('screen-reader-message').textContent =
loadTimeData.getString('loadPoliciesDone');
}
}
// Make Page a singleton.
addSingletonGetter(Page);
| filter | identifier_name |
policy_base.js | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './strings.m.js';
import 'chrome://resources/js/action_link.js';
// <if expr="is_ios">
import 'chrome://resources/js/ios/web_ui.js';
// </if>
import {addSingletonGetter, addWebUIListener} from 'chrome://resources/js/cr.m.js';
import {define as crUiDefine} from 'chrome://resources/js/cr/ui.m.js';
import {FocusOutlineManager} from 'chrome://resources/js/cr/ui/focus_outline_manager.m.js';
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
import {$} from 'chrome://resources/js/util.m.js';
/**
* @typedef {{
* [id: string]: {
* name: string,
* policyNames: !Array<string>,
* }}
*/
let PolicyNamesResponse;
/**
* @typedef {!Array<{
* name: string,
* id: ?String,
* policies: {[name: string]: policy.Policy},
* precedenceOrder: ?Array<string>,
* }>}
*/
let PolicyValuesResponse;
/**
* @typedef {{
* level: string,
* scope: string,
* source: string,
* value: any,
* }}
*/
let Conflict;
/**
* @typedef {{
* ignored?: boolean,
* name: string,
* level: string,
* link: ?string,
* scope: string,
* source: string,
* error: string,
* warning: string,
* info: string,
* value: any,
* deprecated: ?boolean,
* future: ?boolean,
* allSourcesMerged: ?boolean,
* conflicts: ?Array<!Conflict>,
* superseded: ?Array<!Conflict>,
* }}
*/
let Policy;
/**
* @typedef {{
* id: ?string,
* isExtension?: boolean,
* name: string,
* policies: !Array<!Policy>,
* precedenceOrder: ?Array<string>,
* }}
*/
let PolicyTableModel;
/**
* A box that shows the status of cloud policy for a device, machine or user.
* @constructor
* @extends {HTMLFieldSetElement}
*/
const StatusBox = crUiDefine(function() {
const node = $('status-box-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
StatusBox.prototype = {
// Set up the prototype chain.
__proto__: HTMLFieldSetElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {},
/**
* Sets the text of a particular named label element in the status box
* and updates the visibility if needed.
* @param {string} labelName The name of the label element that is being
* updated.
* @param {string} labelValue The new text content for the label.
* @param {boolean=} needsToBeShown True if we want to show the label
* False otherwise.
*/
setLabelAndShow_(labelName, labelValue, needsToBeShown = true) {
const labelElement = this.querySelector(labelName);
labelElement.textContent = labelValue ? ' ' + labelValue : '';
if (needsToBeShown) {
labelElement.parentElement.hidden = false;
}
},
/**
* Populate the box with the given cloud policy status.
* @param {string} scope The policy scope, either "device", "machine",
* "user", or "updater".
* @param {Object} status Dictionary with information about the status.
*/
initialize(scope, status) {
const notSpecifiedString = loadTimeData.getString('notSpecified');
// Set appropriate box legend based on status key
this.querySelector('.legend').textContent =
loadTimeData.getString(status.boxLegendKey);
if (scope === 'device') {
// Populate the device naming information.
// Populate the asset identifier.
this.setLabelAndShow_('.asset-id', status.assetId || notSpecifiedString);
// Populate the device location.
this.setLabelAndShow_('.location', status.location || notSpecifiedString);
// Populate the directory API ID.
this.setLabelAndShow_(
'.directory-api-id', status.directoryApiId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
// For off-hours policy, indicate if it's active or not.
if (status.isOffHoursActive != null) {
this.setLabelAndShow_(
'.is-offhours-active',
loadTimeData.getString(
status.isOffHoursActive ? 'offHoursActive' :
'offHoursNotActive'));
}
} else if (scope === 'machine') {
this.setLabelAndShow_('.machine-enrollment-device-id', status.deviceId);
this.setLabelAndShow_(
'.machine-enrollment-token', status.enrollmentToken);
if (status.machine) {
this.setLabelAndShow_('.machine-enrollment-name', status.machine);
}
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
} else if (scope === 'updater') {
if (status.version) {
this.setLabelAndShow_('.version', status.version);
}
if (status.domain) {
this.setLabelAndShow_('.machine-enrollment-domain', status.domain);
}
} else {
// Populate the topmost item with the username.
this.setLabelAndShow_('.username', status.username);
// Populate the user gaia id.
this.setLabelAndShow_('.gaia-id', status.gaiaId || notSpecifiedString);
this.setLabelAndShow_('.client-id', status.clientId);
if (status.isAffiliated != null) {
this.setLabelAndShow_(
'.is-affiliated',
loadTimeData.getString(
status.isAffiliated ? 'isAffiliatedYes' : 'isAffiliatedNo'));
}
}
if (status.enterpriseDomainManager) {
this.setLabelAndShow_('.managed-by', status.enterpriseDomainManager);
}
if (status.timeSinceLastRefresh) {
this.setLabelAndShow_(
'.time-since-last-refresh', status.timeSinceLastRefresh);
}
if (scope !== 'updater') {
this.setLabelAndShow_('.refresh-interval', status.refreshInterval);
this.setLabelAndShow_('.status', status.status);
this.setLabelAndShow_(
'.policy-push',
loadTimeData.getString(
status.policiesPushAvailable ? 'policiesPushOn' :
'policiesPushOff'));
}
if (status.lastCloudReportSentTimestamp) {
this.setLabelAndShow_(
'.last-cloud-report-sent-timestamp',
status.lastCloudReportSentTimestamp + ' (' +
status.timeSinceLastCloudReportSent + ')');
}
},
};
/**
* A single policy conflict's entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyConflict = crUiDefine(function() {
const node = $('policy-conflict-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyConflict.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Conflict} conflict
* @param {string} row_label
*/
initialize(conflict, row_label) {
this.querySelector('.scope').textContent = loadTimeData.getString(
conflict.scope === 'user' ? 'scopeUser' : 'scopeDevice');
this.querySelector('.level').textContent = loadTimeData.getString(
conflict.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
this.querySelector('.source').textContent =
loadTimeData.getString(conflict.source);
this.querySelector('.value.row .value').textContent = conflict.value;
this.querySelector('.name').textContent = loadTimeData.getString(row_label);
}
};
/**
* A single policy's row entry in the policy table.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyRow = crUiDefine(function() {
const node = $('policy-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
const toggle = this.querySelector('.policy.row .toggle');
toggle.addEventListener('click', this.toggleExpanded_.bind(this));
const copy = this.querySelector('.copy-value');
copy.addEventListener('click', this.copyValue_.bind(this));
},
/** @param {Policy} policy */
initialize(policy) {
/** @type {Policy} */
this.policy = policy;
/** @private {boolean} */
this.unset_ = policy.value === undefined;
/** @private {boolean} */
this.hasErrors_ = !!policy.error;
/** @private {boolean} */
this.hasWarnings_ = !!policy.warning;
/** @private {boolean} */
this.hasInfos_ = !!policy.info;
/** @private {boolean} */
this.hasConflicts_ = !!policy.conflicts;
/** @private {boolean} */
this.hasSuperseded_ = !!policy.superseded;
/** @private {boolean} */
this.isMergedValue_ = !!policy.allSourcesMerged;
/** @private {boolean} */
this.deprecated_ = !!policy.deprecated;
/** @private {boolean} */
this.future_ = !!policy.future;
// Populate the name column.
const nameDisplay = this.querySelector('.name .link span');
nameDisplay.textContent = policy.name;
if (policy.link) {
const link = this.querySelector('.name .link');
link.href = policy.link;
link.title = loadTimeData.getStringF('policyLearnMore', policy.name);
} else {
this.classList.add('no-help-link');
}
// Populate the remaining columns with policy scope, level and value if a
// value has been set. Otherwise, leave them blank.
if (!this.unset_) {
const scopeDisplay = this.querySelector('.scope');
scopeDisplay.textContent = loadTimeData.getString(
policy.scope === 'user' ? 'scopeUser' : 'scopeDevice');
const levelDisplay = this.querySelector('.level');
levelDisplay.textContent = loadTimeData.getString(
policy.level === 'recommended' ? 'levelRecommended' :
'levelMandatory');
const sourceDisplay = this.querySelector('.source');
sourceDisplay.textContent = loadTimeData.getString(policy.source);
// Reduces load on the DOM for long values;
const truncatedValue =
(policy.value && policy.value.toString().length > 256) ?
`${policy.value.toString().substr(0, 256)}\u2026` :
policy.value;
const valueDisplay = this.querySelector('.value');
valueDisplay.textContent = truncatedValue;
const copyLink = this.querySelector('.copy .link');
copyLink.title = loadTimeData.getStringF('policyCopyValue', policy.name);
const valueRowContentDisplay = this.querySelector('.value.row .value');
valueRowContentDisplay.textContent = policy.value;
const errorRowContentDisplay = this.querySelector('.errors.row .value');
errorRowContentDisplay.textContent = policy.error;
const warningRowContentDisplay =
this.querySelector('.warnings.row .value');
warningRowContentDisplay.textContent = policy.warning;
const infoRowContentDisplay = this.querySelector('.infos.row .value');
infoRowContentDisplay.textContent = policy.info;
const messagesDisplay = this.querySelector('.messages');
const errorsNotice =
this.hasErrors_ ? loadTimeData.getString('error') : '';
const deprecationNotice =
this.deprecated_ ? loadTimeData.getString('deprecated') : '';
const futureNotice = this.future_ ? loadTimeData.getString('future') : '';
const warningsNotice =
this.hasWarnings_ ? loadTimeData.getString('warning') : '';
const conflictsNotice = this.hasConflicts_ && !this.isMergedValue_ ?
loadTimeData.getString('conflict') :
'';
const ignoredNotice =
this.policy.ignored ? loadTimeData.getString('ignored') : '';
let notice =
[
errorsNotice, deprecationNotice, futureNotice, warningsNotice,
ignoredNotice, conflictsNotice
].filter(x => !!x)
.join(', ') ||
loadTimeData.getString('ok');
const supersededNotice = this.hasSuperseded_ && !this.isMergedValue_ ?
loadTimeData.getString('superseding') :
'';
if (supersededNotice) {
// Include superseded notice regardless of other notices
notice += `, ${supersededNotice}`;
}
messagesDisplay.textContent = notice;
if (policy.conflicts) {
policy.conflicts.forEach(conflict => {
const row = new PolicyConflict;
row.initialize(conflict, 'conflictValue');
this.appendChild(row);
});
}
if (policy.superseded) {
policy.superseded.forEach(superseded => {
const row = new PolicyConflict;
row.initialize(superseded, 'supersededValue');
this.appendChild(row);
});
}
} else {
const messagesDisplay = this.querySelector('.messages');
messagesDisplay.textContent = loadTimeData.getString('unset');
}
},
/**
* Copies the policy's value to the clipboard.
* @private
*/
copyValue_() {
const policyValueDisplay = this.querySelector('.value.row .value');
// Select the text that will be copied.
const selection = window.getSelection();
const range = window.document.createRange();
range.selectNodeContents(policyValueDisplay);
selection.removeAllRanges();
selection.addRange(range);
// Copy the policy value to the clipboard.
navigator.clipboard.writeText(policyValueDisplay.innerText).catch(error => {
console.error('Unable to copy policy value to clipboard:', error);
});
},
/**
* Toggle the visibility of an additional row containing the complete text.
* @private
*/
toggleExpanded_() {
const warningRowDisplay = this.querySelector('.warnings.row');
const errorRowDisplay = this.querySelector('.errors.row');
const infoRowDisplay = this.querySelector('.infos.row');
const valueRowDisplay = this.querySelector('.value.row');
valueRowDisplay.hidden = !valueRowDisplay.hidden;
if (valueRowDisplay.hidden) {
this.classList.remove('expanded');
} else {
this.classList.add('expanded');
}
this.querySelector('.show-more').hidden = !valueRowDisplay.hidden;
this.querySelector('.show-less').hidden = valueRowDisplay.hidden;
if (this.hasWarnings_) {
warningRowDisplay.hidden = !warningRowDisplay.hidden;
}
if (this.hasErrors_) {
errorRowDisplay.hidden = !errorRowDisplay.hidden;
}
if (this.hasInfos_) {
infoRowDisplay.hidden = !infoRowDisplay.hidden;
}
this.querySelectorAll('.policy-conflict-data')
.forEach(row => row.hidden = !row.hidden);
this.querySelectorAll('.policy-superseded-data')
.forEach(row => row.hidden = !row.hidden);
},
};
/**
* A row describing the current policy precedence.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyPrecedenceRow = crUiDefine(function() {
const node = $('policy-precedence-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyPrecedenceRow.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
decorate() {},
/**
* @param {Array<String>} precedenceOrder Array containing ordered strings
* which represent the order of policy precedence.
*/
initialize(precedenceOrder) {
this.querySelector('.precedence.row > .value').textContent =
precedenceOrder.join(' > ');
}
};
/**
* A table of policies and their values.
* @constructor
* @extends {HTMLDivElement}
*/
const PolicyTable = crUiDefine(function() {
const node = $('policy-table-template').cloneNode(true);
node.removeAttribute('id');
return node;
});
PolicyTable.prototype = {
// Set up the prototype chain.
__proto__: HTMLDivElement.prototype,
/**
* Initialization function for the cr.ui framework.
*/
decorate() {
this.policies_ = {}; | // Clear policies
const mainContent = this.querySelector('.main');
const policies = this.querySelectorAll('.policy-data');
this.querySelector('.header').textContent = dataModel.name;
this.querySelector('.id').textContent = dataModel.id;
this.querySelector('.id').hidden = !dataModel.id;
policies.forEach(row => mainContent.removeChild(row));
dataModel.policies
.sort((a, b) => {
if ((a.value !== undefined && b.value !== undefined) ||
a.value === b.value) {
if (a.link !== undefined && b.link !== undefined) {
// Sorting the policies in ascending alpha order.
return a.name > b.name ? 1 : -1;
}
// Sorting so unknown policies are last.
return a.link !== undefined ? -1 : 1;
}
// Sorting so unset values are last.
return a.value !== undefined ? -1 : 1;
})
.forEach(policy => {
const policyRow = new PolicyRow;
policyRow.initialize(policy);
mainContent.appendChild(policyRow);
});
this.filter();
// Show the current policy precedence order in the Policy Precedence table.
if (dataModel.name === 'Policy Precedence') {
// Clear previous precedence row.
const precedenceRowOld = this.querySelectorAll('.policy-precedence-data');
precedenceRowOld.forEach(row => mainContent.removeChild(row));
const precedenceRow = new PolicyPrecedenceRow;
precedenceRow.initialize(dataModel.precedenceOrder);
mainContent.appendChild(precedenceRow);
}
},
/**
* Set the filter pattern. Only policies whose name contains |pattern| are
* shown in the policy table. The filter is case insensitive. It can be
* disabled by setting |pattern| to an empty string.
* @param {string} pattern The filter pattern.
*/
setFilterPattern(pattern) {
this.filterPattern_ = pattern.toLowerCase();
this.filter();
},
/**
* Filter policies. Only policies whose name contains the filter pattern are
* shown in the table. Furthermore, policies whose value is not currently
* set are only shown if the corresponding checkbox is checked.
*/
filter() {
const showUnset = $('show-unset').checked;
const policies = this.querySelectorAll('.policy-data');
for (let i = 0; i < policies.length; i++) {
const policyDisplay = policies[i];
policyDisplay.hidden =
policyDisplay.policy.value === undefined && !showUnset ||
policyDisplay.policy.name.toLowerCase().indexOf(
this.filterPattern_) === -1;
}
this.querySelector('.no-policy').hidden =
!!this.querySelector('.policy-data:not([hidden])');
},
};
/**
* A singleton object that handles communication between browser and WebUI.
*/
export class Page {
constructor() {
/** @type {?Element} */
this.mainSection = null;
/** @type {{[id: string]: PolicyTable}} */
this.policyTables = {};
}
/**
* Main initialization function. Called by the browser on page load.
*/
initialize() {
FocusOutlineManager.forDocument(document);
this.mainSection = $('main-section');
// Place the initial focus on the filter input field.
$('filter').focus();
$('filter').onsearch = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].setFilterPattern($('filter').value);
}
};
$('reload-policies').onclick = () => {
$('reload-policies').disabled = true;
$('screen-reader-message').textContent =
loadTimeData.getString('loadingPolicies');
chrome.send('reloadPolicies');
};
const exportButton = $('export-policies');
const hideExportButton = loadTimeData.valueExists('hideExportButton') &&
loadTimeData.getBoolean('hideExportButton');
if (hideExportButton) {
exportButton.style.display = 'none';
} else {
exportButton.onclick = () => {
chrome.send('exportPoliciesJSON');
};
}
$('copy-policies').onclick = () => {
chrome.send('copyPoliciesJSON');
};
$('show-unset').onchange = () => {
for (const policyTable in this.policyTables) {
this.policyTables[policyTable].filter();
}
};
chrome.send('listenPoliciesUpdates');
addWebUIListener('status-updated', status => this.setStatus(status));
addWebUIListener(
'policies-updated',
(names, values) => this.onPoliciesReceived_(names, values));
addWebUIListener('download-json', json => this.downloadJson(json));
}
/**
* @param {PolicyNamesResponse} policyNames
* @param {PolicyValuesResponse} policyValues
* @private
*/
onPoliciesReceived_(policyNames, policyValues) {
/** @type {Array<!PolicyTableModel>} */
const policyGroups = policyValues.map(value => {
const knownPolicyNames =
policyNames[value.id] ? policyNames[value.id].policyNames : [];
const knownPolicyNamesSet = new Set(knownPolicyNames);
const receivedPolicyNames = Object.keys(value.policies);
const allPolicyNames =
Array.from(new Set([...knownPolicyNames, ...receivedPolicyNames]));
const policies = allPolicyNames.map(
name => Object.assign(
{
name,
link: [
policyNames.chrome.policyNames,
policyNames.precedence?.policyNames
].includes(knownPolicyNames) &&
knownPolicyNamesSet.has(name) ?
`https://chromeenterprise.google/policies/?policy=${name}` :
undefined,
},
value.policies[name]));
return {
name: value.forSigninScreen ?
`${value.name} [${loadTimeData.getString('signinProfile')}]` :
value.name,
id: value.isExtension ? value.id : null,
policies,
...(value.precedenceOrder && {precedenceOrder: value.precedenceOrder}),
};
});
policyGroups.forEach(group => this.createOrUpdatePolicyTable(group));
this.reloadPoliciesDone();
}
/**
* Triggers the download of the policies as a JSON file.
* @param {String} json The policies as a JSON string.
*/
downloadJson(json) {
const blob = new Blob([json], {type: 'application/json'});
const blobUrl = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = blobUrl;
link.download = 'policies.json';
document.body.appendChild(link);
link.dispatchEvent(new MouseEvent(
'click', {bubbles: true, cancelable: true, view: window}));
document.body.removeChild(link);
}
/** @param {PolicyTableModel} dataModel */
createOrUpdatePolicyTable(dataModel) {
const id = `${dataModel.name}-${dataModel.id}`;
if (!this.policyTables[id]) {
this.policyTables[id] = new PolicyTable;
this.mainSection.appendChild(this.policyTables[id]);
}
this.policyTables[id].update(dataModel);
}
/**
* Update the status section of the page to show the current cloud policy
* status.
* @param {Object} status Dictionary containing the current policy status.
*/
setStatus(status) {
// Remove any existing status boxes.
const container = $('status-box-container');
while (container.firstChild) {
container.removeChild(container.firstChild);
}
// Hide the status section.
const section = $('status-section');
section.hidden = true;
// Add a status box for each scope that has a cloud policy status.
for (const scope in status) {
const box = new StatusBox;
box.initialize(scope, status[scope]);
container.appendChild(box);
// Show the status section.
section.hidden = false;
}
}
/**
* Re-enable the reload policies button when the previous request to reload
* policies values has completed.
*/
reloadPoliciesDone() {
$('reload-policies').disabled = false;
$('screen-reader-message').textContent =
loadTimeData.getString('loadPoliciesDone');
}
}
// Make Page a singleton.
addSingletonGetter(Page); | this.filterPattern_ = '';
},
/** @param {PolicyTableModel} dataModel */
update(dataModel) { | random_line_split |
lib.rs | //! Binding Rust with Python, both ways!
//!
//! This library will generate and handle type conversions between Python and
//! Rust. To use Python from Rust refer to the
//! [library wiki](https://github.com/iduartgomez/rustypy/wiki), more general examples
//! and information on how to use Rust in Python can also be found there.
//!
//! Checkout the [PyTypes](../rustypy/pytypes/index.html) module documentation for more information
//! on how to write foreign functions that are compliant with Python as well as using the custom
//! types that will ease type conversion.
#![crate_type = "cdylib"]
extern crate cpython;
extern crate libc;
extern crate syn;
extern crate walkdir;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::ptr;
use libc::size_t;
mod macros;
pub mod pytypes;
// re-export
pub use self::pytypes::pybool::PyBool;
pub use self::pytypes::pydict::PyDict;
pub use self::pytypes::pylist::PyList;
pub use self::pytypes::pystring::PyString;
pub use self::pytypes::pytuple::PyTuple;
pub use self::pytypes::PyArg;
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn parse_src(
path: *mut PyString,
krate_data: &mut KrateData,
) -> *mut PyString {
let path = PyString::from_ptr_to_string(path);
let path: &Path = path.as_ref();
let dir = if let Some(parent) = path.parent() {
parent
} else {
// unlikely this happens, but just in case
return PyString::from("crate in root directory not allowed".to_string()).into_raw();
};
for entry in walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
if let Some(ext) = e.path().extension() {
ext == "rs"
} else {
false
}
})
{
if let Err(err) = parse_file(krate_data, entry.path()) {
return err;
}
}
ptr::null_mut::<PyString>()
}
fn parse_file(krate_data: &mut KrateData, path: &Path) -> Result<(), *mut PyString> {
let mut f = match File::open(path) {
Ok(file) => file,
Err(_) => {
return Err(
PyString::from(format!("path not found: {}", path.to_str().unwrap())).into_raw(),
)
}
};
let mut src = String::new();
if f.read_to_string(&mut src).is_err() {
return Err(PyString::from(format!(
"failed to read the source file: {}",
path.to_str().unwrap()
))
.into_raw());
}
match syn::parse_file(&src) {
Ok(krate) => {
syn::visit::visit_file(krate_data, &krate);
krate_data.collect_values();
if krate_data.collected.is_empty() {
return Err(PyString::from("zero function calls parsed".to_string()).into_raw());
}
}
Err(err) => return Err(PyString::from(format!("{}", err)).into_raw()),
};
Ok(())
}
#[doc(hidden)]
pub struct KrateData {
functions: Vec<FnDef>,
collected: Vec<String>,
prefixes: Vec<String>,
}
impl KrateData {
fn new(prefixes: Vec<String>) -> KrateData {
KrateData {
functions: vec![],
collected: vec![],
prefixes,
}
}
fn collect_values(&mut self) {
let mut add = true;
for v in self.functions.drain(..) {
let FnDef {
name: mut fndef,
args,
output,
} = v;
let original_name = fndef.clone();
if !args.is_empty() {
fndef.push_str("::");
args.iter().fold(&mut fndef, |acc, arg| {
if let Ok(repr) = type_repr(arg, None) {
acc.push_str(&repr);
acc.push(';');
} else {
eprintln!(
"could not generate bindings for fn `{}`; unacceptable parameters
",
original_name
);
add = false;
}
acc | } else {
// function w/o arguments
fndef.push_str("::();");
}
if add {
match output {
syn::ReturnType::Default => fndef.push_str("type(void)"),
syn::ReturnType::Type(_, ty) => {
if let Ok(ty) = type_repr(&ty, None) {
fndef.push_str(&ty)
} else {
continue;
}
}
}
self.collected.push(fndef);
} else {
add = true
}
}
}
fn add_fn(&mut self, name: String, fn_decl: &syn::ItemFn) {
for prefix in &self.prefixes {
if name.starts_with(prefix) {
let syn::ItemFn { sig, .. } = fn_decl.clone();
let mut args = vec![];
for arg in sig.inputs {
match arg {
syn::FnArg::Typed(pat_ty) => args.push(*pat_ty.ty),
_ => continue,
}
}
self.functions.push(FnDef {
name,
args,
output: sig.output,
});
break;
}
}
}
fn iter_krate(&self, idx: usize) -> Option<&str> {
if self.collected.len() >= (idx + 1) {
Some(&self.collected[idx])
} else {
None
}
}
}
fn type_repr(ty: &syn::Type, r: Option<&str>) -> Result<String, ()> {
let mut repr = String::new();
match ty {
syn::Type::Path(path) => {
let syn::TypePath { path, .. } = path;
if let Some(ty) = path.segments.last() {
if let Some(r) = r {
Ok(format!("type({} {})", r, ty.ident))
} else {
Ok(format!("type({})", ty.ident))
}
} else {
Err(())
}
}
syn::Type::Ptr(ty) => {
let syn::TypePtr {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "*mut",
_ => "*const",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
syn::Type::Reference(ty) => {
let syn::TypeReference {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "&mut",
_ => "&",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
_ => Err(()),
}
}
impl<'ast> syn::visit::Visit<'ast> for KrateData {
fn visit_item(&mut self, item: &syn::Item) {
match item {
syn::Item::Fn(fn_decl, ..) => {
if let syn::Visibility::Public(_) = fn_decl.vis {
let name = format!("{}", fn_decl.sig.ident);
self.add_fn(name, &*fn_decl)
}
}
syn::Item::Mod(mod_item) if mod_item.content.is_some() => {
for item in &mod_item.content.as_ref().unwrap().1 {
self.visit_item(item);
}
}
_ => {}
}
}
}
struct FnDef {
name: String,
output: syn::ReturnType,
args: Vec<syn::Type>,
}
// C FFI for KrateData objects:
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_new(ptr: *mut PyList) -> *mut KrateData {
let p = PyList::from_ptr(ptr);
let p: Vec<String> = PyList::into(p);
Box::into_raw(Box::new(KrateData::new(p)))
}
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_free(ptr: *mut KrateData) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn krate_data_len(krate: &KrateData) -> size_t {
krate.collected.len()
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn krate_data_iter(krate: &KrateData, idx: size_t) -> *mut PyString {
match krate.iter_krate(idx as usize) {
Some(val) => PyString::from(val).into_raw(),
None => PyString::from("NO_IDX_ERROR").into_raw(),
}
}
#[cfg(test)]
mod parsing_tests {
use super::*;
#[test]
#[ignore]
fn parse_lib() {
let path = std::env::home_dir()
.unwrap()
.join("workspace/sources/rustypy_debug/rust_code/src/lib.rs");
// let path_ori: std::path::PathBuf = std::env::current_dir().unwrap();
// let path: std::path::PathBuf = path_ori
// .parent()
// .unwrap()
// .parent()
// .unwrap()
// .join("tests/rs_test_lib/lib.rs");
// the entry point to the library:
let entry_point = PyString::from(path.to_str().unwrap().to_string()).into_raw();
let mut krate_data = KrateData::new(vec!["python_bind_".to_string()]);
unsafe {
let response = parse_src(entry_point, &mut krate_data);
let response: String = PyString::from_ptr_to_string(response);
assert!(!response.is_empty());
}
}
} | }); | random_line_split |
lib.rs | //! Binding Rust with Python, both ways!
//!
//! This library will generate and handle type conversions between Python and
//! Rust. To use Python from Rust refer to the
//! [library wiki](https://github.com/iduartgomez/rustypy/wiki), more general examples
//! and information on how to use Rust in Python can also be found there.
//!
//! Checkout the [PyTypes](../rustypy/pytypes/index.html) module documentation for more information
//! on how to write foreign functions that are compliant with Python as well as using the custom
//! types that will ease type conversion.
#![crate_type = "cdylib"]
extern crate cpython;
extern crate libc;
extern crate syn;
extern crate walkdir;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::ptr;
use libc::size_t;
mod macros;
pub mod pytypes;
// re-export
pub use self::pytypes::pybool::PyBool;
pub use self::pytypes::pydict::PyDict;
pub use self::pytypes::pylist::PyList;
pub use self::pytypes::pystring::PyString;
pub use self::pytypes::pytuple::PyTuple;
pub use self::pytypes::PyArg;
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn parse_src(
path: *mut PyString,
krate_data: &mut KrateData,
) -> *mut PyString {
let path = PyString::from_ptr_to_string(path);
let path: &Path = path.as_ref();
let dir = if let Some(parent) = path.parent() {
parent
} else {
// unlikely this happens, but just in case
return PyString::from("crate in root directory not allowed".to_string()).into_raw();
};
for entry in walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
if let Some(ext) = e.path().extension() {
ext == "rs"
} else {
false
}
})
{
if let Err(err) = parse_file(krate_data, entry.path()) {
return err;
}
}
ptr::null_mut::<PyString>()
}
fn parse_file(krate_data: &mut KrateData, path: &Path) -> Result<(), *mut PyString> {
let mut f = match File::open(path) {
Ok(file) => file,
Err(_) => {
return Err(
PyString::from(format!("path not found: {}", path.to_str().unwrap())).into_raw(),
)
}
};
let mut src = String::new();
if f.read_to_string(&mut src).is_err() {
return Err(PyString::from(format!(
"failed to read the source file: {}",
path.to_str().unwrap()
))
.into_raw());
}
match syn::parse_file(&src) {
Ok(krate) => {
syn::visit::visit_file(krate_data, &krate);
krate_data.collect_values();
if krate_data.collected.is_empty() {
return Err(PyString::from("zero function calls parsed".to_string()).into_raw());
}
}
Err(err) => return Err(PyString::from(format!("{}", err)).into_raw()),
};
Ok(())
}
#[doc(hidden)]
pub struct KrateData {
functions: Vec<FnDef>,
collected: Vec<String>,
prefixes: Vec<String>,
}
impl KrateData {
fn new(prefixes: Vec<String>) -> KrateData {
KrateData {
functions: vec![],
collected: vec![],
prefixes,
}
}
fn collect_values(&mut self) {
let mut add = true;
for v in self.functions.drain(..) {
let FnDef {
name: mut fndef,
args,
output,
} = v;
let original_name = fndef.clone();
if !args.is_empty() {
fndef.push_str("::");
args.iter().fold(&mut fndef, |acc, arg| {
if let Ok(repr) = type_repr(arg, None) {
acc.push_str(&repr);
acc.push(';');
} else {
eprintln!(
"could not generate bindings for fn `{}`; unacceptable parameters
",
original_name
);
add = false;
}
acc
});
} else {
// function w/o arguments
fndef.push_str("::();");
}
if add {
match output {
syn::ReturnType::Default => fndef.push_str("type(void)"),
syn::ReturnType::Type(_, ty) => {
if let Ok(ty) = type_repr(&ty, None) {
fndef.push_str(&ty)
} else {
continue;
}
}
}
self.collected.push(fndef);
} else {
add = true
}
}
}
fn add_fn(&mut self, name: String, fn_decl: &syn::ItemFn) {
for prefix in &self.prefixes {
if name.starts_with(prefix) {
let syn::ItemFn { sig, .. } = fn_decl.clone();
let mut args = vec![];
for arg in sig.inputs {
match arg {
syn::FnArg::Typed(pat_ty) => args.push(*pat_ty.ty),
_ => continue,
}
}
self.functions.push(FnDef {
name,
args,
output: sig.output,
});
break;
}
}
}
fn iter_krate(&self, idx: usize) -> Option<&str> {
if self.collected.len() >= (idx + 1) {
Some(&self.collected[idx])
} else {
None
}
}
}
fn type_repr(ty: &syn::Type, r: Option<&str>) -> Result<String, ()> {
let mut repr = String::new();
match ty {
syn::Type::Path(path) => {
let syn::TypePath { path, .. } = path;
if let Some(ty) = path.segments.last() {
if let Some(r) = r {
Ok(format!("type({} {})", r, ty.ident))
} else {
Ok(format!("type({})", ty.ident))
}
} else {
Err(())
}
}
syn::Type::Ptr(ty) => {
let syn::TypePtr {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "*mut",
_ => "*const",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
syn::Type::Reference(ty) => {
let syn::TypeReference {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "&mut",
_ => "&",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
_ => Err(()),
}
}
impl<'ast> syn::visit::Visit<'ast> for KrateData {
fn visit_item(&mut self, item: &syn::Item) {
match item {
syn::Item::Fn(fn_decl, ..) => {
if let syn::Visibility::Public(_) = fn_decl.vis {
let name = format!("{}", fn_decl.sig.ident);
self.add_fn(name, &*fn_decl)
}
}
syn::Item::Mod(mod_item) if mod_item.content.is_some() => {
for item in &mod_item.content.as_ref().unwrap().1 {
self.visit_item(item);
}
}
_ => {}
}
}
}
struct FnDef {
name: String,
output: syn::ReturnType,
args: Vec<syn::Type>,
}
// C FFI for KrateData objects:
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_new(ptr: *mut PyList) -> *mut KrateData {
let p = PyList::from_ptr(ptr);
let p: Vec<String> = PyList::into(p);
Box::into_raw(Box::new(KrateData::new(p)))
}
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_free(ptr: *mut KrateData) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn krate_data_len(krate: &KrateData) -> size_t {
krate.collected.len()
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn krate_data_iter(krate: &KrateData, idx: size_t) -> *mut PyString |
#[cfg(test)]
mod parsing_tests {
use super::*;
#[test]
#[ignore]
fn parse_lib() {
let path = std::env::home_dir()
.unwrap()
.join("workspace/sources/rustypy_debug/rust_code/src/lib.rs");
// let path_ori: std::path::PathBuf = std::env::current_dir().unwrap();
// let path: std::path::PathBuf = path_ori
// .parent()
// .unwrap()
// .parent()
// .unwrap()
// .join("tests/rs_test_lib/lib.rs");
// the entry point to the library:
let entry_point = PyString::from(path.to_str().unwrap().to_string()).into_raw();
let mut krate_data = KrateData::new(vec!["python_bind_".to_string()]);
unsafe {
let response = parse_src(entry_point, &mut krate_data);
let response: String = PyString::from_ptr_to_string(response);
assert!(!response.is_empty());
}
}
}
| {
match krate.iter_krate(idx as usize) {
Some(val) => PyString::from(val).into_raw(),
None => PyString::from("NO_IDX_ERROR").into_raw(),
}
} | identifier_body |
lib.rs | //! Binding Rust with Python, both ways!
//!
//! This library will generate and handle type conversions between Python and
//! Rust. To use Python from Rust refer to the
//! [library wiki](https://github.com/iduartgomez/rustypy/wiki), more general examples
//! and information on how to use Rust in Python can also be found there.
//!
//! Checkout the [PyTypes](../rustypy/pytypes/index.html) module documentation for more information
//! on how to write foreign functions that are compliant with Python as well as using the custom
//! types that will ease type conversion.
#![crate_type = "cdylib"]
extern crate cpython;
extern crate libc;
extern crate syn;
extern crate walkdir;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::ptr;
use libc::size_t;
mod macros;
pub mod pytypes;
// re-export
pub use self::pytypes::pybool::PyBool;
pub use self::pytypes::pydict::PyDict;
pub use self::pytypes::pylist::PyList;
pub use self::pytypes::pystring::PyString;
pub use self::pytypes::pytuple::PyTuple;
pub use self::pytypes::PyArg;
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn parse_src(
path: *mut PyString,
krate_data: &mut KrateData,
) -> *mut PyString {
let path = PyString::from_ptr_to_string(path);
let path: &Path = path.as_ref();
let dir = if let Some(parent) = path.parent() {
parent
} else {
// unlikely this happens, but just in case
return PyString::from("crate in root directory not allowed".to_string()).into_raw();
};
for entry in walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
if let Some(ext) = e.path().extension() {
ext == "rs"
} else {
false
}
})
{
if let Err(err) = parse_file(krate_data, entry.path()) {
return err;
}
}
ptr::null_mut::<PyString>()
}
fn parse_file(krate_data: &mut KrateData, path: &Path) -> Result<(), *mut PyString> {
let mut f = match File::open(path) {
Ok(file) => file,
Err(_) => {
return Err(
PyString::from(format!("path not found: {}", path.to_str().unwrap())).into_raw(),
)
}
};
let mut src = String::new();
if f.read_to_string(&mut src).is_err() {
return Err(PyString::from(format!(
"failed to read the source file: {}",
path.to_str().unwrap()
))
.into_raw());
}
match syn::parse_file(&src) {
Ok(krate) => {
syn::visit::visit_file(krate_data, &krate);
krate_data.collect_values();
if krate_data.collected.is_empty() {
return Err(PyString::from("zero function calls parsed".to_string()).into_raw());
}
}
Err(err) => return Err(PyString::from(format!("{}", err)).into_raw()),
};
Ok(())
}
#[doc(hidden)]
pub struct KrateData {
functions: Vec<FnDef>,
collected: Vec<String>,
prefixes: Vec<String>,
}
impl KrateData {
fn new(prefixes: Vec<String>) -> KrateData {
KrateData {
functions: vec![],
collected: vec![],
prefixes,
}
}
fn collect_values(&mut self) {
let mut add = true;
for v in self.functions.drain(..) {
let FnDef {
name: mut fndef,
args,
output,
} = v;
let original_name = fndef.clone();
if !args.is_empty() {
fndef.push_str("::");
args.iter().fold(&mut fndef, |acc, arg| {
if let Ok(repr) = type_repr(arg, None) {
acc.push_str(&repr);
acc.push(';');
} else {
eprintln!(
"could not generate bindings for fn `{}`; unacceptable parameters
",
original_name
);
add = false;
}
acc
});
} else {
// function w/o arguments
fndef.push_str("::();");
}
if add {
match output {
syn::ReturnType::Default => fndef.push_str("type(void)"),
syn::ReturnType::Type(_, ty) => {
if let Ok(ty) = type_repr(&ty, None) {
fndef.push_str(&ty)
} else {
continue;
}
}
}
self.collected.push(fndef);
} else {
add = true
}
}
}
fn add_fn(&mut self, name: String, fn_decl: &syn::ItemFn) {
for prefix in &self.prefixes {
if name.starts_with(prefix) {
let syn::ItemFn { sig, .. } = fn_decl.clone();
let mut args = vec![];
for arg in sig.inputs {
match arg {
syn::FnArg::Typed(pat_ty) => args.push(*pat_ty.ty),
_ => continue,
}
}
self.functions.push(FnDef {
name,
args,
output: sig.output,
});
break;
}
}
}
fn iter_krate(&self, idx: usize) -> Option<&str> {
if self.collected.len() >= (idx + 1) {
Some(&self.collected[idx])
} else {
None
}
}
}
fn type_repr(ty: &syn::Type, r: Option<&str>) -> Result<String, ()> {
let mut repr = String::new();
match ty {
syn::Type::Path(path) => {
let syn::TypePath { path, .. } = path;
if let Some(ty) = path.segments.last() {
if let Some(r) = r {
Ok(format!("type({} {})", r, ty.ident))
} else {
Ok(format!("type({})", ty.ident))
}
} else {
Err(())
}
}
syn::Type::Ptr(ty) => {
let syn::TypePtr {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "*mut",
_ => "*const",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
syn::Type::Reference(ty) => {
let syn::TypeReference {
elem, mutability, ..
} = ty;
let m = match mutability {
Some(_) => "&mut",
_ => "&",
};
repr.push_str(&type_repr(&*elem, Some(m))?);
Ok(repr)
}
_ => Err(()),
}
}
impl<'ast> syn::visit::Visit<'ast> for KrateData {
fn visit_item(&mut self, item: &syn::Item) {
match item {
syn::Item::Fn(fn_decl, ..) => {
if let syn::Visibility::Public(_) = fn_decl.vis {
let name = format!("{}", fn_decl.sig.ident);
self.add_fn(name, &*fn_decl)
}
}
syn::Item::Mod(mod_item) if mod_item.content.is_some() => {
for item in &mod_item.content.as_ref().unwrap().1 {
self.visit_item(item);
}
}
_ => {}
}
}
}
struct FnDef {
name: String,
output: syn::ReturnType,
args: Vec<syn::Type>,
}
// C FFI for KrateData objects:
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_new(ptr: *mut PyList) -> *mut KrateData {
let p = PyList::from_ptr(ptr);
let p: Vec<String> = PyList::into(p);
Box::into_raw(Box::new(KrateData::new(p)))
}
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn krate_data_free(ptr: *mut KrateData) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn | (krate: &KrateData) -> size_t {
krate.collected.len()
}
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn krate_data_iter(krate: &KrateData, idx: size_t) -> *mut PyString {
match krate.iter_krate(idx as usize) {
Some(val) => PyString::from(val).into_raw(),
None => PyString::from("NO_IDX_ERROR").into_raw(),
}
}
#[cfg(test)]
mod parsing_tests {
use super::*;
#[test]
#[ignore]
fn parse_lib() {
let path = std::env::home_dir()
.unwrap()
.join("workspace/sources/rustypy_debug/rust_code/src/lib.rs");
// let path_ori: std::path::PathBuf = std::env::current_dir().unwrap();
// let path: std::path::PathBuf = path_ori
// .parent()
// .unwrap()
// .parent()
// .unwrap()
// .join("tests/rs_test_lib/lib.rs");
// the entry point to the library:
let entry_point = PyString::from(path.to_str().unwrap().to_string()).into_raw();
let mut krate_data = KrateData::new(vec!["python_bind_".to_string()]);
unsafe {
let response = parse_src(entry_point, &mut krate_data);
let response: String = PyString::from_ptr_to_string(response);
assert!(!response.is_empty());
}
}
}
| krate_data_len | identifier_name |
tasks.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { Task } from './task';
import { TaskService } from './task.service';
@Component({
selector: 'my-tasks',
templateUrl: './tasks.component.html'
})
export class | implements OnInit {
tasks: Task[][];
selectedTask: Task;
prevTasksInProgressLength: number;
constructor(private router: Router, private taskService: TaskService) { }
getTasks(): void {
this.taskService.getTasksByStatus(1).then(tasks => { this.tasks[1] = tasks; });
this.taskService.getTasksByStatus(2).then(tasks => { this.tasks[2] = tasks; });
this.taskService.getTasksByStatus(3).then(tasks => this.tasks[3] = tasks);
}
add(title: string, description: string, priority: number): void {
title = title.trim();
description = description.trim();
if (!title || !description || priority > 3 || priority < 1) { return; }
this.taskService.create(title, description, priority)
.then(task => {
this.tasks[1].push(task);
this.selectedTask = null;
});
}
delete(task: Task): void {
this.taskService
.delete(task.id)
.then(() => {
this.tasks[task.status] = this.tasks[task.status].filter(h => h !== task);
if (this.selectedTask === task) { this.selectedTask = null; }
});
}
ngOnInit(): void {
this.tasks = new Array<Task[]>();
this.getTasks();
}
onSelect(task: Task): void {
this.selectedTask = task;
}
gotoDetail(): void {
this.router.navigate(['/detail', this.selectedTask.id]);
}
addTo($event: any, zone:number): void {
console.log(zone);
let droppedTask: Task = $event.dragData;
var status = droppedTask.status;
var index = this.tasks[status].indexOf(droppedTask);
droppedTask.status = zone;
this.taskService.update(droppedTask)
.then(() => {
this.tasks[status].splice(index, 1);
this.tasks[zone].push(droppedTask);
});
}
}
| TasksComponent | identifier_name |
tasks.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { Task } from './task';
import { TaskService } from './task.service';
@Component({
selector: 'my-tasks',
templateUrl: './tasks.component.html'
})
export class TasksComponent implements OnInit {
tasks: Task[][];
selectedTask: Task;
prevTasksInProgressLength: number;
constructor(private router: Router, private taskService: TaskService) |
getTasks(): void {
this.taskService.getTasksByStatus(1).then(tasks => { this.tasks[1] = tasks; });
this.taskService.getTasksByStatus(2).then(tasks => { this.tasks[2] = tasks; });
this.taskService.getTasksByStatus(3).then(tasks => this.tasks[3] = tasks);
}
add(title: string, description: string, priority: number): void {
title = title.trim();
description = description.trim();
if (!title || !description || priority > 3 || priority < 1) { return; }
this.taskService.create(title, description, priority)
.then(task => {
this.tasks[1].push(task);
this.selectedTask = null;
});
}
delete(task: Task): void {
this.taskService
.delete(task.id)
.then(() => {
this.tasks[task.status] = this.tasks[task.status].filter(h => h !== task);
if (this.selectedTask === task) { this.selectedTask = null; }
});
}
ngOnInit(): void {
this.tasks = new Array<Task[]>();
this.getTasks();
}
onSelect(task: Task): void {
this.selectedTask = task;
}
gotoDetail(): void {
this.router.navigate(['/detail', this.selectedTask.id]);
}
addTo($event: any, zone:number): void {
console.log(zone);
let droppedTask: Task = $event.dragData;
var status = droppedTask.status;
var index = this.tasks[status].indexOf(droppedTask);
droppedTask.status = zone;
this.taskService.update(droppedTask)
.then(() => {
this.tasks[status].splice(index, 1);
this.tasks[zone].push(droppedTask);
});
}
}
| { } | identifier_body |
tasks.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { Task } from './task';
import { TaskService } from './task.service';
@Component({
selector: 'my-tasks',
templateUrl: './tasks.component.html'
})
export class TasksComponent implements OnInit {
tasks: Task[][];
selectedTask: Task;
prevTasksInProgressLength: number;
constructor(private router: Router, private taskService: TaskService) { }
getTasks(): void {
this.taskService.getTasksByStatus(1).then(tasks => { this.tasks[1] = tasks; });
this.taskService.getTasksByStatus(2).then(tasks => { this.tasks[2] = tasks; });
this.taskService.getTasksByStatus(3).then(tasks => this.tasks[3] = tasks);
}
add(title: string, description: string, priority: number): void {
title = title.trim();
description = description.trim();
if (!title || !description || priority > 3 || priority < 1) { return; }
this.taskService.create(title, description, priority)
.then(task => {
this.tasks[1].push(task);
this.selectedTask = null;
});
}
delete(task: Task): void {
this.taskService
.delete(task.id)
.then(() => {
this.tasks[task.status] = this.tasks[task.status].filter(h => h !== task);
if (this.selectedTask === task) { this.selectedTask = null; }
});
}
ngOnInit(): void {
this.tasks = new Array<Task[]>();
this.getTasks();
}
onSelect(task: Task): void {
this.selectedTask = task;
}
gotoDetail(): void {
this.router.navigate(['/detail', this.selectedTask.id]);
}
addTo($event: any, zone:number): void {
console.log(zone);
let droppedTask: Task = $event.dragData;
var status = droppedTask.status;
var index = this.tasks[status].indexOf(droppedTask);
droppedTask.status = zone;
this.taskService.update(droppedTask)
.then(() => {
this.tasks[status].splice(index, 1); | } | this.tasks[zone].push(droppedTask);
});
} | random_line_split |
tasks.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { Task } from './task';
import { TaskService } from './task.service';
@Component({
selector: 'my-tasks',
templateUrl: './tasks.component.html'
})
export class TasksComponent implements OnInit {
tasks: Task[][];
selectedTask: Task;
prevTasksInProgressLength: number;
constructor(private router: Router, private taskService: TaskService) { }
getTasks(): void {
this.taskService.getTasksByStatus(1).then(tasks => { this.tasks[1] = tasks; });
this.taskService.getTasksByStatus(2).then(tasks => { this.tasks[2] = tasks; });
this.taskService.getTasksByStatus(3).then(tasks => this.tasks[3] = tasks);
}
add(title: string, description: string, priority: number): void {
title = title.trim();
description = description.trim();
if (!title || !description || priority > 3 || priority < 1) { return; }
this.taskService.create(title, description, priority)
.then(task => {
this.tasks[1].push(task);
this.selectedTask = null;
});
}
delete(task: Task): void {
this.taskService
.delete(task.id)
.then(() => {
this.tasks[task.status] = this.tasks[task.status].filter(h => h !== task);
if (this.selectedTask === task) |
});
}
ngOnInit(): void {
this.tasks = new Array<Task[]>();
this.getTasks();
}
onSelect(task: Task): void {
this.selectedTask = task;
}
gotoDetail(): void {
this.router.navigate(['/detail', this.selectedTask.id]);
}
addTo($event: any, zone:number): void {
console.log(zone);
let droppedTask: Task = $event.dragData;
var status = droppedTask.status;
var index = this.tasks[status].indexOf(droppedTask);
droppedTask.status = zone;
this.taskService.update(droppedTask)
.then(() => {
this.tasks[status].splice(index, 1);
this.tasks[zone].push(droppedTask);
});
}
}
| { this.selectedTask = null; } | conditional_block |
peerDependencies-type.test.ts | import {lint, ruleType} from '../../../src/rules/peerDependencies-type';
import {Severity} from '../../../src/types/severity';
describe('peerDependencies-type Unit Tests', () => {
describe('a rule type value should be exported', () => {
test('it should equal "standard"', () => {
expect(ruleType).toStrictEqual('standard');
});
});
describe('when package.json has node with incorrect type', () => {
test('LintIssue object should be returned', () => { | const packageJsonData = {
peerDependencies: 'peerDependencies',
};
const response = lint(packageJsonData, Severity.Error);
expect(response.lintId).toStrictEqual('peerDependencies-type');
expect(response.severity).toStrictEqual('error');
expect(response.node).toStrictEqual('peerDependencies');
expect(response.lintMessage).toStrictEqual('Type should be an Object');
});
});
describe('when package.json does not have node', () => {
test('true should be returned', () => {
const packageJsonData = {};
const response = lint(packageJsonData, Severity.Error);
expect(response).toBeNull();
});
});
}); | random_line_split |
|
upload-progress.component.ts | import { Component, OnInit, Input, ChangeDetectorRef } from '@angular/core';
import { UploadService } from '../../services/upload.service';
import { UsersService } from '../../services/users.service';
import { forkJoin } from '../../../../node_modules/rxjs';
import { User } from '../../model/user';
import { Select2OptionData } from 'ng-select2';
import { Options } from 'select2';
import { isDefined } from '../../../../node_modules/@angular/compiler/src/util';
import { UploadJobProgress, UploadJob } from '../../model/upload';
@Component({
selector: 'app-upload-progress',
templateUrl: './upload-progress.component.html',
styleUrls: ['./upload-progress.component.css']
})
export class UploadProgressComponent implements OnInit {
// Incoming members
@Input() currentUserId: number;
// private members
users: User[];
currentUser: User;
// public members
public selectedUser: User;
public isAdminUser: boolean;
public options: Options = { width: '600', multiple: false, tags: true };
public userOptionData: Select2OptionData[] = [];
public showUserDropdown = false;
public inProgressCount = 0;
public inQueueCount = 0;
public awaitingConfirmationCount = 0;
constructor(private uploadService: UploadService,
private usersService: UsersService,
private ref: ChangeDetectorRef) { }
ngOnInit() {
// Define the observable(s)
const usersObservable = this.usersService.getUsers();
// API call(s)
forkJoin([usersObservable]).subscribe(results => {
// Initialise the list of users
this.users = results[0];
// Initialise the current user
// Selected user will be the same as current user to begin with
this.currentUser = this.selectedUser = this.users.find(x => x.Id === this.currentUserId);
// Initialise the user selection dropdown
this.initialiseUsersDropdown();
// The user selection dropdown is displayed if the
// current user is an administrator
this.isAdminUser = this.currentUser.IsAdministrator;
let interval = setInterval(() => {
this.refreshTable();
}, 2000);
});
}
refreshTable(): void {
const user = this.selectedUser;
this.selectedUser = new User();
if (!this.ref['destroyed']) |
this.selectedUser = user;
if (!this.ref['destroyed']) {
this.ref.detectChanges();
}
}
initialiseUsersDropdown(): void {
if (isDefined(this.users)) {
this.users.forEach(user => {
this.userOptionData.push({ id: user.Id.toString(), text: user.DisplayName });
});
}
}
/** Method to handle the user dropdown change event */
onUserChange(event: any): void {
// The event input variable holds the user id
// Find the user
const user = this.users.find(x => x.Id === Number(event));
// Hide the user dropdown
this.toggleUserDropdown();
// Refresh the job list
this.selectedUser = user;
this.ref.detectChanges();
}
/** Method to toggle the user dropdown */
toggleUserDropdown(): void {
this.showUserDropdown = !this.showUserDropdown;
}
/** Method to handle the upload job progress members event emitter function */
updateUploadJobProgressMembers(uploadJobProgress: UploadJobProgress): void {
if (uploadJobProgress) {
this.inProgressCount = uploadJobProgress.InProgress;
this.inQueueCount = uploadJobProgress.InQueue;
this.awaitingConfirmationCount = uploadJobProgress.AwaitingConfirmation;
}
}
}
| {
this.ref.detectChanges();
} | conditional_block |
upload-progress.component.ts | import { Component, OnInit, Input, ChangeDetectorRef } from '@angular/core';
import { UploadService } from '../../services/upload.service';
import { UsersService } from '../../services/users.service';
import { forkJoin } from '../../../../node_modules/rxjs';
import { User } from '../../model/user';
import { Select2OptionData } from 'ng-select2';
import { Options } from 'select2';
import { isDefined } from '../../../../node_modules/@angular/compiler/src/util';
import { UploadJobProgress, UploadJob } from '../../model/upload';
@Component({
selector: 'app-upload-progress',
templateUrl: './upload-progress.component.html',
styleUrls: ['./upload-progress.component.css']
})
export class UploadProgressComponent implements OnInit {
// Incoming members
@Input() currentUserId: number;
// private members
users: User[];
currentUser: User;
// public members
public selectedUser: User;
public isAdminUser: boolean;
public options: Options = { width: '600', multiple: false, tags: true };
public userOptionData: Select2OptionData[] = [];
public showUserDropdown = false;
public inProgressCount = 0;
public inQueueCount = 0;
public awaitingConfirmationCount = 0;
constructor(private uploadService: UploadService,
private usersService: UsersService,
private ref: ChangeDetectorRef) { }
ngOnInit() {
// Define the observable(s)
const usersObservable = this.usersService.getUsers();
// API call(s)
forkJoin([usersObservable]).subscribe(results => {
// Initialise the list of users
this.users = results[0];
// Initialise the current user
// Selected user will be the same as current user to begin with
this.currentUser = this.selectedUser = this.users.find(x => x.Id === this.currentUserId);
// Initialise the user selection dropdown
this.initialiseUsersDropdown();
// The user selection dropdown is displayed if the
// current user is an administrator
this.isAdminUser = this.currentUser.IsAdministrator;
let interval = setInterval(() => {
this.refreshTable();
}, 2000);
});
}
refreshTable(): void {
const user = this.selectedUser;
this.selectedUser = new User();
if (!this.ref['destroyed']) {
this.ref.detectChanges();
}
this.selectedUser = user;
if (!this.ref['destroyed']) {
this.ref.detectChanges();
}
}
initialiseUsersDropdown(): void {
if (isDefined(this.users)) {
this.users.forEach(user => {
this.userOptionData.push({ id: user.Id.toString(), text: user.DisplayName });
});
}
}
/** Method to handle the user dropdown change event */
| (event: any): void {
// The event input variable holds the user id
// Find the user
const user = this.users.find(x => x.Id === Number(event));
// Hide the user dropdown
this.toggleUserDropdown();
// Refresh the job list
this.selectedUser = user;
this.ref.detectChanges();
}
/** Method to toggle the user dropdown */
toggleUserDropdown(): void {
this.showUserDropdown = !this.showUserDropdown;
}
/** Method to handle the upload job progress members event emitter function */
updateUploadJobProgressMembers(uploadJobProgress: UploadJobProgress): void {
if (uploadJobProgress) {
this.inProgressCount = uploadJobProgress.InProgress;
this.inQueueCount = uploadJobProgress.InQueue;
this.awaitingConfirmationCount = uploadJobProgress.AwaitingConfirmation;
}
}
}
| onUserChange | identifier_name |
upload-progress.component.ts | import { Component, OnInit, Input, ChangeDetectorRef } from '@angular/core';
import { UploadService } from '../../services/upload.service';
import { UsersService } from '../../services/users.service';
import { forkJoin } from '../../../../node_modules/rxjs';
import { User } from '../../model/user';
import { Select2OptionData } from 'ng-select2';
import { Options } from 'select2';
import { isDefined } from '../../../../node_modules/@angular/compiler/src/util';
import { UploadJobProgress, UploadJob } from '../../model/upload';
@Component({
selector: 'app-upload-progress',
templateUrl: './upload-progress.component.html',
styleUrls: ['./upload-progress.component.css']
})
export class UploadProgressComponent implements OnInit {
// Incoming members
@Input() currentUserId: number;
// private members
users: User[];
currentUser: User;
// public members
public selectedUser: User;
public isAdminUser: boolean;
public options: Options = { width: '600', multiple: false, tags: true };
public userOptionData: Select2OptionData[] = [];
public showUserDropdown = false;
public inProgressCount = 0;
public inQueueCount = 0; | private ref: ChangeDetectorRef) { }
ngOnInit() {
// Define the observable(s)
const usersObservable = this.usersService.getUsers();
// API call(s)
forkJoin([usersObservable]).subscribe(results => {
// Initialise the list of users
this.users = results[0];
// Initialise the current user
// Selected user will be the same as current user to begin with
this.currentUser = this.selectedUser = this.users.find(x => x.Id === this.currentUserId);
// Initialise the user selection dropdown
this.initialiseUsersDropdown();
// The user selection dropdown is displayed if the
// current user is an administrator
this.isAdminUser = this.currentUser.IsAdministrator;
let interval = setInterval(() => {
this.refreshTable();
}, 2000);
});
}
refreshTable(): void {
const user = this.selectedUser;
this.selectedUser = new User();
if (!this.ref['destroyed']) {
this.ref.detectChanges();
}
this.selectedUser = user;
if (!this.ref['destroyed']) {
this.ref.detectChanges();
}
}
initialiseUsersDropdown(): void {
if (isDefined(this.users)) {
this.users.forEach(user => {
this.userOptionData.push({ id: user.Id.toString(), text: user.DisplayName });
});
}
}
/** Method to handle the user dropdown change event */
onUserChange(event: any): void {
// The event input variable holds the user id
// Find the user
const user = this.users.find(x => x.Id === Number(event));
// Hide the user dropdown
this.toggleUserDropdown();
// Refresh the job list
this.selectedUser = user;
this.ref.detectChanges();
}
/** Method to toggle the user dropdown */
toggleUserDropdown(): void {
this.showUserDropdown = !this.showUserDropdown;
}
/** Method to handle the upload job progress members event emitter function */
updateUploadJobProgressMembers(uploadJobProgress: UploadJobProgress): void {
if (uploadJobProgress) {
this.inProgressCount = uploadJobProgress.InProgress;
this.inQueueCount = uploadJobProgress.InQueue;
this.awaitingConfirmationCount = uploadJobProgress.AwaitingConfirmation;
}
}
} | public awaitingConfirmationCount = 0;
constructor(private uploadService: UploadService,
private usersService: UsersService, | random_line_split |
quiz_group.py | from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.util import combine_kwargs
class QuizGroup(CanvasObject):
def | (self):
return "{} ({})".format(self.name, self.id)
def delete(self, **kwargs):
"""
Get details of the quiz group with the given id.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.destroy>`_
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def reorder_question_group(self, order, **kwargs):
"""
Update the order of questions within a given group
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id/reorder \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.reorder>`_
:param order: A list of dictionaries containing the key 'id' of
the question to be placed at order's index.
:type order: list[dict]
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
if not isinstance(order, list) or not order:
raise ValueError("Param `order` must be a non-empty list.")
for question in order:
if not isinstance(question, dict):
raise ValueError(
"`order` must consist only of dictionaries representing "
"Question items."
)
if "id" not in question:
raise ValueError("Dictionaries in `order` must contain an `id` key.")
kwargs["order"] = order
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups/{}/reorder".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update(self, quiz_groups, **kwargs):
"""
Update a question group given by id.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.update>`_
:param quiz_groups: The name, pick count, and/or question points.
All of these parameters are optional, but at least one must exist
(even if empty) to recieve a response.
The request expects a list, but will only update 1 question group per request.
:type quiz_groups: list[dict]
:returns: `True` if the QuizGroup was updated. `False` otherwise.
:rtype: bool
"""
if not isinstance(quiz_groups, list) or len(quiz_groups) <= 0:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups` must contain a dictionary")
param_list = ["name", "pick_count", "question_points"]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
successful = "name" in response.json().get("quiz_groups")[0]
if successful:
super(QuizGroup, self).set_attributes(response.json().get("quiz_groups")[0])
return successful
| __str__ | identifier_name |
quiz_group.py | from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.util import combine_kwargs
class QuizGroup(CanvasObject):
def __str__(self):
|
def delete(self, **kwargs):
"""
Get details of the quiz group with the given id.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.destroy>`_
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def reorder_question_group(self, order, **kwargs):
"""
Update the order of questions within a given group
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id/reorder \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.reorder>`_
:param order: A list of dictionaries containing the key 'id' of
the question to be placed at order's index.
:type order: list[dict]
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
if not isinstance(order, list) or not order:
raise ValueError("Param `order` must be a non-empty list.")
for question in order:
if not isinstance(question, dict):
raise ValueError(
"`order` must consist only of dictionaries representing "
"Question items."
)
if "id" not in question:
raise ValueError("Dictionaries in `order` must contain an `id` key.")
kwargs["order"] = order
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups/{}/reorder".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update(self, quiz_groups, **kwargs):
"""
Update a question group given by id.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.update>`_
:param quiz_groups: The name, pick count, and/or question points.
All of these parameters are optional, but at least one must exist
(even if empty) to recieve a response.
The request expects a list, but will only update 1 question group per request.
:type quiz_groups: list[dict]
:returns: `True` if the QuizGroup was updated. `False` otherwise.
:rtype: bool
"""
if not isinstance(quiz_groups, list) or len(quiz_groups) <= 0:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups` must contain a dictionary")
param_list = ["name", "pick_count", "question_points"]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
successful = "name" in response.json().get("quiz_groups")[0]
if successful:
super(QuizGroup, self).set_attributes(response.json().get("quiz_groups")[0])
return successful
| return "{} ({})".format(self.name, self.id) | identifier_body |
quiz_group.py | from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.util import combine_kwargs
class QuizGroup(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self, **kwargs):
"""
Get details of the quiz group with the given id.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.destroy>`_
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def reorder_question_group(self, order, **kwargs):
"""
Update the order of questions within a given group
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id/reorder \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.reorder>`_
:param order: A list of dictionaries containing the key 'id' of
the question to be placed at order's index.
:type order: list[dict]
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
if not isinstance(order, list) or not order:
raise ValueError("Param `order` must be a non-empty list.")
for question in order:
if not isinstance(question, dict):
raise ValueError(
"`order` must consist only of dictionaries representing "
"Question items."
)
if "id" not in question:
raise ValueError("Dictionaries in `order` must contain an `id` key.")
kwargs["order"] = order
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups/{}/reorder".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update(self, quiz_groups, **kwargs):
"""
Update a question group given by id.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.update>`_
:param quiz_groups: The name, pick count, and/or question points.
All of these parameters are optional, but at least one must exist
(even if empty) to recieve a response.
The request expects a list, but will only update 1 question group per request.
:type quiz_groups: list[dict]
:returns: `True` if the QuizGroup was updated. `False` otherwise.
:rtype: bool
"""
if not isinstance(quiz_groups, list) or len(quiz_groups) <= 0:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups` must contain a dictionary")
param_list = ["name", "pick_count", "question_points"]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
successful = "name" in response.json().get("quiz_groups")[0]
if successful: | super(QuizGroup, self).set_attributes(response.json().get("quiz_groups")[0])
return successful | random_line_split |
|
quiz_group.py | from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.util import combine_kwargs
class QuizGroup(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self, **kwargs):
"""
Get details of the quiz group with the given id.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.destroy>`_
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def reorder_question_group(self, order, **kwargs):
"""
Update the order of questions within a given group
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id/reorder \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.reorder>`_
:param order: A list of dictionaries containing the key 'id' of
the question to be placed at order's index.
:type order: list[dict]
:returns: True if the result was successful (Status code of 204)
:rtype: bool
"""
if not isinstance(order, list) or not order:
|
for question in order:
if not isinstance(question, dict):
raise ValueError(
"`order` must consist only of dictionaries representing "
"Question items."
)
if "id" not in question:
raise ValueError("Dictionaries in `order` must contain an `id` key.")
kwargs["order"] = order
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups/{}/reorder".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update(self, quiz_groups, **kwargs):
"""
Update a question group given by id.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.update>`_
:param quiz_groups: The name, pick count, and/or question points.
All of these parameters are optional, but at least one must exist
(even if empty) to recieve a response.
The request expects a list, but will only update 1 question group per request.
:type quiz_groups: list[dict]
:returns: `True` if the QuizGroup was updated. `False` otherwise.
:rtype: bool
"""
if not isinstance(quiz_groups, list) or len(quiz_groups) <= 0:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups` must contain a dictionary")
param_list = ["name", "pick_count", "question_points"]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/groups/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
successful = "name" in response.json().get("quiz_groups")[0]
if successful:
super(QuizGroup, self).set_attributes(response.json().get("quiz_groups")[0])
return successful
| raise ValueError("Param `order` must be a non-empty list.") | conditional_block |
actions.py | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Actions related to task commands."""
import time
from drydock_provisioner.cli.action import CliAction
from drydock_provisioner.cli.const import TaskStatus
class TaskList(CliAction): # pylint: disable=too-few-public-methods
"""Action to list tasks."""
def __init__(self, api_client):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
"""
super().__init__(api_client)
self.logger.debug('TaskList action initialized')
def invoke(self):
"""Invoke execution of this action."""
return self.api_client.get_tasks()
class TaskCreate(CliAction): # pylint: disable=too-few-public-methods
"""Action to create tasks against a design."""
def __init__(self,
api_client,
design_ref,
action_name=None,
node_names=None,
rack_names=None,
node_tags=None,
block=False,
poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string design_ref: The URI reference to design documents
:param string action_name: The name of the action being performed for this task
:param List node_names: The list of node names to restrict action application
:param List rack_names: The list of rack names to restrict action application
:param List node_tags: The list of node tags to restrict action application
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.design_ref = design_ref
self.action_name = action_name
self.logger.debug('TaskCreate action initialized for design=%s',
design_ref)
self.logger.debug('Action is %s', action_name)
self.logger.debug("Node names = %s", node_names)
self.logger.debug("Rack names = %s", rack_names)
self.logger.debug("Node tags = %s", node_tags)
self.block = block
self.poll_interval = poll_interval
if any([node_names, rack_names, node_tags]):
filter_items = {'filter_type': 'union'}
if node_names is not None:
filter_items['node_names'] = node_names
if rack_names is not None:
filter_items['rack_names'] = rack_names
if node_tags is None:
filter_items['node_tags'] = node_tags
self.node_filter = {
'filter_set_type': 'intersection',
'filter_set': [filter_items]
}
else:
self.node_filter = None
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.create_task(
design_ref=self.design_ref,
task_action=self.action_name,
node_filter=self.node_filter)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.get('status',
'') in [TaskStatus.Complete, TaskStatus.Terminated]:
|
class TaskShow(CliAction): # pylint: disable=too-few-public-methods
"""Action to show a task's detial."""
def __init__(self, api_client, task_id, block=False, poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string task_id: the UUID of the task to retrieve
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.task_id = task_id
self.logger.debug('TaskShow action initialized for task_id=%s,',
task_id)
self.block = block
self.poll_interval = poll_interval
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.get_task(task_id=self.task_id)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.status in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
| return task | conditional_block |
actions.py | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Actions related to task commands."""
import time
from drydock_provisioner.cli.action import CliAction
from drydock_provisioner.cli.const import TaskStatus
class TaskList(CliAction): # pylint: disable=too-few-public-methods
"""Action to list tasks."""
def __init__(self, api_client):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
"""
super().__init__(api_client)
self.logger.debug('TaskList action initialized')
def invoke(self):
"""Invoke execution of this action."""
return self.api_client.get_tasks()
class TaskCreate(CliAction): # pylint: disable=too-few-public-methods
"""Action to create tasks against a design."""
def __init__(self,
api_client,
design_ref,
action_name=None,
node_names=None,
rack_names=None,
node_tags=None,
block=False,
poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string design_ref: The URI reference to design documents
:param string action_name: The name of the action being performed for this task
:param List node_names: The list of node names to restrict action application
:param List rack_names: The list of rack names to restrict action application
:param List node_tags: The list of node tags to restrict action application
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.design_ref = design_ref
self.action_name = action_name
self.logger.debug('TaskCreate action initialized for design=%s',
design_ref)
self.logger.debug('Action is %s', action_name)
self.logger.debug("Node names = %s", node_names)
self.logger.debug("Rack names = %s", rack_names)
self.logger.debug("Node tags = %s", node_tags)
self.block = block
self.poll_interval = poll_interval
if any([node_names, rack_names, node_tags]):
filter_items = {'filter_type': 'union'}
if node_names is not None:
filter_items['node_names'] = node_names
if rack_names is not None:
filter_items['rack_names'] = rack_names
if node_tags is None:
filter_items['node_tags'] = node_tags
self.node_filter = {
'filter_set_type': 'intersection',
'filter_set': [filter_items]
}
else:
self.node_filter = None
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.create_task(
design_ref=self.design_ref,
task_action=self.action_name,
node_filter=self.node_filter)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.get('status',
'') in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
class TaskShow(CliAction): # pylint: disable=too-few-public-methods
"""Action to show a task's detial."""
def __init__(self, api_client, task_id, block=False, poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string task_id: the UUID of the task to retrieve
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.task_id = task_id
self.logger.debug('TaskShow action initialized for task_id=%s,',
task_id)
self.block = block
self.poll_interval = poll_interval
def | (self):
"""Invoke execution of this action."""
task = self.api_client.get_task(task_id=self.task_id)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.status in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
| invoke | identifier_name |
actions.py | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Actions related to task commands."""
import time
from drydock_provisioner.cli.action import CliAction
from drydock_provisioner.cli.const import TaskStatus
class TaskList(CliAction): # pylint: disable=too-few-public-methods
"""Action to list tasks."""
def __init__(self, api_client):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
"""
super().__init__(api_client)
self.logger.debug('TaskList action initialized')
def invoke(self):
"""Invoke execution of this action."""
return self.api_client.get_tasks()
class TaskCreate(CliAction): # pylint: disable=too-few-public-methods
"""Action to create tasks against a design."""
def __init__(self,
api_client,
design_ref,
action_name=None,
node_names=None,
rack_names=None,
node_tags=None,
block=False,
poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string design_ref: The URI reference to design documents
:param string action_name: The name of the action being performed for this task
:param List node_names: The list of node names to restrict action application
:param List rack_names: The list of rack names to restrict action application
:param List node_tags: The list of node tags to restrict action application
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.design_ref = design_ref
self.action_name = action_name
self.logger.debug('TaskCreate action initialized for design=%s', | self.logger.debug("Node names = %s", node_names)
self.logger.debug("Rack names = %s", rack_names)
self.logger.debug("Node tags = %s", node_tags)
self.block = block
self.poll_interval = poll_interval
if any([node_names, rack_names, node_tags]):
filter_items = {'filter_type': 'union'}
if node_names is not None:
filter_items['node_names'] = node_names
if rack_names is not None:
filter_items['rack_names'] = rack_names
if node_tags is None:
filter_items['node_tags'] = node_tags
self.node_filter = {
'filter_set_type': 'intersection',
'filter_set': [filter_items]
}
else:
self.node_filter = None
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.create_task(
design_ref=self.design_ref,
task_action=self.action_name,
node_filter=self.node_filter)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.get('status',
'') in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
class TaskShow(CliAction): # pylint: disable=too-few-public-methods
"""Action to show a task's detial."""
def __init__(self, api_client, task_id, block=False, poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string task_id: the UUID of the task to retrieve
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.task_id = task_id
self.logger.debug('TaskShow action initialized for task_id=%s,',
task_id)
self.block = block
self.poll_interval = poll_interval
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.get_task(task_id=self.task_id)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.status in [TaskStatus.Complete, TaskStatus.Terminated]:
return task | design_ref)
self.logger.debug('Action is %s', action_name)
| random_line_split |
actions.py | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Actions related to task commands."""
import time
from drydock_provisioner.cli.action import CliAction
from drydock_provisioner.cli.const import TaskStatus
class TaskList(CliAction): # pylint: disable=too-few-public-methods
"""Action to list tasks."""
def __init__(self, api_client):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
"""
super().__init__(api_client)
self.logger.debug('TaskList action initialized')
def invoke(self):
"""Invoke execution of this action."""
return self.api_client.get_tasks()
class TaskCreate(CliAction): # pylint: disable=too-few-public-methods
"""Action to create tasks against a design."""
def __init__(self,
api_client,
design_ref,
action_name=None,
node_names=None,
rack_names=None,
node_tags=None,
block=False,
poll_interval=15):
|
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.create_task(
design_ref=self.design_ref,
task_action=self.action_name,
node_filter=self.node_filter)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.get('status',
'') in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
class TaskShow(CliAction): # pylint: disable=too-few-public-methods
"""Action to show a task's detial."""
def __init__(self, api_client, task_id, block=False, poll_interval=15):
"""Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string task_id: the UUID of the task to retrieve
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.task_id = task_id
self.logger.debug('TaskShow action initialized for task_id=%s,',
task_id)
self.block = block
self.poll_interval = poll_interval
def invoke(self):
"""Invoke execution of this action."""
task = self.api_client.get_task(task_id=self.task_id)
if not self.block:
return task
task_id = task.get('task_id')
while True:
time.sleep(self.poll_interval)
task = self.api_client.get_task(task_id=task_id)
if task.status in [TaskStatus.Complete, TaskStatus.Terminated]:
return task
| """Object initializer.
:param DrydockClient api_client: The api client used for invocation.
:param string design_ref: The URI reference to design documents
:param string action_name: The name of the action being performed for this task
:param List node_names: The list of node names to restrict action application
:param List rack_names: The list of rack names to restrict action application
:param List node_tags: The list of node tags to restrict action application
:param bool block: Whether to block CLI exit until task completes
:param integer poll_interval: Polling interval to query task status
"""
super().__init__(api_client)
self.design_ref = design_ref
self.action_name = action_name
self.logger.debug('TaskCreate action initialized for design=%s',
design_ref)
self.logger.debug('Action is %s', action_name)
self.logger.debug("Node names = %s", node_names)
self.logger.debug("Rack names = %s", rack_names)
self.logger.debug("Node tags = %s", node_tags)
self.block = block
self.poll_interval = poll_interval
if any([node_names, rack_names, node_tags]):
filter_items = {'filter_type': 'union'}
if node_names is not None:
filter_items['node_names'] = node_names
if rack_names is not None:
filter_items['rack_names'] = rack_names
if node_tags is None:
filter_items['node_tags'] = node_tags
self.node_filter = {
'filter_set_type': 'intersection',
'filter_set': [filter_items]
}
else:
self.node_filter = None | identifier_body |
danmaku.py | #!/usr/bin/env python2
# -*- coding:utf-8 -*-
import signal
import json
import argparse
import threading
import requests
from settings import load_config
from app import GDanmakuApp
from server_selection import ServerSelectionWindow
from danmaku_ui import Danmaku
from gi.repository import Gtk, GLib, GObject
class Main(object):
def __init__(self, server=None):
self.server = server
server_selection = ServerSelectionWindow(self.server)
server_selection.connect('server-selected', self.on_server_selected)
self.app = GDanmakuApp(self)
self.thread_sub = None
self.enabled = True
self.options = load_config()
self.live_danmakus = {}
def _subscribe_danmaku(self, server, channel, password):
print("subscribing from server: {}, channel: {}".format(server, channel))
uri = self.options["http_stream_uri"].format(cname=channel)
if uri.startswith("/") and server.endswith("/"):
server = server[:-1]
url = server + uri
while 1:
try:
res = requests.get(
url, headers={"X-GDANMAKU-AUTH-KEY": password})
except requests.exceptions.ConnectionError:
continue
if res.status_code == 200 and res.text:
try:
dm_opts = json.loads(res.text)
except:
continue
else:
GLib.idle_add(self.new_danmaku, dm_opts)
def new_danmaku(self, dm_opts):
if not self.enabled:
return
for opt in dm_opts:
try:
dm = Danmaku(**opt)
dm.connect('delete-event', self.on_danmaku_delete)
except Exception as e:
print(e)
continue
self.live_danmakus[id(dm)] = dm
def on_danmaku_delete(self, dm, event):
self.live_danmakus.pop(id(dm))
def toggle_danmaku(self):
self.enabled = not self.enabled
if not self.enabled:
for _, dm in self.live_danmakus.iteritems():
dm.hide()
dm._clean_exit()
def on_server_selected(self, widget, server, channel, password):
thread_sub = threading.Thread(
target=self._subscribe_danmaku, args=(server, channel, password))
thread_sub.daemon = True
thread_sub.start()
self.thread_sub = thread_sub
def run(self):
GObject.threads_init()
Gtk.main()
def app_config():
from config_panel import ConfigPanel
from gi.repository import Gtk
ConfigPanel()
Gtk.main()
def main():
|
if __name__ == '__main__':
main()
# vim: ts=4 sw=4 sts=4 expandtab
| options = load_config()
parser = argparse.ArgumentParser(prog="gdanmaku")
parser.add_argument(
"--server",
type=str,
default=options["http_stream_server"],
help="danmaku stream server"
)
parser.add_argument(
'--config',
action="store_true",
help="run configuration window"
)
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if args.config:
app_config()
else:
main_app = Main(args.server)
main_app.run() | identifier_body |
danmaku.py | #!/usr/bin/env python2
# -*- coding:utf-8 -*-
import signal
import json
import argparse
import threading
import requests
from settings import load_config
from app import GDanmakuApp | from gi.repository import Gtk, GLib, GObject
class Main(object):
def __init__(self, server=None):
self.server = server
server_selection = ServerSelectionWindow(self.server)
server_selection.connect('server-selected', self.on_server_selected)
self.app = GDanmakuApp(self)
self.thread_sub = None
self.enabled = True
self.options = load_config()
self.live_danmakus = {}
def _subscribe_danmaku(self, server, channel, password):
print("subscribing from server: {}, channel: {}".format(server, channel))
uri = self.options["http_stream_uri"].format(cname=channel)
if uri.startswith("/") and server.endswith("/"):
server = server[:-1]
url = server + uri
while 1:
try:
res = requests.get(
url, headers={"X-GDANMAKU-AUTH-KEY": password})
except requests.exceptions.ConnectionError:
continue
if res.status_code == 200 and res.text:
try:
dm_opts = json.loads(res.text)
except:
continue
else:
GLib.idle_add(self.new_danmaku, dm_opts)
def new_danmaku(self, dm_opts):
if not self.enabled:
return
for opt in dm_opts:
try:
dm = Danmaku(**opt)
dm.connect('delete-event', self.on_danmaku_delete)
except Exception as e:
print(e)
continue
self.live_danmakus[id(dm)] = dm
def on_danmaku_delete(self, dm, event):
self.live_danmakus.pop(id(dm))
def toggle_danmaku(self):
self.enabled = not self.enabled
if not self.enabled:
for _, dm in self.live_danmakus.iteritems():
dm.hide()
dm._clean_exit()
def on_server_selected(self, widget, server, channel, password):
thread_sub = threading.Thread(
target=self._subscribe_danmaku, args=(server, channel, password))
thread_sub.daemon = True
thread_sub.start()
self.thread_sub = thread_sub
def run(self):
GObject.threads_init()
Gtk.main()
def app_config():
from config_panel import ConfigPanel
from gi.repository import Gtk
ConfigPanel()
Gtk.main()
def main():
options = load_config()
parser = argparse.ArgumentParser(prog="gdanmaku")
parser.add_argument(
"--server",
type=str,
default=options["http_stream_server"],
help="danmaku stream server"
)
parser.add_argument(
'--config',
action="store_true",
help="run configuration window"
)
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if args.config:
app_config()
else:
main_app = Main(args.server)
main_app.run()
if __name__ == '__main__':
main()
# vim: ts=4 sw=4 sts=4 expandtab | from server_selection import ServerSelectionWindow
from danmaku_ui import Danmaku | random_line_split |
danmaku.py | #!/usr/bin/env python2
# -*- coding:utf-8 -*-
import signal
import json
import argparse
import threading
import requests
from settings import load_config
from app import GDanmakuApp
from server_selection import ServerSelectionWindow
from danmaku_ui import Danmaku
from gi.repository import Gtk, GLib, GObject
class Main(object):
def __init__(self, server=None):
self.server = server
server_selection = ServerSelectionWindow(self.server)
server_selection.connect('server-selected', self.on_server_selected)
self.app = GDanmakuApp(self)
self.thread_sub = None
self.enabled = True
self.options = load_config()
self.live_danmakus = {}
def _subscribe_danmaku(self, server, channel, password):
print("subscribing from server: {}, channel: {}".format(server, channel))
uri = self.options["http_stream_uri"].format(cname=channel)
if uri.startswith("/") and server.endswith("/"):
server = server[:-1]
url = server + uri
while 1:
try:
res = requests.get(
url, headers={"X-GDANMAKU-AUTH-KEY": password})
except requests.exceptions.ConnectionError:
continue
if res.status_code == 200 and res.text:
try:
dm_opts = json.loads(res.text)
except:
continue
else:
GLib.idle_add(self.new_danmaku, dm_opts)
def new_danmaku(self, dm_opts):
if not self.enabled:
return
for opt in dm_opts:
|
def on_danmaku_delete(self, dm, event):
self.live_danmakus.pop(id(dm))
def toggle_danmaku(self):
self.enabled = not self.enabled
if not self.enabled:
for _, dm in self.live_danmakus.iteritems():
dm.hide()
dm._clean_exit()
def on_server_selected(self, widget, server, channel, password):
thread_sub = threading.Thread(
target=self._subscribe_danmaku, args=(server, channel, password))
thread_sub.daemon = True
thread_sub.start()
self.thread_sub = thread_sub
def run(self):
GObject.threads_init()
Gtk.main()
def app_config():
from config_panel import ConfigPanel
from gi.repository import Gtk
ConfigPanel()
Gtk.main()
def main():
options = load_config()
parser = argparse.ArgumentParser(prog="gdanmaku")
parser.add_argument(
"--server",
type=str,
default=options["http_stream_server"],
help="danmaku stream server"
)
parser.add_argument(
'--config',
action="store_true",
help="run configuration window"
)
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if args.config:
app_config()
else:
main_app = Main(args.server)
main_app.run()
if __name__ == '__main__':
main()
# vim: ts=4 sw=4 sts=4 expandtab
| try:
dm = Danmaku(**opt)
dm.connect('delete-event', self.on_danmaku_delete)
except Exception as e:
print(e)
continue
self.live_danmakus[id(dm)] = dm | conditional_block |
danmaku.py | #!/usr/bin/env python2
# -*- coding:utf-8 -*-
import signal
import json
import argparse
import threading
import requests
from settings import load_config
from app import GDanmakuApp
from server_selection import ServerSelectionWindow
from danmaku_ui import Danmaku
from gi.repository import Gtk, GLib, GObject
class Main(object):
def __init__(self, server=None):
self.server = server
server_selection = ServerSelectionWindow(self.server)
server_selection.connect('server-selected', self.on_server_selected)
self.app = GDanmakuApp(self)
self.thread_sub = None
self.enabled = True
self.options = load_config()
self.live_danmakus = {}
def _subscribe_danmaku(self, server, channel, password):
print("subscribing from server: {}, channel: {}".format(server, channel))
uri = self.options["http_stream_uri"].format(cname=channel)
if uri.startswith("/") and server.endswith("/"):
server = server[:-1]
url = server + uri
while 1:
try:
res = requests.get(
url, headers={"X-GDANMAKU-AUTH-KEY": password})
except requests.exceptions.ConnectionError:
continue
if res.status_code == 200 and res.text:
try:
dm_opts = json.loads(res.text)
except:
continue
else:
GLib.idle_add(self.new_danmaku, dm_opts)
def new_danmaku(self, dm_opts):
if not self.enabled:
return
for opt in dm_opts:
try:
dm = Danmaku(**opt)
dm.connect('delete-event', self.on_danmaku_delete)
except Exception as e:
print(e)
continue
self.live_danmakus[id(dm)] = dm
def on_danmaku_delete(self, dm, event):
self.live_danmakus.pop(id(dm))
def toggle_danmaku(self):
self.enabled = not self.enabled
if not self.enabled:
for _, dm in self.live_danmakus.iteritems():
dm.hide()
dm._clean_exit()
def on_server_selected(self, widget, server, channel, password):
thread_sub = threading.Thread(
target=self._subscribe_danmaku, args=(server, channel, password))
thread_sub.daemon = True
thread_sub.start()
self.thread_sub = thread_sub
def run(self):
GObject.threads_init()
Gtk.main()
def app_config():
from config_panel import ConfigPanel
from gi.repository import Gtk
ConfigPanel()
Gtk.main()
def | ():
options = load_config()
parser = argparse.ArgumentParser(prog="gdanmaku")
parser.add_argument(
"--server",
type=str,
default=options["http_stream_server"],
help="danmaku stream server"
)
parser.add_argument(
'--config',
action="store_true",
help="run configuration window"
)
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if args.config:
app_config()
else:
main_app = Main(args.server)
main_app.run()
if __name__ == '__main__':
main()
# vim: ts=4 sw=4 sts=4 expandtab
| main | identifier_name |
statuses.js | define(['config', 'folders'], function (config, folders) {
var wrikeStates = { 'active': 0, 'completed': 1, 'deferred': 2, 'cancelled': 3 };
var statusFolders = folders.getSubfolders(config.statusFolder)
, statuses = {}
, statusesById = {};
$.each(statusFolders, function (i, val) {
var wrikeState = val.data.title.match(/\d+\. .* \((.*)\)/);
// If the status has an improperly formatted name, ignore it
if (wrikeState === null || typeof (wrikeState = wrikeStates[wrikeState[1].toLowerCase()]) === 'undefined') |
val.powerWrike.wrikeState = wrikeState;
statuses[val.powerWrike.uniquePath] = statusesById[val.id] = val;
});
return {
statuses: statuses,
statusesById: statusesById,
};
});
| {
debug.warn('Status has the wrong format. Should be titled "123. Your Status Name (Active|Completed|Deferred|Cancelled)"\nYou provided "' + val.data.title + '"');
return;
} | conditional_block |
statuses.js | define(['config', 'folders'], function (config, folders) {
var wrikeStates = { 'active': 0, 'completed': 1, 'deferred': 2, 'cancelled': 3 };
var statusFolders = folders.getSubfolders(config.statusFolder)
, statuses = {}
, statusesById = {};
$.each(statusFolders, function (i, val) {
var wrikeState = val.data.title.match(/\d+\. .* \((.*)\)/);
// If the status has an improperly formatted name, ignore it | }
val.powerWrike.wrikeState = wrikeState;
statuses[val.powerWrike.uniquePath] = statusesById[val.id] = val;
});
return {
statuses: statuses,
statusesById: statusesById,
};
}); | if (wrikeState === null || typeof (wrikeState = wrikeStates[wrikeState[1].toLowerCase()]) === 'undefined') {
debug.warn('Status has the wrong format. Should be titled "123. Your Status Name (Active|Completed|Deferred|Cancelled)"\nYou provided "' + val.data.title + '"');
return; | random_line_split |
xlang_kafkaio_it_test.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration test for Python cross-language pipelines for Java KafkaIO."""
from __future__ import absolute_import
import contextlib
import logging
import os
import socket
import subprocess
import time
import typing
import unittest
import apache_beam as beam
from apache_beam.io.external.kafka import ReadFromKafka
from apache_beam.io.external.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.testing.test_pipeline import TestPipeline
class CrossLanguageKafkaIO(object):
def __init__(self, bootstrap_servers, topic, expansion_service=None):
self.bootstrap_servers = bootstrap_servers
self.topic = topic
self.expansion_service = expansion_service
self.sum_counter = Metrics.counter('source', 'elements_sum')
def build_write_pipeline(self, pipeline):
_ = (
pipeline
| 'Impulse' >> beam.Impulse()
| 'Generate' >> beam.FlatMap(lambda x: range(1000)) # pylint: disable=range-builtin-not-iterating
| 'Reshuffle' >> beam.Reshuffle()
| 'MakeKV' >> beam.Map(lambda x:
(b'', str(x).encode())).with_output_types(
typing.Tuple[bytes, bytes])
| 'WriteToKafka' >> WriteToKafka(
producer_config={'bootstrap.servers': self.bootstrap_servers},
topic=self.topic,
expansion_service=self.expansion_service))
def build_read_pipeline(self, pipeline):
_ = (
pipeline
| 'ReadFromKafka' >> ReadFromKafka(
consumer_config={
'bootstrap.servers': self.bootstrap_servers,
'auto.offset.reset': 'earliest'
},
topics=[self.topic],
expansion_service=self.expansion_service)
| 'Windowing' >> beam.WindowInto(
beam.window.FixedWindows(300),
trigger=beam.transforms.trigger.AfterProcessingTime(60),
accumulation_mode=beam.transforms.trigger.AccumulationMode.
DISCARDING)
| 'DecodingValue' >> beam.Map(lambda elem: int(elem[1].decode()))
| 'CombineGlobally' >> beam.CombineGlobally(sum).without_defaults()
| 'SetSumCounter' >> beam.Map(self.sum_counter.inc))
def run_xlang_kafkaio(self, pipeline):
self.build_write_pipeline(pipeline)
self.build_read_pipeline(pipeline)
pipeline.run(False)
@unittest.skipUnless(
os.environ.get('LOCAL_KAFKA_JAR'),
"LOCAL_KAFKA_JAR environment var is not provided.")
class CrossLanguageKafkaIOTest(unittest.TestCase):
def get_open_port(self):
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except: # pylint: disable=bare-except
# Above call will fail for nodes that only support IPv6.
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.bind(('localhost', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
@contextlib.contextmanager
def local_kafka_service(self, local_kafka_jar_file):
kafka_port = str(self.get_open_port())
zookeeper_port = str(self.get_open_port())
kafka_server = None
try:
kafka_server = subprocess.Popen(
['java', '-jar', local_kafka_jar_file, kafka_port, zookeeper_port])
time.sleep(3)
yield kafka_port
finally:
if kafka_server:
kafka_server.kill()
def get_options(self):
options = PipelineOptions([
'--runner',
'FlinkRunner',
'--parallelism',
'2',
'--experiment',
'beam_fn_api'
])
return options
def test_kafkaio_write(self):
local_kafka_jar = os.environ.get('LOCAL_KAFKA_JAR')
with self.local_kafka_service(local_kafka_jar) as kafka_port:
options = self.get_options()
p = TestPipeline(options=options)
p.not_use_test_runner_api = True
CrossLanguageKafkaIO('localhost:%s' % kafka_port,
'xlang_kafkaio_test').build_write_pipeline(p)
job = p.run()
job.wait_until_finish()
if __name__ == '__main__':
| logging.getLogger().setLevel(logging.INFO)
unittest.main() | conditional_block |
|
xlang_kafkaio_it_test.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration test for Python cross-language pipelines for Java KafkaIO."""
from __future__ import absolute_import
import contextlib
import logging
import os
import socket
import subprocess
import time
import typing
import unittest
import apache_beam as beam
from apache_beam.io.external.kafka import ReadFromKafka
from apache_beam.io.external.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.testing.test_pipeline import TestPipeline
class CrossLanguageKafkaIO(object):
def __init__(self, bootstrap_servers, topic, expansion_service=None):
self.bootstrap_servers = bootstrap_servers
self.topic = topic
self.expansion_service = expansion_service
self.sum_counter = Metrics.counter('source', 'elements_sum')
def build_write_pipeline(self, pipeline):
_ = (
pipeline
| 'Impulse' >> beam.Impulse()
| 'Generate' >> beam.FlatMap(lambda x: range(1000)) # pylint: disable=range-builtin-not-iterating
| 'Reshuffle' >> beam.Reshuffle()
| 'MakeKV' >> beam.Map(lambda x:
(b'', str(x).encode())).with_output_types(
typing.Tuple[bytes, bytes])
| 'WriteToKafka' >> WriteToKafka(
producer_config={'bootstrap.servers': self.bootstrap_servers},
topic=self.topic,
expansion_service=self.expansion_service))
def build_read_pipeline(self, pipeline):
|
def run_xlang_kafkaio(self, pipeline):
self.build_write_pipeline(pipeline)
self.build_read_pipeline(pipeline)
pipeline.run(False)
@unittest.skipUnless(
os.environ.get('LOCAL_KAFKA_JAR'),
"LOCAL_KAFKA_JAR environment var is not provided.")
class CrossLanguageKafkaIOTest(unittest.TestCase):
def get_open_port(self):
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except: # pylint: disable=bare-except
# Above call will fail for nodes that only support IPv6.
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.bind(('localhost', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
@contextlib.contextmanager
def local_kafka_service(self, local_kafka_jar_file):
kafka_port = str(self.get_open_port())
zookeeper_port = str(self.get_open_port())
kafka_server = None
try:
kafka_server = subprocess.Popen(
['java', '-jar', local_kafka_jar_file, kafka_port, zookeeper_port])
time.sleep(3)
yield kafka_port
finally:
if kafka_server:
kafka_server.kill()
def get_options(self):
options = PipelineOptions([
'--runner',
'FlinkRunner',
'--parallelism',
'2',
'--experiment',
'beam_fn_api'
])
return options
def test_kafkaio_write(self):
local_kafka_jar = os.environ.get('LOCAL_KAFKA_JAR')
with self.local_kafka_service(local_kafka_jar) as kafka_port:
options = self.get_options()
p = TestPipeline(options=options)
p.not_use_test_runner_api = True
CrossLanguageKafkaIO('localhost:%s' % kafka_port,
'xlang_kafkaio_test').build_write_pipeline(p)
job = p.run()
job.wait_until_finish()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| _ = (
pipeline
| 'ReadFromKafka' >> ReadFromKafka(
consumer_config={
'bootstrap.servers': self.bootstrap_servers,
'auto.offset.reset': 'earliest'
},
topics=[self.topic],
expansion_service=self.expansion_service)
| 'Windowing' >> beam.WindowInto(
beam.window.FixedWindows(300),
trigger=beam.transforms.trigger.AfterProcessingTime(60),
accumulation_mode=beam.transforms.trigger.AccumulationMode.
DISCARDING)
| 'DecodingValue' >> beam.Map(lambda elem: int(elem[1].decode()))
| 'CombineGlobally' >> beam.CombineGlobally(sum).without_defaults()
| 'SetSumCounter' >> beam.Map(self.sum_counter.inc)) | identifier_body |
xlang_kafkaio_it_test.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration test for Python cross-language pipelines for Java KafkaIO."""
from __future__ import absolute_import
import contextlib
import logging
import os
import socket
import subprocess
import time
import typing
import unittest
import apache_beam as beam
from apache_beam.io.external.kafka import ReadFromKafka
from apache_beam.io.external.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.testing.test_pipeline import TestPipeline
class CrossLanguageKafkaIO(object):
def __init__(self, bootstrap_servers, topic, expansion_service=None):
self.bootstrap_servers = bootstrap_servers
self.topic = topic
self.expansion_service = expansion_service
self.sum_counter = Metrics.counter('source', 'elements_sum')
def build_write_pipeline(self, pipeline):
_ = (
pipeline
| 'Impulse' >> beam.Impulse()
| 'Generate' >> beam.FlatMap(lambda x: range(1000)) # pylint: disable=range-builtin-not-iterating
| 'Reshuffle' >> beam.Reshuffle()
| 'MakeKV' >> beam.Map(lambda x:
(b'', str(x).encode())).with_output_types(
typing.Tuple[bytes, bytes])
| 'WriteToKafka' >> WriteToKafka(
producer_config={'bootstrap.servers': self.bootstrap_servers},
topic=self.topic,
expansion_service=self.expansion_service))
def build_read_pipeline(self, pipeline):
_ = (
pipeline
| 'ReadFromKafka' >> ReadFromKafka(
consumer_config={
'bootstrap.servers': self.bootstrap_servers,
'auto.offset.reset': 'earliest'
},
topics=[self.topic],
expansion_service=self.expansion_service)
| 'Windowing' >> beam.WindowInto(
beam.window.FixedWindows(300),
trigger=beam.transforms.trigger.AfterProcessingTime(60),
accumulation_mode=beam.transforms.trigger.AccumulationMode.
DISCARDING)
| 'DecodingValue' >> beam.Map(lambda elem: int(elem[1].decode()))
| 'CombineGlobally' >> beam.CombineGlobally(sum).without_defaults()
| 'SetSumCounter' >> beam.Map(self.sum_counter.inc))
def run_xlang_kafkaio(self, pipeline):
self.build_write_pipeline(pipeline)
self.build_read_pipeline(pipeline)
pipeline.run(False)
@unittest.skipUnless(
os.environ.get('LOCAL_KAFKA_JAR'),
"LOCAL_KAFKA_JAR environment var is not provided.")
class CrossLanguageKafkaIOTest(unittest.TestCase):
def get_open_port(self):
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except: # pylint: disable=bare-except
# Above call will fail for nodes that only support IPv6.
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.bind(('localhost', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
@contextlib.contextmanager
def local_kafka_service(self, local_kafka_jar_file):
kafka_port = str(self.get_open_port())
zookeeper_port = str(self.get_open_port())
kafka_server = None
try:
kafka_server = subprocess.Popen(
['java', '-jar', local_kafka_jar_file, kafka_port, zookeeper_port])
time.sleep(3)
yield kafka_port
finally:
if kafka_server:
kafka_server.kill()
def | (self):
options = PipelineOptions([
'--runner',
'FlinkRunner',
'--parallelism',
'2',
'--experiment',
'beam_fn_api'
])
return options
def test_kafkaio_write(self):
local_kafka_jar = os.environ.get('LOCAL_KAFKA_JAR')
with self.local_kafka_service(local_kafka_jar) as kafka_port:
options = self.get_options()
p = TestPipeline(options=options)
p.not_use_test_runner_api = True
CrossLanguageKafkaIO('localhost:%s' % kafka_port,
'xlang_kafkaio_test').build_write_pipeline(p)
job = p.run()
job.wait_until_finish()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| get_options | identifier_name |
xlang_kafkaio_it_test.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration test for Python cross-language pipelines for Java KafkaIO."""
from __future__ import absolute_import
import contextlib
import logging
import os
import socket
import subprocess
import time
import typing
import unittest
import apache_beam as beam
from apache_beam.io.external.kafka import ReadFromKafka
from apache_beam.io.external.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.testing.test_pipeline import TestPipeline
class CrossLanguageKafkaIO(object):
def __init__(self, bootstrap_servers, topic, expansion_service=None):
self.bootstrap_servers = bootstrap_servers
self.topic = topic
self.expansion_service = expansion_service
self.sum_counter = Metrics.counter('source', 'elements_sum')
def build_write_pipeline(self, pipeline):
_ = (
pipeline
| 'Impulse' >> beam.Impulse()
| 'Generate' >> beam.FlatMap(lambda x: range(1000)) # pylint: disable=range-builtin-not-iterating
| 'Reshuffle' >> beam.Reshuffle()
| 'MakeKV' >> beam.Map(lambda x:
(b'', str(x).encode())).with_output_types(
typing.Tuple[bytes, bytes])
| 'WriteToKafka' >> WriteToKafka(
producer_config={'bootstrap.servers': self.bootstrap_servers},
topic=self.topic,
expansion_service=self.expansion_service))
def build_read_pipeline(self, pipeline):
_ = (
pipeline
| 'ReadFromKafka' >> ReadFromKafka(
consumer_config={
'bootstrap.servers': self.bootstrap_servers,
'auto.offset.reset': 'earliest'
},
topics=[self.topic],
expansion_service=self.expansion_service)
| 'Windowing' >> beam.WindowInto(
beam.window.FixedWindows(300),
trigger=beam.transforms.trigger.AfterProcessingTime(60),
accumulation_mode=beam.transforms.trigger.AccumulationMode.
DISCARDING)
| 'DecodingValue' >> beam.Map(lambda elem: int(elem[1].decode()))
| 'CombineGlobally' >> beam.CombineGlobally(sum).without_defaults()
| 'SetSumCounter' >> beam.Map(self.sum_counter.inc))
def run_xlang_kafkaio(self, pipeline):
self.build_write_pipeline(pipeline)
self.build_read_pipeline(pipeline)
pipeline.run(False)
@unittest.skipUnless(
os.environ.get('LOCAL_KAFKA_JAR'),
"LOCAL_KAFKA_JAR environment var is not provided.")
class CrossLanguageKafkaIOTest(unittest.TestCase):
def get_open_port(self):
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except: # pylint: disable=bare-except
# Above call will fail for nodes that only support IPv6.
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.bind(('localhost', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
@contextlib.contextmanager
def local_kafka_service(self, local_kafka_jar_file):
kafka_port = str(self.get_open_port())
zookeeper_port = str(self.get_open_port())
kafka_server = None
try:
kafka_server = subprocess.Popen(
['java', '-jar', local_kafka_jar_file, kafka_port, zookeeper_port])
time.sleep(3)
yield kafka_port
finally:
if kafka_server:
kafka_server.kill()
def get_options(self):
options = PipelineOptions([
'--runner',
'FlinkRunner',
'--parallelism',
'2',
'--experiment',
'beam_fn_api'
])
return options
def test_kafkaio_write(self):
local_kafka_jar = os.environ.get('LOCAL_KAFKA_JAR')
with self.local_kafka_service(local_kafka_jar) as kafka_port:
options = self.get_options()
p = TestPipeline(options=options)
p.not_use_test_runner_api = True
CrossLanguageKafkaIO('localhost:%s' % kafka_port,
'xlang_kafkaio_test').build_write_pipeline(p)
job = p.run() |
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main() | job.wait_until_finish() | random_line_split |
project-cache-issue-31849.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #31849: the problem here was actually a performance
// cliff, but I'm adding the test for reference.
pub trait Upcast<T> {
fn upcast(self) -> T;
}
impl<S1, S2, T1, T2> Upcast<(T1, T2)> for (S1,S2)
where S1: Upcast<T1>,
S2: Upcast<T2>,
{
fn upcast(self) -> (T1, T2) { (self.0.upcast(), self.1.upcast()) }
}
impl Upcast<()> for ()
{
fn upcast(self) -> () { () }
}
pub trait ToStatic {
type Static: 'static;
fn to_static(self) -> Self::Static where Self: Sized;
}
impl<T, U> ToStatic for (T, U)
where T: ToStatic,
U: ToStatic
{
type Static = (T::Static, U::Static);
fn to_static(self) -> Self::Static { (self.0.to_static(), self.1.to_static()) }
}
impl ToStatic for ()
{
type Static = ();
fn to_static(self) -> () { () }
}
trait Factory {
type Output;
fn build(&self) -> Self::Output;
}
impl<S,T> Factory for (S, T)
where S: Factory,
T: Factory,
S::Output: ToStatic,
<S::Output as ToStatic>::Static: Upcast<S::Output>,
{
type Output = (S::Output, T::Output);
fn build(&self) -> Self::Output { (self.0.build().to_static().upcast(), self.1.build()) }
}
impl Factory for () {
type Output = ();
fn build(&self) -> Self::Output { () }
}
fn | () {
// More parens, more time.
let it = ((((((((((),()),()),()),()),()),()),()),()),());
it.build();
}
| main | identifier_name |
project-cache-issue-31849.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #31849: the problem here was actually a performance
// cliff, but I'm adding the test for reference.
pub trait Upcast<T> {
fn upcast(self) -> T;
}
impl<S1, S2, T1, T2> Upcast<(T1, T2)> for (S1,S2)
where S1: Upcast<T1>,
S2: Upcast<T2>,
{
fn upcast(self) -> (T1, T2) { (self.0.upcast(), self.1.upcast()) }
}
impl Upcast<()> for ()
{
fn upcast(self) -> () |
}
pub trait ToStatic {
type Static: 'static;
fn to_static(self) -> Self::Static where Self: Sized;
}
impl<T, U> ToStatic for (T, U)
where T: ToStatic,
U: ToStatic
{
type Static = (T::Static, U::Static);
fn to_static(self) -> Self::Static { (self.0.to_static(), self.1.to_static()) }
}
impl ToStatic for ()
{
type Static = ();
fn to_static(self) -> () { () }
}
trait Factory {
type Output;
fn build(&self) -> Self::Output;
}
impl<S,T> Factory for (S, T)
where S: Factory,
T: Factory,
S::Output: ToStatic,
<S::Output as ToStatic>::Static: Upcast<S::Output>,
{
type Output = (S::Output, T::Output);
fn build(&self) -> Self::Output { (self.0.build().to_static().upcast(), self.1.build()) }
}
impl Factory for () {
type Output = ();
fn build(&self) -> Self::Output { () }
}
fn main() {
// More parens, more time.
let it = ((((((((((),()),()),()),()),()),()),()),()),());
it.build();
}
| { () } | identifier_body |
project-cache-issue-31849.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #31849: the problem here was actually a performance
// cliff, but I'm adding the test for reference.
pub trait Upcast<T> {
fn upcast(self) -> T;
}
impl<S1, S2, T1, T2> Upcast<(T1, T2)> for (S1,S2)
where S1: Upcast<T1>,
S2: Upcast<T2>,
{
fn upcast(self) -> (T1, T2) { (self.0.upcast(), self.1.upcast()) }
}
| fn upcast(self) -> () { () }
}
pub trait ToStatic {
type Static: 'static;
fn to_static(self) -> Self::Static where Self: Sized;
}
impl<T, U> ToStatic for (T, U)
where T: ToStatic,
U: ToStatic
{
type Static = (T::Static, U::Static);
fn to_static(self) -> Self::Static { (self.0.to_static(), self.1.to_static()) }
}
impl ToStatic for ()
{
type Static = ();
fn to_static(self) -> () { () }
}
trait Factory {
type Output;
fn build(&self) -> Self::Output;
}
impl<S,T> Factory for (S, T)
where S: Factory,
T: Factory,
S::Output: ToStatic,
<S::Output as ToStatic>::Static: Upcast<S::Output>,
{
type Output = (S::Output, T::Output);
fn build(&self) -> Self::Output { (self.0.build().to_static().upcast(), self.1.build()) }
}
impl Factory for () {
type Output = ();
fn build(&self) -> Self::Output { () }
}
fn main() {
// More parens, more time.
let it = ((((((((((),()),()),()),()),()),()),()),()),());
it.build();
} | impl Upcast<()> for ()
{ | random_line_split |
tri10.rs | use russell_lab::{Matrix, Vector};
/// Defines a triangle with 10 nodes (cubic edges; interior node)
///
/// # Local IDs of nodes
///
/// ```text
/// s
/// |
/// 2, (0,1)
/// | ',
/// | ',
/// 5 7,
/// | ',
/// | ',
/// 8 9 4,
/// | ',
/// | (0,0) ', (1,0)
/// 0-----3-----6-----1 ---- r
/// ```
///
/// # Local IDs of edges
///
/// ```text
/// |\
/// | \
/// | \ 1
/// 2| \
/// | \
/// |_____\
/// 0
/// ```
pub struct Tri10 {}
impl Tri10 {
pub const NDIM: usize = 2;
pub const NNODE: usize = 10;
pub const NEDGE: usize = 3;
pub const NFACE: usize = 0;
pub const EDGE_NNODE: usize = 4;
pub const FACE_NNODE: usize = 0;
pub const FACE_NEDGE: usize = 0;
#[rustfmt::skip]
pub const EDGE_NODE_IDS: [[usize; Tri10::EDGE_NNODE]; Tri10::NEDGE] = [
[0, 1, 3, 6],
[1, 2, 4, 7],
[2, 0, 5, 8],
];
#[rustfmt::skip]
pub const NODE_REFERENCE_COORDS: [[f64; Tri10::NDIM]; Tri10::NNODE] = [
[0.0 , 0.0 ], // 0
[1.0 , 0.0 ], // 1
[0.0 , 1.0 ], // 2
[1.0 / 3.0 , 0.0 ], // 3
[2.0 / 3.0 , 1.0 / 3.0], // 4
[0.0 , 2.0 / 3.0], // 5
[2.0 / 3.0 , 0.0 ], // 6
[1.0 / 3.0 , 2.0 / 3.0], // 7
[0.0 , 1.0 / 3.0], // 8
[1.0 / 3.0 , 1.0 / 3.0], // 9
];
/// Computes the interpolation functions
pub fn calc_interp(interp: &mut Vector, ksi: &[f64]) {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let t1 = s * (3.0 * s - 1.0);
let t2 = z * (3.0 * z - 1.0);
let t3 = r * (3.0 * r - 1.0);
interp[0] = 0.5 * t2 * (3.0 * z - 2.0);
interp[1] = 0.5 * t3 * (3.0 * r - 2.0);
interp[2] = 0.5 * t1 * (3.0 * s - 2.0);
interp[3] = 4.5 * r * t2;
interp[4] = 4.5 * s * t3;
interp[5] = 4.5 * z * t1;
interp[6] = 4.5 * z * t3;
interp[7] = 4.5 * r * t1;
interp[8] = 4.5 * s * t2;
interp[9] = 27.0 * s * z * r;
}
/// Computes the derivatives of interpolation functions
pub fn calc_deriv(deriv: &mut Matrix, ksi: &[f64]) |
}
| {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let q0 = 4.5 * (6.0 * z - 1.0);
let q1 = 4.5 * s * (3.0 * s - 1.0);
let q2 = 4.5 * z * (3.0 * z - 1.0);
let q3 = 4.5 * r * (3.0 * r - 1.0);
let q4 = 4.5 * (6.0 * s - 1.0);
let q5 = 4.5 * (6.0 * r - 1.0);
let q6 = q0 * s;
let q7 = q0 * r;
let q8 = -0.5 * (27.0 * z * z - 18.0 * z + 2.0);
let q9 = 0.5 * (27.0 * s * s - 18.0 * s + 2.0);
let q10 = 0.5 * (27.0 * r * r - 18.0 * r + 2.0);
deriv[0][0] = q8;
deriv[1][0] = q10;
deriv[2][0] = 0.0;
deriv[3][0] = q2 - q7;
deriv[4][0] = s * q5;
deriv[5][0] = -q1;
deriv[6][0] = z * q5 - q3;
deriv[7][0] = q1;
deriv[8][0] = -q6;
deriv[9][0] = 27.0 * s * (z - r);
deriv[0][1] = q8;
deriv[1][1] = 0.0;
deriv[2][1] = q9;
deriv[3][1] = -q7;
deriv[4][1] = q3;
deriv[5][1] = z * q4 - q1;
deriv[6][1] = -q3;
deriv[7][1] = r * q4;
deriv[8][1] = q2 - q6;
deriv[9][1] = 27.0 * r * (z - s);
} | identifier_body |
tri10.rs | use russell_lab::{Matrix, Vector};
/// Defines a triangle with 10 nodes (cubic edges; interior node)
///
/// # Local IDs of nodes
///
/// ```text
/// s
/// |
/// 2, (0,1)
/// | ',
/// | ',
/// 5 7,
/// | ',
/// | ',
/// 8 9 4,
/// | ',
/// | (0,0) ', (1,0)
/// 0-----3-----6-----1 ---- r | /// ```
///
/// # Local IDs of edges
///
/// ```text
/// |\
/// | \
/// | \ 1
/// 2| \
/// | \
/// |_____\
/// 0
/// ```
pub struct Tri10 {}
impl Tri10 {
pub const NDIM: usize = 2;
pub const NNODE: usize = 10;
pub const NEDGE: usize = 3;
pub const NFACE: usize = 0;
pub const EDGE_NNODE: usize = 4;
pub const FACE_NNODE: usize = 0;
pub const FACE_NEDGE: usize = 0;
#[rustfmt::skip]
pub const EDGE_NODE_IDS: [[usize; Tri10::EDGE_NNODE]; Tri10::NEDGE] = [
[0, 1, 3, 6],
[1, 2, 4, 7],
[2, 0, 5, 8],
];
#[rustfmt::skip]
pub const NODE_REFERENCE_COORDS: [[f64; Tri10::NDIM]; Tri10::NNODE] = [
[0.0 , 0.0 ], // 0
[1.0 , 0.0 ], // 1
[0.0 , 1.0 ], // 2
[1.0 / 3.0 , 0.0 ], // 3
[2.0 / 3.0 , 1.0 / 3.0], // 4
[0.0 , 2.0 / 3.0], // 5
[2.0 / 3.0 , 0.0 ], // 6
[1.0 / 3.0 , 2.0 / 3.0], // 7
[0.0 , 1.0 / 3.0], // 8
[1.0 / 3.0 , 1.0 / 3.0], // 9
];
/// Computes the interpolation functions
pub fn calc_interp(interp: &mut Vector, ksi: &[f64]) {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let t1 = s * (3.0 * s - 1.0);
let t2 = z * (3.0 * z - 1.0);
let t3 = r * (3.0 * r - 1.0);
interp[0] = 0.5 * t2 * (3.0 * z - 2.0);
interp[1] = 0.5 * t3 * (3.0 * r - 2.0);
interp[2] = 0.5 * t1 * (3.0 * s - 2.0);
interp[3] = 4.5 * r * t2;
interp[4] = 4.5 * s * t3;
interp[5] = 4.5 * z * t1;
interp[6] = 4.5 * z * t3;
interp[7] = 4.5 * r * t1;
interp[8] = 4.5 * s * t2;
interp[9] = 27.0 * s * z * r;
}
/// Computes the derivatives of interpolation functions
pub fn calc_deriv(deriv: &mut Matrix, ksi: &[f64]) {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let q0 = 4.5 * (6.0 * z - 1.0);
let q1 = 4.5 * s * (3.0 * s - 1.0);
let q2 = 4.5 * z * (3.0 * z - 1.0);
let q3 = 4.5 * r * (3.0 * r - 1.0);
let q4 = 4.5 * (6.0 * s - 1.0);
let q5 = 4.5 * (6.0 * r - 1.0);
let q6 = q0 * s;
let q7 = q0 * r;
let q8 = -0.5 * (27.0 * z * z - 18.0 * z + 2.0);
let q9 = 0.5 * (27.0 * s * s - 18.0 * s + 2.0);
let q10 = 0.5 * (27.0 * r * r - 18.0 * r + 2.0);
deriv[0][0] = q8;
deriv[1][0] = q10;
deriv[2][0] = 0.0;
deriv[3][0] = q2 - q7;
deriv[4][0] = s * q5;
deriv[5][0] = -q1;
deriv[6][0] = z * q5 - q3;
deriv[7][0] = q1;
deriv[8][0] = -q6;
deriv[9][0] = 27.0 * s * (z - r);
deriv[0][1] = q8;
deriv[1][1] = 0.0;
deriv[2][1] = q9;
deriv[3][1] = -q7;
deriv[4][1] = q3;
deriv[5][1] = z * q4 - q1;
deriv[6][1] = -q3;
deriv[7][1] = r * q4;
deriv[8][1] = q2 - q6;
deriv[9][1] = 27.0 * r * (z - s);
}
} | random_line_split |
|
tri10.rs | use russell_lab::{Matrix, Vector};
/// Defines a triangle with 10 nodes (cubic edges; interior node)
///
/// # Local IDs of nodes
///
/// ```text
/// s
/// |
/// 2, (0,1)
/// | ',
/// | ',
/// 5 7,
/// | ',
/// | ',
/// 8 9 4,
/// | ',
/// | (0,0) ', (1,0)
/// 0-----3-----6-----1 ---- r
/// ```
///
/// # Local IDs of edges
///
/// ```text
/// |\
/// | \
/// | \ 1
/// 2| \
/// | \
/// |_____\
/// 0
/// ```
pub struct Tri10 {}
impl Tri10 {
pub const NDIM: usize = 2;
pub const NNODE: usize = 10;
pub const NEDGE: usize = 3;
pub const NFACE: usize = 0;
pub const EDGE_NNODE: usize = 4;
pub const FACE_NNODE: usize = 0;
pub const FACE_NEDGE: usize = 0;
#[rustfmt::skip]
pub const EDGE_NODE_IDS: [[usize; Tri10::EDGE_NNODE]; Tri10::NEDGE] = [
[0, 1, 3, 6],
[1, 2, 4, 7],
[2, 0, 5, 8],
];
#[rustfmt::skip]
pub const NODE_REFERENCE_COORDS: [[f64; Tri10::NDIM]; Tri10::NNODE] = [
[0.0 , 0.0 ], // 0
[1.0 , 0.0 ], // 1
[0.0 , 1.0 ], // 2
[1.0 / 3.0 , 0.0 ], // 3
[2.0 / 3.0 , 1.0 / 3.0], // 4
[0.0 , 2.0 / 3.0], // 5
[2.0 / 3.0 , 0.0 ], // 6
[1.0 / 3.0 , 2.0 / 3.0], // 7
[0.0 , 1.0 / 3.0], // 8
[1.0 / 3.0 , 1.0 / 3.0], // 9
];
/// Computes the interpolation functions
pub fn calc_interp(interp: &mut Vector, ksi: &[f64]) {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let t1 = s * (3.0 * s - 1.0);
let t2 = z * (3.0 * z - 1.0);
let t3 = r * (3.0 * r - 1.0);
interp[0] = 0.5 * t2 * (3.0 * z - 2.0);
interp[1] = 0.5 * t3 * (3.0 * r - 2.0);
interp[2] = 0.5 * t1 * (3.0 * s - 2.0);
interp[3] = 4.5 * r * t2;
interp[4] = 4.5 * s * t3;
interp[5] = 4.5 * z * t1;
interp[6] = 4.5 * z * t3;
interp[7] = 4.5 * r * t1;
interp[8] = 4.5 * s * t2;
interp[9] = 27.0 * s * z * r;
}
/// Computes the derivatives of interpolation functions
pub fn | (deriv: &mut Matrix, ksi: &[f64]) {
let (r, s) = (ksi[0], ksi[1]);
let z = 1.0 - r - s;
let q0 = 4.5 * (6.0 * z - 1.0);
let q1 = 4.5 * s * (3.0 * s - 1.0);
let q2 = 4.5 * z * (3.0 * z - 1.0);
let q3 = 4.5 * r * (3.0 * r - 1.0);
let q4 = 4.5 * (6.0 * s - 1.0);
let q5 = 4.5 * (6.0 * r - 1.0);
let q6 = q0 * s;
let q7 = q0 * r;
let q8 = -0.5 * (27.0 * z * z - 18.0 * z + 2.0);
let q9 = 0.5 * (27.0 * s * s - 18.0 * s + 2.0);
let q10 = 0.5 * (27.0 * r * r - 18.0 * r + 2.0);
deriv[0][0] = q8;
deriv[1][0] = q10;
deriv[2][0] = 0.0;
deriv[3][0] = q2 - q7;
deriv[4][0] = s * q5;
deriv[5][0] = -q1;
deriv[6][0] = z * q5 - q3;
deriv[7][0] = q1;
deriv[8][0] = -q6;
deriv[9][0] = 27.0 * s * (z - r);
deriv[0][1] = q8;
deriv[1][1] = 0.0;
deriv[2][1] = q9;
deriv[3][1] = -q7;
deriv[4][1] = q3;
deriv[5][1] = z * q4 - q1;
deriv[6][1] = -q3;
deriv[7][1] = r * q4;
deriv[8][1] = q2 - q6;
deriv[9][1] = 27.0 * r * (z - s);
}
}
| calc_deriv | identifier_name |
test_different_outputs.py | import unittest
from polycircles import polycircles
from nose.tools import assert_equal, assert_almost_equal
class TestDifferentOutputs(unittest.TestCase):
"""Tests the various output methods: KML style, WKT, lat-lon and lon-lat."""
def setUp(self):
self.latitude = 32.074322
self.longitude = 34.792081
self.radius_meters = 100
self.number_of_vertices = 36
self.polycircle = \
polycircles.Polycircle(latitude=self.latitude,
longitude=self.longitude,
radius=self.radius_meters,
number_of_vertices=self.number_of_vertices)
def test_lat_lon_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lat_lon():
assert_almost_equal(vertex[0], self.latitude, places=2)
assert_almost_equal(vertex[1], self.longitude, places=2)
def test_lon_lat_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lon_lat():
assert_almost_equal(vertex[0], self.longitude, places=2)
assert_almost_equal(vertex[1], self.latitude, places=2)
def test_vertices_equals_lat_lon(self):
"""Asserts that the "vertices" property is identical to the return
value of to_lat_lon()."""
assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon())
def | (self):
"""Asserts that the return value of to_kml() property is identical to
the return value of to_lon_lat()."""
assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat())
if __name__ == '__main__':
unittest.main() | test_kml_equals_lon_lat | identifier_name |
test_different_outputs.py | import unittest
from polycircles import polycircles
from nose.tools import assert_equal, assert_almost_equal
class TestDifferentOutputs(unittest.TestCase):
"""Tests the various output methods: KML style, WKT, lat-lon and lon-lat."""
def setUp(self):
self.latitude = 32.074322
self.longitude = 34.792081
self.radius_meters = 100
self.number_of_vertices = 36
self.polycircle = \
polycircles.Polycircle(latitude=self.latitude,
longitude=self.longitude,
radius=self.radius_meters,
number_of_vertices=self.number_of_vertices)
def test_lat_lon_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lat_lon():
assert_almost_equal(vertex[0], self.latitude, places=2)
assert_almost_equal(vertex[1], self.longitude, places=2)
def test_lon_lat_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lon_lat():
assert_almost_equal(vertex[0], self.longitude, places=2)
assert_almost_equal(vertex[1], self.latitude, places=2)
def test_vertices_equals_lat_lon(self):
"""Asserts that the "vertices" property is identical to the return
value of to_lat_lon()."""
assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon())
def test_kml_equals_lon_lat(self):
"""Asserts that the return value of to_kml() property is identical to
the return value of to_lon_lat()."""
assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat())
if __name__ == '__main__':
| unittest.main() | conditional_block |
|
test_different_outputs.py | import unittest
from polycircles import polycircles
from nose.tools import assert_equal, assert_almost_equal
class TestDifferentOutputs(unittest.TestCase):
"""Tests the various output methods: KML style, WKT, lat-lon and lon-lat."""
def setUp(self):
self.latitude = 32.074322
self.longitude = 34.792081
self.radius_meters = 100
self.number_of_vertices = 36
self.polycircle = \
polycircles.Polycircle(latitude=self.latitude,
longitude=self.longitude,
radius=self.radius_meters,
number_of_vertices=self.number_of_vertices)
def test_lat_lon_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lat_lon():
assert_almost_equal(vertex[0], self.latitude, places=2)
assert_almost_equal(vertex[1], self.longitude, places=2)
def test_lon_lat_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lon_lat():
assert_almost_equal(vertex[0], self.longitude, places=2) | assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon())
def test_kml_equals_lon_lat(self):
"""Asserts that the return value of to_kml() property is identical to
the return value of to_lon_lat()."""
assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat())
if __name__ == '__main__':
unittest.main() | assert_almost_equal(vertex[1], self.latitude, places=2)
def test_vertices_equals_lat_lon(self):
"""Asserts that the "vertices" property is identical to the return
value of to_lat_lon().""" | random_line_split |
test_different_outputs.py | import unittest
from polycircles import polycircles
from nose.tools import assert_equal, assert_almost_equal
class TestDifferentOutputs(unittest.TestCase):
"""Tests the various output methods: KML style, WKT, lat-lon and lon-lat."""
def setUp(self):
self.latitude = 32.074322
self.longitude = 34.792081
self.radius_meters = 100
self.number_of_vertices = 36
self.polycircle = \
polycircles.Polycircle(latitude=self.latitude,
longitude=self.longitude,
radius=self.radius_meters,
number_of_vertices=self.number_of_vertices)
def test_lat_lon_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lat_lon():
assert_almost_equal(vertex[0], self.latitude, places=2)
assert_almost_equal(vertex[1], self.longitude, places=2)
def test_lon_lat_output(self):
|
def test_vertices_equals_lat_lon(self):
"""Asserts that the "vertices" property is identical to the return
value of to_lat_lon()."""
assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon())
def test_kml_equals_lon_lat(self):
"""Asserts that the return value of to_kml() property is identical to
the return value of to_lon_lat()."""
assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat())
if __name__ == '__main__':
unittest.main() | """Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lon_lat():
assert_almost_equal(vertex[0], self.longitude, places=2)
assert_almost_equal(vertex[1], self.latitude, places=2) | identifier_body |
safeEval.py | #!/usr/bin/env python3
# Copyright 2016 - 2021 Bas van Meerten and Wouter Franssen
# This file is part of ssNake.
#
# ssNake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ssNake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ssNake. If not, see <http://www.gnu.org/licenses/>.
import re
import numpy as np
import scipy.special
import hypercomplex as hc
def safeEval(inp, length=None, Type='All', x=None):
| """
Creates a more restricted eval environment.
Note that this method is still not acceptable to process strings from untrusted sources.
Parameters
----------
inp : str
String to evaluate.
length : int or float, optional
The variable length will be set to this value.
By default the variable length is not set.
Type : {'All', 'FI', 'C'}, optional
Type of expected output. 'All' will return all types, 'FI' will return a float or int, and 'C' will return a complex number.
By default Type is set to 'All'
x : array_like, optional
The variable x is set to this variable,
By default the variable x is not used.
Returns
-------
Object
The result of the evaluated string.
"""
env = vars(np).copy()
env.update(vars(hc).copy())
env.update(vars(scipy.special).copy())
env.update(vars(scipy.integrate).copy())
env["locals"] = None
env["globals"] = None
env["__name__"] = None
env["__file__"] = None
env["__builtins__"] = {'None': None, 'False': False, 'True':True} # None
env["slice"] = slice
if length is not None:
env["length"] = length
if x is not None:
env["x"] = x
inp = re.sub('([0-9]+)[kK]', '\g<1>*1024', str(inp))
try:
val = eval(inp, env)
if isinstance(val, str):
return None
if Type == 'All':
return val
if Type == 'FI': #single float/int type
if isinstance(val, (float, int)) and not np.isnan(val) and not np.isinf(val):
return val
return None
if Type == 'C': #single complex number
if isinstance(val, (float, int, complex)) and not np.isnan(val) and not np.isinf(val):
return val
return None
except Exception:
return None | identifier_body |
|
safeEval.py | #!/usr/bin/env python3
# Copyright 2016 - 2021 Bas van Meerten and Wouter Franssen
# This file is part of ssNake.
#
# ssNake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ssNake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ssNake. If not, see <http://www.gnu.org/licenses/>.
import re
import numpy as np
import scipy.special
import hypercomplex as hc
def | (inp, length=None, Type='All', x=None):
"""
Creates a more restricted eval environment.
Note that this method is still not acceptable to process strings from untrusted sources.
Parameters
----------
inp : str
String to evaluate.
length : int or float, optional
The variable length will be set to this value.
By default the variable length is not set.
Type : {'All', 'FI', 'C'}, optional
Type of expected output. 'All' will return all types, 'FI' will return a float or int, and 'C' will return a complex number.
By default Type is set to 'All'
x : array_like, optional
The variable x is set to this variable,
By default the variable x is not used.
Returns
-------
Object
The result of the evaluated string.
"""
env = vars(np).copy()
env.update(vars(hc).copy())
env.update(vars(scipy.special).copy())
env.update(vars(scipy.integrate).copy())
env["locals"] = None
env["globals"] = None
env["__name__"] = None
env["__file__"] = None
env["__builtins__"] = {'None': None, 'False': False, 'True':True} # None
env["slice"] = slice
if length is not None:
env["length"] = length
if x is not None:
env["x"] = x
inp = re.sub('([0-9]+)[kK]', '\g<1>*1024', str(inp))
try:
val = eval(inp, env)
if isinstance(val, str):
return None
if Type == 'All':
return val
if Type == 'FI': #single float/int type
if isinstance(val, (float, int)) and not np.isnan(val) and not np.isinf(val):
return val
return None
if Type == 'C': #single complex number
if isinstance(val, (float, int, complex)) and not np.isnan(val) and not np.isinf(val):
return val
return None
except Exception:
return None
| safeEval | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.