file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
visitors.js | import * as virtualTypes from "./path/lib/virtual-types";
import * as messages from "babel-messages";
import * as t from "babel-types";
import clone from "lodash/clone";
/**
* explode() will take a visitor object with all of the various shorthands
* that we support, and validates & normalizes it into a common format, ready
* to be used in traversal
*
* The various shorthands are:
* * `Identifier() { ... }` -> `Identifier: { enter() { ... } }`
* * `"Identifier|NumericLiteral": { ... }` -> `Identifier: { ... }, NumericLiteral: { ... }`
* * Aliases in `babel-types`: e.g. `Property: { ... }` -> `ObjectProperty: { ... }, ClassProperty: { ... }`
*
* Other normalizations are:
* * Visitors of virtual types are wrapped, so that they are only visited when
* their dynamic check passes
* * `enter` and `exit` functions are wrapped in arrays, to ease merging of
* visitors
*/
export function explode(visitor) {
if (visitor._exploded) return visitor;
visitor._exploded = true;
// normalise pipes
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let parts: Array<string> = nodeType.split("|");
if (parts.length === 1) continue;
let fns = visitor[nodeType];
delete visitor[nodeType];
for (let part of parts) {
visitor[part] = fns;
}
}
// verify data structure
verify(visitor);
// make sure there's no __esModule type since this is because we're using loose mode
// and it sets __esModule to be enumerable on all modules :(
delete visitor.__esModule;
// ensure visitors are objects
ensureEntranceObjects(visitor);
// ensure enter/exit callbacks are arrays
ensureCallbackArrays(visitor);
// add type wrappers
for (let nodeType of (Object.keys(visitor): Array)) {
if (shouldIgnoreKey(nodeType)) continue;
let wrapper = virtualTypes[nodeType];
if (!wrapper) continue;
// wrap all the functions
let fns = visitor[nodeType];
for (let type in fns) {
fns[type] = wrapCheck(wrapper, fns[type]);
}
// clear it from the visitor
delete visitor[nodeType];
if (wrapper.types) {
for (let type of (wrapper.types: Array<string>)) {
// merge the visitor if necessary or just put it back in
if (visitor[type]) {
mergePair(visitor[type], fns);
} else {
visitor[type] = fns;
}
}
} else {
mergePair(visitor, fns);
}
}
// add aliases
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let fns = visitor[nodeType];
let aliases: ?Array<string> = t.FLIPPED_ALIAS_KEYS[nodeType];
let deprecratedKey = t.DEPRECATED_KEYS[nodeType];
if (deprecratedKey) {
console.trace(`Visitor defined for ${nodeType} but it has been renamed to ${deprecratedKey}`);
aliases = [deprecratedKey];
}
if (!aliases) continue;
// clear it from the visitor
delete visitor[nodeType];
for (let alias of aliases) {
let existing = visitor[alias];
if (existing) {
mergePair(existing, fns);
} else {
visitor[alias] = clone(fns);
}
}
}
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
ensureCallbackArrays(visitor[nodeType]);
}
return visitor;
}
export function verify(visitor) {
if (visitor._verified) return;
if (typeof visitor === "function") {
throw new Error(messages.get("traverseVerifyRootFunction"));
}
for (let nodeType in visitor) {
if (nodeType === "enter" || nodeType === "exit") {
validateVisitorMethods(nodeType, visitor[nodeType]);
}
if (shouldIgnoreKey(nodeType)) continue;
if (t.TYPES.indexOf(nodeType) < 0) {
throw new Error(messages.get("traverseVerifyNodeType", nodeType));
}
let visitors = visitor[nodeType];
if (typeof visitors === "object") {
for (let visitorKey in visitors) {
if (visitorKey === "enter" || visitorKey === "exit") {
// verify that it just contains functions
validateVisitorMethods(`${nodeType}.${visitorKey}`, visitors[visitorKey]);
} else {
throw new Error(messages.get("traverseVerifyVisitorProperty", nodeType, visitorKey));
}
}
}
}
visitor._verified = true;
}
function validateVisitorMethods(path, val) {
let fns = [].concat(val);
for (let fn of fns) {
if (typeof fn !== "function") {
throw new TypeError(`Non-function found defined in ${path} with type ${typeof fn}`);
}
}
}
export function merge(visitors: Array, states: Array = []) {
let rootVisitor = {};
for (let i = 0; i < visitors.length; i++) {
let visitor = visitors[i];
let state = states[i];
explode(visitor);
for (let type in visitor) {
let visitorType = visitor[type];
// if we have state then overload the callbacks to take it
if (state) visitorType = wrapWithState(visitorType, state);
let nodeVisitor = rootVisitor[type] = rootVisitor[type] || {};
mergePair(nodeVisitor, visitorType);
}
}
return rootVisitor;
}
function wrapWithState(oldVisitor, state) {
let newVisitor = {};
for (let key in oldVisitor) {
let fns = oldVisitor[key];
// not an enter/exit array of callbacks
if (!Array.isArray(fns)) continue;
fns = fns.map(function (fn) {
let newFn = function (path) {
return fn.call(state, path, state);
};
newFn.toString = () => fn.toString();
return newFn;
});
newVisitor[key] = fns;
}
return newVisitor; | }
function ensureEntranceObjects(obj) {
for (let key in obj) {
if (shouldIgnoreKey(key)) continue;
let fns = obj[key];
if (typeof fns === "function") {
obj[key] = { enter: fns };
}
}
}
function ensureCallbackArrays(obj) {
if (obj.enter && !Array.isArray(obj.enter)) obj.enter = [obj.enter];
if (obj.exit && !Array.isArray(obj.exit)) obj.exit = [obj.exit];
}
function wrapCheck(wrapper, fn) {
let newFn = function (path) {
if (wrapper.checkPath(path)) {
return fn.apply(this, arguments);
}
};
newFn.toString = () => fn.toString();
return newFn;
}
function shouldIgnoreKey(key) {
// internal/hidden key
if (key[0] === "_") return true;
// ignore function keys
if (key === "enter" || key === "exit" || key === "shouldSkip") return true;
// ignore other options
if (key === "blacklist" || key === "noScope" || key === "skipKeys") return true;
return false;
}
function mergePair(dest, src) {
for (let key in src) {
dest[key] = [].concat(dest[key] || [], src[key]);
}
} | random_line_split |
|
visitors.js | import * as virtualTypes from "./path/lib/virtual-types";
import * as messages from "babel-messages";
import * as t from "babel-types";
import clone from "lodash/clone";
/**
* explode() will take a visitor object with all of the various shorthands
* that we support, and validates & normalizes it into a common format, ready
* to be used in traversal
*
* The various shorthands are:
* * `Identifier() { ... }` -> `Identifier: { enter() { ... } }`
* * `"Identifier|NumericLiteral": { ... }` -> `Identifier: { ... }, NumericLiteral: { ... }`
* * Aliases in `babel-types`: e.g. `Property: { ... }` -> `ObjectProperty: { ... }, ClassProperty: { ... }`
*
* Other normalizations are:
* * Visitors of virtual types are wrapped, so that they are only visited when
* their dynamic check passes
* * `enter` and `exit` functions are wrapped in arrays, to ease merging of
* visitors
*/
export function explode(visitor) {
if (visitor._exploded) return visitor;
visitor._exploded = true;
// normalise pipes
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let parts: Array<string> = nodeType.split("|");
if (parts.length === 1) continue;
let fns = visitor[nodeType];
delete visitor[nodeType];
for (let part of parts) {
visitor[part] = fns;
}
}
// verify data structure
verify(visitor);
// make sure there's no __esModule type since this is because we're using loose mode
// and it sets __esModule to be enumerable on all modules :(
delete visitor.__esModule;
// ensure visitors are objects
ensureEntranceObjects(visitor);
// ensure enter/exit callbacks are arrays
ensureCallbackArrays(visitor);
// add type wrappers
for (let nodeType of (Object.keys(visitor): Array)) {
if (shouldIgnoreKey(nodeType)) continue;
let wrapper = virtualTypes[nodeType];
if (!wrapper) continue;
// wrap all the functions
let fns = visitor[nodeType];
for (let type in fns) {
fns[type] = wrapCheck(wrapper, fns[type]);
}
// clear it from the visitor
delete visitor[nodeType];
if (wrapper.types) {
for (let type of (wrapper.types: Array<string>)) {
// merge the visitor if necessary or just put it back in
if (visitor[type]) {
mergePair(visitor[type], fns);
} else {
visitor[type] = fns;
}
}
} else {
mergePair(visitor, fns);
}
}
// add aliases
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let fns = visitor[nodeType];
let aliases: ?Array<string> = t.FLIPPED_ALIAS_KEYS[nodeType];
let deprecratedKey = t.DEPRECATED_KEYS[nodeType];
if (deprecratedKey) {
console.trace(`Visitor defined for ${nodeType} but it has been renamed to ${deprecratedKey}`);
aliases = [deprecratedKey];
}
if (!aliases) continue;
// clear it from the visitor
delete visitor[nodeType];
for (let alias of aliases) {
let existing = visitor[alias];
if (existing) {
mergePair(existing, fns);
} else {
visitor[alias] = clone(fns);
}
}
}
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
ensureCallbackArrays(visitor[nodeType]);
}
return visitor;
}
export function verify(visitor) {
if (visitor._verified) return;
if (typeof visitor === "function") {
throw new Error(messages.get("traverseVerifyRootFunction"));
}
for (let nodeType in visitor) {
if (nodeType === "enter" || nodeType === "exit") {
validateVisitorMethods(nodeType, visitor[nodeType]);
}
if (shouldIgnoreKey(nodeType)) continue;
if (t.TYPES.indexOf(nodeType) < 0) {
throw new Error(messages.get("traverseVerifyNodeType", nodeType));
}
let visitors = visitor[nodeType];
if (typeof visitors === "object") {
for (let visitorKey in visitors) {
if (visitorKey === "enter" || visitorKey === "exit") {
// verify that it just contains functions
validateVisitorMethods(`${nodeType}.${visitorKey}`, visitors[visitorKey]);
} else {
throw new Error(messages.get("traverseVerifyVisitorProperty", nodeType, visitorKey));
}
}
}
}
visitor._verified = true;
}
function validateVisitorMethods(path, val) {
let fns = [].concat(val);
for (let fn of fns) {
if (typeof fn !== "function") {
throw new TypeError(`Non-function found defined in ${path} with type ${typeof fn}`);
}
}
}
export function merge(visitors: Array, states: Array = []) {
let rootVisitor = {};
for (let i = 0; i < visitors.length; i++) {
let visitor = visitors[i];
let state = states[i];
explode(visitor);
for (let type in visitor) {
let visitorType = visitor[type];
// if we have state then overload the callbacks to take it
if (state) visitorType = wrapWithState(visitorType, state);
let nodeVisitor = rootVisitor[type] = rootVisitor[type] || {};
mergePair(nodeVisitor, visitorType);
}
}
return rootVisitor;
}
function wrapWithState(oldVisitor, state) {
let newVisitor = {};
for (let key in oldVisitor) {
let fns = oldVisitor[key];
// not an enter/exit array of callbacks
if (!Array.isArray(fns)) continue;
fns = fns.map(function (fn) {
let newFn = function (path) {
return fn.call(state, path, state);
};
newFn.toString = () => fn.toString();
return newFn;
});
newVisitor[key] = fns;
}
return newVisitor;
}
function ensureEntranceObjects(obj) {
for (let key in obj) {
if (shouldIgnoreKey(key)) continue;
let fns = obj[key];
if (typeof fns === "function") {
obj[key] = { enter: fns };
}
}
}
function ensureCallbackArrays(obj) {
if (obj.enter && !Array.isArray(obj.enter)) obj.enter = [obj.enter];
if (obj.exit && !Array.isArray(obj.exit)) obj.exit = [obj.exit];
}
function wrapCheck(wrapper, fn) {
let newFn = function (path) {
if (wrapper.checkPath(path)) {
return fn.apply(this, arguments);
}
};
newFn.toString = () => fn.toString();
return newFn;
}
function shouldIgnoreKey(key) {
// internal/hidden key
if (key[0] === "_") return true;
// ignore function keys
if (key === "enter" || key === "exit" || key === "shouldSkip") return true;
// ignore other options
if (key === "blacklist" || key === "noScope" || key === "skipKeys") return true;
return false;
}
function | (dest, src) {
for (let key in src) {
dest[key] = [].concat(dest[key] || [], src[key]);
}
}
| mergePair | identifier_name |
unused-attr.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_attributes)]
#![allow(dead_code, unused_imports)]
#![feature(core, custom_attribute)]
#![foo] //~ ERROR unused attribute
#[foo] //~ ERROR unused attribute
extern crate core;
#[foo] //~ ERROR unused attribute
use std::collections;
#[foo] //~ ERROR unused attribute
extern "C" {
#[foo] //~ ERROR unused attribute
fn foo();
}
#[foo] //~ ERROR unused attribute
mod foo {
#[foo] //~ ERROR unused attribute
pub enum Foo {
#[foo] //~ ERROR unused attribute
Bar,
}
}
#[foo] //~ ERROR unused attribute
fn bar(f: foo::Foo) {
match f {
#[foo] //~ ERROR unused attribute
foo::Foo::Bar => |
}
}
#[foo] //~ ERROR unused attribute
struct Foo {
#[foo] //~ ERROR unused attribute
a: isize
}
#[foo] //~ ERROR unused attribute
trait Baz {
#[foo] //~ ERROR unused attribute
fn blah();
#[foo] //~ ERROR unused attribute
fn blah2() {}
}
fn main() {}
| {} | conditional_block |
unused-attr.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_attributes)]
#![allow(dead_code, unused_imports)]
#![feature(core, custom_attribute)]
#![foo] //~ ERROR unused attribute
#[foo] //~ ERROR unused attribute
extern crate core;
#[foo] //~ ERROR unused attribute
use std::collections;
#[foo] //~ ERROR unused attribute
extern "C" {
#[foo] //~ ERROR unused attribute
fn foo();
}
#[foo] //~ ERROR unused attribute
mod foo {
#[foo] //~ ERROR unused attribute
pub enum Foo {
#[foo] //~ ERROR unused attribute
Bar,
}
}
#[foo] //~ ERROR unused attribute
fn bar(f: foo::Foo) |
#[foo] //~ ERROR unused attribute
struct Foo {
#[foo] //~ ERROR unused attribute
a: isize
}
#[foo] //~ ERROR unused attribute
trait Baz {
#[foo] //~ ERROR unused attribute
fn blah();
#[foo] //~ ERROR unused attribute
fn blah2() {}
}
fn main() {}
| {
match f {
#[foo] //~ ERROR unused attribute
foo::Foo::Bar => {}
}
} | identifier_body |
unused-attr.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_attributes)]
#![allow(dead_code, unused_imports)]
#![feature(core, custom_attribute)]
#![foo] //~ ERROR unused attribute
#[foo] //~ ERROR unused attribute
extern crate core;
#[foo] //~ ERROR unused attribute
use std::collections;
#[foo] //~ ERROR unused attribute
extern "C" {
#[foo] //~ ERROR unused attribute
fn foo();
}
#[foo] //~ ERROR unused attribute
mod foo {
#[foo] //~ ERROR unused attribute
pub enum Foo {
#[foo] //~ ERROR unused attribute
Bar,
}
}
#[foo] //~ ERROR unused attribute
fn bar(f: foo::Foo) {
match f {
#[foo] //~ ERROR unused attribute
foo::Foo::Bar => {}
}
}
#[foo] //~ ERROR unused attribute
struct | {
#[foo] //~ ERROR unused attribute
a: isize
}
#[foo] //~ ERROR unused attribute
trait Baz {
#[foo] //~ ERROR unused attribute
fn blah();
#[foo] //~ ERROR unused attribute
fn blah2() {}
}
fn main() {}
| Foo | identifier_name |
unused-attr.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_attributes)]
#![allow(dead_code, unused_imports)]
#![feature(core, custom_attribute)]
#![foo] //~ ERROR unused attribute
#[foo] //~ ERROR unused attribute
extern crate core;
#[foo] //~ ERROR unused attribute
use std::collections;
#[foo] //~ ERROR unused attribute
extern "C" {
#[foo] //~ ERROR unused attribute
fn foo();
}
#[foo] //~ ERROR unused attribute
mod foo {
#[foo] //~ ERROR unused attribute
pub enum Foo {
#[foo] //~ ERROR unused attribute
Bar,
}
}
#[foo] //~ ERROR unused attribute
fn bar(f: foo::Foo) {
match f {
#[foo] //~ ERROR unused attribute
foo::Foo::Bar => {}
}
}
#[foo] //~ ERROR unused attribute
struct Foo {
#[foo] //~ ERROR unused attribute
a: isize
}
#[foo] //~ ERROR unused attribute
trait Baz {
#[foo] //~ ERROR unused attribute
fn blah();
#[foo] //~ ERROR unused attribute
fn blah2() {}
}
fn main() {} | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// | random_line_split |
__init__.py | #
# Copyright (c) 2005
# The President and Fellows of Harvard College.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the University nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# Author: Geoffrey Mainland <[email protected]>
#
__all__ = ["message", "packet", "utils", "tossim"] | # ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OR CONTRIBUTORS BE LIABLE | random_line_split |
MediaManager.ts | ///<reference path='refs.ts'/>
module TDev.RT.WinRT {
export function MediaManagerInit() {
MediaManagerWinRT.init();
}
export module MediaManagerWinRT
{
var _url: string;
export function init()
|
function sourceRequested(e : Windows.Media.PlayTo.PlayToSourceRequestedEventArgs) {
if (_url) {
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = null;
}
try {
var video = <HTMLVideoElement>elt("video");
video.src = _url;
_url = null;
e.sourceRequest.setSource((<any>video).msPlayToSource);
} catch (ex) {
Time.log(ex.message);
}
}
}
export function playMedia(url: string)
{
_url = url;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = sourceRequested;
(<any>playToManager).showPlayToUI();
}
}
}
}
| {
if (Browser.isGenStubs) return;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.defaultSourceSelection = false;
playToManager.onsourcerequested = null;
}
} | identifier_body |
MediaManager.ts | ///<reference path='refs.ts'/>
module TDev.RT.WinRT {
export function MediaManagerInit() {
MediaManagerWinRT.init();
}
export module MediaManagerWinRT
{
var _url: string;
export function | ()
{
if (Browser.isGenStubs) return;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.defaultSourceSelection = false;
playToManager.onsourcerequested = null;
}
}
function sourceRequested(e : Windows.Media.PlayTo.PlayToSourceRequestedEventArgs) {
if (_url) {
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = null;
}
try {
var video = <HTMLVideoElement>elt("video");
video.src = _url;
_url = null;
e.sourceRequest.setSource((<any>video).msPlayToSource);
} catch (ex) {
Time.log(ex.message);
}
}
}
export function playMedia(url: string)
{
_url = url;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = sourceRequested;
(<any>playToManager).showPlayToUI();
}
}
}
}
| init | identifier_name |
MediaManager.ts | ///<reference path='refs.ts'/>
module TDev.RT.WinRT {
export function MediaManagerInit() {
MediaManagerWinRT.init();
}
export module MediaManagerWinRT
{
var _url: string;
export function init()
{
if (Browser.isGenStubs) return;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.defaultSourceSelection = false;
playToManager.onsourcerequested = null;
}
}
function sourceRequested(e : Windows.Media.PlayTo.PlayToSourceRequestedEventArgs) {
if (_url) {
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = null;
}
try {
var video = <HTMLVideoElement>elt("video");
video.src = _url;
_url = null;
e.sourceRequest.setSource((<any>video).msPlayToSource);
} catch (ex) {
Time.log(ex.message);
}
}
}
export function playMedia(url: string)
{
_url = url;
if (Windows.Media.PlayTo.PlayToManager) |
}
}
}
| {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = sourceRequested;
(<any>playToManager).showPlayToUI();
} | conditional_block |
MediaManager.ts | ///<reference path='refs.ts'/>
module TDev.RT.WinRT {
export function MediaManagerInit() {
MediaManagerWinRT.init();
}
export module MediaManagerWinRT
{
var _url: string;
export function init()
{
if (Browser.isGenStubs) return;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.defaultSourceSelection = false;
playToManager.onsourcerequested = null;
}
}
function sourceRequested(e : Windows.Media.PlayTo.PlayToSourceRequestedEventArgs) {
if (_url) {
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = null; | _url = null;
e.sourceRequest.setSource((<any>video).msPlayToSource);
} catch (ex) {
Time.log(ex.message);
}
}
}
export function playMedia(url: string)
{
_url = url;
if (Windows.Media.PlayTo.PlayToManager) {
var playToManager = Windows.Media.PlayTo.PlayToManager.getForCurrentView();
playToManager.onsourcerequested = sourceRequested;
(<any>playToManager).showPlayToUI();
}
}
}
} | }
try {
var video = <HTMLVideoElement>elt("video");
video.src = _url; | random_line_split |
barbican-keystone-listener.py | #!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Barbican Keystone notification listener server.
"""
import eventlet
import os
import sys
# Oslo messaging notification server uses eventlet.
#
# To have remote debugging, thread module needs to be disabled.
# eventlet.monkey_patch(thread=False)
eventlet.monkey_patch()
# 'Borrowed' from the Glance project:
# If ../barbican/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
|
from barbican.common import config
from barbican.openstack.common import log
from barbican.openstack.common import service
from barbican import queue
from barbican.queue import keystone_listener
from oslo_config import cfg
def fail(returncode, e):
sys.stderr.write("ERROR: {0}\n".format(e))
sys.exit(returncode)
if __name__ == '__main__':
try:
config.parse_args()
config.setup_remote_pydev_debug()
# Import and configure logging.
log.setup('barbican')
LOG = log.getLogger(__name__)
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
CONF = cfg.CONF
queue.init(CONF)
if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'):
service.launch(
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e)
| sys.path.insert(0, possible_topdir) | conditional_block |
barbican-keystone-listener.py | #!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Barbican Keystone notification listener server.
"""
import eventlet
import os
import sys
# Oslo messaging notification server uses eventlet.
#
# To have remote debugging, thread module needs to be disabled.
# eventlet.monkey_patch(thread=False)
eventlet.monkey_patch()
# 'Borrowed' from the Glance project:
# If ../barbican/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
sys.path.insert(0, possible_topdir)
from barbican.common import config
from barbican.openstack.common import log
from barbican.openstack.common import service
from barbican import queue
from barbican.queue import keystone_listener
from oslo_config import cfg
def | (returncode, e):
sys.stderr.write("ERROR: {0}\n".format(e))
sys.exit(returncode)
if __name__ == '__main__':
try:
config.parse_args()
config.setup_remote_pydev_debug()
# Import and configure logging.
log.setup('barbican')
LOG = log.getLogger(__name__)
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
CONF = cfg.CONF
queue.init(CONF)
if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'):
service.launch(
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e)
| fail | identifier_name |
barbican-keystone-listener.py | #!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0 | # implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Barbican Keystone notification listener server.
"""
import eventlet
import os
import sys
# Oslo messaging notification server uses eventlet.
#
# To have remote debugging, thread module needs to be disabled.
# eventlet.monkey_patch(thread=False)
eventlet.monkey_patch()
# 'Borrowed' from the Glance project:
# If ../barbican/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
sys.path.insert(0, possible_topdir)
from barbican.common import config
from barbican.openstack.common import log
from barbican.openstack.common import service
from barbican import queue
from barbican.queue import keystone_listener
from oslo_config import cfg
def fail(returncode, e):
sys.stderr.write("ERROR: {0}\n".format(e))
sys.exit(returncode)
if __name__ == '__main__':
try:
config.parse_args()
config.setup_remote_pydev_debug()
# Import and configure logging.
log.setup('barbican')
LOG = log.getLogger(__name__)
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
CONF = cfg.CONF
queue.init(CONF)
if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'):
service.launch(
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e) | #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | random_line_split |
barbican-keystone-listener.py | #!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Barbican Keystone notification listener server.
"""
import eventlet
import os
import sys
# Oslo messaging notification server uses eventlet.
#
# To have remote debugging, thread module needs to be disabled.
# eventlet.monkey_patch(thread=False)
eventlet.monkey_patch()
# 'Borrowed' from the Glance project:
# If ../barbican/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
sys.path.insert(0, possible_topdir)
from barbican.common import config
from barbican.openstack.common import log
from barbican.openstack.common import service
from barbican import queue
from barbican.queue import keystone_listener
from oslo_config import cfg
def fail(returncode, e):
|
if __name__ == '__main__':
try:
config.parse_args()
config.setup_remote_pydev_debug()
# Import and configure logging.
log.setup('barbican')
LOG = log.getLogger(__name__)
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
CONF = cfg.CONF
queue.init(CONF)
if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'):
service.launch(
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e)
| sys.stderr.write("ERROR: {0}\n".format(e))
sys.exit(returncode) | identifier_body |
collection.js | /* eslint-env mocha */
import { setup, teardown } from "./helpers"
describe("Collection page", () => {
let metaphysics, browser
before(async () => {
;({ metaphysics, browser } = await setup())
metaphysics.post("/", (req, res) => {
res.send(require("./fixtures/metaphysics/collection"))
})
}) | const $ = await browser.page("/collection/kaws-companions")
$.html().should.containEql("KAWS: Companions")
$.html().should.containEql("Collectible Sculptures")
$.html().should.containEql("Brian Donnelly, better known as KAWS")
})
it("renders artwork grid", async () => {
const $ = await browser.page("/collection/kaws-companions")
$.html().should.containEql("Boba Fett Companion")
$.html().should.containEql("Woodstock")
})
}) |
after(teardown)
it("renders a title and header info", async () => { | random_line_split |
webgl_conformance_test.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.core import util
src_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
conformance_path = os.path.join(src_path, 'third_party', 'webgl_conformance')
conformance_harness_script = r"""
var testHarness = {};
testHarness._allTestSucceeded = true;
testHarness._messages = '';
testHarness._failures = 0;
testHarness._finished = false;
testHarness.reportResults = function(success, msg) {
testHarness._allTestSucceeded = testHarness._allTestSucceeded && !!success;
if(!success) {
testHarness._failures++;
if(msg) {
testHarness._messages += msg + "\n";
}
}
};
testHarness.notifyFinished = function() {
testHarness._finished = true;
};
testHarness.navigateToPage = function(src) {
var testFrame = document.getElementById("test-frame");
testFrame.src = src;
};
window.webglTestHarness = testHarness;
window.parent.webglTestHarness = testHarness;
console.log("Harness injected.");
"""
def _DidWebGLTestSucceed(tab):
return tab.EvaluateJavaScript('webglTestHarness._allTestSucceeded')
def _WebGLTestMessages(tab):
return tab.EvaluateJavaScript('webglTestHarness._messages')
class WebGLConformanceTest(page_test.PageTest):
def __init__(self):
super(WebGLConformanceTest, self).__init__('ValidatePage')
def CreatePageSet(self, options):
tests = WebGLConformanceTest._ParseTests('00_test_list.txt', '1.0.1')
page_set_dict = {
'description': 'Executes WebGL conformance tests',
'user_agent_type': 'desktop',
'serving_dirs': [
'../../../../third_party/webgl_conformance'
],
'pages': []
}
pages = page_set_dict['pages']
for test in tests:
pages.append({
'url': 'file:///../../../../third_party/webgl_conformance/' + test,
'script_to_evaluate_on_commit': conformance_harness_script,
'wait_for_javascript_expression': 'webglTestHarness._finished'
})
return page_set.PageSet.FromDict(page_set_dict, __file__)
def ValidatePage(self, page, tab, results):
|
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-webgl')
@staticmethod
def _ParseTests(path, version = None):
test_paths = []
current_dir = os.path.dirname(path)
full_path = os.path.normpath(os.path.join(conformance_path, path))
if not os.path.exists(full_path):
raise Exception('The WebGL conformance test path specified ' +
'does not exist: ' + full_path)
with open(full_path, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//') or line.startswith('#'):
continue
line_tokens = line.split(' ')
i = 0
min_version = None
while i < len(line_tokens):
token = line_tokens[i]
if token == '--min-version':
i += 1
min_version = line_tokens[i]
i += 1
if version and min_version and version < min_version:
continue
test_name = line_tokens[-1]
if '.txt' in test_name:
include_path = os.path.join(current_dir, test_name)
test_paths += WebGLConformanceTest._ParseTests(
include_path, version)
else:
test = os.path.join(current_dir, test_name)
test_paths.append(test)
return test_paths
| if _DidWebGLTestSucceed(tab):
results.AddSuccess(page)
else:
results.AddFailureMessage(page, _WebGLTestMessages(tab)) | identifier_body |
webgl_conformance_test.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.core import util
src_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
conformance_path = os.path.join(src_path, 'third_party', 'webgl_conformance')
conformance_harness_script = r"""
var testHarness = {};
testHarness._allTestSucceeded = true;
testHarness._messages = '';
testHarness._failures = 0;
testHarness._finished = false;
testHarness.reportResults = function(success, msg) {
testHarness._allTestSucceeded = testHarness._allTestSucceeded && !!success;
if(!success) {
testHarness._failures++;
if(msg) {
testHarness._messages += msg + "\n";
}
}
};
testHarness.notifyFinished = function() {
testHarness._finished = true;
};
testHarness.navigateToPage = function(src) {
var testFrame = document.getElementById("test-frame");
testFrame.src = src;
};
window.webglTestHarness = testHarness;
window.parent.webglTestHarness = testHarness;
console.log("Harness injected.");
"""
def _DidWebGLTestSucceed(tab):
return tab.EvaluateJavaScript('webglTestHarness._allTestSucceeded')
def _WebGLTestMessages(tab):
return tab.EvaluateJavaScript('webglTestHarness._messages')
class WebGLConformanceTest(page_test.PageTest):
def __init__(self):
super(WebGLConformanceTest, self).__init__('ValidatePage')
def CreatePageSet(self, options):
tests = WebGLConformanceTest._ParseTests('00_test_list.txt', '1.0.1')
page_set_dict = {
'description': 'Executes WebGL conformance tests',
'user_agent_type': 'desktop',
'serving_dirs': [
'../../../../third_party/webgl_conformance'
],
'pages': []
}
pages = page_set_dict['pages']
for test in tests:
pages.append({
'url': 'file:///../../../../third_party/webgl_conformance/' + test,
'script_to_evaluate_on_commit': conformance_harness_script,
'wait_for_javascript_expression': 'webglTestHarness._finished'
})
return page_set.PageSet.FromDict(page_set_dict, __file__)
| else:
results.AddFailureMessage(page, _WebGLTestMessages(tab))
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-webgl')
@staticmethod
def _ParseTests(path, version = None):
test_paths = []
current_dir = os.path.dirname(path)
full_path = os.path.normpath(os.path.join(conformance_path, path))
if not os.path.exists(full_path):
raise Exception('The WebGL conformance test path specified ' +
'does not exist: ' + full_path)
with open(full_path, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//') or line.startswith('#'):
continue
line_tokens = line.split(' ')
i = 0
min_version = None
while i < len(line_tokens):
token = line_tokens[i]
if token == '--min-version':
i += 1
min_version = line_tokens[i]
i += 1
if version and min_version and version < min_version:
continue
test_name = line_tokens[-1]
if '.txt' in test_name:
include_path = os.path.join(current_dir, test_name)
test_paths += WebGLConformanceTest._ParseTests(
include_path, version)
else:
test = os.path.join(current_dir, test_name)
test_paths.append(test)
return test_paths | def ValidatePage(self, page, tab, results):
if _DidWebGLTestSucceed(tab):
results.AddSuccess(page) | random_line_split |
webgl_conformance_test.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.core import util
src_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
conformance_path = os.path.join(src_path, 'third_party', 'webgl_conformance')
conformance_harness_script = r"""
var testHarness = {};
testHarness._allTestSucceeded = true;
testHarness._messages = '';
testHarness._failures = 0;
testHarness._finished = false;
testHarness.reportResults = function(success, msg) {
testHarness._allTestSucceeded = testHarness._allTestSucceeded && !!success;
if(!success) {
testHarness._failures++;
if(msg) {
testHarness._messages += msg + "\n";
}
}
};
testHarness.notifyFinished = function() {
testHarness._finished = true;
};
testHarness.navigateToPage = function(src) {
var testFrame = document.getElementById("test-frame");
testFrame.src = src;
};
window.webglTestHarness = testHarness;
window.parent.webglTestHarness = testHarness;
console.log("Harness injected.");
"""
def _DidWebGLTestSucceed(tab):
return tab.EvaluateJavaScript('webglTestHarness._allTestSucceeded')
def _WebGLTestMessages(tab):
return tab.EvaluateJavaScript('webglTestHarness._messages')
class WebGLConformanceTest(page_test.PageTest):
def __init__(self):
super(WebGLConformanceTest, self).__init__('ValidatePage')
def CreatePageSet(self, options):
tests = WebGLConformanceTest._ParseTests('00_test_list.txt', '1.0.1')
page_set_dict = {
'description': 'Executes WebGL conformance tests',
'user_agent_type': 'desktop',
'serving_dirs': [
'../../../../third_party/webgl_conformance'
],
'pages': []
}
pages = page_set_dict['pages']
for test in tests:
pages.append({
'url': 'file:///../../../../third_party/webgl_conformance/' + test,
'script_to_evaluate_on_commit': conformance_harness_script,
'wait_for_javascript_expression': 'webglTestHarness._finished'
})
return page_set.PageSet.FromDict(page_set_dict, __file__)
def ValidatePage(self, page, tab, results):
if _DidWebGLTestSucceed(tab):
results.AddSuccess(page)
else:
|
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-webgl')
@staticmethod
def _ParseTests(path, version = None):
test_paths = []
current_dir = os.path.dirname(path)
full_path = os.path.normpath(os.path.join(conformance_path, path))
if not os.path.exists(full_path):
raise Exception('The WebGL conformance test path specified ' +
'does not exist: ' + full_path)
with open(full_path, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//') or line.startswith('#'):
continue
line_tokens = line.split(' ')
i = 0
min_version = None
while i < len(line_tokens):
token = line_tokens[i]
if token == '--min-version':
i += 1
min_version = line_tokens[i]
i += 1
if version and min_version and version < min_version:
continue
test_name = line_tokens[-1]
if '.txt' in test_name:
include_path = os.path.join(current_dir, test_name)
test_paths += WebGLConformanceTest._ParseTests(
include_path, version)
else:
test = os.path.join(current_dir, test_name)
test_paths.append(test)
return test_paths
| results.AddFailureMessage(page, _WebGLTestMessages(tab)) | conditional_block |
webgl_conformance_test.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.core import util
src_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
conformance_path = os.path.join(src_path, 'third_party', 'webgl_conformance')
conformance_harness_script = r"""
var testHarness = {};
testHarness._allTestSucceeded = true;
testHarness._messages = '';
testHarness._failures = 0;
testHarness._finished = false;
testHarness.reportResults = function(success, msg) {
testHarness._allTestSucceeded = testHarness._allTestSucceeded && !!success;
if(!success) {
testHarness._failures++;
if(msg) {
testHarness._messages += msg + "\n";
}
}
};
testHarness.notifyFinished = function() {
testHarness._finished = true;
};
testHarness.navigateToPage = function(src) {
var testFrame = document.getElementById("test-frame");
testFrame.src = src;
};
window.webglTestHarness = testHarness;
window.parent.webglTestHarness = testHarness;
console.log("Harness injected.");
"""
def | (tab):
return tab.EvaluateJavaScript('webglTestHarness._allTestSucceeded')
def _WebGLTestMessages(tab):
return tab.EvaluateJavaScript('webglTestHarness._messages')
class WebGLConformanceTest(page_test.PageTest):
def __init__(self):
super(WebGLConformanceTest, self).__init__('ValidatePage')
def CreatePageSet(self, options):
tests = WebGLConformanceTest._ParseTests('00_test_list.txt', '1.0.1')
page_set_dict = {
'description': 'Executes WebGL conformance tests',
'user_agent_type': 'desktop',
'serving_dirs': [
'../../../../third_party/webgl_conformance'
],
'pages': []
}
pages = page_set_dict['pages']
for test in tests:
pages.append({
'url': 'file:///../../../../third_party/webgl_conformance/' + test,
'script_to_evaluate_on_commit': conformance_harness_script,
'wait_for_javascript_expression': 'webglTestHarness._finished'
})
return page_set.PageSet.FromDict(page_set_dict, __file__)
def ValidatePage(self, page, tab, results):
if _DidWebGLTestSucceed(tab):
results.AddSuccess(page)
else:
results.AddFailureMessage(page, _WebGLTestMessages(tab))
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-webgl')
@staticmethod
def _ParseTests(path, version = None):
test_paths = []
current_dir = os.path.dirname(path)
full_path = os.path.normpath(os.path.join(conformance_path, path))
if not os.path.exists(full_path):
raise Exception('The WebGL conformance test path specified ' +
'does not exist: ' + full_path)
with open(full_path, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//') or line.startswith('#'):
continue
line_tokens = line.split(' ')
i = 0
min_version = None
while i < len(line_tokens):
token = line_tokens[i]
if token == '--min-version':
i += 1
min_version = line_tokens[i]
i += 1
if version and min_version and version < min_version:
continue
test_name = line_tokens[-1]
if '.txt' in test_name:
include_path = os.path.join(current_dir, test_name)
test_paths += WebGLConformanceTest._ParseTests(
include_path, version)
else:
test = os.path.join(current_dir, test_name)
test_paths.append(test)
return test_paths
| _DidWebGLTestSucceed | identifier_name |
Grid.js | /**
* @jsx React.DOM
* @copyright Prometheus Research, LLC 2014
*/
"use strict";
var React = require('react/addons');
var PropTypes = React.PropTypes;
var Header = require('./Header');
var Viewport = require('./Viewport');
var ColumnMetrics = require('./ColumnMetrics');
var DOMMetrics = require('./DOMMetrics');
var GridScrollMixin = {
componentDidMount() | ,
componentDidUpdate() {
this._onScroll();
},
componentWillMount() {
this._scrollLeft = undefined;
},
componentWillUnmount() {
this._scrollLeft = undefined;
},
onScroll({scrollLeft}) {
if (this._scrollLeft !== scrollLeft) {
this._scrollLeft = scrollLeft;
this._onScroll();
}
},
_onScroll() {
if (this._scrollLeft !== undefined) {
this.refs.header.setScrollLeft(this._scrollLeft);
this.refs.viewport.setScrollLeft(this._scrollLeft);
}
}
};
var Grid = React.createClass({
mixins: [
GridScrollMixin,
ColumnMetrics.Mixin,
DOMMetrics.MetricsComputatorMixin
],
propTypes: {
rows: PropTypes.oneOfType([PropTypes.array, PropTypes.func]).isRequired,
columns: PropTypes.array.isRequired
},
getStyle: function(){
return{
overflowX: 'scroll',
overflowY: 'hidden',
outline: 0,
position: 'relative',
minHeight: this.props.minHeight
}
},
render() {
var headerRows = this.props.headerRows || [{ref : 'row'}];
return (
<div {...this.props} style={this.getStyle()} className="react-grid-Grid">
<Header
ref="header"
columns={this.state.columns}
onColumnResize={this.onColumnResize}
height={this.props.rowHeight}
totalWidth={this.DOMMetrics.gridWidth()}
headerRows={headerRows}
/>
<Viewport
ref="viewport"
width={this.state.columns.width}
rowHeight={this.props.rowHeight}
rowRenderer={this.props.rowRenderer}
cellRenderer={this.props.cellRenderer}
rows={this.props.rows}
selectedRows={this.props.selectedRows}
expandedRows={this.props.expandedRows}
length={this.props.length}
columns={this.state.columns}
totalWidth={this.DOMMetrics.gridWidth()}
onScroll={this.onScroll}
onRows={this.props.onRows}
rowOffsetHeight={this.props.rowOffsetHeight || this.props.rowHeight * headerRows.length}
/>
</div>
);
},
getDefaultProps() {
return {
rowHeight: 35,
minHeight: 350
};
},
});
module.exports = Grid;
| {
this._scrollLeft = this.refs.viewport.getScroll().scrollLeft;
this._onScroll();
} | identifier_body |
Grid.js | /**
* @jsx React.DOM
* @copyright Prometheus Research, LLC 2014
*/
"use strict";
var React = require('react/addons');
var PropTypes = React.PropTypes;
var Header = require('./Header');
var Viewport = require('./Viewport');
var ColumnMetrics = require('./ColumnMetrics');
var DOMMetrics = require('./DOMMetrics');
var GridScrollMixin = {
componentDidMount() {
this._scrollLeft = this.refs.viewport.getScroll().scrollLeft;
this._onScroll();
},
componentDidUpdate() {
this._onScroll();
},
| () {
this._scrollLeft = undefined;
},
componentWillUnmount() {
this._scrollLeft = undefined;
},
onScroll({scrollLeft}) {
if (this._scrollLeft !== scrollLeft) {
this._scrollLeft = scrollLeft;
this._onScroll();
}
},
_onScroll() {
if (this._scrollLeft !== undefined) {
this.refs.header.setScrollLeft(this._scrollLeft);
this.refs.viewport.setScrollLeft(this._scrollLeft);
}
}
};
var Grid = React.createClass({
mixins: [
GridScrollMixin,
ColumnMetrics.Mixin,
DOMMetrics.MetricsComputatorMixin
],
propTypes: {
rows: PropTypes.oneOfType([PropTypes.array, PropTypes.func]).isRequired,
columns: PropTypes.array.isRequired
},
getStyle: function(){
return{
overflowX: 'scroll',
overflowY: 'hidden',
outline: 0,
position: 'relative',
minHeight: this.props.minHeight
}
},
render() {
var headerRows = this.props.headerRows || [{ref : 'row'}];
return (
<div {...this.props} style={this.getStyle()} className="react-grid-Grid">
<Header
ref="header"
columns={this.state.columns}
onColumnResize={this.onColumnResize}
height={this.props.rowHeight}
totalWidth={this.DOMMetrics.gridWidth()}
headerRows={headerRows}
/>
<Viewport
ref="viewport"
width={this.state.columns.width}
rowHeight={this.props.rowHeight}
rowRenderer={this.props.rowRenderer}
cellRenderer={this.props.cellRenderer}
rows={this.props.rows}
selectedRows={this.props.selectedRows}
expandedRows={this.props.expandedRows}
length={this.props.length}
columns={this.state.columns}
totalWidth={this.DOMMetrics.gridWidth()}
onScroll={this.onScroll}
onRows={this.props.onRows}
rowOffsetHeight={this.props.rowOffsetHeight || this.props.rowHeight * headerRows.length}
/>
</div>
);
},
getDefaultProps() {
return {
rowHeight: 35,
minHeight: 350
};
},
});
module.exports = Grid;
| componentWillMount | identifier_name |
Grid.js | /**
* @jsx React.DOM
* @copyright Prometheus Research, LLC 2014
*/
"use strict";
var React = require('react/addons');
var PropTypes = React.PropTypes;
var Header = require('./Header');
var Viewport = require('./Viewport');
var ColumnMetrics = require('./ColumnMetrics');
var DOMMetrics = require('./DOMMetrics');
var GridScrollMixin = {
componentDidMount() {
this._scrollLeft = this.refs.viewport.getScroll().scrollLeft;
this._onScroll();
},
componentDidUpdate() {
this._onScroll();
},
componentWillMount() {
this._scrollLeft = undefined;
},
componentWillUnmount() {
this._scrollLeft = undefined;
},
onScroll({scrollLeft}) {
if (this._scrollLeft !== scrollLeft) |
},
_onScroll() {
if (this._scrollLeft !== undefined) {
this.refs.header.setScrollLeft(this._scrollLeft);
this.refs.viewport.setScrollLeft(this._scrollLeft);
}
}
};
var Grid = React.createClass({
mixins: [
GridScrollMixin,
ColumnMetrics.Mixin,
DOMMetrics.MetricsComputatorMixin
],
propTypes: {
rows: PropTypes.oneOfType([PropTypes.array, PropTypes.func]).isRequired,
columns: PropTypes.array.isRequired
},
getStyle: function(){
return{
overflowX: 'scroll',
overflowY: 'hidden',
outline: 0,
position: 'relative',
minHeight: this.props.minHeight
}
},
render() {
var headerRows = this.props.headerRows || [{ref : 'row'}];
return (
<div {...this.props} style={this.getStyle()} className="react-grid-Grid">
<Header
ref="header"
columns={this.state.columns}
onColumnResize={this.onColumnResize}
height={this.props.rowHeight}
totalWidth={this.DOMMetrics.gridWidth()}
headerRows={headerRows}
/>
<Viewport
ref="viewport"
width={this.state.columns.width}
rowHeight={this.props.rowHeight}
rowRenderer={this.props.rowRenderer}
cellRenderer={this.props.cellRenderer}
rows={this.props.rows}
selectedRows={this.props.selectedRows}
expandedRows={this.props.expandedRows}
length={this.props.length}
columns={this.state.columns}
totalWidth={this.DOMMetrics.gridWidth()}
onScroll={this.onScroll}
onRows={this.props.onRows}
rowOffsetHeight={this.props.rowOffsetHeight || this.props.rowHeight * headerRows.length}
/>
</div>
);
},
getDefaultProps() {
return {
rowHeight: 35,
minHeight: 350
};
},
});
module.exports = Grid;
| {
this._scrollLeft = scrollLeft;
this._onScroll();
} | conditional_block |
Grid.js | /**
* @jsx React.DOM
* @copyright Prometheus Research, LLC 2014
*/
"use strict";
var React = require('react/addons');
var PropTypes = React.PropTypes;
var Header = require('./Header');
var Viewport = require('./Viewport');
var ColumnMetrics = require('./ColumnMetrics');
var DOMMetrics = require('./DOMMetrics');
var GridScrollMixin = {
componentDidMount() {
this._scrollLeft = this.refs.viewport.getScroll().scrollLeft;
this._onScroll();
},
componentDidUpdate() {
this._onScroll();
},
componentWillMount() {
this._scrollLeft = undefined;
},
componentWillUnmount() {
this._scrollLeft = undefined;
},
onScroll({scrollLeft}) {
if (this._scrollLeft !== scrollLeft) {
this._scrollLeft = scrollLeft;
this._onScroll();
}
},
_onScroll() {
if (this._scrollLeft !== undefined) {
this.refs.header.setScrollLeft(this._scrollLeft);
this.refs.viewport.setScrollLeft(this._scrollLeft);
}
}
};
var Grid = React.createClass({
mixins: [
GridScrollMixin,
ColumnMetrics.Mixin,
DOMMetrics.MetricsComputatorMixin
],
propTypes: { | getStyle: function(){
return{
overflowX: 'scroll',
overflowY: 'hidden',
outline: 0,
position: 'relative',
minHeight: this.props.minHeight
}
},
render() {
var headerRows = this.props.headerRows || [{ref : 'row'}];
return (
<div {...this.props} style={this.getStyle()} className="react-grid-Grid">
<Header
ref="header"
columns={this.state.columns}
onColumnResize={this.onColumnResize}
height={this.props.rowHeight}
totalWidth={this.DOMMetrics.gridWidth()}
headerRows={headerRows}
/>
<Viewport
ref="viewport"
width={this.state.columns.width}
rowHeight={this.props.rowHeight}
rowRenderer={this.props.rowRenderer}
cellRenderer={this.props.cellRenderer}
rows={this.props.rows}
selectedRows={this.props.selectedRows}
expandedRows={this.props.expandedRows}
length={this.props.length}
columns={this.state.columns}
totalWidth={this.DOMMetrics.gridWidth()}
onScroll={this.onScroll}
onRows={this.props.onRows}
rowOffsetHeight={this.props.rowOffsetHeight || this.props.rowHeight * headerRows.length}
/>
</div>
);
},
getDefaultProps() {
return {
rowHeight: 35,
minHeight: 350
};
},
});
module.exports = Grid; | rows: PropTypes.oneOfType([PropTypes.array, PropTypes.func]).isRequired,
columns: PropTypes.array.isRequired
},
| random_line_split |
fancyclick.js | (function ($, window, document, undefined) {
'use strict';
var settings = {
container: '.main',
animationMethod: 'replace',
duration: 1000,
preload: false,
anchors: 'a',
blacklist: '.no-fancyclick',
whitelist: '',
onLoadStart: function () {
//$('body').addClass('fancy-in-transition');
},
onLoadEnd: function () {
//$('body').addClass('fancy-in-transition');
}
};
/**
* Initilize plugin with specified options
* @param opts
*/
function init(opts) {
// initialize config
for (var key in opts) {
if (settings.hasOwnProperty(key)) {
settings[key] = opts[key];
}
}
attachLoader();
history.pushState({}, '', window.location.href);
$(window).on('popstate', stateChange);
}
/**
* Manage state changes, if a user navigates back or forward
* load the page from history
* @param e
*/
function stateChange(e) {
if (e.originalEvent.state !== null) {
loadPage(e, true);
} else {
loadPage(e);
}
}
/**
* Determine if the url is local or external
* As seen in Fastest way to detect external URLs
* (http://stackoverflow.com/questions/6238351/fastest-way-to-detect-external-urls)
* @param url
* @returns {boolean}
*/
function isExternal(url) {
var match = url.match(/^([^:\/?#]+:)?(?:\/\/([^\/?#]*))?([^?#]+)?(\?[^#]*)?(#.*)?/);
if (typeof match[1] === 'string' && match[1].length > 0 && match[1].toLowerCase() !== location.protocol) {
return true;
}
if (typeof match[2] === 'string' && match[2].length > 0 && match[2].replace(new RegExp(':(' + {'http:': 80, 'https:': 443}[location.protocol] + ')?$'), '') !== location.host) |
return false;
}
function attachLoader() {
var links = $('a');
$.each(links, function (key, element) {
var $element = $(element);
if (!isExternal($element.attr('href'))) {
$element.click(loadPage);
}
});
}
function loadPage(e, changeBack) {
e.preventDefault();
var durationFn = getComputedAnimationDuration();
var duration = durationFn() || settings.duration;
var href = e.currentTarget.href || window.location.href;
var element = $(settings.container);
// fire loading start callback
settings.onLoadStart();
$.ajax({
url: href,
dataType: 'html'
}).then(function (responseText) {
if (responseText) {
var dom = $('<div>').append($.parseHTML(responseText));
updateTitle(dom.find('title').text());
if (settings.animationMethod === 'replace') {
var html = dom.find(settings.container).html();
element.html(html);
setTimeout(function () {
settings.onLoadEnd();
}, duration);
} else {
element.addClass('fancy-leave');
var afterElement = dom.find(settings.container).addClass('fancy-enter');
element.after(afterElement);
setTimeout(function () {
element.remove();
afterElement.removeClass('fancy-enter');
settings.onLoadEnd();
}, duration);
}
// if this is the initial page loaded add it to the history
if (!changeBack) {
history.pushState({}, '', href);
}
}
}, function (error) {
// fire the load end callback
settings.onLoadEnd();
// log the error
console.error(error);
});
}
/**
* Update the title of the page
* @param title
*/
function updateTitle(title) {
$('title').text(title);
}
/**
* Get the computed animation duration for an element
*/
function getComputedAnimationDuration() {
var element = $('<div>')
.css('visibility', 'hidden')
.addClass('fancy-enter')
.appendTo('body');
var time = 0;
setTimeout(function() {
time += (parseFloat(getComputedStyle(element[0]).animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':after')));//.animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':before').animationDuration));
element.remove();
},0);
return function() {
return time;
};
}
window.fancyClick = {
init: init
};
}(jQuery, window, document, undefined)); | {
return true;
} | conditional_block |
fancyclick.js | (function ($, window, document, undefined) {
'use strict';
var settings = {
container: '.main',
animationMethod: 'replace',
duration: 1000,
preload: false,
anchors: 'a',
blacklist: '.no-fancyclick',
whitelist: '',
onLoadStart: function () {
//$('body').addClass('fancy-in-transition');
},
onLoadEnd: function () {
//$('body').addClass('fancy-in-transition');
}
};
/**
* Initilize plugin with specified options
* @param opts
*/
function init(opts) {
// initialize config
for (var key in opts) {
if (settings.hasOwnProperty(key)) {
settings[key] = opts[key];
}
}
attachLoader();
history.pushState({}, '', window.location.href);
$(window).on('popstate', stateChange);
}
/**
* Manage state changes, if a user navigates back or forward
* load the page from history
* @param e
*/
function stateChange(e) {
if (e.originalEvent.state !== null) {
loadPage(e, true);
} else {
loadPage(e);
}
}
/**
* Determine if the url is local or external
* As seen in Fastest way to detect external URLs
* (http://stackoverflow.com/questions/6238351/fastest-way-to-detect-external-urls)
* @param url
* @returns {boolean}
*/
function | (url) {
var match = url.match(/^([^:\/?#]+:)?(?:\/\/([^\/?#]*))?([^?#]+)?(\?[^#]*)?(#.*)?/);
if (typeof match[1] === 'string' && match[1].length > 0 && match[1].toLowerCase() !== location.protocol) {
return true;
}
if (typeof match[2] === 'string' && match[2].length > 0 && match[2].replace(new RegExp(':(' + {'http:': 80, 'https:': 443}[location.protocol] + ')?$'), '') !== location.host) {
return true;
}
return false;
}
function attachLoader() {
var links = $('a');
$.each(links, function (key, element) {
var $element = $(element);
if (!isExternal($element.attr('href'))) {
$element.click(loadPage);
}
});
}
function loadPage(e, changeBack) {
e.preventDefault();
var durationFn = getComputedAnimationDuration();
var duration = durationFn() || settings.duration;
var href = e.currentTarget.href || window.location.href;
var element = $(settings.container);
// fire loading start callback
settings.onLoadStart();
$.ajax({
url: href,
dataType: 'html'
}).then(function (responseText) {
if (responseText) {
var dom = $('<div>').append($.parseHTML(responseText));
updateTitle(dom.find('title').text());
if (settings.animationMethod === 'replace') {
var html = dom.find(settings.container).html();
element.html(html);
setTimeout(function () {
settings.onLoadEnd();
}, duration);
} else {
element.addClass('fancy-leave');
var afterElement = dom.find(settings.container).addClass('fancy-enter');
element.after(afterElement);
setTimeout(function () {
element.remove();
afterElement.removeClass('fancy-enter');
settings.onLoadEnd();
}, duration);
}
// if this is the initial page loaded add it to the history
if (!changeBack) {
history.pushState({}, '', href);
}
}
}, function (error) {
// fire the load end callback
settings.onLoadEnd();
// log the error
console.error(error);
});
}
/**
* Update the title of the page
* @param title
*/
function updateTitle(title) {
$('title').text(title);
}
/**
* Get the computed animation duration for an element
*/
function getComputedAnimationDuration() {
var element = $('<div>')
.css('visibility', 'hidden')
.addClass('fancy-enter')
.appendTo('body');
var time = 0;
setTimeout(function() {
time += (parseFloat(getComputedStyle(element[0]).animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':after')));//.animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':before').animationDuration));
element.remove();
},0);
return function() {
return time;
};
}
window.fancyClick = {
init: init
};
}(jQuery, window, document, undefined)); | isExternal | identifier_name |
fancyclick.js | (function ($, window, document, undefined) {
'use strict';
var settings = {
container: '.main',
animationMethod: 'replace',
duration: 1000,
preload: false,
anchors: 'a',
blacklist: '.no-fancyclick',
whitelist: '',
onLoadStart: function () {
//$('body').addClass('fancy-in-transition');
},
onLoadEnd: function () {
//$('body').addClass('fancy-in-transition');
}
};
/**
* Initilize plugin with specified options
* @param opts
*/
function init(opts) {
// initialize config
for (var key in opts) {
if (settings.hasOwnProperty(key)) {
settings[key] = opts[key];
}
}
attachLoader();
history.pushState({}, '', window.location.href);
$(window).on('popstate', stateChange);
}
/**
* Manage state changes, if a user navigates back or forward
* load the page from history
* @param e
*/
function stateChange(e) {
if (e.originalEvent.state !== null) {
loadPage(e, true);
} else {
loadPage(e);
}
}
/**
* Determine if the url is local or external
* As seen in Fastest way to detect external URLs
* (http://stackoverflow.com/questions/6238351/fastest-way-to-detect-external-urls)
* @param url
* @returns {boolean}
*/
function isExternal(url) {
var match = url.match(/^([^:\/?#]+:)?(?:\/\/([^\/?#]*))?([^?#]+)?(\?[^#]*)?(#.*)?/);
if (typeof match[1] === 'string' && match[1].length > 0 && match[1].toLowerCase() !== location.protocol) {
return true;
}
if (typeof match[2] === 'string' && match[2].length > 0 && match[2].replace(new RegExp(':(' + {'http:': 80, 'https:': 443}[location.protocol] + ')?$'), '') !== location.host) {
return true;
}
return false;
}
function attachLoader() {
var links = $('a');
$.each(links, function (key, element) {
var $element = $(element);
if (!isExternal($element.attr('href'))) {
$element.click(loadPage);
}
});
}
function loadPage(e, changeBack) |
/**
* Update the title of the page
* @param title
*/
function updateTitle(title) {
$('title').text(title);
}
/**
* Get the computed animation duration for an element
*/
function getComputedAnimationDuration() {
var element = $('<div>')
.css('visibility', 'hidden')
.addClass('fancy-enter')
.appendTo('body');
var time = 0;
setTimeout(function() {
time += (parseFloat(getComputedStyle(element[0]).animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':after')));//.animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':before').animationDuration));
element.remove();
},0);
return function() {
return time;
};
}
window.fancyClick = {
init: init
};
}(jQuery, window, document, undefined)); | {
e.preventDefault();
var durationFn = getComputedAnimationDuration();
var duration = durationFn() || settings.duration;
var href = e.currentTarget.href || window.location.href;
var element = $(settings.container);
// fire loading start callback
settings.onLoadStart();
$.ajax({
url: href,
dataType: 'html'
}).then(function (responseText) {
if (responseText) {
var dom = $('<div>').append($.parseHTML(responseText));
updateTitle(dom.find('title').text());
if (settings.animationMethod === 'replace') {
var html = dom.find(settings.container).html();
element.html(html);
setTimeout(function () {
settings.onLoadEnd();
}, duration);
} else {
element.addClass('fancy-leave');
var afterElement = dom.find(settings.container).addClass('fancy-enter');
element.after(afterElement);
setTimeout(function () {
element.remove();
afterElement.removeClass('fancy-enter');
settings.onLoadEnd();
}, duration);
}
// if this is the initial page loaded add it to the history
if (!changeBack) {
history.pushState({}, '', href);
}
}
}, function (error) {
// fire the load end callback
settings.onLoadEnd();
// log the error
console.error(error);
});
} | identifier_body |
fancyclick.js | (function ($, window, document, undefined) {
'use strict';
var settings = {
container: '.main',
animationMethod: 'replace',
duration: 1000,
preload: false,
anchors: 'a',
blacklist: '.no-fancyclick',
whitelist: '',
onLoadStart: function () {
//$('body').addClass('fancy-in-transition');
},
onLoadEnd: function () {
//$('body').addClass('fancy-in-transition');
}
};
/**
* Initilize plugin with specified options
* @param opts
*/
function init(opts) {
// initialize config
for (var key in opts) {
if (settings.hasOwnProperty(key)) {
settings[key] = opts[key];
}
}
attachLoader();
history.pushState({}, '', window.location.href);
$(window).on('popstate', stateChange);
}
/**
* Manage state changes, if a user navigates back or forward
* load the page from history
* @param e
*/
function stateChange(e) {
if (e.originalEvent.state !== null) {
loadPage(e, true);
} else {
loadPage(e);
}
}
/**
* Determine if the url is local or external
* As seen in Fastest way to detect external URLs
* (http://stackoverflow.com/questions/6238351/fastest-way-to-detect-external-urls)
* @param url
* @returns {boolean}
*/
function isExternal(url) {
var match = url.match(/^([^:\/?#]+:)?(?:\/\/([^\/?#]*))?([^?#]+)?(\?[^#]*)?(#.*)?/);
if (typeof match[1] === 'string' && match[1].length > 0 && match[1].toLowerCase() !== location.protocol) {
return true;
}
if (typeof match[2] === 'string' && match[2].length > 0 && match[2].replace(new RegExp(':(' + {'http:': 80, 'https:': 443}[location.protocol] + ')?$'), '') !== location.host) {
return true;
}
return false;
}
function attachLoader() { | var links = $('a');
$.each(links, function (key, element) {
var $element = $(element);
if (!isExternal($element.attr('href'))) {
$element.click(loadPage);
}
});
}
function loadPage(e, changeBack) {
e.preventDefault();
var durationFn = getComputedAnimationDuration();
var duration = durationFn() || settings.duration;
var href = e.currentTarget.href || window.location.href;
var element = $(settings.container);
// fire loading start callback
settings.onLoadStart();
$.ajax({
url: href,
dataType: 'html'
}).then(function (responseText) {
if (responseText) {
var dom = $('<div>').append($.parseHTML(responseText));
updateTitle(dom.find('title').text());
if (settings.animationMethod === 'replace') {
var html = dom.find(settings.container).html();
element.html(html);
setTimeout(function () {
settings.onLoadEnd();
}, duration);
} else {
element.addClass('fancy-leave');
var afterElement = dom.find(settings.container).addClass('fancy-enter');
element.after(afterElement);
setTimeout(function () {
element.remove();
afterElement.removeClass('fancy-enter');
settings.onLoadEnd();
}, duration);
}
// if this is the initial page loaded add it to the history
if (!changeBack) {
history.pushState({}, '', href);
}
}
}, function (error) {
// fire the load end callback
settings.onLoadEnd();
// log the error
console.error(error);
});
}
/**
* Update the title of the page
* @param title
*/
function updateTitle(title) {
$('title').text(title);
}
/**
* Get the computed animation duration for an element
*/
function getComputedAnimationDuration() {
var element = $('<div>')
.css('visibility', 'hidden')
.addClass('fancy-enter')
.appendTo('body');
var time = 0;
setTimeout(function() {
time += (parseFloat(getComputedStyle(element[0]).animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':after')));//.animationDuration));
time += (parseFloat(getComputedStyle(element[0], ':before').animationDuration));
element.remove();
},0);
return function() {
return time;
};
}
window.fancyClick = {
init: init
};
}(jQuery, window, document, undefined)); | random_line_split |
|
use-from-trait-xc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:use_from_trait_xc.rs
extern crate use_from_trait_xc; |
fn main() {
} |
use use_from_trait_xc::Trait::foo; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import
use use_from_trait_xc::Foo::new; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import | random_line_split |
use-from-trait-xc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:use_from_trait_xc.rs
extern crate use_from_trait_xc;
use use_from_trait_xc::Trait::foo; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import
use use_from_trait_xc::Foo::new; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import
fn main() | {
} | identifier_body |
|
use-from-trait-xc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:use_from_trait_xc.rs
extern crate use_from_trait_xc;
use use_from_trait_xc::Trait::foo; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import
use use_from_trait_xc::Foo::new; //~ ERROR cannot import from a trait or type implementation
//~^ ERROR failed to resolve import
fn | () {
}
| main | identifier_name |
layout_collection.rs | extern crate wtftw;
use self::wtftw::config::GeneralConfig;
use self::wtftw::core::stack::Stack;
use self::wtftw::layout::Layout;
use self::wtftw::layout::LayoutMessage;
use self::wtftw::window_system::Rectangle;
use self::wtftw::window_system::Window;
use self::wtftw::window_system::WindowSystem;
pub struct LayoutCollection {
pub layouts: Vec<Box<Layout>>, | }
impl LayoutCollection {
pub fn new(layouts: Vec<Box<Layout>>) -> Box<Layout> {
Box::new(LayoutCollection {
layouts: layouts,
current: 0
})
}
}
impl Layout for LayoutCollection {
fn apply_layout(&mut self, window_system: &WindowSystem, screen: Rectangle, config: &GeneralConfig,
stack: &Option<Stack<Window>>) -> Vec<(Window, Rectangle)> {
self.layouts[self.current].apply_layout(window_system, screen, config, stack)
}
fn apply_message(&mut self, message: LayoutMessage, window_system: &WindowSystem,
stack: &Option<Stack<Window>>, config: &GeneralConfig) -> bool {
match message {
LayoutMessage::Next => {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + 1) % self.layouts.len();
true
}
LayoutMessage::Prev => {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + (self.layouts.len() - 1)) % self.layouts.len();
true
}
_ => self.layouts[self.current].apply_message(message, window_system, stack,
config)
}
}
fn description(&self) -> String {
self.layouts[self.current].description()
}
fn copy(&self) -> Box<Layout> {
Box::new(LayoutCollection {
current: self.current,
layouts: self.layouts.iter().map(|x| x.copy()).collect()
})
}
} | pub current: usize | random_line_split |
layout_collection.rs | extern crate wtftw;
use self::wtftw::config::GeneralConfig;
use self::wtftw::core::stack::Stack;
use self::wtftw::layout::Layout;
use self::wtftw::layout::LayoutMessage;
use self::wtftw::window_system::Rectangle;
use self::wtftw::window_system::Window;
use self::wtftw::window_system::WindowSystem;
pub struct | {
pub layouts: Vec<Box<Layout>>,
pub current: usize
}
impl LayoutCollection {
pub fn new(layouts: Vec<Box<Layout>>) -> Box<Layout> {
Box::new(LayoutCollection {
layouts: layouts,
current: 0
})
}
}
impl Layout for LayoutCollection {
fn apply_layout(&mut self, window_system: &WindowSystem, screen: Rectangle, config: &GeneralConfig,
stack: &Option<Stack<Window>>) -> Vec<(Window, Rectangle)> {
self.layouts[self.current].apply_layout(window_system, screen, config, stack)
}
fn apply_message(&mut self, message: LayoutMessage, window_system: &WindowSystem,
stack: &Option<Stack<Window>>, config: &GeneralConfig) -> bool {
match message {
LayoutMessage::Next => {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + 1) % self.layouts.len();
true
}
LayoutMessage::Prev => {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + (self.layouts.len() - 1)) % self.layouts.len();
true
}
_ => self.layouts[self.current].apply_message(message, window_system, stack,
config)
}
}
fn description(&self) -> String {
self.layouts[self.current].description()
}
fn copy(&self) -> Box<Layout> {
Box::new(LayoutCollection {
current: self.current,
layouts: self.layouts.iter().map(|x| x.copy()).collect()
})
}
}
| LayoutCollection | identifier_name |
layout_collection.rs | extern crate wtftw;
use self::wtftw::config::GeneralConfig;
use self::wtftw::core::stack::Stack;
use self::wtftw::layout::Layout;
use self::wtftw::layout::LayoutMessage;
use self::wtftw::window_system::Rectangle;
use self::wtftw::window_system::Window;
use self::wtftw::window_system::WindowSystem;
pub struct LayoutCollection {
pub layouts: Vec<Box<Layout>>,
pub current: usize
}
impl LayoutCollection {
pub fn new(layouts: Vec<Box<Layout>>) -> Box<Layout> {
Box::new(LayoutCollection {
layouts: layouts,
current: 0
})
}
}
impl Layout for LayoutCollection {
fn apply_layout(&mut self, window_system: &WindowSystem, screen: Rectangle, config: &GeneralConfig,
stack: &Option<Stack<Window>>) -> Vec<(Window, Rectangle)> {
self.layouts[self.current].apply_layout(window_system, screen, config, stack)
}
fn apply_message(&mut self, message: LayoutMessage, window_system: &WindowSystem,
stack: &Option<Stack<Window>>, config: &GeneralConfig) -> bool {
match message {
LayoutMessage::Next => {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + 1) % self.layouts.len();
true
}
LayoutMessage::Prev => |
_ => self.layouts[self.current].apply_message(message, window_system, stack,
config)
}
}
fn description(&self) -> String {
self.layouts[self.current].description()
}
fn copy(&self) -> Box<Layout> {
Box::new(LayoutCollection {
current: self.current,
layouts: self.layouts.iter().map(|x| x.copy()).collect()
})
}
}
| {
self.layouts[self.current].unhook(window_system, stack, config);
self.current = (self.current + (self.layouts.len() - 1)) % self.layouts.len();
true
} | conditional_block |
conversions_tests.rs | use std::convert::TryInto;
use hashing;
use crate::gen::build::bazel::remote::execution::v2 as remexec;
#[test]
fn from_our_digest() {
let our_digest = &hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
let converted: remexec::Digest = our_digest.into();
let want = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
assert_eq!(converted, want);
}
#[test]
fn from_bazel_digest() { | let bazel_digest = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let want = hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
assert_eq!(converted, Ok(want));
}
#[test]
fn from_bad_bazel_digest() {
let bazel_digest = remexec::Digest {
hash: "0".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let err = converted.expect_err("Want Err converting bad digest");
assert!(
err.starts_with("Bad fingerprint in Digest \"0\":"),
"Bad error message: {}",
err
);
} | random_line_split |
|
conversions_tests.rs | use std::convert::TryInto;
use hashing;
use crate::gen::build::bazel::remote::execution::v2 as remexec;
#[test]
fn from_our_digest() |
#[test]
fn from_bazel_digest() {
let bazel_digest = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let want = hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
assert_eq!(converted, Ok(want));
}
#[test]
fn from_bad_bazel_digest() {
let bazel_digest = remexec::Digest {
hash: "0".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let err = converted.expect_err("Want Err converting bad digest");
assert!(
err.starts_with("Bad fingerprint in Digest \"0\":"),
"Bad error message: {}",
err
);
}
| {
let our_digest = &hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
let converted: remexec::Digest = our_digest.into();
let want = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
assert_eq!(converted, want);
} | identifier_body |
conversions_tests.rs | use std::convert::TryInto;
use hashing;
use crate::gen::build::bazel::remote::execution::v2 as remexec;
#[test]
fn from_our_digest() {
let our_digest = &hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
let converted: remexec::Digest = our_digest.into();
let want = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
assert_eq!(converted, want);
}
#[test]
fn | () {
let bazel_digest = remexec::Digest {
hash: "0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let want = hashing::Digest::new(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)
.unwrap(),
10,
);
assert_eq!(converted, Ok(want));
}
#[test]
fn from_bad_bazel_digest() {
let bazel_digest = remexec::Digest {
hash: "0".to_owned(),
size_bytes: 10,
};
let converted: Result<hashing::Digest, String> = (&bazel_digest).try_into();
let err = converted.expect_err("Want Err converting bad digest");
assert!(
err.starts_with("Bad fingerprint in Digest \"0\":"),
"Bad error message: {}",
err
);
}
| from_bazel_digest | identifier_name |
siphash24.rs | //! `SipHash-2-4`
use ffi;
use libc::c_ulonglong;
use randombytes::randombytes_into;
pub const HASHBYTES: usize = ffi::crypto_shorthash_siphash24_BYTES;
pub const KEYBYTES: usize = ffi::crypto_shorthash_siphash24_KEYBYTES;
/// Digest-structure
#[derive(Copy)]
pub struct Digest(pub [u8; HASHBYTES]);
newtype_clone!(Digest);
newtype_impl!(Digest, HASHBYTES);
/// Key
///
/// When a `Key` goes out of scope its contents
/// will be zeroed out
pub struct Key(pub [u8; KEYBYTES]);
newtype_drop!(Key);
newtype_clone!(Key);
newtype_impl!(Key, KEYBYTES);
/// `gen_key()` randomly generates a key for shorthash
///
/// THREAD SAFETY: `gen_key()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn gen_key() -> Key {
let mut k = [0; KEYBYTES];
randombytes_into(&mut k);
Key(k)
}
/// `shorthash` hashes a message `m` under a key `k`. It
/// returns a hash `h`.
pub fn shorthash(m: &[u8],
&Key(ref k): &Key) -> Digest {
unsafe {
let mut h = [0; HASHBYTES];
ffi::crypto_shorthash_siphash24(&mut h, m.as_ptr(),
m.len() as c_ulonglong,
k);
Digest(h)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_vectors() {
let maxlen = 64;
let mut m = Vec::with_capacity(64);
for i in (0usize..64) {
m.push(i as u8);
}
let h_expecteds = [[0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72]
,[0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74]
,[0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d]
,[0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85]
,[0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf]
,[0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18]
,[0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb]
,[0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab]
,[0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93]
,[0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e]
,[0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a]
,[0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4]
,[0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75]
,[0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14]
,[0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7]
,[0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1]
,[0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f]
,[0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69]
,[0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b]
,[0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb]
,[0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe]
,[0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0]
,[0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93]
,[0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8]
,[0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8]
,[0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc]
,[0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17]
,[0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f]
,[0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde]
,[0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6]
,[0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad]
,[0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32]
,[0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71]
,[0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7]
,[0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12]
,[0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15]
,[0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31]
,[0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02]
,[0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca]
,[0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a]
,[0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e]
,[0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad]
,[0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18]
,[0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4]
,[0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9]
,[0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9]
,[0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb]
,[0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0]
,[0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6]
,[0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7]
,[0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee]
,[0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1]
,[0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a]
,[0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81]
,[0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f]
,[0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24]
,[0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7]
,[0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea]
,[0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60]
,[0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66]
,[0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c]
,[0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f]
,[0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5]
,[0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95]];
let k = Key([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
for i in (0usize..maxlen) {
let Digest(h) = shorthash(&m[..i], &k);
assert!(h == h_expecteds[i]);
}
}
}
#[cfg(feature = "benchmarks")]
#[cfg(test)]
mod bench {
extern crate test;
use randombytes::randombytes;
use super::*;
const BENCH_SIZES: [usize; 14] = [0, 1, 2, 4, 8, 16, 32, 64,
128, 256, 512, 1024, 2048, 4096];
#[bench]
fn | (b: &mut test::Bencher) {
let k = gen_key();
let ms: Vec<Vec<u8>> = BENCH_SIZES.iter().map(|s| {
randombytes(*s)
}).collect();
b.iter(|| {
for m in ms.iter() {
shorthash(m, &k);
}
});
}
}
| bench_shorthash | identifier_name |
siphash24.rs | //! `SipHash-2-4`
use ffi;
use libc::c_ulonglong;
use randombytes::randombytes_into;
pub const HASHBYTES: usize = ffi::crypto_shorthash_siphash24_BYTES;
pub const KEYBYTES: usize = ffi::crypto_shorthash_siphash24_KEYBYTES;
/// Digest-structure
#[derive(Copy)]
pub struct Digest(pub [u8; HASHBYTES]);
newtype_clone!(Digest);
newtype_impl!(Digest, HASHBYTES);
/// Key
///
/// When a `Key` goes out of scope its contents
/// will be zeroed out
pub struct Key(pub [u8; KEYBYTES]);
newtype_drop!(Key);
newtype_clone!(Key);
newtype_impl!(Key, KEYBYTES);
/// `gen_key()` randomly generates a key for shorthash
///
/// THREAD SAFETY: `gen_key()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn gen_key() -> Key {
let mut k = [0; KEYBYTES];
randombytes_into(&mut k);
Key(k)
}
/// `shorthash` hashes a message `m` under a key `k`. It
/// returns a hash `h`.
pub fn shorthash(m: &[u8],
&Key(ref k): &Key) -> Digest {
unsafe {
let mut h = [0; HASHBYTES];
ffi::crypto_shorthash_siphash24(&mut h, m.as_ptr(),
m.len() as c_ulonglong,
k);
Digest(h)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_vectors() {
let maxlen = 64;
let mut m = Vec::with_capacity(64);
for i in (0usize..64) {
m.push(i as u8);
}
let h_expecteds = [[0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72]
,[0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74]
,[0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d]
,[0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85]
,[0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf]
,[0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18]
,[0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb]
,[0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab]
,[0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93]
,[0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e]
,[0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a]
,[0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4]
,[0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75]
,[0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14]
,[0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7]
,[0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1]
,[0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f]
,[0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69]
,[0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b]
,[0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb]
,[0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe]
,[0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0]
,[0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93]
,[0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8]
,[0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8]
,[0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc]
,[0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17]
,[0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f]
,[0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde]
,[0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6]
,[0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad]
,[0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32]
,[0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71]
,[0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7]
,[0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12]
,[0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15]
,[0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31]
,[0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02]
,[0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca]
,[0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a]
,[0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e]
,[0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad]
,[0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18]
,[0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4]
,[0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9]
,[0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9]
,[0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb]
,[0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0]
,[0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6]
,[0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7] | ,[0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f]
,[0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24]
,[0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7]
,[0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea]
,[0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60]
,[0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66]
,[0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c]
,[0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f]
,[0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5]
,[0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95]];
let k = Key([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
for i in (0usize..maxlen) {
let Digest(h) = shorthash(&m[..i], &k);
assert!(h == h_expecteds[i]);
}
}
}
#[cfg(feature = "benchmarks")]
#[cfg(test)]
mod bench {
extern crate test;
use randombytes::randombytes;
use super::*;
const BENCH_SIZES: [usize; 14] = [0, 1, 2, 4, 8, 16, 32, 64,
128, 256, 512, 1024, 2048, 4096];
#[bench]
fn bench_shorthash(b: &mut test::Bencher) {
let k = gen_key();
let ms: Vec<Vec<u8>> = BENCH_SIZES.iter().map(|s| {
randombytes(*s)
}).collect();
b.iter(|| {
for m in ms.iter() {
shorthash(m, &k);
}
});
}
} | ,[0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee]
,[0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1]
,[0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a]
,[0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81] | random_line_split |
siphash24.rs | //! `SipHash-2-4`
use ffi;
use libc::c_ulonglong;
use randombytes::randombytes_into;
pub const HASHBYTES: usize = ffi::crypto_shorthash_siphash24_BYTES;
pub const KEYBYTES: usize = ffi::crypto_shorthash_siphash24_KEYBYTES;
/// Digest-structure
#[derive(Copy)]
pub struct Digest(pub [u8; HASHBYTES]);
newtype_clone!(Digest);
newtype_impl!(Digest, HASHBYTES);
/// Key
///
/// When a `Key` goes out of scope its contents
/// will be zeroed out
pub struct Key(pub [u8; KEYBYTES]);
newtype_drop!(Key);
newtype_clone!(Key);
newtype_impl!(Key, KEYBYTES);
/// `gen_key()` randomly generates a key for shorthash
///
/// THREAD SAFETY: `gen_key()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn gen_key() -> Key {
let mut k = [0; KEYBYTES];
randombytes_into(&mut k);
Key(k)
}
/// `shorthash` hashes a message `m` under a key `k`. It
/// returns a hash `h`.
pub fn shorthash(m: &[u8],
&Key(ref k): &Key) -> Digest {
unsafe {
let mut h = [0; HASHBYTES];
ffi::crypto_shorthash_siphash24(&mut h, m.as_ptr(),
m.len() as c_ulonglong,
k);
Digest(h)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_vectors() {
let maxlen = 64;
let mut m = Vec::with_capacity(64);
for i in (0usize..64) {
m.push(i as u8);
}
let h_expecteds = [[0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72]
,[0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74]
,[0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d]
,[0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85]
,[0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf]
,[0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18]
,[0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb]
,[0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab]
,[0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93]
,[0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e]
,[0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a]
,[0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4]
,[0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75]
,[0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14]
,[0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7]
,[0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1]
,[0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f]
,[0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69]
,[0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b]
,[0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb]
,[0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe]
,[0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0]
,[0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93]
,[0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8]
,[0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8]
,[0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc]
,[0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17]
,[0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f]
,[0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde]
,[0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6]
,[0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad]
,[0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32]
,[0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71]
,[0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7]
,[0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12]
,[0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15]
,[0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31]
,[0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02]
,[0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca]
,[0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a]
,[0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e]
,[0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad]
,[0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18]
,[0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4]
,[0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9]
,[0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9]
,[0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb]
,[0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0]
,[0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6]
,[0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7]
,[0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee]
,[0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1]
,[0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a]
,[0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81]
,[0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f]
,[0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24]
,[0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7]
,[0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea]
,[0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60]
,[0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66]
,[0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c]
,[0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f]
,[0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5]
,[0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95]];
let k = Key([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
for i in (0usize..maxlen) {
let Digest(h) = shorthash(&m[..i], &k);
assert!(h == h_expecteds[i]);
}
}
}
#[cfg(feature = "benchmarks")]
#[cfg(test)]
mod bench {
extern crate test;
use randombytes::randombytes;
use super::*;
const BENCH_SIZES: [usize; 14] = [0, 1, 2, 4, 8, 16, 32, 64,
128, 256, 512, 1024, 2048, 4096];
#[bench]
fn bench_shorthash(b: &mut test::Bencher) |
}
| {
let k = gen_key();
let ms: Vec<Vec<u8>> = BENCH_SIZES.iter().map(|s| {
randombytes(*s)
}).collect();
b.iter(|| {
for m in ms.iter() {
shorthash(m, &k);
}
});
} | identifier_body |
high_lows.py | import csv
from datetime import datetime
from matplotlib import pyplot as plt
# Get dates, high, and low temperatures from file.
filename = 'sitka_weather_2017.csv'
with open(filename) as f:
reader = csv.reader(f)
header_row = next(reader)
dates, highs, lows = [], [], []
for row in reader:
|
# Plot data.
fig = plt.figure(dpi=128, figsize=(10, 6))
plt.plot(dates, highs, c='red', alpha=0.5)
plt.plot(dates, lows, c='blue', alpha=0.5)
plt.fill_between(dates, highs, lows, facecolor='blue', alpha=0.1)
# Format plot.
plt.title("Daily high and low temperatures - 2017", fontsize=24)
plt.xlabel('', fontsize=16)
fig.autofmt_xdate()
plt.ylabel("Temperature (F)", fontsize=16)
plt.tick_params(axis='both', which='major', labelsize=16)
plt.show()
| current_date = datetime.strptime(row[0], "%Y-%m-%d")
dates.append(current_date)
high = int(row[1])
highs.append(high)
low = int(row[3])
lows.append(low) | conditional_block |
high_lows.py | import csv
from datetime import datetime
from matplotlib import pyplot as plt
# Get dates, high, and low temperatures from file.
filename = 'sitka_weather_2017.csv'
with open(filename) as f:
reader = csv.reader(f)
header_row = next(reader) |
dates, highs, lows = [], [], []
for row in reader:
current_date = datetime.strptime(row[0], "%Y-%m-%d")
dates.append(current_date)
high = int(row[1])
highs.append(high)
low = int(row[3])
lows.append(low)
# Plot data.
fig = plt.figure(dpi=128, figsize=(10, 6))
plt.plot(dates, highs, c='red', alpha=0.5)
plt.plot(dates, lows, c='blue', alpha=0.5)
plt.fill_between(dates, highs, lows, facecolor='blue', alpha=0.1)
# Format plot.
plt.title("Daily high and low temperatures - 2017", fontsize=24)
plt.xlabel('', fontsize=16)
fig.autofmt_xdate()
plt.ylabel("Temperature (F)", fontsize=16)
plt.tick_params(axis='both', which='major', labelsize=16)
plt.show() | random_line_split |
|
devstack.py | """
Specific overrides to the base prod settings to make development easier.
"""
from os.path import abspath, dirname, join
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
MEDIA_ROOT = "/edx/var/edxapp/uploads"
DEBUG = True
USE_I18N = True
TEMPLATE_DEBUG = True
SITE_NAME = 'localhost:8000'
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'Devstack')
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
HTTPS = 'off'
################################ LOGGERS ######################################
# Silence noisy logs
import logging
LOG_OVERRIDES = [
('track.contexts', logging.CRITICAL),
('track.middleware', logging.CRITICAL),
('dd.dogapi', logging.CRITICAL),
('django_comment_client.utils', logging.CRITICAL),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
################################ EMAIL ########################################
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True # Enable email for all Studio courses
FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms)
########################## ANALYTICS TESTING ########################
ANALYTICS_SERVER_URL = "http://127.0.0.1:9000/"
ANALYTICS_API_KEY = ""
# Set this to the dashboard URL in order to display the link from the
# dashboard to the Analytics Dashboard.
ANALYTICS_DASHBOARD_URL = None
################################ DEBUG TOOLBAR ################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo')
MIDDLEWARE_CLASSES += (
'django_comment_client.utils.QueryCountDebugMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar_mongo.panel.MongoDebugPanel',
# ProfilingPanel has been intentionally removed for default devstack.py
# runtimes for performance reasons. If you wish to re-enable it in your
# local development environment, please create a new settings file
# that imports and extends devstack.py.
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'lms.envs.devstack.should_show_debug_toolbar'
}
def should_show_debug_toolbar(_):
|
########################### PIPELINE #################################
# Skip packaging and optimization in development
PIPELINE_ENABLED = False
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# Disable JavaScript compression in development
PIPELINE_JS_COMPRESSOR = None
# Whether to run django-require in debug mode.
REQUIRE_DEBUG = DEBUG
PIPELINE_SASS_ARGUMENTS = '--debug-info --require {proj_dir}/static/sass/bourbon/lib/bourbon.rb'.format(proj_dir=PROJECT_ROOT)
########################### VERIFIED CERTIFICATES #################################
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
FEATURES['ENABLE_PAYMENT_FAKE'] = True
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR = {
'CyberSource2': {
"PURCHASE_ENDPOINT": '/shoppingcart/payment_fake/',
"SECRET_KEY": 'abcd123',
"ACCESS_KEY": 'abcd123',
"PROFILE_ID": 'edx',
}
}
########################### External REST APIs #################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
OAUTH_OIDC_ISSUER = 'http://127.0.0.1:8000/oauth2'
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
########################### Milestones #################################
FEATURES['MILESTONES_APP'] = True
########################### Milestones #################################
FEATURES['ORGANIZATIONS_APP'] = True
########################### Entrance Exams #################################
FEATURES['ENTRANCE_EXAMS'] = True
################################ COURSE LICENSES ################################
FEATURES['LICENSING'] = True
########################## Courseware Search #######################
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
########################## Dashboard Search #######################
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
########################## Certificates Web/HTML View #######################
FEATURES['CERTIFICATES_HTML_VIEW'] = True
########################## Course Discovery #######################
from django.utils.translation import ugettext as _
LANGUAGE_MAP = {'terms': {lang: display for lang, display in ALL_LANGUAGES}, 'name': _('Language')}
COURSE_DISCOVERY_MEANINGS = {
'org': {
'name': _('Organization'),
},
'modes': {
'name': _('Course Type'),
'terms': {
'honor': _('Honor'),
'verified': _('Verified'),
},
},
'language': LANGUAGE_MAP,
}
FEATURES['ENABLE_COURSE_DISCOVERY'] = True
# Setting for overriding default filtering facets for Course discovery
# COURSE_DISCOVERY_FILTERS = ["org", "language", "modes"]
FEATURES['COURSES_ARE_BROWSEABLE'] = True
HOMEPAGE_COURSE_MAX = 9
# Software secure fake page feature flag
FEATURES['ENABLE_SOFTWARE_SECURE_FAKE'] = True
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
# Skip enrollment start date filtering
SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING = True
########################## Shopping cart ##########################
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['STORE_BILLING_INFO'] = True
FEATURES['ENABLE_PAID_COURSE_REGISTRATION'] = True
FEATURES['ENABLE_COSMETIC_DISPLAY_PRICE'] = True
########################## Third Party Auth #######################
if FEATURES.get('ENABLE_THIRD_PARTY_AUTH') and 'third_party_auth.dummy.DummyBackend' not in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS = ['third_party_auth.dummy.DummyBackend'] + list(AUTHENTICATION_BACKENDS)
############## ECOMMERCE API CONFIGURATION SETTINGS ###############
ECOMMERCE_PUBLIC_URL_ROOT = "http://localhost:8002"
###################### Cross-domain requests ######################
FEATURES['ENABLE_CORS_HEADERS'] = True
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ()
CORS_ORIGIN_ALLOW_ALL = True
#####################################################################
# See if the developer has any local overrides.
if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')):
from .private import * # pylint: disable=import-error,wildcard-import
#####################################################################
# Lastly, run any migrations, if needed.
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
| return True # We always want the toolbar on devstack regardless of IP, auth, etc. | identifier_body |
devstack.py | """
Specific overrides to the base prod settings to make development easier.
"""
from os.path import abspath, dirname, join
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
MEDIA_ROOT = "/edx/var/edxapp/uploads"
DEBUG = True
USE_I18N = True
TEMPLATE_DEBUG = True
SITE_NAME = 'localhost:8000'
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'Devstack')
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
HTTPS = 'off'
################################ LOGGERS ######################################
# Silence noisy logs
import logging
LOG_OVERRIDES = [
('track.contexts', logging.CRITICAL),
('track.middleware', logging.CRITICAL),
('dd.dogapi', logging.CRITICAL),
('django_comment_client.utils', logging.CRITICAL),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
################################ EMAIL ########################################
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True # Enable email for all Studio courses
FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms)
########################## ANALYTICS TESTING ########################
ANALYTICS_SERVER_URL = "http://127.0.0.1:9000/"
ANALYTICS_API_KEY = ""
# Set this to the dashboard URL in order to display the link from the
# dashboard to the Analytics Dashboard.
ANALYTICS_DASHBOARD_URL = None
################################ DEBUG TOOLBAR ################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo')
MIDDLEWARE_CLASSES += (
'django_comment_client.utils.QueryCountDebugMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar_mongo.panel.MongoDebugPanel',
# ProfilingPanel has been intentionally removed for default devstack.py
# runtimes for performance reasons. If you wish to re-enable it in your
# local development environment, please create a new settings file
# that imports and extends devstack.py.
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'lms.envs.devstack.should_show_debug_toolbar'
}
def | (_):
return True # We always want the toolbar on devstack regardless of IP, auth, etc.
########################### PIPELINE #################################
# Skip packaging and optimization in development
PIPELINE_ENABLED = False
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# Disable JavaScript compression in development
PIPELINE_JS_COMPRESSOR = None
# Whether to run django-require in debug mode.
REQUIRE_DEBUG = DEBUG
PIPELINE_SASS_ARGUMENTS = '--debug-info --require {proj_dir}/static/sass/bourbon/lib/bourbon.rb'.format(proj_dir=PROJECT_ROOT)
########################### VERIFIED CERTIFICATES #################################
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
FEATURES['ENABLE_PAYMENT_FAKE'] = True
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR = {
'CyberSource2': {
"PURCHASE_ENDPOINT": '/shoppingcart/payment_fake/',
"SECRET_KEY": 'abcd123',
"ACCESS_KEY": 'abcd123',
"PROFILE_ID": 'edx',
}
}
########################### External REST APIs #################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
OAUTH_OIDC_ISSUER = 'http://127.0.0.1:8000/oauth2'
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
########################### Milestones #################################
FEATURES['MILESTONES_APP'] = True
########################### Milestones #################################
FEATURES['ORGANIZATIONS_APP'] = True
########################### Entrance Exams #################################
FEATURES['ENTRANCE_EXAMS'] = True
################################ COURSE LICENSES ################################
FEATURES['LICENSING'] = True
########################## Courseware Search #######################
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
########################## Dashboard Search #######################
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
########################## Certificates Web/HTML View #######################
FEATURES['CERTIFICATES_HTML_VIEW'] = True
########################## Course Discovery #######################
from django.utils.translation import ugettext as _
LANGUAGE_MAP = {'terms': {lang: display for lang, display in ALL_LANGUAGES}, 'name': _('Language')}
COURSE_DISCOVERY_MEANINGS = {
'org': {
'name': _('Organization'),
},
'modes': {
'name': _('Course Type'),
'terms': {
'honor': _('Honor'),
'verified': _('Verified'),
},
},
'language': LANGUAGE_MAP,
}
FEATURES['ENABLE_COURSE_DISCOVERY'] = True
# Setting for overriding default filtering facets for Course discovery
# COURSE_DISCOVERY_FILTERS = ["org", "language", "modes"]
FEATURES['COURSES_ARE_BROWSEABLE'] = True
HOMEPAGE_COURSE_MAX = 9
# Software secure fake page feature flag
FEATURES['ENABLE_SOFTWARE_SECURE_FAKE'] = True
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
# Skip enrollment start date filtering
SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING = True
########################## Shopping cart ##########################
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['STORE_BILLING_INFO'] = True
FEATURES['ENABLE_PAID_COURSE_REGISTRATION'] = True
FEATURES['ENABLE_COSMETIC_DISPLAY_PRICE'] = True
########################## Third Party Auth #######################
if FEATURES.get('ENABLE_THIRD_PARTY_AUTH') and 'third_party_auth.dummy.DummyBackend' not in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS = ['third_party_auth.dummy.DummyBackend'] + list(AUTHENTICATION_BACKENDS)
############## ECOMMERCE API CONFIGURATION SETTINGS ###############
ECOMMERCE_PUBLIC_URL_ROOT = "http://localhost:8002"
###################### Cross-domain requests ######################
FEATURES['ENABLE_CORS_HEADERS'] = True
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ()
CORS_ORIGIN_ALLOW_ALL = True
#####################################################################
# See if the developer has any local overrides.
if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')):
from .private import * # pylint: disable=import-error,wildcard-import
#####################################################################
# Lastly, run any migrations, if needed.
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
| should_show_debug_toolbar | identifier_name |
devstack.py | """
Specific overrides to the base prod settings to make development easier.
"""
from os.path import abspath, dirname, join
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
MEDIA_ROOT = "/edx/var/edxapp/uploads"
DEBUG = True
USE_I18N = True
TEMPLATE_DEBUG = True
SITE_NAME = 'localhost:8000'
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'Devstack')
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
HTTPS = 'off'
################################ LOGGERS ######################################
# Silence noisy logs
import logging
LOG_OVERRIDES = [
('track.contexts', logging.CRITICAL),
('track.middleware', logging.CRITICAL),
('dd.dogapi', logging.CRITICAL),
('django_comment_client.utils', logging.CRITICAL),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
################################ EMAIL ########################################
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True # Enable email for all Studio courses
FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms)
########################## ANALYTICS TESTING ########################
ANALYTICS_SERVER_URL = "http://127.0.0.1:9000/"
ANALYTICS_API_KEY = ""
# Set this to the dashboard URL in order to display the link from the
# dashboard to the Analytics Dashboard.
ANALYTICS_DASHBOARD_URL = None
################################ DEBUG TOOLBAR ################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo')
MIDDLEWARE_CLASSES += (
'django_comment_client.utils.QueryCountDebugMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar_mongo.panel.MongoDebugPanel',
# ProfilingPanel has been intentionally removed for default devstack.py
# runtimes for performance reasons. If you wish to re-enable it in your
# local development environment, please create a new settings file
# that imports and extends devstack.py.
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'lms.envs.devstack.should_show_debug_toolbar'
}
def should_show_debug_toolbar(_):
return True # We always want the toolbar on devstack regardless of IP, auth, etc.
########################### PIPELINE #################################
# Skip packaging and optimization in development
PIPELINE_ENABLED = False
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# Disable JavaScript compression in development
PIPELINE_JS_COMPRESSOR = None
# Whether to run django-require in debug mode.
REQUIRE_DEBUG = DEBUG
PIPELINE_SASS_ARGUMENTS = '--debug-info --require {proj_dir}/static/sass/bourbon/lib/bourbon.rb'.format(proj_dir=PROJECT_ROOT)
########################### VERIFIED CERTIFICATES #################################
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
FEATURES['ENABLE_PAYMENT_FAKE'] = True
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR = {
'CyberSource2': {
"PURCHASE_ENDPOINT": '/shoppingcart/payment_fake/',
"SECRET_KEY": 'abcd123',
"ACCESS_KEY": 'abcd123',
"PROFILE_ID": 'edx',
}
}
########################### External REST APIs #################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
OAUTH_OIDC_ISSUER = 'http://127.0.0.1:8000/oauth2'
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
########################### Milestones #################################
FEATURES['MILESTONES_APP'] = True
########################### Milestones #################################
FEATURES['ORGANIZATIONS_APP'] = True
########################### Entrance Exams #################################
FEATURES['ENTRANCE_EXAMS'] = True
################################ COURSE LICENSES ################################
FEATURES['LICENSING'] = True
########################## Courseware Search #######################
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
########################## Dashboard Search #######################
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
########################## Certificates Web/HTML View #######################
FEATURES['CERTIFICATES_HTML_VIEW'] = True
########################## Course Discovery #######################
from django.utils.translation import ugettext as _
LANGUAGE_MAP = {'terms': {lang: display for lang, display in ALL_LANGUAGES}, 'name': _('Language')}
COURSE_DISCOVERY_MEANINGS = {
'org': {
'name': _('Organization'),
},
'modes': {
'name': _('Course Type'),
'terms': {
'honor': _('Honor'),
'verified': _('Verified'),
},
},
'language': LANGUAGE_MAP,
}
FEATURES['ENABLE_COURSE_DISCOVERY'] = True
# Setting for overriding default filtering facets for Course discovery
# COURSE_DISCOVERY_FILTERS = ["org", "language", "modes"]
FEATURES['COURSES_ARE_BROWSEABLE'] = True
HOMEPAGE_COURSE_MAX = 9
# Software secure fake page feature flag
FEATURES['ENABLE_SOFTWARE_SECURE_FAKE'] = True
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
# Skip enrollment start date filtering
SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING = True
########################## Shopping cart ##########################
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['STORE_BILLING_INFO'] = True
FEATURES['ENABLE_PAID_COURSE_REGISTRATION'] = True
FEATURES['ENABLE_COSMETIC_DISPLAY_PRICE'] = True
########################## Third Party Auth #######################
if FEATURES.get('ENABLE_THIRD_PARTY_AUTH') and 'third_party_auth.dummy.DummyBackend' not in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS = ['third_party_auth.dummy.DummyBackend'] + list(AUTHENTICATION_BACKENDS)
############## ECOMMERCE API CONFIGURATION SETTINGS ###############
ECOMMERCE_PUBLIC_URL_ROOT = "http://localhost:8002"
###################### Cross-domain requests ######################
FEATURES['ENABLE_CORS_HEADERS'] = True
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ()
CORS_ORIGIN_ALLOW_ALL = True
| #####################################################################
# See if the developer has any local overrides.
if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')):
from .private import * # pylint: disable=import-error,wildcard-import
#####################################################################
# Lastly, run any migrations, if needed.
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd' | random_line_split |
|
devstack.py | """
Specific overrides to the base prod settings to make development easier.
"""
from os.path import abspath, dirname, join
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
MEDIA_ROOT = "/edx/var/edxapp/uploads"
DEBUG = True
USE_I18N = True
TEMPLATE_DEBUG = True
SITE_NAME = 'localhost:8000'
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'Devstack')
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
HTTPS = 'off'
################################ LOGGERS ######################################
# Silence noisy logs
import logging
LOG_OVERRIDES = [
('track.contexts', logging.CRITICAL),
('track.middleware', logging.CRITICAL),
('dd.dogapi', logging.CRITICAL),
('django_comment_client.utils', logging.CRITICAL),
]
for log_name, log_level in LOG_OVERRIDES:
|
################################ EMAIL ########################################
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True # Enable email for all Studio courses
FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms)
########################## ANALYTICS TESTING ########################
ANALYTICS_SERVER_URL = "http://127.0.0.1:9000/"
ANALYTICS_API_KEY = ""
# Set this to the dashboard URL in order to display the link from the
# dashboard to the Analytics Dashboard.
ANALYTICS_DASHBOARD_URL = None
################################ DEBUG TOOLBAR ################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo')
MIDDLEWARE_CLASSES += (
'django_comment_client.utils.QueryCountDebugMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar_mongo.panel.MongoDebugPanel',
# ProfilingPanel has been intentionally removed for default devstack.py
# runtimes for performance reasons. If you wish to re-enable it in your
# local development environment, please create a new settings file
# that imports and extends devstack.py.
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'lms.envs.devstack.should_show_debug_toolbar'
}
def should_show_debug_toolbar(_):
return True # We always want the toolbar on devstack regardless of IP, auth, etc.
########################### PIPELINE #################################
# Skip packaging and optimization in development
PIPELINE_ENABLED = False
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# Disable JavaScript compression in development
PIPELINE_JS_COMPRESSOR = None
# Whether to run django-require in debug mode.
REQUIRE_DEBUG = DEBUG
PIPELINE_SASS_ARGUMENTS = '--debug-info --require {proj_dir}/static/sass/bourbon/lib/bourbon.rb'.format(proj_dir=PROJECT_ROOT)
########################### VERIFIED CERTIFICATES #################################
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
FEATURES['ENABLE_PAYMENT_FAKE'] = True
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR = {
'CyberSource2': {
"PURCHASE_ENDPOINT": '/shoppingcart/payment_fake/',
"SECRET_KEY": 'abcd123',
"ACCESS_KEY": 'abcd123',
"PROFILE_ID": 'edx',
}
}
########################### External REST APIs #################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
OAUTH_OIDC_ISSUER = 'http://127.0.0.1:8000/oauth2'
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
########################### Milestones #################################
FEATURES['MILESTONES_APP'] = True
########################### Milestones #################################
FEATURES['ORGANIZATIONS_APP'] = True
########################### Entrance Exams #################################
FEATURES['ENTRANCE_EXAMS'] = True
################################ COURSE LICENSES ################################
FEATURES['LICENSING'] = True
########################## Courseware Search #######################
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
########################## Dashboard Search #######################
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
########################## Certificates Web/HTML View #######################
FEATURES['CERTIFICATES_HTML_VIEW'] = True
########################## Course Discovery #######################
from django.utils.translation import ugettext as _
LANGUAGE_MAP = {'terms': {lang: display for lang, display in ALL_LANGUAGES}, 'name': _('Language')}
COURSE_DISCOVERY_MEANINGS = {
'org': {
'name': _('Organization'),
},
'modes': {
'name': _('Course Type'),
'terms': {
'honor': _('Honor'),
'verified': _('Verified'),
},
},
'language': LANGUAGE_MAP,
}
FEATURES['ENABLE_COURSE_DISCOVERY'] = True
# Setting for overriding default filtering facets for Course discovery
# COURSE_DISCOVERY_FILTERS = ["org", "language", "modes"]
FEATURES['COURSES_ARE_BROWSEABLE'] = True
HOMEPAGE_COURSE_MAX = 9
# Software secure fake page feature flag
FEATURES['ENABLE_SOFTWARE_SECURE_FAKE'] = True
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
# Skip enrollment start date filtering
SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING = True
########################## Shopping cart ##########################
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['STORE_BILLING_INFO'] = True
FEATURES['ENABLE_PAID_COURSE_REGISTRATION'] = True
FEATURES['ENABLE_COSMETIC_DISPLAY_PRICE'] = True
########################## Third Party Auth #######################
if FEATURES.get('ENABLE_THIRD_PARTY_AUTH') and 'third_party_auth.dummy.DummyBackend' not in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS = ['third_party_auth.dummy.DummyBackend'] + list(AUTHENTICATION_BACKENDS)
############## ECOMMERCE API CONFIGURATION SETTINGS ###############
ECOMMERCE_PUBLIC_URL_ROOT = "http://localhost:8002"
###################### Cross-domain requests ######################
FEATURES['ENABLE_CORS_HEADERS'] = True
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ()
CORS_ORIGIN_ALLOW_ALL = True
#####################################################################
# See if the developer has any local overrides.
if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')):
from .private import * # pylint: disable=import-error,wildcard-import
#####################################################################
# Lastly, run any migrations, if needed.
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
| logging.getLogger(log_name).setLevel(log_level) | conditional_block |
rust.js | // Copyright (c) 2016, Matt Godbolt
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
const BaseCompiler = require('../base-compiler'),
_ = require('underscore'),
path = require('path'),
argumentParsers = require("./argument-parsers");
class RustCompiler extends BaseCompiler {
constructor(info, env) |
getSharedLibraryPathsAsArguments() {
return [];
}
optionsForFilter(filters, outputFilename, userOptions) {
let options = ['-C', 'debuginfo=1', '-o', this.filename(outputFilename)];
let userRequestedEmit = _.any(userOptions, opt => opt.indexOf("--emit") > -1);
//TODO: Binary not supported (?)
if (!filters.binary) {
if (!userRequestedEmit) {
options = options.concat('--emit', 'asm');
}
if (filters.intel) options = options.concat('-Cllvm-args=--x86-asm-syntax=intel');
}
options = options.concat(['--crate-type', 'rlib']);
return options;
}
// Override the IR file name method for rustc because the output file is different from clang.
getIrOutputFilename(inputFilename) {
return this.getOutputFilename(path.dirname(inputFilename), this.outputFilebase)
.replace('.s', '.ll');
}
getArgumentParser() {
return argumentParsers.Rust;
}
isCfgCompiler(/*compilerVersion*/) {
return true;
}
}
module.exports = RustCompiler;
| {
super(info, env);
this.compiler.supportsIntel = true;
this.compiler.supportsIrView = true;
this.compiler.irArg = ['--emit', 'llvm-ir'];
} | identifier_body |
rust.js | // Copyright (c) 2016, Matt Godbolt
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
const BaseCompiler = require('../base-compiler'),
_ = require('underscore'),
path = require('path'),
argumentParsers = require("./argument-parsers");
class RustCompiler extends BaseCompiler {
constructor(info, env) {
super(info, env);
this.compiler.supportsIntel = true;
this.compiler.supportsIrView = true;
this.compiler.irArg = ['--emit', 'llvm-ir'];
}
getSharedLibraryPathsAsArguments() {
return [];
}
optionsForFilter(filters, outputFilename, userOptions) {
let options = ['-C', 'debuginfo=1', '-o', this.filename(outputFilename)];
let userRequestedEmit = _.any(userOptions, opt => opt.indexOf("--emit") > -1);
//TODO: Binary not supported (?)
if (!filters.binary) {
if (!userRequestedEmit) {
options = options.concat('--emit', 'asm');
}
if (filters.intel) options = options.concat('-Cllvm-args=--x86-asm-syntax=intel');
}
options = options.concat(['--crate-type', 'rlib']);
return options;
}
// Override the IR file name method for rustc because the output file is different from clang.
| (inputFilename) {
return this.getOutputFilename(path.dirname(inputFilename), this.outputFilebase)
.replace('.s', '.ll');
}
getArgumentParser() {
return argumentParsers.Rust;
}
isCfgCompiler(/*compilerVersion*/) {
return true;
}
}
module.exports = RustCompiler;
| getIrOutputFilename | identifier_name |
rust.js | // Copyright (c) 2016, Matt Godbolt
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
const BaseCompiler = require('../base-compiler'),
_ = require('underscore'),
path = require('path'), | argumentParsers = require("./argument-parsers");
class RustCompiler extends BaseCompiler {
constructor(info, env) {
super(info, env);
this.compiler.supportsIntel = true;
this.compiler.supportsIrView = true;
this.compiler.irArg = ['--emit', 'llvm-ir'];
}
getSharedLibraryPathsAsArguments() {
return [];
}
optionsForFilter(filters, outputFilename, userOptions) {
let options = ['-C', 'debuginfo=1', '-o', this.filename(outputFilename)];
let userRequestedEmit = _.any(userOptions, opt => opt.indexOf("--emit") > -1);
//TODO: Binary not supported (?)
if (!filters.binary) {
if (!userRequestedEmit) {
options = options.concat('--emit', 'asm');
}
if (filters.intel) options = options.concat('-Cllvm-args=--x86-asm-syntax=intel');
}
options = options.concat(['--crate-type', 'rlib']);
return options;
}
// Override the IR file name method for rustc because the output file is different from clang.
getIrOutputFilename(inputFilename) {
return this.getOutputFilename(path.dirname(inputFilename), this.outputFilebase)
.replace('.s', '.ll');
}
getArgumentParser() {
return argumentParsers.Rust;
}
isCfgCompiler(/*compilerVersion*/) {
return true;
}
}
module.exports = RustCompiler; | random_line_split |
|
rust.js | // Copyright (c) 2016, Matt Godbolt
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
const BaseCompiler = require('../base-compiler'),
_ = require('underscore'),
path = require('path'),
argumentParsers = require("./argument-parsers");
class RustCompiler extends BaseCompiler {
constructor(info, env) {
super(info, env);
this.compiler.supportsIntel = true;
this.compiler.supportsIrView = true;
this.compiler.irArg = ['--emit', 'llvm-ir'];
}
getSharedLibraryPathsAsArguments() {
return [];
}
optionsForFilter(filters, outputFilename, userOptions) {
let options = ['-C', 'debuginfo=1', '-o', this.filename(outputFilename)];
let userRequestedEmit = _.any(userOptions, opt => opt.indexOf("--emit") > -1);
//TODO: Binary not supported (?)
if (!filters.binary) {
if (!userRequestedEmit) |
if (filters.intel) options = options.concat('-Cllvm-args=--x86-asm-syntax=intel');
}
options = options.concat(['--crate-type', 'rlib']);
return options;
}
// Override the IR file name method for rustc because the output file is different from clang.
getIrOutputFilename(inputFilename) {
return this.getOutputFilename(path.dirname(inputFilename), this.outputFilebase)
.replace('.s', '.ll');
}
getArgumentParser() {
return argumentParsers.Rust;
}
isCfgCompiler(/*compilerVersion*/) {
return true;
}
}
module.exports = RustCompiler;
| {
options = options.concat('--emit', 'asm');
} | conditional_block |
u32.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for `u32`
mod inst {
use num::{Primitive, BitCount};
use unstable::intrinsics;
pub type T = u32;
#[allow(non_camel_case_types)]
pub type T_SIGNED = i32;
pub static bits: uint = 32;
impl Primitive for u32 {
#[inline(always)]
fn bits() -> uint { 32 }
#[inline(always)]
fn bytes() -> uint { Primitive::bits::<u32>() / 8 }
}
impl BitCount for u32 {
/// Counts the number of bits set. Wraps LLVM's `ctpop` intrinsic.
#[inline(always)]
fn population_count(&self) -> u32 { unsafe { intrinsics::ctpop32(*self as i32) as u32 } }
/// Counts the number of leading zeros. Wraps LLVM's `ctlp` intrinsic.
#[inline(always)]
fn leading_zeros(&self) -> u32 { unsafe { intrinsics::ctlz32(*self as i32) as u32 } }
/// Counts the number of trailing zeros. Wraps LLVM's `cttp` intrinsic.
#[inline(always)]
fn trailing_zeros(&self) -> u32 { unsafe { intrinsics::cttz32(*self as i32) as u32 } } | } | } | random_line_split |
u32.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for `u32`
mod inst {
use num::{Primitive, BitCount};
use unstable::intrinsics;
pub type T = u32;
#[allow(non_camel_case_types)]
pub type T_SIGNED = i32;
pub static bits: uint = 32;
impl Primitive for u32 {
#[inline(always)]
fn bits() -> uint { 32 }
#[inline(always)]
fn bytes() -> uint { Primitive::bits::<u32>() / 8 }
}
impl BitCount for u32 {
/// Counts the number of bits set. Wraps LLVM's `ctpop` intrinsic.
#[inline(always)]
fn population_count(&self) -> u32 |
/// Counts the number of leading zeros. Wraps LLVM's `ctlp` intrinsic.
#[inline(always)]
fn leading_zeros(&self) -> u32 { unsafe { intrinsics::ctlz32(*self as i32) as u32 } }
/// Counts the number of trailing zeros. Wraps LLVM's `cttp` intrinsic.
#[inline(always)]
fn trailing_zeros(&self) -> u32 { unsafe { intrinsics::cttz32(*self as i32) as u32 } }
}
}
| { unsafe { intrinsics::ctpop32(*self as i32) as u32 } } | identifier_body |
u32.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for `u32`
mod inst {
use num::{Primitive, BitCount};
use unstable::intrinsics;
pub type T = u32;
#[allow(non_camel_case_types)]
pub type T_SIGNED = i32;
pub static bits: uint = 32;
impl Primitive for u32 {
#[inline(always)]
fn bits() -> uint { 32 }
#[inline(always)]
fn bytes() -> uint { Primitive::bits::<u32>() / 8 }
}
impl BitCount for u32 {
/// Counts the number of bits set. Wraps LLVM's `ctpop` intrinsic.
#[inline(always)]
fn population_count(&self) -> u32 { unsafe { intrinsics::ctpop32(*self as i32) as u32 } }
/// Counts the number of leading zeros. Wraps LLVM's `ctlp` intrinsic.
#[inline(always)]
fn leading_zeros(&self) -> u32 { unsafe { intrinsics::ctlz32(*self as i32) as u32 } }
/// Counts the number of trailing zeros. Wraps LLVM's `cttp` intrinsic.
#[inline(always)]
fn | (&self) -> u32 { unsafe { intrinsics::cttz32(*self as i32) as u32 } }
}
}
| trailing_zeros | identifier_name |
mod.rs | // Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod process;
mod vcpu;
use std::fs::File;
use std::io;
use std::io::Read;
use std::os::unix::net::UnixDatagram;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Barrier};
use std::thread;
use std::time::{Duration, Instant};
use libc::{
c_int, c_ulong, fcntl, ioctl, socketpair, AF_UNIX, EAGAIN, EBADF, EDEADLK, EEXIST, EINTR,
EINVAL, ENOENT, EOVERFLOW, EPERM, FIOCLEX, F_SETPIPE_SZ, MS_NODEV, MS_NOEXEC, MS_NOSUID,
MS_RDONLY, O_NONBLOCK, SIGCHLD, SOCK_SEQPACKET,
};
use protobuf::ProtobufError;
use remain::sorted;
use thiserror::Error;
use anyhow::{anyhow, bail, Context, Result};
use base::{
add_fd_flags, block_signal, clear_signal, drop_capabilities, enable_core_scheduling, error,
getegid, geteuid, info, pipe, register_rt_signal_handler, validate_raw_descriptor, warn,
AsRawDescriptor, Error as SysError, Event, FromRawDescriptor, Killable, MmapError, PollToken,
Result as SysResult, SignalFd, WaitContext, SIGRTMIN,
};
use kvm::{Cap, Datamatch, IoeventAddress, Kvm, Vcpu, VcpuExit, Vm};
use minijail::{self, Minijail};
use net_util::{Tap, TapT};
use vm_memory::{GuestMemory, MemoryPolicy};
use self::process::*;
use self::vcpu::*;
use crate::{Config, Executable};
const MAX_DATAGRAM_SIZE: usize = 4096;
const MAX_VCPU_DATAGRAM_SIZE: usize = 0x40000;
/// An error that occurs when communicating with the plugin process.
#[sorted]
#[derive(Error, Debug)]
pub enum CommError {
#[error("failed to decode plugin request: {0}")]
DecodeRequest(ProtobufError),
#[error("failed to encode plugin response: {0}")]
EncodeResponse(ProtobufError),
#[error("plugin request socket has been hung up")]
PluginSocketHup,
#[error("failed to recv from plugin request socket: {0}")]
PluginSocketRecv(SysError),
#[error("failed to send to plugin request socket: {0}")]
PluginSocketSend(SysError),
}
fn new_seqpacket_pair() -> SysResult<(UnixDatagram, UnixDatagram)> {
let mut fds = [0, 0];
unsafe {
let ret = socketpair(AF_UNIX, SOCK_SEQPACKET, 0, fds.as_mut_ptr());
if ret == 0 {
ioctl(fds[0], FIOCLEX);
Ok((
UnixDatagram::from_raw_descriptor(fds[0]),
UnixDatagram::from_raw_descriptor(fds[1]),
))
} else {
Err(SysError::last())
}
}
}
struct VcpuPipe {
crosvm_read: File,
plugin_write: File,
plugin_read: File,
crosvm_write: File,
}
fn new_pipe_pair() -> SysResult<VcpuPipe> {
let to_crosvm = pipe(true)?;
let to_plugin = pipe(true)?;
// Increasing the pipe size can be a nice-to-have to make sure that
// messages get across atomically (and made sure that writes don't block),
// though it's not necessary a hard requirement for things to work.
let flags = unsafe {
fcntl(
to_crosvm.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of crosvm pipe (result {}): {}",
flags,
SysError::last()
);
}
let flags = unsafe {
fcntl(
to_plugin.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of plugin pipe (result {}): {}",
flags,
SysError::last()
);
}
Ok(VcpuPipe {
crosvm_read: to_crosvm.0,
plugin_write: to_crosvm.1,
plugin_read: to_plugin.0,
crosvm_write: to_plugin.1,
})
}
fn proto_to_sys_err(e: ProtobufError) -> SysError {
match e {
ProtobufError::IoError(e) => SysError::new(e.raw_os_error().unwrap_or(EINVAL)),
_ => SysError::new(EINVAL),
}
}
fn io_to_sys_err(e: io::Error) -> SysError {
SysError::new(e.raw_os_error().unwrap_or(EINVAL))
}
fn mmap_to_sys_err(e: MmapError) -> SysError |
fn create_plugin_jail(root: &Path, log_failures: bool, seccomp_policy: &Path) -> Result<Minijail> {
// All child jails run in a new user namespace without any users mapped,
// they run as nobody unless otherwise configured.
let mut j = Minijail::new().context("failed to create jail")?;
j.namespace_pids();
j.namespace_user();
j.uidmap(&format!("0 {0} 1", geteuid()))
.context("failed to set uidmap for jail")?;
j.gidmap(&format!("0 {0} 1", getegid()))
.context("failed to set gidmap for jail")?;
j.namespace_user_disable_setgroups();
// Don't need any capabilities.
j.use_caps(0);
// Create a new mount namespace with an empty root FS.
j.namespace_vfs();
j.enter_pivot_root(root)
.context("failed to set jail pivot root")?;
// Run in an empty network namespace.
j.namespace_net();
j.no_new_privs();
// By default we'll prioritize using the pre-compiled .bpf over the .policy
// file (the .bpf is expected to be compiled using "trap" as the failure
// behavior instead of the default "kill" behavior).
// Refer to the code comment for the "seccomp-log-failures"
// command-line parameter for an explanation about why the |log_failures|
// flag forces the use of .policy files (and the build-time alternative to
// this run-time flag).
let bpf_policy_file = seccomp_policy.with_extension("bpf");
if bpf_policy_file.exists() && !log_failures {
j.parse_seccomp_program(&bpf_policy_file)
.context("failed to parse jail seccomp BPF program")?;
} else {
// Use TSYNC only for the side effect of it using SECCOMP_RET_TRAP,
// which will correctly kill the entire device process if a worker
// thread commits a seccomp violation.
j.set_seccomp_filter_tsync();
if log_failures {
j.log_seccomp_filter_failures();
}
j.parse_seccomp_filters(&seccomp_policy.with_extension("policy"))
.context("failed to parse jail seccomp filter")?;
}
j.use_seccomp_filter();
// Don't do init setup.
j.run_as_init();
// Create a tmpfs in the plugin's root directory so that we can bind mount it's executable
// file into it. The size=67108864 is size=64*1024*1024 or size=64MB.
j.mount_with_data(
Path::new("none"),
Path::new("/"),
"tmpfs",
(MS_NOSUID | MS_NODEV | MS_NOEXEC) as usize,
"size=67108864",
)
.context("failed to mount root")?;
// Because we requested to "run as init", minijail will not mount /proc for us even though
// plugin will be running in its own PID namespace, so we have to mount it ourselves.
j.mount(
Path::new("proc"),
Path::new("/proc"),
"proc",
(MS_NOSUID | MS_NODEV | MS_NOEXEC | MS_RDONLY) as usize,
)
.context("failed to mount proc")?;
Ok(j)
}
/// Each `PluginObject` represents one object that was instantiated by the guest using the `Create`
/// request.
///
/// Each such object has an ID associated with it that exists in an ID space shared by every variant
/// of `PluginObject`. This allows all the objects to be indexed in a single map, and allows for a
/// common destroy method.
///
/// In addition to the destory method, each object may have methods specific to its variant type.
/// These variant methods must be done by matching the variant to the expected type for that method.
/// For example, getting the dirty log from a `Memory` object starting with an ID:
///
/// ```ignore
/// match objects.get(&request_id) {
/// Some(&PluginObject::Memory { slot, length }) => vm.get_dirty_log(slot, &mut dirty_log[..]),
/// _ => return Err(SysError::new(ENOENT)),
/// }
/// ```
enum PluginObject {
IoEvent {
evt: Event,
addr: IoeventAddress,
length: u32,
datamatch: u64,
},
Memory {
slot: u32,
length: usize,
},
IrqEvent {
irq_id: u32,
evt: Event,
},
}
impl PluginObject {
fn destroy(self, vm: &mut Vm) -> SysResult<()> {
match self {
PluginObject::IoEvent {
evt,
addr,
length,
datamatch,
} => match length {
0 => vm.unregister_ioevent(&evt, addr, Datamatch::AnyLength),
1 => vm.unregister_ioevent(&evt, addr, Datamatch::U8(Some(datamatch as u8))),
2 => vm.unregister_ioevent(&evt, addr, Datamatch::U16(Some(datamatch as u16))),
4 => vm.unregister_ioevent(&evt, addr, Datamatch::U32(Some(datamatch as u32))),
8 => vm.unregister_ioevent(&evt, addr, Datamatch::U64(Some(datamatch as u64))),
_ => Err(SysError::new(EINVAL)),
},
PluginObject::Memory { slot, .. } => vm.remove_memory_region(slot).and(Ok(())),
PluginObject::IrqEvent { irq_id, evt } => vm.unregister_irqfd(&evt, irq_id),
}
}
}
pub fn run_vcpus(
kvm: &Kvm,
vm: &Vm,
plugin: &Process,
vcpu_count: u32,
kill_signaled: &Arc<AtomicBool>,
exit_evt: &Event,
vcpu_handles: &mut Vec<thread::JoinHandle<()>>,
) -> Result<()> {
let vcpu_thread_barrier = Arc::new(Barrier::new((vcpu_count) as usize));
let use_kvm_signals = !kvm.check_extension(Cap::ImmediateExit);
// If we need to force a vcpu to exit from a VM then a SIGRTMIN signal is sent
// to that vcpu's thread. If KVM is running the VM then it'll return -EINTR.
// An issue is what to do when KVM isn't running the VM (where we could be
// in the kernel or in the app).
//
// If KVM supports "immediate exit" then we set a signal handler that will
// set the |immediate_exit| flag that tells KVM to return -EINTR before running
// the VM.
//
// If KVM doesn't support immediate exit then we'll block SIGRTMIN in the app
// and tell KVM to unblock SIGRTMIN before running the VM (at which point a blocked
// signal might get asserted). There's overhead to have KVM unblock and re-block
// SIGRTMIN each time it runs the VM, so this mode should be avoided.
if use_kvm_signals {
unsafe {
extern "C" fn handle_signal(_: c_int) {}
// Our signal handler does nothing and is trivially async signal safe.
// We need to install this signal handler even though we do block
// the signal below, to ensure that this signal will interrupt
// execution of KVM_RUN (this is implementation issue).
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
// We do not really want the signal handler to run...
block_signal(SIGRTMIN() + 0).expect("failed to block signal");
} else {
unsafe {
extern "C" fn handle_signal(_: c_int) {
Vcpu::set_local_immediate_exit(true);
}
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
}
for cpu_id in 0..vcpu_count {
let kill_signaled = kill_signaled.clone();
let vcpu_thread_barrier = vcpu_thread_barrier.clone();
let vcpu_exit_evt = exit_evt.try_clone().context("failed to clone event")?;
let vcpu_plugin = plugin.create_vcpu(cpu_id)?;
let vcpu = Vcpu::new(cpu_id as c_ulong, kvm, vm).context("error creating vcpu")?;
vcpu_handles.push(
thread::Builder::new()
.name(format!("crosvm_vcpu{}", cpu_id))
.spawn(move || {
if use_kvm_signals {
// Tell KVM to not block anything when entering kvm run
// because we will be using first RT signal to kick the VCPU.
vcpu.set_signal_mask(&[])
.expect("failed to set up KVM VCPU signal mask");
}
if let Err(e) = enable_core_scheduling() {
error!("Failed to enable core scheduling: {}", e);
}
let vcpu = vcpu
.to_runnable(Some(SIGRTMIN() + 0))
.expect("Failed to set thread id");
let res = vcpu_plugin.init(&vcpu);
vcpu_thread_barrier.wait();
if let Err(e) = res {
error!("failed to initialize vcpu {}: {}", cpu_id, e);
} else {
loop {
let mut interrupted_by_signal = false;
let run_res = vcpu.run();
match run_res {
Ok(run) => match run {
VcpuExit::IoIn { port, mut size } => {
let mut data = [0; 256];
if size > data.len() {
error!(
"unsupported IoIn size of {} bytes at port {:#x}",
size, port
);
size = data.len();
}
vcpu_plugin.io_read(port as u64, &mut data[..size], &vcpu);
if let Err(e) = vcpu.set_data(&data[..size]) {
error!(
"failed to set return data for IoIn at port {:#x}: {}",
port, e
);
}
}
VcpuExit::IoOut {
port,
mut size,
data,
} => {
if size > data.len() {
error!("unsupported IoOut size of {} bytes at port {:#x}", size, port);
size = data.len();
}
vcpu_plugin.io_write(port as u64, &data[..size], &vcpu);
}
VcpuExit::MmioRead { address, size } => {
let mut data = [0; 8];
vcpu_plugin.mmio_read(
address as u64,
&mut data[..size],
&vcpu,
);
// Setting data for mmio can not fail.
let _ = vcpu.set_data(&data[..size]);
}
VcpuExit::MmioWrite {
address,
size,
data,
} => {
vcpu_plugin.mmio_write(
address as u64,
&data[..size],
&vcpu,
);
}
VcpuExit::HypervHcall { input, params } => {
let mut data = [0; 8];
vcpu_plugin.hyperv_call(input, params, &mut data, &vcpu);
// Setting data for hyperv call can not fail.
let _ = vcpu.set_data(&data);
}
VcpuExit::HypervSynic {
msr,
control,
evt_page,
msg_page,
} => {
vcpu_plugin
.hyperv_synic(msr, control, evt_page, msg_page, &vcpu);
}
VcpuExit::Hlt => break,
VcpuExit::Shutdown => break,
VcpuExit::InternalError => {
error!("vcpu {} has internal error", cpu_id);
break;
}
r => warn!("unexpected vcpu exit: {:?}", r),
},
Err(e) => match e.errno() {
EINTR => interrupted_by_signal = true,
EAGAIN => {}
_ => {
error!("vcpu hit unknown error: {}", e);
break;
}
},
}
if kill_signaled.load(Ordering::SeqCst) {
break;
}
// Only handle the pause request if kvm reported that it was
// interrupted by a signal. This helps to entire that KVM has had a chance
// to finish emulating any IO that may have immediately happened.
// If we eagerly check pre_run() then any IO that we
// just reported to the plugin won't have been processed yet by KVM.
// Not eagerly calling pre_run() also helps to reduce
// any overhead from checking if a pause request is pending.
// The assumption is that pause requests aren't common
// or frequent so it's better to optimize for the non-pause execution paths.
if interrupted_by_signal {
if use_kvm_signals {
clear_signal(SIGRTMIN() + 0)
.expect("failed to clear pending signal");
} else {
vcpu.set_immediate_exit(false);
}
if let Err(e) = vcpu_plugin.pre_run(&vcpu) {
error!("failed to process pause on vcpu {}: {}", cpu_id, e);
break;
}
}
}
}
vcpu_exit_evt
.write(1)
.expect("failed to signal vcpu exit event");
})
.context("error spawning vcpu thread")?,
);
}
Ok(())
}
#[derive(PollToken)]
enum Token {
Exit,
ChildSignal,
Stderr,
Plugin { index: usize },
}
/// Run a VM with a plugin process specified by `cfg`.
///
/// Not every field of `cfg` will be used. In particular, most field that pertain to a specific
/// device are ignored because the plugin is responsible for emulating hardware.
pub fn run_config(cfg: Config) -> Result<()> {
info!("crosvm starting plugin process");
// Masking signals is inherently dangerous, since this can persist across clones/execs. Do this
// before any jailed devices have been spawned, so that we can catch any of them that fail very
// quickly.
let sigchld_fd = SignalFd::new(SIGCHLD).context("failed to create signalfd")?;
// Create a pipe to capture error messages from plugin and minijail.
let (mut stderr_rd, stderr_wr) = pipe(true).context("failed to create stderr pipe")?;
add_fd_flags(stderr_rd.as_raw_descriptor(), O_NONBLOCK)
.context("error marking stderr nonblocking")?;
let jail = if cfg.sandbox {
// An empty directory for jailed plugin pivot root.
let root_path = match &cfg.plugin_root {
Some(dir) => dir,
None => Path::new(option_env!("DEFAULT_PIVOT_ROOT").unwrap_or("/var/empty")),
};
if root_path.is_relative() {
bail!("path to the root directory must be absolute");
}
if !root_path.exists() {
bail!("no root directory for jailed process to pivot root into");
}
if !root_path.is_dir() {
bail!("specified root directory is not a directory");
}
let policy_path = cfg.seccomp_policy_dir.join("plugin");
let mut jail = create_plugin_jail(root_path, cfg.seccomp_log_failures, &policy_path)?;
// Update gid map of the jail if caller provided supplemental groups.
if !cfg.plugin_gid_maps.is_empty() {
let map = format!("0 {} 1", getegid())
+ &cfg
.plugin_gid_maps
.into_iter()
.map(|m| format!(",{} {} {}", m.inner, m.outer, m.count))
.collect::<String>();
jail.gidmap(&map).context("failed to set gidmap for jail")?;
}
// Mount minimal set of devices (full, zero, urandom, etc). We can not use
// jail.mount_dev() here because crosvm may not be running with CAP_SYS_ADMIN.
let device_names = ["full", "null", "urandom", "zero"];
for name in &device_names {
let device = Path::new("/dev").join(&name);
jail.mount_bind(&device, &device, true)
.context("failed to mount dev")?;
}
for bind_mount in &cfg.plugin_mounts {
jail.mount_bind(&bind_mount.src, &bind_mount.dst, bind_mount.writable)
.with_context(|| {
format!(
"failed to bind mount {} -> {} as {} ",
bind_mount.src.display(),
bind_mount.dst.display(),
if bind_mount.writable {
"writable"
} else {
"read only"
}
)
})?;
}
Some(jail)
} else {
None
};
let mut tap_interfaces: Vec<Tap> = Vec::new();
if let Some(host_ip) = cfg.host_ip {
if let Some(netmask) = cfg.netmask {
if let Some(mac_address) = cfg.mac_address {
let tap = Tap::new(false, false).context("error opening tap device")?;
tap.set_ip_addr(host_ip).context("error setting tap ip")?;
tap.set_netmask(netmask)
.context("error setting tap netmask")?;
tap.set_mac_address(mac_address)
.context("error setting tap mac address")?;
tap.enable().context("error enabling tap device")?;
tap_interfaces.push(tap);
}
}
}
for tap_fd in cfg.tap_fd {
// Safe because we ensure that we get a unique handle to the fd.
let tap = unsafe {
Tap::from_raw_descriptor(
validate_raw_descriptor(tap_fd).context("failed to validate raw tap fd")?,
)
.context("failed to create tap device from raw fd")?
};
tap_interfaces.push(tap);
}
let plugin_args: Vec<&str> = cfg.params.iter().map(|s| &s[..]).collect();
let plugin_path = match cfg.executable_path {
Some(Executable::Plugin(ref plugin_path)) => plugin_path.as_path(),
_ => panic!("Executable was not a plugin"),
};
let vcpu_count = cfg.vcpu_count.unwrap_or(1) as u32;
let mem = GuestMemory::new(&[]).unwrap();
let mut mem_policy = MemoryPolicy::empty();
if cfg.hugepages {
mem_policy |= MemoryPolicy::USE_HUGEPAGES;
}
mem.set_memory_policy(mem_policy);
let kvm = Kvm::new_with_path(&cfg.kvm_device_path).context("error creating Kvm")?;
let mut vm = Vm::new(&kvm, mem).context("error creating vm")?;
vm.create_irq_chip()
.context("failed to create kvm irqchip")?;
vm.create_pit().context("failed to create kvm PIT")?;
let mut plugin = Process::new(vcpu_count, plugin_path, &plugin_args, jail, stderr_wr)?;
// Now that the jail for the plugin has been created and we had a chance to adjust gids there,
// we can drop all our capabilities in case we had any.
drop_capabilities().context("failed to drop process capabilities")?;
let mut res = Ok(());
// If Some, we will exit after enough time is passed to shutdown cleanly.
let mut dying_instant: Option<Instant> = None;
let duration_to_die = Duration::from_millis(1000);
let exit_evt = Event::new().context("failed to create event")?;
let kill_signaled = Arc::new(AtomicBool::new(false));
let mut vcpu_handles = Vec::with_capacity(vcpu_count as usize);
let wait_ctx = WaitContext::build_with(&[
(&exit_evt, Token::Exit),
(&sigchld_fd, Token::ChildSignal),
(&stderr_rd, Token::Stderr),
])
.context("failed to add control descriptors to wait context")?;
let mut sockets_to_drop = Vec::new();
let mut redo_wait_ctx_sockets = true;
// In this loop, make every attempt to not return early. If an error is encountered, set `res`
// to the error, set `dying_instant` to now, and signal the plugin that it will be killed soon.
// If the plugin cannot be signaled because it is dead of `signal_kill` failed, simply break
// from the poll loop so that the VCPU threads can be cleaned up.
'wait: loop {
// After we have waited long enough, it's time to give up and exit.
if dying_instant
.map(|i| i.elapsed() >= duration_to_die)
.unwrap_or(false)
{
break;
}
if redo_wait_ctx_sockets {
for (index, socket) in plugin.sockets().iter().enumerate() {
wait_ctx
.add(socket, Token::Plugin { index })
.context("failed to add plugin sockets to wait context")?;
}
}
let plugin_socket_count = plugin.sockets().len();
let events = {
let poll_res = match dying_instant {
Some(inst) => wait_ctx.wait_timeout(duration_to_die - inst.elapsed()),
None => wait_ctx.wait(),
};
match poll_res {
Ok(v) => v,
Err(e) => {
// Polling no longer works, time to break and cleanup,
if res.is_ok() {
res = Err(e).context("failed to poll all FDs");
}
break;
}
}
};
for event in events.iter().filter(|e| e.is_hungup) {
if let Token::Stderr = event.token {
let _ = wait_ctx.delete(&stderr_rd);
}
}
for event in events.iter().filter(|e| e.is_readable) {
match event.token {
Token::Exit => {
// No need to check the exit event if we are already doing cleanup.
let _ = wait_ctx.delete(&exit_evt);
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on exit event");
}
}
Token::ChildSignal => {
// Print all available siginfo structs, then exit the loop.
loop {
match sigchld_fd.read() {
Ok(Some(siginfo)) => {
// If the plugin process has ended, there is no need to continue
// processing plugin connections, so we break early.
if siginfo.ssi_pid == plugin.pid() as u32 {
break 'wait;
}
// Because SIGCHLD is not expected from anything other than the
// plugin process, report it as an error.
if res.is_ok() {
res = Err(anyhow!(
"process {} died with signal {}, status {}, and code {}",
siginfo.ssi_pid,
siginfo.ssi_signo,
siginfo.ssi_status,
siginfo.ssi_code,
));
}
}
Ok(None) => break, // No more signals to read.
Err(e) => {
// Something really must be messed up for this to happen, continue
// processing connections for a limited time.
if res.is_ok() {
res = Err(e).context("failed to read signal fd");
}
break;
}
}
}
// As we only spawn the plugin process, getting a SIGCHLD can only mean
// something went wrong.
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on SIGCHLD");
}
}
Token::Stderr => loop {
let mut buf = [0u8; 4096];
match stderr_rd.read(&mut buf) {
Ok(len) => {
for l in String::from_utf8_lossy(&buf[0..len]).lines() {
error!("minijail/plugin: {}", l);
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
break;
}
Err(e) => {
error!("failed reading from stderr: {}", e);
break;
}
}
},
Token::Plugin { index } => {
match plugin.handle_socket(index, &kvm, &mut vm, &vcpu_handles, &tap_interfaces)
{
Ok(_) => {}
// A HUP is an expected event for a socket, so don't bother warning about
// it.
Err(CommError::PluginSocketHup) => sockets_to_drop.push(index),
// Only one connection out of potentially many is broken. Drop it, but don't
// start cleaning up. Because the error isn't returned, we will warn about
// it here.
Err(e) => {
warn!("error handling plugin socket: {}", e);
sockets_to_drop.push(index);
}
}
}
}
}
if vcpu_handles.is_empty() && dying_instant.is_none() && plugin.is_started() {
let res = run_vcpus(
&kvm,
&vm,
&plugin,
vcpu_count,
&kill_signaled,
&exit_evt,
&mut vcpu_handles,
);
if let Err(e) = res {
dying_instant.get_or_insert(Instant::now());
error!("failed to start vcpus: {}", e);
}
}
redo_wait_ctx_sockets =
!sockets_to_drop.is_empty() || plugin.sockets().len() != plugin_socket_count;
// Cleanup all of the sockets that we have determined were disconnected or suffered some
// other error.
plugin.drop_sockets(&mut sockets_to_drop);
sockets_to_drop.clear();
if redo_wait_ctx_sockets {
for socket in plugin.sockets() {
let _ = wait_ctx.delete(socket);
}
}
}
// vcpu threads MUST see the kill signaled flag, otherwise they may re-enter the VM.
kill_signaled.store(true, Ordering::SeqCst);
// Depending on how we ended up here, the plugin process, or a VCPU thread waiting for requests
// might be stuck. The `signal_kill` call will unstick all the VCPU threads by closing their
// blocked connections.
plugin
.signal_kill()
.context("error sending kill signal to plugin on cleanup")?;
for handle in vcpu_handles {
match handle.kill(SIGRTMIN() + 0) {
Ok(_) => {
if let Err(e) = handle.join() {
error!("failed to join vcpu thread: {:?}", e);
}
}
Err(e) => error!("failed to kill vcpu thread: {}", e),
}
}
match plugin.try_wait() {
// The plugin has run out of time by now
Ok(ProcessStatus::Running) => Err(anyhow!("plugin did not exit within timeout")),
// Return an error discovered earlier in this function.
Ok(ProcessStatus::Success) => res.map_err(anyhow::Error::msg),
Ok(ProcessStatus::Fail(code)) => Err(anyhow!("plugin exited with error: {}", code)),
Ok(ProcessStatus::Signal(code)) => Err(anyhow!("plugin exited with signal {}", code)),
Err(e) => Err(anyhow!("error waiting for plugin to exit: {}", e)),
}
}
| {
match e {
MmapError::SystemCallFailed(e) => e,
_ => SysError::new(EINVAL),
}
} | identifier_body |
mod.rs | // Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod process;
mod vcpu;
use std::fs::File;
use std::io;
use std::io::Read;
use std::os::unix::net::UnixDatagram;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Barrier};
use std::thread;
use std::time::{Duration, Instant};
use libc::{
c_int, c_ulong, fcntl, ioctl, socketpair, AF_UNIX, EAGAIN, EBADF, EDEADLK, EEXIST, EINTR,
EINVAL, ENOENT, EOVERFLOW, EPERM, FIOCLEX, F_SETPIPE_SZ, MS_NODEV, MS_NOEXEC, MS_NOSUID,
MS_RDONLY, O_NONBLOCK, SIGCHLD, SOCK_SEQPACKET,
};
use protobuf::ProtobufError;
use remain::sorted;
use thiserror::Error;
use anyhow::{anyhow, bail, Context, Result};
use base::{
add_fd_flags, block_signal, clear_signal, drop_capabilities, enable_core_scheduling, error,
getegid, geteuid, info, pipe, register_rt_signal_handler, validate_raw_descriptor, warn,
AsRawDescriptor, Error as SysError, Event, FromRawDescriptor, Killable, MmapError, PollToken,
Result as SysResult, SignalFd, WaitContext, SIGRTMIN,
};
use kvm::{Cap, Datamatch, IoeventAddress, Kvm, Vcpu, VcpuExit, Vm};
use minijail::{self, Minijail};
use net_util::{Tap, TapT};
use vm_memory::{GuestMemory, MemoryPolicy};
use self::process::*;
use self::vcpu::*;
use crate::{Config, Executable};
const MAX_DATAGRAM_SIZE: usize = 4096;
const MAX_VCPU_DATAGRAM_SIZE: usize = 0x40000;
/// An error that occurs when communicating with the plugin process.
#[sorted]
#[derive(Error, Debug)]
pub enum CommError {
#[error("failed to decode plugin request: {0}")]
DecodeRequest(ProtobufError),
#[error("failed to encode plugin response: {0}")]
EncodeResponse(ProtobufError),
#[error("plugin request socket has been hung up")]
PluginSocketHup,
#[error("failed to recv from plugin request socket: {0}")]
PluginSocketRecv(SysError),
#[error("failed to send to plugin request socket: {0}")]
PluginSocketSend(SysError),
}
fn new_seqpacket_pair() -> SysResult<(UnixDatagram, UnixDatagram)> {
let mut fds = [0, 0];
unsafe {
let ret = socketpair(AF_UNIX, SOCK_SEQPACKET, 0, fds.as_mut_ptr());
if ret == 0 {
ioctl(fds[0], FIOCLEX);
Ok((
UnixDatagram::from_raw_descriptor(fds[0]),
UnixDatagram::from_raw_descriptor(fds[1]),
))
} else {
Err(SysError::last())
}
}
}
struct VcpuPipe {
crosvm_read: File,
plugin_write: File,
plugin_read: File,
crosvm_write: File,
}
fn new_pipe_pair() -> SysResult<VcpuPipe> {
let to_crosvm = pipe(true)?;
let to_plugin = pipe(true)?;
// Increasing the pipe size can be a nice-to-have to make sure that
// messages get across atomically (and made sure that writes don't block),
// though it's not necessary a hard requirement for things to work.
let flags = unsafe {
fcntl(
to_crosvm.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of crosvm pipe (result {}): {}",
flags,
SysError::last()
);
}
let flags = unsafe {
fcntl(
to_plugin.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of plugin pipe (result {}): {}",
flags,
SysError::last()
);
}
Ok(VcpuPipe {
crosvm_read: to_crosvm.0,
plugin_write: to_crosvm.1,
plugin_read: to_plugin.0,
crosvm_write: to_plugin.1,
})
}
fn proto_to_sys_err(e: ProtobufError) -> SysError {
match e {
ProtobufError::IoError(e) => SysError::new(e.raw_os_error().unwrap_or(EINVAL)),
_ => SysError::new(EINVAL),
}
}
fn io_to_sys_err(e: io::Error) -> SysError {
SysError::new(e.raw_os_error().unwrap_or(EINVAL))
}
fn mmap_to_sys_err(e: MmapError) -> SysError {
match e {
MmapError::SystemCallFailed(e) => e,
_ => SysError::new(EINVAL),
}
}
fn create_plugin_jail(root: &Path, log_failures: bool, seccomp_policy: &Path) -> Result<Minijail> {
// All child jails run in a new user namespace without any users mapped,
// they run as nobody unless otherwise configured.
let mut j = Minijail::new().context("failed to create jail")?;
j.namespace_pids();
j.namespace_user();
j.uidmap(&format!("0 {0} 1", geteuid()))
.context("failed to set uidmap for jail")?;
j.gidmap(&format!("0 {0} 1", getegid()))
.context("failed to set gidmap for jail")?;
j.namespace_user_disable_setgroups();
// Don't need any capabilities.
j.use_caps(0);
// Create a new mount namespace with an empty root FS.
j.namespace_vfs();
j.enter_pivot_root(root)
.context("failed to set jail pivot root")?;
// Run in an empty network namespace.
j.namespace_net();
j.no_new_privs();
// By default we'll prioritize using the pre-compiled .bpf over the .policy
// file (the .bpf is expected to be compiled using "trap" as the failure
// behavior instead of the default "kill" behavior).
// Refer to the code comment for the "seccomp-log-failures"
// command-line parameter for an explanation about why the |log_failures|
// flag forces the use of .policy files (and the build-time alternative to
// this run-time flag).
let bpf_policy_file = seccomp_policy.with_extension("bpf");
if bpf_policy_file.exists() && !log_failures {
j.parse_seccomp_program(&bpf_policy_file)
.context("failed to parse jail seccomp BPF program")?;
} else {
// Use TSYNC only for the side effect of it using SECCOMP_RET_TRAP,
// which will correctly kill the entire device process if a worker
// thread commits a seccomp violation.
j.set_seccomp_filter_tsync();
if log_failures {
j.log_seccomp_filter_failures();
}
j.parse_seccomp_filters(&seccomp_policy.with_extension("policy"))
.context("failed to parse jail seccomp filter")?;
}
j.use_seccomp_filter();
// Don't do init setup.
j.run_as_init();
// Create a tmpfs in the plugin's root directory so that we can bind mount it's executable
// file into it. The size=67108864 is size=64*1024*1024 or size=64MB.
j.mount_with_data(
Path::new("none"),
Path::new("/"),
"tmpfs",
(MS_NOSUID | MS_NODEV | MS_NOEXEC) as usize,
"size=67108864",
)
.context("failed to mount root")?;
// Because we requested to "run as init", minijail will not mount /proc for us even though
// plugin will be running in its own PID namespace, so we have to mount it ourselves.
j.mount(
Path::new("proc"),
Path::new("/proc"),
"proc",
(MS_NOSUID | MS_NODEV | MS_NOEXEC | MS_RDONLY) as usize,
)
.context("failed to mount proc")?;
Ok(j)
}
/// Each `PluginObject` represents one object that was instantiated by the guest using the `Create`
/// request.
///
/// Each such object has an ID associated with it that exists in an ID space shared by every variant
/// of `PluginObject`. This allows all the objects to be indexed in a single map, and allows for a
/// common destroy method.
///
/// In addition to the destory method, each object may have methods specific to its variant type.
/// These variant methods must be done by matching the variant to the expected type for that method.
/// For example, getting the dirty log from a `Memory` object starting with an ID:
///
/// ```ignore
/// match objects.get(&request_id) {
/// Some(&PluginObject::Memory { slot, length }) => vm.get_dirty_log(slot, &mut dirty_log[..]),
/// _ => return Err(SysError::new(ENOENT)),
/// }
/// ```
enum PluginObject {
IoEvent {
evt: Event,
addr: IoeventAddress,
length: u32,
datamatch: u64,
},
Memory {
slot: u32,
length: usize,
},
IrqEvent {
irq_id: u32,
evt: Event,
},
}
impl PluginObject {
fn destroy(self, vm: &mut Vm) -> SysResult<()> {
match self {
PluginObject::IoEvent {
evt,
addr,
length,
datamatch,
} => match length {
0 => vm.unregister_ioevent(&evt, addr, Datamatch::AnyLength),
1 => vm.unregister_ioevent(&evt, addr, Datamatch::U8(Some(datamatch as u8))),
2 => vm.unregister_ioevent(&evt, addr, Datamatch::U16(Some(datamatch as u16))),
4 => vm.unregister_ioevent(&evt, addr, Datamatch::U32(Some(datamatch as u32))),
8 => vm.unregister_ioevent(&evt, addr, Datamatch::U64(Some(datamatch as u64))),
_ => Err(SysError::new(EINVAL)),
},
PluginObject::Memory { slot, .. } => vm.remove_memory_region(slot).and(Ok(())),
PluginObject::IrqEvent { irq_id, evt } => vm.unregister_irqfd(&evt, irq_id),
}
}
}
pub fn run_vcpus(
kvm: &Kvm,
vm: &Vm,
plugin: &Process,
vcpu_count: u32,
kill_signaled: &Arc<AtomicBool>,
exit_evt: &Event,
vcpu_handles: &mut Vec<thread::JoinHandle<()>>,
) -> Result<()> {
let vcpu_thread_barrier = Arc::new(Barrier::new((vcpu_count) as usize));
let use_kvm_signals = !kvm.check_extension(Cap::ImmediateExit);
// If we need to force a vcpu to exit from a VM then a SIGRTMIN signal is sent
// to that vcpu's thread. If KVM is running the VM then it'll return -EINTR.
// An issue is what to do when KVM isn't running the VM (where we could be
// in the kernel or in the app).
//
// If KVM supports "immediate exit" then we set a signal handler that will
// set the |immediate_exit| flag that tells KVM to return -EINTR before running
// the VM.
//
// If KVM doesn't support immediate exit then we'll block SIGRTMIN in the app
// and tell KVM to unblock SIGRTMIN before running the VM (at which point a blocked
// signal might get asserted). There's overhead to have KVM unblock and re-block
// SIGRTMIN each time it runs the VM, so this mode should be avoided.
if use_kvm_signals {
unsafe {
extern "C" fn handle_signal(_: c_int) {}
// Our signal handler does nothing and is trivially async signal safe.
// We need to install this signal handler even though we do block
// the signal below, to ensure that this signal will interrupt
// execution of KVM_RUN (this is implementation issue).
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
// We do not really want the signal handler to run...
block_signal(SIGRTMIN() + 0).expect("failed to block signal");
} else {
unsafe {
extern "C" fn handle_signal(_: c_int) {
Vcpu::set_local_immediate_exit(true);
}
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
}
for cpu_id in 0..vcpu_count {
let kill_signaled = kill_signaled.clone();
let vcpu_thread_barrier = vcpu_thread_barrier.clone();
let vcpu_exit_evt = exit_evt.try_clone().context("failed to clone event")?;
let vcpu_plugin = plugin.create_vcpu(cpu_id)?;
let vcpu = Vcpu::new(cpu_id as c_ulong, kvm, vm).context("error creating vcpu")?;
vcpu_handles.push(
thread::Builder::new()
.name(format!("crosvm_vcpu{}", cpu_id))
.spawn(move || {
if use_kvm_signals {
// Tell KVM to not block anything when entering kvm run
// because we will be using first RT signal to kick the VCPU.
vcpu.set_signal_mask(&[])
.expect("failed to set up KVM VCPU signal mask");
}
if let Err(e) = enable_core_scheduling() {
error!("Failed to enable core scheduling: {}", e);
}
let vcpu = vcpu
.to_runnable(Some(SIGRTMIN() + 0)) |
let res = vcpu_plugin.init(&vcpu);
vcpu_thread_barrier.wait();
if let Err(e) = res {
error!("failed to initialize vcpu {}: {}", cpu_id, e);
} else {
loop {
let mut interrupted_by_signal = false;
let run_res = vcpu.run();
match run_res {
Ok(run) => match run {
VcpuExit::IoIn { port, mut size } => {
let mut data = [0; 256];
if size > data.len() {
error!(
"unsupported IoIn size of {} bytes at port {:#x}",
size, port
);
size = data.len();
}
vcpu_plugin.io_read(port as u64, &mut data[..size], &vcpu);
if let Err(e) = vcpu.set_data(&data[..size]) {
error!(
"failed to set return data for IoIn at port {:#x}: {}",
port, e
);
}
}
VcpuExit::IoOut {
port,
mut size,
data,
} => {
if size > data.len() {
error!("unsupported IoOut size of {} bytes at port {:#x}", size, port);
size = data.len();
}
vcpu_plugin.io_write(port as u64, &data[..size], &vcpu);
}
VcpuExit::MmioRead { address, size } => {
let mut data = [0; 8];
vcpu_plugin.mmio_read(
address as u64,
&mut data[..size],
&vcpu,
);
// Setting data for mmio can not fail.
let _ = vcpu.set_data(&data[..size]);
}
VcpuExit::MmioWrite {
address,
size,
data,
} => {
vcpu_plugin.mmio_write(
address as u64,
&data[..size],
&vcpu,
);
}
VcpuExit::HypervHcall { input, params } => {
let mut data = [0; 8];
vcpu_plugin.hyperv_call(input, params, &mut data, &vcpu);
// Setting data for hyperv call can not fail.
let _ = vcpu.set_data(&data);
}
VcpuExit::HypervSynic {
msr,
control,
evt_page,
msg_page,
} => {
vcpu_plugin
.hyperv_synic(msr, control, evt_page, msg_page, &vcpu);
}
VcpuExit::Hlt => break,
VcpuExit::Shutdown => break,
VcpuExit::InternalError => {
error!("vcpu {} has internal error", cpu_id);
break;
}
r => warn!("unexpected vcpu exit: {:?}", r),
},
Err(e) => match e.errno() {
EINTR => interrupted_by_signal = true,
EAGAIN => {}
_ => {
error!("vcpu hit unknown error: {}", e);
break;
}
},
}
if kill_signaled.load(Ordering::SeqCst) {
break;
}
// Only handle the pause request if kvm reported that it was
// interrupted by a signal. This helps to entire that KVM has had a chance
// to finish emulating any IO that may have immediately happened.
// If we eagerly check pre_run() then any IO that we
// just reported to the plugin won't have been processed yet by KVM.
// Not eagerly calling pre_run() also helps to reduce
// any overhead from checking if a pause request is pending.
// The assumption is that pause requests aren't common
// or frequent so it's better to optimize for the non-pause execution paths.
if interrupted_by_signal {
if use_kvm_signals {
clear_signal(SIGRTMIN() + 0)
.expect("failed to clear pending signal");
} else {
vcpu.set_immediate_exit(false);
}
if let Err(e) = vcpu_plugin.pre_run(&vcpu) {
error!("failed to process pause on vcpu {}: {}", cpu_id, e);
break;
}
}
}
}
vcpu_exit_evt
.write(1)
.expect("failed to signal vcpu exit event");
})
.context("error spawning vcpu thread")?,
);
}
Ok(())
}
#[derive(PollToken)]
enum Token {
Exit,
ChildSignal,
Stderr,
Plugin { index: usize },
}
/// Run a VM with a plugin process specified by `cfg`.
///
/// Not every field of `cfg` will be used. In particular, most field that pertain to a specific
/// device are ignored because the plugin is responsible for emulating hardware.
pub fn run_config(cfg: Config) -> Result<()> {
info!("crosvm starting plugin process");
// Masking signals is inherently dangerous, since this can persist across clones/execs. Do this
// before any jailed devices have been spawned, so that we can catch any of them that fail very
// quickly.
let sigchld_fd = SignalFd::new(SIGCHLD).context("failed to create signalfd")?;
// Create a pipe to capture error messages from plugin and minijail.
let (mut stderr_rd, stderr_wr) = pipe(true).context("failed to create stderr pipe")?;
add_fd_flags(stderr_rd.as_raw_descriptor(), O_NONBLOCK)
.context("error marking stderr nonblocking")?;
let jail = if cfg.sandbox {
// An empty directory for jailed plugin pivot root.
let root_path = match &cfg.plugin_root {
Some(dir) => dir,
None => Path::new(option_env!("DEFAULT_PIVOT_ROOT").unwrap_or("/var/empty")),
};
if root_path.is_relative() {
bail!("path to the root directory must be absolute");
}
if !root_path.exists() {
bail!("no root directory for jailed process to pivot root into");
}
if !root_path.is_dir() {
bail!("specified root directory is not a directory");
}
let policy_path = cfg.seccomp_policy_dir.join("plugin");
let mut jail = create_plugin_jail(root_path, cfg.seccomp_log_failures, &policy_path)?;
// Update gid map of the jail if caller provided supplemental groups.
if !cfg.plugin_gid_maps.is_empty() {
let map = format!("0 {} 1", getegid())
+ &cfg
.plugin_gid_maps
.into_iter()
.map(|m| format!(",{} {} {}", m.inner, m.outer, m.count))
.collect::<String>();
jail.gidmap(&map).context("failed to set gidmap for jail")?;
}
// Mount minimal set of devices (full, zero, urandom, etc). We can not use
// jail.mount_dev() here because crosvm may not be running with CAP_SYS_ADMIN.
let device_names = ["full", "null", "urandom", "zero"];
for name in &device_names {
let device = Path::new("/dev").join(&name);
jail.mount_bind(&device, &device, true)
.context("failed to mount dev")?;
}
for bind_mount in &cfg.plugin_mounts {
jail.mount_bind(&bind_mount.src, &bind_mount.dst, bind_mount.writable)
.with_context(|| {
format!(
"failed to bind mount {} -> {} as {} ",
bind_mount.src.display(),
bind_mount.dst.display(),
if bind_mount.writable {
"writable"
} else {
"read only"
}
)
})?;
}
Some(jail)
} else {
None
};
let mut tap_interfaces: Vec<Tap> = Vec::new();
if let Some(host_ip) = cfg.host_ip {
if let Some(netmask) = cfg.netmask {
if let Some(mac_address) = cfg.mac_address {
let tap = Tap::new(false, false).context("error opening tap device")?;
tap.set_ip_addr(host_ip).context("error setting tap ip")?;
tap.set_netmask(netmask)
.context("error setting tap netmask")?;
tap.set_mac_address(mac_address)
.context("error setting tap mac address")?;
tap.enable().context("error enabling tap device")?;
tap_interfaces.push(tap);
}
}
}
for tap_fd in cfg.tap_fd {
// Safe because we ensure that we get a unique handle to the fd.
let tap = unsafe {
Tap::from_raw_descriptor(
validate_raw_descriptor(tap_fd).context("failed to validate raw tap fd")?,
)
.context("failed to create tap device from raw fd")?
};
tap_interfaces.push(tap);
}
let plugin_args: Vec<&str> = cfg.params.iter().map(|s| &s[..]).collect();
let plugin_path = match cfg.executable_path {
Some(Executable::Plugin(ref plugin_path)) => plugin_path.as_path(),
_ => panic!("Executable was not a plugin"),
};
let vcpu_count = cfg.vcpu_count.unwrap_or(1) as u32;
let mem = GuestMemory::new(&[]).unwrap();
let mut mem_policy = MemoryPolicy::empty();
if cfg.hugepages {
mem_policy |= MemoryPolicy::USE_HUGEPAGES;
}
mem.set_memory_policy(mem_policy);
let kvm = Kvm::new_with_path(&cfg.kvm_device_path).context("error creating Kvm")?;
let mut vm = Vm::new(&kvm, mem).context("error creating vm")?;
vm.create_irq_chip()
.context("failed to create kvm irqchip")?;
vm.create_pit().context("failed to create kvm PIT")?;
let mut plugin = Process::new(vcpu_count, plugin_path, &plugin_args, jail, stderr_wr)?;
// Now that the jail for the plugin has been created and we had a chance to adjust gids there,
// we can drop all our capabilities in case we had any.
drop_capabilities().context("failed to drop process capabilities")?;
let mut res = Ok(());
// If Some, we will exit after enough time is passed to shutdown cleanly.
let mut dying_instant: Option<Instant> = None;
let duration_to_die = Duration::from_millis(1000);
let exit_evt = Event::new().context("failed to create event")?;
let kill_signaled = Arc::new(AtomicBool::new(false));
let mut vcpu_handles = Vec::with_capacity(vcpu_count as usize);
let wait_ctx = WaitContext::build_with(&[
(&exit_evt, Token::Exit),
(&sigchld_fd, Token::ChildSignal),
(&stderr_rd, Token::Stderr),
])
.context("failed to add control descriptors to wait context")?;
let mut sockets_to_drop = Vec::new();
let mut redo_wait_ctx_sockets = true;
// In this loop, make every attempt to not return early. If an error is encountered, set `res`
// to the error, set `dying_instant` to now, and signal the plugin that it will be killed soon.
// If the plugin cannot be signaled because it is dead of `signal_kill` failed, simply break
// from the poll loop so that the VCPU threads can be cleaned up.
'wait: loop {
// After we have waited long enough, it's time to give up and exit.
if dying_instant
.map(|i| i.elapsed() >= duration_to_die)
.unwrap_or(false)
{
break;
}
if redo_wait_ctx_sockets {
for (index, socket) in plugin.sockets().iter().enumerate() {
wait_ctx
.add(socket, Token::Plugin { index })
.context("failed to add plugin sockets to wait context")?;
}
}
let plugin_socket_count = plugin.sockets().len();
let events = {
let poll_res = match dying_instant {
Some(inst) => wait_ctx.wait_timeout(duration_to_die - inst.elapsed()),
None => wait_ctx.wait(),
};
match poll_res {
Ok(v) => v,
Err(e) => {
// Polling no longer works, time to break and cleanup,
if res.is_ok() {
res = Err(e).context("failed to poll all FDs");
}
break;
}
}
};
for event in events.iter().filter(|e| e.is_hungup) {
if let Token::Stderr = event.token {
let _ = wait_ctx.delete(&stderr_rd);
}
}
for event in events.iter().filter(|e| e.is_readable) {
match event.token {
Token::Exit => {
// No need to check the exit event if we are already doing cleanup.
let _ = wait_ctx.delete(&exit_evt);
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on exit event");
}
}
Token::ChildSignal => {
// Print all available siginfo structs, then exit the loop.
loop {
match sigchld_fd.read() {
Ok(Some(siginfo)) => {
// If the plugin process has ended, there is no need to continue
// processing plugin connections, so we break early.
if siginfo.ssi_pid == plugin.pid() as u32 {
break 'wait;
}
// Because SIGCHLD is not expected from anything other than the
// plugin process, report it as an error.
if res.is_ok() {
res = Err(anyhow!(
"process {} died with signal {}, status {}, and code {}",
siginfo.ssi_pid,
siginfo.ssi_signo,
siginfo.ssi_status,
siginfo.ssi_code,
));
}
}
Ok(None) => break, // No more signals to read.
Err(e) => {
// Something really must be messed up for this to happen, continue
// processing connections for a limited time.
if res.is_ok() {
res = Err(e).context("failed to read signal fd");
}
break;
}
}
}
// As we only spawn the plugin process, getting a SIGCHLD can only mean
// something went wrong.
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on SIGCHLD");
}
}
Token::Stderr => loop {
let mut buf = [0u8; 4096];
match stderr_rd.read(&mut buf) {
Ok(len) => {
for l in String::from_utf8_lossy(&buf[0..len]).lines() {
error!("minijail/plugin: {}", l);
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
break;
}
Err(e) => {
error!("failed reading from stderr: {}", e);
break;
}
}
},
Token::Plugin { index } => {
match plugin.handle_socket(index, &kvm, &mut vm, &vcpu_handles, &tap_interfaces)
{
Ok(_) => {}
// A HUP is an expected event for a socket, so don't bother warning about
// it.
Err(CommError::PluginSocketHup) => sockets_to_drop.push(index),
// Only one connection out of potentially many is broken. Drop it, but don't
// start cleaning up. Because the error isn't returned, we will warn about
// it here.
Err(e) => {
warn!("error handling plugin socket: {}", e);
sockets_to_drop.push(index);
}
}
}
}
}
if vcpu_handles.is_empty() && dying_instant.is_none() && plugin.is_started() {
let res = run_vcpus(
&kvm,
&vm,
&plugin,
vcpu_count,
&kill_signaled,
&exit_evt,
&mut vcpu_handles,
);
if let Err(e) = res {
dying_instant.get_or_insert(Instant::now());
error!("failed to start vcpus: {}", e);
}
}
redo_wait_ctx_sockets =
!sockets_to_drop.is_empty() || plugin.sockets().len() != plugin_socket_count;
// Cleanup all of the sockets that we have determined were disconnected or suffered some
// other error.
plugin.drop_sockets(&mut sockets_to_drop);
sockets_to_drop.clear();
if redo_wait_ctx_sockets {
for socket in plugin.sockets() {
let _ = wait_ctx.delete(socket);
}
}
}
// vcpu threads MUST see the kill signaled flag, otherwise they may re-enter the VM.
kill_signaled.store(true, Ordering::SeqCst);
// Depending on how we ended up here, the plugin process, or a VCPU thread waiting for requests
// might be stuck. The `signal_kill` call will unstick all the VCPU threads by closing their
// blocked connections.
plugin
.signal_kill()
.context("error sending kill signal to plugin on cleanup")?;
for handle in vcpu_handles {
match handle.kill(SIGRTMIN() + 0) {
Ok(_) => {
if let Err(e) = handle.join() {
error!("failed to join vcpu thread: {:?}", e);
}
}
Err(e) => error!("failed to kill vcpu thread: {}", e),
}
}
match plugin.try_wait() {
// The plugin has run out of time by now
Ok(ProcessStatus::Running) => Err(anyhow!("plugin did not exit within timeout")),
// Return an error discovered earlier in this function.
Ok(ProcessStatus::Success) => res.map_err(anyhow::Error::msg),
Ok(ProcessStatus::Fail(code)) => Err(anyhow!("plugin exited with error: {}", code)),
Ok(ProcessStatus::Signal(code)) => Err(anyhow!("plugin exited with signal {}", code)),
Err(e) => Err(anyhow!("error waiting for plugin to exit: {}", e)),
}
} | .expect("Failed to set thread id"); | random_line_split |
mod.rs | // Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod process;
mod vcpu;
use std::fs::File;
use std::io;
use std::io::Read;
use std::os::unix::net::UnixDatagram;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Barrier};
use std::thread;
use std::time::{Duration, Instant};
use libc::{
c_int, c_ulong, fcntl, ioctl, socketpair, AF_UNIX, EAGAIN, EBADF, EDEADLK, EEXIST, EINTR,
EINVAL, ENOENT, EOVERFLOW, EPERM, FIOCLEX, F_SETPIPE_SZ, MS_NODEV, MS_NOEXEC, MS_NOSUID,
MS_RDONLY, O_NONBLOCK, SIGCHLD, SOCK_SEQPACKET,
};
use protobuf::ProtobufError;
use remain::sorted;
use thiserror::Error;
use anyhow::{anyhow, bail, Context, Result};
use base::{
add_fd_flags, block_signal, clear_signal, drop_capabilities, enable_core_scheduling, error,
getegid, geteuid, info, pipe, register_rt_signal_handler, validate_raw_descriptor, warn,
AsRawDescriptor, Error as SysError, Event, FromRawDescriptor, Killable, MmapError, PollToken,
Result as SysResult, SignalFd, WaitContext, SIGRTMIN,
};
use kvm::{Cap, Datamatch, IoeventAddress, Kvm, Vcpu, VcpuExit, Vm};
use minijail::{self, Minijail};
use net_util::{Tap, TapT};
use vm_memory::{GuestMemory, MemoryPolicy};
use self::process::*;
use self::vcpu::*;
use crate::{Config, Executable};
const MAX_DATAGRAM_SIZE: usize = 4096;
const MAX_VCPU_DATAGRAM_SIZE: usize = 0x40000;
/// An error that occurs when communicating with the plugin process.
#[sorted]
#[derive(Error, Debug)]
pub enum CommError {
#[error("failed to decode plugin request: {0}")]
DecodeRequest(ProtobufError),
#[error("failed to encode plugin response: {0}")]
EncodeResponse(ProtobufError),
#[error("plugin request socket has been hung up")]
PluginSocketHup,
#[error("failed to recv from plugin request socket: {0}")]
PluginSocketRecv(SysError),
#[error("failed to send to plugin request socket: {0}")]
PluginSocketSend(SysError),
}
fn new_seqpacket_pair() -> SysResult<(UnixDatagram, UnixDatagram)> {
let mut fds = [0, 0];
unsafe {
let ret = socketpair(AF_UNIX, SOCK_SEQPACKET, 0, fds.as_mut_ptr());
if ret == 0 {
ioctl(fds[0], FIOCLEX);
Ok((
UnixDatagram::from_raw_descriptor(fds[0]),
UnixDatagram::from_raw_descriptor(fds[1]),
))
} else {
Err(SysError::last())
}
}
}
struct VcpuPipe {
crosvm_read: File,
plugin_write: File,
plugin_read: File,
crosvm_write: File,
}
fn new_pipe_pair() -> SysResult<VcpuPipe> {
let to_crosvm = pipe(true)?;
let to_plugin = pipe(true)?;
// Increasing the pipe size can be a nice-to-have to make sure that
// messages get across atomically (and made sure that writes don't block),
// though it's not necessary a hard requirement for things to work.
let flags = unsafe {
fcntl(
to_crosvm.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of crosvm pipe (result {}): {}",
flags,
SysError::last()
);
}
let flags = unsafe {
fcntl(
to_plugin.0.as_raw_descriptor(),
F_SETPIPE_SZ,
MAX_VCPU_DATAGRAM_SIZE as c_int,
)
};
if flags < 0 || flags != MAX_VCPU_DATAGRAM_SIZE as i32 {
warn!(
"Failed to adjust size of plugin pipe (result {}): {}",
flags,
SysError::last()
);
}
Ok(VcpuPipe {
crosvm_read: to_crosvm.0,
plugin_write: to_crosvm.1,
plugin_read: to_plugin.0,
crosvm_write: to_plugin.1,
})
}
fn proto_to_sys_err(e: ProtobufError) -> SysError {
match e {
ProtobufError::IoError(e) => SysError::new(e.raw_os_error().unwrap_or(EINVAL)),
_ => SysError::new(EINVAL),
}
}
fn io_to_sys_err(e: io::Error) -> SysError {
SysError::new(e.raw_os_error().unwrap_or(EINVAL))
}
fn mmap_to_sys_err(e: MmapError) -> SysError {
match e {
MmapError::SystemCallFailed(e) => e,
_ => SysError::new(EINVAL),
}
}
fn create_plugin_jail(root: &Path, log_failures: bool, seccomp_policy: &Path) -> Result<Minijail> {
// All child jails run in a new user namespace without any users mapped,
// they run as nobody unless otherwise configured.
let mut j = Minijail::new().context("failed to create jail")?;
j.namespace_pids();
j.namespace_user();
j.uidmap(&format!("0 {0} 1", geteuid()))
.context("failed to set uidmap for jail")?;
j.gidmap(&format!("0 {0} 1", getegid()))
.context("failed to set gidmap for jail")?;
j.namespace_user_disable_setgroups();
// Don't need any capabilities.
j.use_caps(0);
// Create a new mount namespace with an empty root FS.
j.namespace_vfs();
j.enter_pivot_root(root)
.context("failed to set jail pivot root")?;
// Run in an empty network namespace.
j.namespace_net();
j.no_new_privs();
// By default we'll prioritize using the pre-compiled .bpf over the .policy
// file (the .bpf is expected to be compiled using "trap" as the failure
// behavior instead of the default "kill" behavior).
// Refer to the code comment for the "seccomp-log-failures"
// command-line parameter for an explanation about why the |log_failures|
// flag forces the use of .policy files (and the build-time alternative to
// this run-time flag).
let bpf_policy_file = seccomp_policy.with_extension("bpf");
if bpf_policy_file.exists() && !log_failures {
j.parse_seccomp_program(&bpf_policy_file)
.context("failed to parse jail seccomp BPF program")?;
} else {
// Use TSYNC only for the side effect of it using SECCOMP_RET_TRAP,
// which will correctly kill the entire device process if a worker
// thread commits a seccomp violation.
j.set_seccomp_filter_tsync();
if log_failures {
j.log_seccomp_filter_failures();
}
j.parse_seccomp_filters(&seccomp_policy.with_extension("policy"))
.context("failed to parse jail seccomp filter")?;
}
j.use_seccomp_filter();
// Don't do init setup.
j.run_as_init();
// Create a tmpfs in the plugin's root directory so that we can bind mount it's executable
// file into it. The size=67108864 is size=64*1024*1024 or size=64MB.
j.mount_with_data(
Path::new("none"),
Path::new("/"),
"tmpfs",
(MS_NOSUID | MS_NODEV | MS_NOEXEC) as usize,
"size=67108864",
)
.context("failed to mount root")?;
// Because we requested to "run as init", minijail will not mount /proc for us even though
// plugin will be running in its own PID namespace, so we have to mount it ourselves.
j.mount(
Path::new("proc"),
Path::new("/proc"),
"proc",
(MS_NOSUID | MS_NODEV | MS_NOEXEC | MS_RDONLY) as usize,
)
.context("failed to mount proc")?;
Ok(j)
}
/// Each `PluginObject` represents one object that was instantiated by the guest using the `Create`
/// request.
///
/// Each such object has an ID associated with it that exists in an ID space shared by every variant
/// of `PluginObject`. This allows all the objects to be indexed in a single map, and allows for a
/// common destroy method.
///
/// In addition to the destory method, each object may have methods specific to its variant type.
/// These variant methods must be done by matching the variant to the expected type for that method.
/// For example, getting the dirty log from a `Memory` object starting with an ID:
///
/// ```ignore
/// match objects.get(&request_id) {
/// Some(&PluginObject::Memory { slot, length }) => vm.get_dirty_log(slot, &mut dirty_log[..]),
/// _ => return Err(SysError::new(ENOENT)),
/// }
/// ```
enum | {
IoEvent {
evt: Event,
addr: IoeventAddress,
length: u32,
datamatch: u64,
},
Memory {
slot: u32,
length: usize,
},
IrqEvent {
irq_id: u32,
evt: Event,
},
}
impl PluginObject {
fn destroy(self, vm: &mut Vm) -> SysResult<()> {
match self {
PluginObject::IoEvent {
evt,
addr,
length,
datamatch,
} => match length {
0 => vm.unregister_ioevent(&evt, addr, Datamatch::AnyLength),
1 => vm.unregister_ioevent(&evt, addr, Datamatch::U8(Some(datamatch as u8))),
2 => vm.unregister_ioevent(&evt, addr, Datamatch::U16(Some(datamatch as u16))),
4 => vm.unregister_ioevent(&evt, addr, Datamatch::U32(Some(datamatch as u32))),
8 => vm.unregister_ioevent(&evt, addr, Datamatch::U64(Some(datamatch as u64))),
_ => Err(SysError::new(EINVAL)),
},
PluginObject::Memory { slot, .. } => vm.remove_memory_region(slot).and(Ok(())),
PluginObject::IrqEvent { irq_id, evt } => vm.unregister_irqfd(&evt, irq_id),
}
}
}
pub fn run_vcpus(
kvm: &Kvm,
vm: &Vm,
plugin: &Process,
vcpu_count: u32,
kill_signaled: &Arc<AtomicBool>,
exit_evt: &Event,
vcpu_handles: &mut Vec<thread::JoinHandle<()>>,
) -> Result<()> {
let vcpu_thread_barrier = Arc::new(Barrier::new((vcpu_count) as usize));
let use_kvm_signals = !kvm.check_extension(Cap::ImmediateExit);
// If we need to force a vcpu to exit from a VM then a SIGRTMIN signal is sent
// to that vcpu's thread. If KVM is running the VM then it'll return -EINTR.
// An issue is what to do when KVM isn't running the VM (where we could be
// in the kernel or in the app).
//
// If KVM supports "immediate exit" then we set a signal handler that will
// set the |immediate_exit| flag that tells KVM to return -EINTR before running
// the VM.
//
// If KVM doesn't support immediate exit then we'll block SIGRTMIN in the app
// and tell KVM to unblock SIGRTMIN before running the VM (at which point a blocked
// signal might get asserted). There's overhead to have KVM unblock and re-block
// SIGRTMIN each time it runs the VM, so this mode should be avoided.
if use_kvm_signals {
unsafe {
extern "C" fn handle_signal(_: c_int) {}
// Our signal handler does nothing and is trivially async signal safe.
// We need to install this signal handler even though we do block
// the signal below, to ensure that this signal will interrupt
// execution of KVM_RUN (this is implementation issue).
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
// We do not really want the signal handler to run...
block_signal(SIGRTMIN() + 0).expect("failed to block signal");
} else {
unsafe {
extern "C" fn handle_signal(_: c_int) {
Vcpu::set_local_immediate_exit(true);
}
register_rt_signal_handler(SIGRTMIN() + 0, handle_signal)
.expect("failed to register vcpu signal handler");
}
}
for cpu_id in 0..vcpu_count {
let kill_signaled = kill_signaled.clone();
let vcpu_thread_barrier = vcpu_thread_barrier.clone();
let vcpu_exit_evt = exit_evt.try_clone().context("failed to clone event")?;
let vcpu_plugin = plugin.create_vcpu(cpu_id)?;
let vcpu = Vcpu::new(cpu_id as c_ulong, kvm, vm).context("error creating vcpu")?;
vcpu_handles.push(
thread::Builder::new()
.name(format!("crosvm_vcpu{}", cpu_id))
.spawn(move || {
if use_kvm_signals {
// Tell KVM to not block anything when entering kvm run
// because we will be using first RT signal to kick the VCPU.
vcpu.set_signal_mask(&[])
.expect("failed to set up KVM VCPU signal mask");
}
if let Err(e) = enable_core_scheduling() {
error!("Failed to enable core scheduling: {}", e);
}
let vcpu = vcpu
.to_runnable(Some(SIGRTMIN() + 0))
.expect("Failed to set thread id");
let res = vcpu_plugin.init(&vcpu);
vcpu_thread_barrier.wait();
if let Err(e) = res {
error!("failed to initialize vcpu {}: {}", cpu_id, e);
} else {
loop {
let mut interrupted_by_signal = false;
let run_res = vcpu.run();
match run_res {
Ok(run) => match run {
VcpuExit::IoIn { port, mut size } => {
let mut data = [0; 256];
if size > data.len() {
error!(
"unsupported IoIn size of {} bytes at port {:#x}",
size, port
);
size = data.len();
}
vcpu_plugin.io_read(port as u64, &mut data[..size], &vcpu);
if let Err(e) = vcpu.set_data(&data[..size]) {
error!(
"failed to set return data for IoIn at port {:#x}: {}",
port, e
);
}
}
VcpuExit::IoOut {
port,
mut size,
data,
} => {
if size > data.len() {
error!("unsupported IoOut size of {} bytes at port {:#x}", size, port);
size = data.len();
}
vcpu_plugin.io_write(port as u64, &data[..size], &vcpu);
}
VcpuExit::MmioRead { address, size } => {
let mut data = [0; 8];
vcpu_plugin.mmio_read(
address as u64,
&mut data[..size],
&vcpu,
);
// Setting data for mmio can not fail.
let _ = vcpu.set_data(&data[..size]);
}
VcpuExit::MmioWrite {
address,
size,
data,
} => {
vcpu_plugin.mmio_write(
address as u64,
&data[..size],
&vcpu,
);
}
VcpuExit::HypervHcall { input, params } => {
let mut data = [0; 8];
vcpu_plugin.hyperv_call(input, params, &mut data, &vcpu);
// Setting data for hyperv call can not fail.
let _ = vcpu.set_data(&data);
}
VcpuExit::HypervSynic {
msr,
control,
evt_page,
msg_page,
} => {
vcpu_plugin
.hyperv_synic(msr, control, evt_page, msg_page, &vcpu);
}
VcpuExit::Hlt => break,
VcpuExit::Shutdown => break,
VcpuExit::InternalError => {
error!("vcpu {} has internal error", cpu_id);
break;
}
r => warn!("unexpected vcpu exit: {:?}", r),
},
Err(e) => match e.errno() {
EINTR => interrupted_by_signal = true,
EAGAIN => {}
_ => {
error!("vcpu hit unknown error: {}", e);
break;
}
},
}
if kill_signaled.load(Ordering::SeqCst) {
break;
}
// Only handle the pause request if kvm reported that it was
// interrupted by a signal. This helps to entire that KVM has had a chance
// to finish emulating any IO that may have immediately happened.
// If we eagerly check pre_run() then any IO that we
// just reported to the plugin won't have been processed yet by KVM.
// Not eagerly calling pre_run() also helps to reduce
// any overhead from checking if a pause request is pending.
// The assumption is that pause requests aren't common
// or frequent so it's better to optimize for the non-pause execution paths.
if interrupted_by_signal {
if use_kvm_signals {
clear_signal(SIGRTMIN() + 0)
.expect("failed to clear pending signal");
} else {
vcpu.set_immediate_exit(false);
}
if let Err(e) = vcpu_plugin.pre_run(&vcpu) {
error!("failed to process pause on vcpu {}: {}", cpu_id, e);
break;
}
}
}
}
vcpu_exit_evt
.write(1)
.expect("failed to signal vcpu exit event");
})
.context("error spawning vcpu thread")?,
);
}
Ok(())
}
#[derive(PollToken)]
enum Token {
Exit,
ChildSignal,
Stderr,
Plugin { index: usize },
}
/// Run a VM with a plugin process specified by `cfg`.
///
/// Not every field of `cfg` will be used. In particular, most field that pertain to a specific
/// device are ignored because the plugin is responsible for emulating hardware.
pub fn run_config(cfg: Config) -> Result<()> {
info!("crosvm starting plugin process");
// Masking signals is inherently dangerous, since this can persist across clones/execs. Do this
// before any jailed devices have been spawned, so that we can catch any of them that fail very
// quickly.
let sigchld_fd = SignalFd::new(SIGCHLD).context("failed to create signalfd")?;
// Create a pipe to capture error messages from plugin and minijail.
let (mut stderr_rd, stderr_wr) = pipe(true).context("failed to create stderr pipe")?;
add_fd_flags(stderr_rd.as_raw_descriptor(), O_NONBLOCK)
.context("error marking stderr nonblocking")?;
let jail = if cfg.sandbox {
// An empty directory for jailed plugin pivot root.
let root_path = match &cfg.plugin_root {
Some(dir) => dir,
None => Path::new(option_env!("DEFAULT_PIVOT_ROOT").unwrap_or("/var/empty")),
};
if root_path.is_relative() {
bail!("path to the root directory must be absolute");
}
if !root_path.exists() {
bail!("no root directory for jailed process to pivot root into");
}
if !root_path.is_dir() {
bail!("specified root directory is not a directory");
}
let policy_path = cfg.seccomp_policy_dir.join("plugin");
let mut jail = create_plugin_jail(root_path, cfg.seccomp_log_failures, &policy_path)?;
// Update gid map of the jail if caller provided supplemental groups.
if !cfg.plugin_gid_maps.is_empty() {
let map = format!("0 {} 1", getegid())
+ &cfg
.plugin_gid_maps
.into_iter()
.map(|m| format!(",{} {} {}", m.inner, m.outer, m.count))
.collect::<String>();
jail.gidmap(&map).context("failed to set gidmap for jail")?;
}
// Mount minimal set of devices (full, zero, urandom, etc). We can not use
// jail.mount_dev() here because crosvm may not be running with CAP_SYS_ADMIN.
let device_names = ["full", "null", "urandom", "zero"];
for name in &device_names {
let device = Path::new("/dev").join(&name);
jail.mount_bind(&device, &device, true)
.context("failed to mount dev")?;
}
for bind_mount in &cfg.plugin_mounts {
jail.mount_bind(&bind_mount.src, &bind_mount.dst, bind_mount.writable)
.with_context(|| {
format!(
"failed to bind mount {} -> {} as {} ",
bind_mount.src.display(),
bind_mount.dst.display(),
if bind_mount.writable {
"writable"
} else {
"read only"
}
)
})?;
}
Some(jail)
} else {
None
};
let mut tap_interfaces: Vec<Tap> = Vec::new();
if let Some(host_ip) = cfg.host_ip {
if let Some(netmask) = cfg.netmask {
if let Some(mac_address) = cfg.mac_address {
let tap = Tap::new(false, false).context("error opening tap device")?;
tap.set_ip_addr(host_ip).context("error setting tap ip")?;
tap.set_netmask(netmask)
.context("error setting tap netmask")?;
tap.set_mac_address(mac_address)
.context("error setting tap mac address")?;
tap.enable().context("error enabling tap device")?;
tap_interfaces.push(tap);
}
}
}
for tap_fd in cfg.tap_fd {
// Safe because we ensure that we get a unique handle to the fd.
let tap = unsafe {
Tap::from_raw_descriptor(
validate_raw_descriptor(tap_fd).context("failed to validate raw tap fd")?,
)
.context("failed to create tap device from raw fd")?
};
tap_interfaces.push(tap);
}
let plugin_args: Vec<&str> = cfg.params.iter().map(|s| &s[..]).collect();
let plugin_path = match cfg.executable_path {
Some(Executable::Plugin(ref plugin_path)) => plugin_path.as_path(),
_ => panic!("Executable was not a plugin"),
};
let vcpu_count = cfg.vcpu_count.unwrap_or(1) as u32;
let mem = GuestMemory::new(&[]).unwrap();
let mut mem_policy = MemoryPolicy::empty();
if cfg.hugepages {
mem_policy |= MemoryPolicy::USE_HUGEPAGES;
}
mem.set_memory_policy(mem_policy);
let kvm = Kvm::new_with_path(&cfg.kvm_device_path).context("error creating Kvm")?;
let mut vm = Vm::new(&kvm, mem).context("error creating vm")?;
vm.create_irq_chip()
.context("failed to create kvm irqchip")?;
vm.create_pit().context("failed to create kvm PIT")?;
let mut plugin = Process::new(vcpu_count, plugin_path, &plugin_args, jail, stderr_wr)?;
// Now that the jail for the plugin has been created and we had a chance to adjust gids there,
// we can drop all our capabilities in case we had any.
drop_capabilities().context("failed to drop process capabilities")?;
let mut res = Ok(());
// If Some, we will exit after enough time is passed to shutdown cleanly.
let mut dying_instant: Option<Instant> = None;
let duration_to_die = Duration::from_millis(1000);
let exit_evt = Event::new().context("failed to create event")?;
let kill_signaled = Arc::new(AtomicBool::new(false));
let mut vcpu_handles = Vec::with_capacity(vcpu_count as usize);
let wait_ctx = WaitContext::build_with(&[
(&exit_evt, Token::Exit),
(&sigchld_fd, Token::ChildSignal),
(&stderr_rd, Token::Stderr),
])
.context("failed to add control descriptors to wait context")?;
let mut sockets_to_drop = Vec::new();
let mut redo_wait_ctx_sockets = true;
// In this loop, make every attempt to not return early. If an error is encountered, set `res`
// to the error, set `dying_instant` to now, and signal the plugin that it will be killed soon.
// If the plugin cannot be signaled because it is dead of `signal_kill` failed, simply break
// from the poll loop so that the VCPU threads can be cleaned up.
'wait: loop {
// After we have waited long enough, it's time to give up and exit.
if dying_instant
.map(|i| i.elapsed() >= duration_to_die)
.unwrap_or(false)
{
break;
}
if redo_wait_ctx_sockets {
for (index, socket) in plugin.sockets().iter().enumerate() {
wait_ctx
.add(socket, Token::Plugin { index })
.context("failed to add plugin sockets to wait context")?;
}
}
let plugin_socket_count = plugin.sockets().len();
let events = {
let poll_res = match dying_instant {
Some(inst) => wait_ctx.wait_timeout(duration_to_die - inst.elapsed()),
None => wait_ctx.wait(),
};
match poll_res {
Ok(v) => v,
Err(e) => {
// Polling no longer works, time to break and cleanup,
if res.is_ok() {
res = Err(e).context("failed to poll all FDs");
}
break;
}
}
};
for event in events.iter().filter(|e| e.is_hungup) {
if let Token::Stderr = event.token {
let _ = wait_ctx.delete(&stderr_rd);
}
}
for event in events.iter().filter(|e| e.is_readable) {
match event.token {
Token::Exit => {
// No need to check the exit event if we are already doing cleanup.
let _ = wait_ctx.delete(&exit_evt);
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on exit event");
}
}
Token::ChildSignal => {
// Print all available siginfo structs, then exit the loop.
loop {
match sigchld_fd.read() {
Ok(Some(siginfo)) => {
// If the plugin process has ended, there is no need to continue
// processing plugin connections, so we break early.
if siginfo.ssi_pid == plugin.pid() as u32 {
break 'wait;
}
// Because SIGCHLD is not expected from anything other than the
// plugin process, report it as an error.
if res.is_ok() {
res = Err(anyhow!(
"process {} died with signal {}, status {}, and code {}",
siginfo.ssi_pid,
siginfo.ssi_signo,
siginfo.ssi_status,
siginfo.ssi_code,
));
}
}
Ok(None) => break, // No more signals to read.
Err(e) => {
// Something really must be messed up for this to happen, continue
// processing connections for a limited time.
if res.is_ok() {
res = Err(e).context("failed to read signal fd");
}
break;
}
}
}
// As we only spawn the plugin process, getting a SIGCHLD can only mean
// something went wrong.
dying_instant.get_or_insert(Instant::now());
let sig_res = plugin.signal_kill();
if res.is_ok() && sig_res.is_err() {
res = sig_res.context("error sending kill signal to plugin on SIGCHLD");
}
}
Token::Stderr => loop {
let mut buf = [0u8; 4096];
match stderr_rd.read(&mut buf) {
Ok(len) => {
for l in String::from_utf8_lossy(&buf[0..len]).lines() {
error!("minijail/plugin: {}", l);
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
break;
}
Err(e) => {
error!("failed reading from stderr: {}", e);
break;
}
}
},
Token::Plugin { index } => {
match plugin.handle_socket(index, &kvm, &mut vm, &vcpu_handles, &tap_interfaces)
{
Ok(_) => {}
// A HUP is an expected event for a socket, so don't bother warning about
// it.
Err(CommError::PluginSocketHup) => sockets_to_drop.push(index),
// Only one connection out of potentially many is broken. Drop it, but don't
// start cleaning up. Because the error isn't returned, we will warn about
// it here.
Err(e) => {
warn!("error handling plugin socket: {}", e);
sockets_to_drop.push(index);
}
}
}
}
}
if vcpu_handles.is_empty() && dying_instant.is_none() && plugin.is_started() {
let res = run_vcpus(
&kvm,
&vm,
&plugin,
vcpu_count,
&kill_signaled,
&exit_evt,
&mut vcpu_handles,
);
if let Err(e) = res {
dying_instant.get_or_insert(Instant::now());
error!("failed to start vcpus: {}", e);
}
}
redo_wait_ctx_sockets =
!sockets_to_drop.is_empty() || plugin.sockets().len() != plugin_socket_count;
// Cleanup all of the sockets that we have determined were disconnected or suffered some
// other error.
plugin.drop_sockets(&mut sockets_to_drop);
sockets_to_drop.clear();
if redo_wait_ctx_sockets {
for socket in plugin.sockets() {
let _ = wait_ctx.delete(socket);
}
}
}
// vcpu threads MUST see the kill signaled flag, otherwise they may re-enter the VM.
kill_signaled.store(true, Ordering::SeqCst);
// Depending on how we ended up here, the plugin process, or a VCPU thread waiting for requests
// might be stuck. The `signal_kill` call will unstick all the VCPU threads by closing their
// blocked connections.
plugin
.signal_kill()
.context("error sending kill signal to plugin on cleanup")?;
for handle in vcpu_handles {
match handle.kill(SIGRTMIN() + 0) {
Ok(_) => {
if let Err(e) = handle.join() {
error!("failed to join vcpu thread: {:?}", e);
}
}
Err(e) => error!("failed to kill vcpu thread: {}", e),
}
}
match plugin.try_wait() {
// The plugin has run out of time by now
Ok(ProcessStatus::Running) => Err(anyhow!("plugin did not exit within timeout")),
// Return an error discovered earlier in this function.
Ok(ProcessStatus::Success) => res.map_err(anyhow::Error::msg),
Ok(ProcessStatus::Fail(code)) => Err(anyhow!("plugin exited with error: {}", code)),
Ok(ProcessStatus::Signal(code)) => Err(anyhow!("plugin exited with signal {}", code)),
Err(e) => Err(anyhow!("error waiting for plugin to exit: {}", e)),
}
}
| PluginObject | identifier_name |
reducer_posts.js | import fuzzy from 'fuzzy';
import { resolve } from 'redux-simple-promise';
import {
FETCH_POSTS,
FETCH_POST,
FETCH_PAGE,
FETCH_POSTS_CATEGORY,
FETCH_POSTS_TAG
} from '../actions/index';
const INITIAL_STATE = { all: {}, post: {}, page: {}, category: {} };
export default function (state = INITIAL_STATE, action) {
switch (action.type) {
case resolve(FETCH_POSTS): {
const data = action.payload.data.entries;
const payload = [];
if (typeof action.meta.term !== 'undefined') {
const options = {
pre: '',
post: '',
| (el) {
return `${el.title} ${el.tags.map((tag) => `${tag} `)} `;
}
};
const results = fuzzy.filter(action.meta.term, data, options);
results.map((el) => {
if (el.score > 6) {
payload.push(el.original);
}
});
return { ...state, all: payload };
}
data.map((el) => {
if (el.meta.type === 'post') {
payload.push(el);
}
});
return { ...state, all: payload };
}
case resolve(FETCH_POSTS_CATEGORY): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.meta.categories.includes(action.meta.category)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POSTS_TAG): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.tags.includes(action.meta.tag)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POST): {
const data = action.payload.data.entries;
let payload;
data.map((el) => {
if (el.title === action.meta.title) {
payload = el;
}
});
return { ...state, post: payload };
}
case resolve(FETCH_PAGE): {
const data = action.payload.data;
return { ...state, page: data };
}
default:
return state;
}
}
| extract | identifier_name |
reducer_posts.js | import fuzzy from 'fuzzy';
import { resolve } from 'redux-simple-promise';
import {
FETCH_POSTS,
FETCH_POST,
FETCH_PAGE,
FETCH_POSTS_CATEGORY,
FETCH_POSTS_TAG
} from '../actions/index';
const INITIAL_STATE = { all: {}, post: {}, page: {}, category: {} };
export default function (state = INITIAL_STATE, action) {
switch (action.type) {
case resolve(FETCH_POSTS): {
const data = action.payload.data.entries;
const payload = [];
if (typeof action.meta.term !== 'undefined') {
const options = {
pre: '',
post: '',
extract(el) |
};
const results = fuzzy.filter(action.meta.term, data, options);
results.map((el) => {
if (el.score > 6) {
payload.push(el.original);
}
});
return { ...state, all: payload };
}
data.map((el) => {
if (el.meta.type === 'post') {
payload.push(el);
}
});
return { ...state, all: payload };
}
case resolve(FETCH_POSTS_CATEGORY): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.meta.categories.includes(action.meta.category)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POSTS_TAG): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.tags.includes(action.meta.tag)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POST): {
const data = action.payload.data.entries;
let payload;
data.map((el) => {
if (el.title === action.meta.title) {
payload = el;
}
});
return { ...state, post: payload };
}
case resolve(FETCH_PAGE): {
const data = action.payload.data;
return { ...state, page: data };
}
default:
return state;
}
}
| {
return `${el.title} ${el.tags.map((tag) => `${tag} `)} `;
} | identifier_body |
reducer_posts.js | import fuzzy from 'fuzzy';
import { resolve } from 'redux-simple-promise';
import {
FETCH_POSTS,
FETCH_POST,
FETCH_PAGE,
FETCH_POSTS_CATEGORY,
FETCH_POSTS_TAG
} from '../actions/index';
const INITIAL_STATE = { all: {}, post: {}, page: {}, category: {} }; | export default function (state = INITIAL_STATE, action) {
switch (action.type) {
case resolve(FETCH_POSTS): {
const data = action.payload.data.entries;
const payload = [];
if (typeof action.meta.term !== 'undefined') {
const options = {
pre: '',
post: '',
extract(el) {
return `${el.title} ${el.tags.map((tag) => `${tag} `)} `;
}
};
const results = fuzzy.filter(action.meta.term, data, options);
results.map((el) => {
if (el.score > 6) {
payload.push(el.original);
}
});
return { ...state, all: payload };
}
data.map((el) => {
if (el.meta.type === 'post') {
payload.push(el);
}
});
return { ...state, all: payload };
}
case resolve(FETCH_POSTS_CATEGORY): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.meta.categories.includes(action.meta.category)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POSTS_TAG): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.tags.includes(action.meta.tag)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POST): {
const data = action.payload.data.entries;
let payload;
data.map((el) => {
if (el.title === action.meta.title) {
payload = el;
}
});
return { ...state, post: payload };
}
case resolve(FETCH_PAGE): {
const data = action.payload.data;
return { ...state, page: data };
}
default:
return state;
}
} | random_line_split |
|
reducer_posts.js | import fuzzy from 'fuzzy';
import { resolve } from 'redux-simple-promise';
import {
FETCH_POSTS,
FETCH_POST,
FETCH_PAGE,
FETCH_POSTS_CATEGORY,
FETCH_POSTS_TAG
} from '../actions/index';
const INITIAL_STATE = { all: {}, post: {}, page: {}, category: {} };
export default function (state = INITIAL_STATE, action) {
switch (action.type) {
case resolve(FETCH_POSTS): {
const data = action.payload.data.entries;
const payload = [];
if (typeof action.meta.term !== 'undefined') {
const options = {
pre: '',
post: '',
extract(el) {
return `${el.title} ${el.tags.map((tag) => `${tag} `)} `;
}
};
const results = fuzzy.filter(action.meta.term, data, options);
results.map((el) => {
if (el.score > 6) {
payload.push(el.original);
}
});
return { ...state, all: payload };
}
data.map((el) => {
if (el.meta.type === 'post') {
payload.push(el);
}
});
return { ...state, all: payload };
}
case resolve(FETCH_POSTS_CATEGORY): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.meta.categories.includes(action.meta.category)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POSTS_TAG): {
const data = action.payload.data.entries;
const payload = [];
data.map((el) => {
if (el.tags.includes(action.meta.tag)) {
payload.push(el);
}
});
return { ...state, category: payload };
}
case resolve(FETCH_POST): {
const data = action.payload.data.entries;
let payload;
data.map((el) => {
if (el.title === action.meta.title) |
});
return { ...state, post: payload };
}
case resolve(FETCH_PAGE): {
const data = action.payload.data;
return { ...state, page: data };
}
default:
return state;
}
}
| {
payload = el;
} | conditional_block |
spoilers.py | """Spoilers cog
Filters out messages that start with a certain prefix, and store them for
later retrieval.
"""
from datetime import datetime, timedelta
import logging
import json
import os
import re
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from cogs.utils import config
# Global variables
KEY_MESSAGE = "message"
KEY_AUTHOR_ID = "authorid"
KEY_AUTHOR_NAME = "author"
KEY_TIMESTAMP = "timestamp"
KEY_EMBED = "embed"
LOGGER = None
PREFIX = "spoiler"
SAVE_FOLDER = "data/lui-cogs/spoilers/" # Path to save folder.
SAVE_FILE = "settings.json"
COOLDOWN = 60
def checkFolder():
"""Used to create the data folder at first startup"""
if not os.path.exists(SAVE_FOLDER):
print("Creating " + SAVE_FOLDER + " folder...")
os.makedirs(SAVE_FOLDER)
def checkFiles():
"""Used to initialize an empty database at first startup"""
theFile = SAVE_FOLDER + SAVE_FILE
if not dataIO.is_valid_json(theFile):
print("Creating default spoilers settings.json")
dataIO.save_json(theFile, {})
class Spoilers: # pylint: disable=too-many-instance-attributes
"""Store messages for later retrieval."""
#Class constructor
def __init__(self, bot):
self.bot = bot
#The JSON keys for the settings:
checkFolder()
checkFiles()
self.settings = config.Config("settings.json",
cogname="lui-cogs/spoilers")
self.messages = self.settings.get("messages") if not None else {}
self.onCooldown = {}
@commands.command(name="spoiler", pass_context=True)
async def spoiler(self, ctx, *, msg):
"""Create a message spoiler."""
wordFilter = self.bot.get_cog("WordFilter")
if not wordFilter:
await self.bot.say("This cog requires the word filter cog to be loaded. "
"Please load the cog and try again")
return
if wordFilter.containsFilterableWords(ctx.message):
await self.bot.say("You have filtered words in your spoiler! Please "
"check it and try again!")
return
try:
store = {}
store[KEY_MESSAGE] = msg
store[KEY_AUTHOR_ID] = ctx.message.author.id
store[KEY_AUTHOR_NAME] = "{0.name}#{0.discriminator}".format(ctx.message.author)
store[KEY_TIMESTAMP] = ctx.message.timestamp.strftime("%s")
if ctx.message.embeds:
data = discord.Embed.from_data(ctx.message.embeds[0])
if data.type == 'image':
| if match:
store[KEY_EMBED] = match.group(0)
await self.bot.delete_message(ctx.message)
newMsg = await self.bot.say(":warning: {} created a spoiler! React to see "
"the message!".format(ctx.message.author.mention))
if not self.messages:
self.messages = {}
self.messages[newMsg.id] = store
await self.bot.add_reaction(newMsg, "\N{INFORMATION SOURCE}")
LOGGER.info("%s#%s (%s) added a spoiler: %s",
ctx.message.author.name,
ctx.message.author.discriminator,
ctx.message.author.id,
msg)
await self.settings.put("messages", self.messages)
except discord.errors.Forbidden as error:
await self.bot.say("I'm not able to do that.")
await self.bot.delete_message(newMsg)
LOGGER.error("Could not create a spoiler in server %s channel %s",
ctx.message.server.name,
ctx.message.channel.name)
LOGGER.error(error)
async def checkForReaction(self, data):
"""Reaction listener (using socket data)
Checks to see if a spoilered message is reacted, and if so, send a DM to the
user that reacted.
"""
# no binary frames
if isinstance(data, bytes):
return
data = json.loads(data)
event = data.get("t")
payload = data.get("d")
if event not in ("MESSAGE_REACTION_ADD", "MESSAGE_REACTION_REMOVE",
"MESSAGE_REACTION_REMOVE_ALL"):
return
isReaction = event == "MESSAGE_REACTION_ADD"
# make sure the reaction is proper
if isReaction:
msgId = payload["message_id"]
if msgId in self.messages.keys():
server = discord.utils.get(self.bot.servers,
id=payload["guild_id"])
reactedUser = discord.utils.get(server.members,
id=payload["user_id"])
if reactedUser.bot:
return
channel = discord.utils.get(server.channels,
id=payload["channel_id"])
message = await self.bot.get_message(channel, msgId)
if payload["emoji"]["id"]:
emoji = discord.Emoji(name=payload["emoji"]["name"],
id=payload["emoji"]["id"],
server=server)
else:
emoji = payload["emoji"]["name"]
await self.bot.remove_reaction(message, emoji, reactedUser)
if (msgId in self.onCooldown.keys() and
reactedUser.id in self.onCooldown[msgId].keys() and
self.onCooldown[msgId][reactedUser.id] > datetime.now()):
return
msg = self.messages[msgId]
embed = discord.Embed()
userObj = discord.utils.get(server.members,
id=msg[KEY_AUTHOR_ID])
if userObj:
embed.set_author(name="{0.name}#{0.discriminator}".format(userObj),
icon_url=userObj.avatar_url)
else:
embed.set_author(name=msg[KEY_AUTHOR_NAME])
if KEY_EMBED in msg:
embed.set_image(url=msg[KEY_EMBED])
embed.description = msg[KEY_MESSAGE]
embed.timestamp = datetime.fromtimestamp(int(msg[KEY_TIMESTAMP]))
try:
await self.bot.send_message(reactedUser, embed=embed)
if msgId not in self.onCooldown.keys():
self.onCooldown[msgId] = {}
self.onCooldown[msgId][reactedUser.id] = (datetime.now() +
timedelta(seconds=COOLDOWN))
except (discord.errors.Forbidden, discord.errors.HTTPException) as error:
LOGGER.error("Could not send DM to %s#%s (%s).",
reactedUser.name,
reactedUser.discriminator,
reactedUser.id)
LOGGER.error(error)
def setup(bot):
"""Add the cog to the bot."""
checkFolder() # Make sure the data folder exists!
checkFiles() # Make sure we have settings!
spoilersCog = Spoilers(bot)
global LOGGER # pylint: disable=global-statement
LOGGER = logging.getLogger("red.Spoilers")
if LOGGER.level == 0:
# Prevents the LOGGER from being loaded again in case of module reload.
LOGGER.setLevel(logging.INFO)
handler = logging.FileHandler(filename="data/lui-cogs/spoilers/info.log",
encoding="utf-8",
mode="a")
handler.setFormatter(logging.Formatter("%(asctime)s %(message)s",
datefmt="[%d/%m/%Y %H:%M:%S]"))
LOGGER.addHandler(handler)
bot.add_listener(spoilersCog.checkForReaction, "on_socket_raw_receive")
bot.add_cog(spoilersCog) | store[KEY_EMBED] = data.url
else:
imglinkPattern = r"(?i)http[^ ]+\.(?:png|jpg|jpeg|gif)"
match = re.search(imglinkPattern, msg)
| random_line_split |
spoilers.py | """Spoilers cog
Filters out messages that start with a certain prefix, and store them for
later retrieval.
"""
from datetime import datetime, timedelta
import logging
import json
import os
import re
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from cogs.utils import config
# Global variables
KEY_MESSAGE = "message"
KEY_AUTHOR_ID = "authorid"
KEY_AUTHOR_NAME = "author"
KEY_TIMESTAMP = "timestamp"
KEY_EMBED = "embed"
LOGGER = None
PREFIX = "spoiler"
SAVE_FOLDER = "data/lui-cogs/spoilers/" # Path to save folder.
SAVE_FILE = "settings.json"
COOLDOWN = 60
def checkFolder():
"""Used to create the data folder at first startup"""
if not os.path.exists(SAVE_FOLDER):
print("Creating " + SAVE_FOLDER + " folder...")
os.makedirs(SAVE_FOLDER)
def checkFiles():
"""Used to initialize an empty database at first startup"""
theFile = SAVE_FOLDER + SAVE_FILE
if not dataIO.is_valid_json(theFile):
print("Creating default spoilers settings.json")
dataIO.save_json(theFile, {})
class Spoilers: # pylint: disable=too-many-instance-attributes
"""Store messages for later retrieval."""
#Class constructor
def __init__(self, bot):
self.bot = bot
#The JSON keys for the settings:
checkFolder()
checkFiles()
self.settings = config.Config("settings.json",
cogname="lui-cogs/spoilers")
self.messages = self.settings.get("messages") if not None else {}
self.onCooldown = {}
@commands.command(name="spoiler", pass_context=True)
async def spoiler(self, ctx, *, msg):
"""Create a message spoiler."""
wordFilter = self.bot.get_cog("WordFilter")
if not wordFilter:
await self.bot.say("This cog requires the word filter cog to be loaded. "
"Please load the cog and try again")
return
if wordFilter.containsFilterableWords(ctx.message):
await self.bot.say("You have filtered words in your spoiler! Please "
"check it and try again!")
return
try:
store = {}
store[KEY_MESSAGE] = msg
store[KEY_AUTHOR_ID] = ctx.message.author.id
store[KEY_AUTHOR_NAME] = "{0.name}#{0.discriminator}".format(ctx.message.author)
store[KEY_TIMESTAMP] = ctx.message.timestamp.strftime("%s")
if ctx.message.embeds:
data = discord.Embed.from_data(ctx.message.embeds[0])
if data.type == 'image':
store[KEY_EMBED] = data.url
else:
imglinkPattern = r"(?i)http[^ ]+\.(?:png|jpg|jpeg|gif)"
match = re.search(imglinkPattern, msg)
if match:
store[KEY_EMBED] = match.group(0)
await self.bot.delete_message(ctx.message)
newMsg = await self.bot.say(":warning: {} created a spoiler! React to see "
"the message!".format(ctx.message.author.mention))
if not self.messages:
self.messages = {}
self.messages[newMsg.id] = store
await self.bot.add_reaction(newMsg, "\N{INFORMATION SOURCE}")
LOGGER.info("%s#%s (%s) added a spoiler: %s",
ctx.message.author.name,
ctx.message.author.discriminator,
ctx.message.author.id,
msg)
await self.settings.put("messages", self.messages)
except discord.errors.Forbidden as error:
await self.bot.say("I'm not able to do that.")
await self.bot.delete_message(newMsg)
LOGGER.error("Could not create a spoiler in server %s channel %s",
ctx.message.server.name,
ctx.message.channel.name)
LOGGER.error(error)
async def checkForReaction(self, data):
|
def setup(bot):
"""Add the cog to the bot."""
checkFolder() # Make sure the data folder exists!
checkFiles() # Make sure we have settings!
spoilersCog = Spoilers(bot)
global LOGGER # pylint: disable=global-statement
LOGGER = logging.getLogger("red.Spoilers")
if LOGGER.level == 0:
# Prevents the LOGGER from being loaded again in case of module reload.
LOGGER.setLevel(logging.INFO)
handler = logging.FileHandler(filename="data/lui-cogs/spoilers/info.log",
encoding="utf-8",
mode="a")
handler.setFormatter(logging.Formatter("%(asctime)s %(message)s",
datefmt="[%d/%m/%Y %H:%M:%S]"))
LOGGER.addHandler(handler)
bot.add_listener(spoilersCog.checkForReaction, "on_socket_raw_receive")
bot.add_cog(spoilersCog)
| """Reaction listener (using socket data)
Checks to see if a spoilered message is reacted, and if so, send a DM to the
user that reacted.
"""
# no binary frames
if isinstance(data, bytes):
return
data = json.loads(data)
event = data.get("t")
payload = data.get("d")
if event not in ("MESSAGE_REACTION_ADD", "MESSAGE_REACTION_REMOVE",
"MESSAGE_REACTION_REMOVE_ALL"):
return
isReaction = event == "MESSAGE_REACTION_ADD"
# make sure the reaction is proper
if isReaction:
msgId = payload["message_id"]
if msgId in self.messages.keys():
server = discord.utils.get(self.bot.servers,
id=payload["guild_id"])
reactedUser = discord.utils.get(server.members,
id=payload["user_id"])
if reactedUser.bot:
return
channel = discord.utils.get(server.channels,
id=payload["channel_id"])
message = await self.bot.get_message(channel, msgId)
if payload["emoji"]["id"]:
emoji = discord.Emoji(name=payload["emoji"]["name"],
id=payload["emoji"]["id"],
server=server)
else:
emoji = payload["emoji"]["name"]
await self.bot.remove_reaction(message, emoji, reactedUser)
if (msgId in self.onCooldown.keys() and
reactedUser.id in self.onCooldown[msgId].keys() and
self.onCooldown[msgId][reactedUser.id] > datetime.now()):
return
msg = self.messages[msgId]
embed = discord.Embed()
userObj = discord.utils.get(server.members,
id=msg[KEY_AUTHOR_ID])
if userObj:
embed.set_author(name="{0.name}#{0.discriminator}".format(userObj),
icon_url=userObj.avatar_url)
else:
embed.set_author(name=msg[KEY_AUTHOR_NAME])
if KEY_EMBED in msg:
embed.set_image(url=msg[KEY_EMBED])
embed.description = msg[KEY_MESSAGE]
embed.timestamp = datetime.fromtimestamp(int(msg[KEY_TIMESTAMP]))
try:
await self.bot.send_message(reactedUser, embed=embed)
if msgId not in self.onCooldown.keys():
self.onCooldown[msgId] = {}
self.onCooldown[msgId][reactedUser.id] = (datetime.now() +
timedelta(seconds=COOLDOWN))
except (discord.errors.Forbidden, discord.errors.HTTPException) as error:
LOGGER.error("Could not send DM to %s#%s (%s).",
reactedUser.name,
reactedUser.discriminator,
reactedUser.id)
LOGGER.error(error) | identifier_body |
spoilers.py | """Spoilers cog
Filters out messages that start with a certain prefix, and store them for
later retrieval.
"""
from datetime import datetime, timedelta
import logging
import json
import os
import re
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from cogs.utils import config
# Global variables
KEY_MESSAGE = "message"
KEY_AUTHOR_ID = "authorid"
KEY_AUTHOR_NAME = "author"
KEY_TIMESTAMP = "timestamp"
KEY_EMBED = "embed"
LOGGER = None
PREFIX = "spoiler"
SAVE_FOLDER = "data/lui-cogs/spoilers/" # Path to save folder.
SAVE_FILE = "settings.json"
COOLDOWN = 60
def checkFolder():
"""Used to create the data folder at first startup"""
if not os.path.exists(SAVE_FOLDER):
print("Creating " + SAVE_FOLDER + " folder...")
os.makedirs(SAVE_FOLDER)
def checkFiles():
"""Used to initialize an empty database at first startup"""
theFile = SAVE_FOLDER + SAVE_FILE
if not dataIO.is_valid_json(theFile):
print("Creating default spoilers settings.json")
dataIO.save_json(theFile, {})
class Spoilers: # pylint: disable=too-many-instance-attributes
"""Store messages for later retrieval."""
#Class constructor
def __init__(self, bot):
self.bot = bot
#The JSON keys for the settings:
checkFolder()
checkFiles()
self.settings = config.Config("settings.json",
cogname="lui-cogs/spoilers")
self.messages = self.settings.get("messages") if not None else {}
self.onCooldown = {}
@commands.command(name="spoiler", pass_context=True)
async def spoiler(self, ctx, *, msg):
"""Create a message spoiler."""
wordFilter = self.bot.get_cog("WordFilter")
if not wordFilter:
await self.bot.say("This cog requires the word filter cog to be loaded. "
"Please load the cog and try again")
return
if wordFilter.containsFilterableWords(ctx.message):
await self.bot.say("You have filtered words in your spoiler! Please "
"check it and try again!")
return
try:
store = {}
store[KEY_MESSAGE] = msg
store[KEY_AUTHOR_ID] = ctx.message.author.id
store[KEY_AUTHOR_NAME] = "{0.name}#{0.discriminator}".format(ctx.message.author)
store[KEY_TIMESTAMP] = ctx.message.timestamp.strftime("%s")
if ctx.message.embeds:
data = discord.Embed.from_data(ctx.message.embeds[0])
if data.type == 'image':
store[KEY_EMBED] = data.url
else:
imglinkPattern = r"(?i)http[^ ]+\.(?:png|jpg|jpeg|gif)"
match = re.search(imglinkPattern, msg)
if match:
store[KEY_EMBED] = match.group(0)
await self.bot.delete_message(ctx.message)
newMsg = await self.bot.say(":warning: {} created a spoiler! React to see "
"the message!".format(ctx.message.author.mention))
if not self.messages:
self.messages = {}
self.messages[newMsg.id] = store
await self.bot.add_reaction(newMsg, "\N{INFORMATION SOURCE}")
LOGGER.info("%s#%s (%s) added a spoiler: %s",
ctx.message.author.name,
ctx.message.author.discriminator,
ctx.message.author.id,
msg)
await self.settings.put("messages", self.messages)
except discord.errors.Forbidden as error:
await self.bot.say("I'm not able to do that.")
await self.bot.delete_message(newMsg)
LOGGER.error("Could not create a spoiler in server %s channel %s",
ctx.message.server.name,
ctx.message.channel.name)
LOGGER.error(error)
async def | (self, data):
"""Reaction listener (using socket data)
Checks to see if a spoilered message is reacted, and if so, send a DM to the
user that reacted.
"""
# no binary frames
if isinstance(data, bytes):
return
data = json.loads(data)
event = data.get("t")
payload = data.get("d")
if event not in ("MESSAGE_REACTION_ADD", "MESSAGE_REACTION_REMOVE",
"MESSAGE_REACTION_REMOVE_ALL"):
return
isReaction = event == "MESSAGE_REACTION_ADD"
# make sure the reaction is proper
if isReaction:
msgId = payload["message_id"]
if msgId in self.messages.keys():
server = discord.utils.get(self.bot.servers,
id=payload["guild_id"])
reactedUser = discord.utils.get(server.members,
id=payload["user_id"])
if reactedUser.bot:
return
channel = discord.utils.get(server.channels,
id=payload["channel_id"])
message = await self.bot.get_message(channel, msgId)
if payload["emoji"]["id"]:
emoji = discord.Emoji(name=payload["emoji"]["name"],
id=payload["emoji"]["id"],
server=server)
else:
emoji = payload["emoji"]["name"]
await self.bot.remove_reaction(message, emoji, reactedUser)
if (msgId in self.onCooldown.keys() and
reactedUser.id in self.onCooldown[msgId].keys() and
self.onCooldown[msgId][reactedUser.id] > datetime.now()):
return
msg = self.messages[msgId]
embed = discord.Embed()
userObj = discord.utils.get(server.members,
id=msg[KEY_AUTHOR_ID])
if userObj:
embed.set_author(name="{0.name}#{0.discriminator}".format(userObj),
icon_url=userObj.avatar_url)
else:
embed.set_author(name=msg[KEY_AUTHOR_NAME])
if KEY_EMBED in msg:
embed.set_image(url=msg[KEY_EMBED])
embed.description = msg[KEY_MESSAGE]
embed.timestamp = datetime.fromtimestamp(int(msg[KEY_TIMESTAMP]))
try:
await self.bot.send_message(reactedUser, embed=embed)
if msgId not in self.onCooldown.keys():
self.onCooldown[msgId] = {}
self.onCooldown[msgId][reactedUser.id] = (datetime.now() +
timedelta(seconds=COOLDOWN))
except (discord.errors.Forbidden, discord.errors.HTTPException) as error:
LOGGER.error("Could not send DM to %s#%s (%s).",
reactedUser.name,
reactedUser.discriminator,
reactedUser.id)
LOGGER.error(error)
def setup(bot):
"""Add the cog to the bot."""
checkFolder() # Make sure the data folder exists!
checkFiles() # Make sure we have settings!
spoilersCog = Spoilers(bot)
global LOGGER # pylint: disable=global-statement
LOGGER = logging.getLogger("red.Spoilers")
if LOGGER.level == 0:
# Prevents the LOGGER from being loaded again in case of module reload.
LOGGER.setLevel(logging.INFO)
handler = logging.FileHandler(filename="data/lui-cogs/spoilers/info.log",
encoding="utf-8",
mode="a")
handler.setFormatter(logging.Formatter("%(asctime)s %(message)s",
datefmt="[%d/%m/%Y %H:%M:%S]"))
LOGGER.addHandler(handler)
bot.add_listener(spoilersCog.checkForReaction, "on_socket_raw_receive")
bot.add_cog(spoilersCog)
| checkForReaction | identifier_name |
spoilers.py | """Spoilers cog
Filters out messages that start with a certain prefix, and store them for
later retrieval.
"""
from datetime import datetime, timedelta
import logging
import json
import os
import re
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from cogs.utils import config
# Global variables
KEY_MESSAGE = "message"
KEY_AUTHOR_ID = "authorid"
KEY_AUTHOR_NAME = "author"
KEY_TIMESTAMP = "timestamp"
KEY_EMBED = "embed"
LOGGER = None
PREFIX = "spoiler"
SAVE_FOLDER = "data/lui-cogs/spoilers/" # Path to save folder.
SAVE_FILE = "settings.json"
COOLDOWN = 60
def checkFolder():
"""Used to create the data folder at first startup"""
if not os.path.exists(SAVE_FOLDER):
print("Creating " + SAVE_FOLDER + " folder...")
os.makedirs(SAVE_FOLDER)
def checkFiles():
"""Used to initialize an empty database at first startup"""
theFile = SAVE_FOLDER + SAVE_FILE
if not dataIO.is_valid_json(theFile):
print("Creating default spoilers settings.json")
dataIO.save_json(theFile, {})
class Spoilers: # pylint: disable=too-many-instance-attributes
"""Store messages for later retrieval."""
#Class constructor
def __init__(self, bot):
self.bot = bot
#The JSON keys for the settings:
checkFolder()
checkFiles()
self.settings = config.Config("settings.json",
cogname="lui-cogs/spoilers")
self.messages = self.settings.get("messages") if not None else {}
self.onCooldown = {}
@commands.command(name="spoiler", pass_context=True)
async def spoiler(self, ctx, *, msg):
"""Create a message spoiler."""
wordFilter = self.bot.get_cog("WordFilter")
if not wordFilter:
await self.bot.say("This cog requires the word filter cog to be loaded. "
"Please load the cog and try again")
return
if wordFilter.containsFilterableWords(ctx.message):
await self.bot.say("You have filtered words in your spoiler! Please "
"check it and try again!")
return
try:
store = {}
store[KEY_MESSAGE] = msg
store[KEY_AUTHOR_ID] = ctx.message.author.id
store[KEY_AUTHOR_NAME] = "{0.name}#{0.discriminator}".format(ctx.message.author)
store[KEY_TIMESTAMP] = ctx.message.timestamp.strftime("%s")
if ctx.message.embeds:
data = discord.Embed.from_data(ctx.message.embeds[0])
if data.type == 'image':
store[KEY_EMBED] = data.url
else:
|
await self.bot.delete_message(ctx.message)
newMsg = await self.bot.say(":warning: {} created a spoiler! React to see "
"the message!".format(ctx.message.author.mention))
if not self.messages:
self.messages = {}
self.messages[newMsg.id] = store
await self.bot.add_reaction(newMsg, "\N{INFORMATION SOURCE}")
LOGGER.info("%s#%s (%s) added a spoiler: %s",
ctx.message.author.name,
ctx.message.author.discriminator,
ctx.message.author.id,
msg)
await self.settings.put("messages", self.messages)
except discord.errors.Forbidden as error:
await self.bot.say("I'm not able to do that.")
await self.bot.delete_message(newMsg)
LOGGER.error("Could not create a spoiler in server %s channel %s",
ctx.message.server.name,
ctx.message.channel.name)
LOGGER.error(error)
async def checkForReaction(self, data):
"""Reaction listener (using socket data)
Checks to see if a spoilered message is reacted, and if so, send a DM to the
user that reacted.
"""
# no binary frames
if isinstance(data, bytes):
return
data = json.loads(data)
event = data.get("t")
payload = data.get("d")
if event not in ("MESSAGE_REACTION_ADD", "MESSAGE_REACTION_REMOVE",
"MESSAGE_REACTION_REMOVE_ALL"):
return
isReaction = event == "MESSAGE_REACTION_ADD"
# make sure the reaction is proper
if isReaction:
msgId = payload["message_id"]
if msgId in self.messages.keys():
server = discord.utils.get(self.bot.servers,
id=payload["guild_id"])
reactedUser = discord.utils.get(server.members,
id=payload["user_id"])
if reactedUser.bot:
return
channel = discord.utils.get(server.channels,
id=payload["channel_id"])
message = await self.bot.get_message(channel, msgId)
if payload["emoji"]["id"]:
emoji = discord.Emoji(name=payload["emoji"]["name"],
id=payload["emoji"]["id"],
server=server)
else:
emoji = payload["emoji"]["name"]
await self.bot.remove_reaction(message, emoji, reactedUser)
if (msgId in self.onCooldown.keys() and
reactedUser.id in self.onCooldown[msgId].keys() and
self.onCooldown[msgId][reactedUser.id] > datetime.now()):
return
msg = self.messages[msgId]
embed = discord.Embed()
userObj = discord.utils.get(server.members,
id=msg[KEY_AUTHOR_ID])
if userObj:
embed.set_author(name="{0.name}#{0.discriminator}".format(userObj),
icon_url=userObj.avatar_url)
else:
embed.set_author(name=msg[KEY_AUTHOR_NAME])
if KEY_EMBED in msg:
embed.set_image(url=msg[KEY_EMBED])
embed.description = msg[KEY_MESSAGE]
embed.timestamp = datetime.fromtimestamp(int(msg[KEY_TIMESTAMP]))
try:
await self.bot.send_message(reactedUser, embed=embed)
if msgId not in self.onCooldown.keys():
self.onCooldown[msgId] = {}
self.onCooldown[msgId][reactedUser.id] = (datetime.now() +
timedelta(seconds=COOLDOWN))
except (discord.errors.Forbidden, discord.errors.HTTPException) as error:
LOGGER.error("Could not send DM to %s#%s (%s).",
reactedUser.name,
reactedUser.discriminator,
reactedUser.id)
LOGGER.error(error)
def setup(bot):
"""Add the cog to the bot."""
checkFolder() # Make sure the data folder exists!
checkFiles() # Make sure we have settings!
spoilersCog = Spoilers(bot)
global LOGGER # pylint: disable=global-statement
LOGGER = logging.getLogger("red.Spoilers")
if LOGGER.level == 0:
# Prevents the LOGGER from being loaded again in case of module reload.
LOGGER.setLevel(logging.INFO)
handler = logging.FileHandler(filename="data/lui-cogs/spoilers/info.log",
encoding="utf-8",
mode="a")
handler.setFormatter(logging.Formatter("%(asctime)s %(message)s",
datefmt="[%d/%m/%Y %H:%M:%S]"))
LOGGER.addHandler(handler)
bot.add_listener(spoilersCog.checkForReaction, "on_socket_raw_receive")
bot.add_cog(spoilersCog)
| imglinkPattern = r"(?i)http[^ ]+\.(?:png|jpg|jpeg|gif)"
match = re.search(imglinkPattern, msg)
if match:
store[KEY_EMBED] = match.group(0) | conditional_block |
setup.py | #!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class SQLiteTest(Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite" | pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX = {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='[email protected]',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
) |
user_options = []
def initialize_options(self): | random_line_split |
setup.py | #!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class SQLiteTest(Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
|
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX = {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='[email protected]',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
)
| sys.exit(0) | conditional_block |
setup.py | #!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class | (Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX = {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='[email protected]',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
)
| SQLiteTest | identifier_name |
setup.py | #!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class SQLiteTest(Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
|
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX = {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='[email protected]',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
)
| if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1) | identifier_body |
reducer.ts | import { Reducer } from 'redux';
import { ActionType, getType } from 'typesafe-actions';
import * as actions from './basic-actions';
export interface FarmingState {
// The actively farming store, if any
readonly storeId?: string;
// A counter for pending tasks that interrupt farming
readonly numInterruptions: number;
}
export type FarmingAction = ActionType<typeof actions>;
const initialState: FarmingState = {
numInterruptions: 0,
};
export const farming: Reducer<FarmingState, FarmingAction> = (
state: FarmingState = initialState,
action: FarmingAction
) => {
switch (action.type) {
case getType(actions.start):
return {
...state,
storeId: action.payload,
numInterruptions: 0,
};
case getType(actions.stop):
return {
...state,
storeId: undefined,
numInterruptions: 0,
};
case getType(actions.interruptFarming):
return {
...state,
numInterruptions: state.numInterruptions + 1,
};
case getType(actions.resumeFarming):
return {
...state, | }
}; | numInterruptions: state.numInterruptions - 1,
};
default:
return state; | random_line_split |
conversion_from_glib.rs | use super::parameter_ffi_call_out;
use crate::{
analysis::{self, try_from_glib::TryFromGlib},
library,
};
#[derive(Clone, Debug)]
pub struct Mode {
pub typ: library::TypeId,
pub transfer: library::Transfer,
pub is_uninitialized: bool,
pub try_from_glib: TryFromGlib,
}
impl From<¶meter_ffi_call_out::Parameter> for Mode {
fn from(orig: ¶meter_ffi_call_out::Parameter) -> Mode {
Mode {
typ: orig.typ,
transfer: orig.transfer, | is_uninitialized: orig.is_uninitialized,
try_from_glib: orig.try_from_glib.clone(),
}
}
}
impl From<&analysis::Parameter> for Mode {
fn from(orig: &analysis::Parameter) -> Mode {
Mode {
typ: orig.lib_par.typ,
transfer: orig.lib_par.transfer,
is_uninitialized: false,
try_from_glib: orig.try_from_glib.clone(),
}
}
} | random_line_split |
|
conversion_from_glib.rs | use super::parameter_ffi_call_out;
use crate::{
analysis::{self, try_from_glib::TryFromGlib},
library,
};
#[derive(Clone, Debug)]
pub struct | {
pub typ: library::TypeId,
pub transfer: library::Transfer,
pub is_uninitialized: bool,
pub try_from_glib: TryFromGlib,
}
impl From<¶meter_ffi_call_out::Parameter> for Mode {
fn from(orig: ¶meter_ffi_call_out::Parameter) -> Mode {
Mode {
typ: orig.typ,
transfer: orig.transfer,
is_uninitialized: orig.is_uninitialized,
try_from_glib: orig.try_from_glib.clone(),
}
}
}
impl From<&analysis::Parameter> for Mode {
fn from(orig: &analysis::Parameter) -> Mode {
Mode {
typ: orig.lib_par.typ,
transfer: orig.lib_par.transfer,
is_uninitialized: false,
try_from_glib: orig.try_from_glib.clone(),
}
}
}
| Mode | identifier_name |
conversion_from_glib.rs | use super::parameter_ffi_call_out;
use crate::{
analysis::{self, try_from_glib::TryFromGlib},
library,
};
#[derive(Clone, Debug)]
pub struct Mode {
pub typ: library::TypeId,
pub transfer: library::Transfer,
pub is_uninitialized: bool,
pub try_from_glib: TryFromGlib,
}
impl From<¶meter_ffi_call_out::Parameter> for Mode {
fn from(orig: ¶meter_ffi_call_out::Parameter) -> Mode |
}
impl From<&analysis::Parameter> for Mode {
fn from(orig: &analysis::Parameter) -> Mode {
Mode {
typ: orig.lib_par.typ,
transfer: orig.lib_par.transfer,
is_uninitialized: false,
try_from_glib: orig.try_from_glib.clone(),
}
}
}
| {
Mode {
typ: orig.typ,
transfer: orig.transfer,
is_uninitialized: orig.is_uninitialized,
try_from_glib: orig.try_from_glib.clone(),
}
} | identifier_body |
payrolls.reducer.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ResourceState} from '../../../common/store/resource.reducer';
import {idsToHashWithCurrentTimestamp, resourcesToHash} from '../../../common/store/reducer.helper';
import * as payroll from './payroll-collection.actions';
import {PayrollCollectionHistory} from '../../../services/payroll/domain/payroll-collection-history.model';
export const initialState: ResourceState = {
ids: [],
entities: {},
loadedAt: {},
selectedId: null,
};
export function reducer(state = initialState, action: payroll.Actions): ResourceState | {
switch (action.type) {
case payroll.LOAD_ALL_COLLECTIONS: {
return initialState;
}
case payroll.LOAD_ALL_COLLECTIONS_COMPLETE: {
const payrolls: PayrollCollectionHistory[] = action.payload;
const ids = payrolls.map(payroll => payroll.identifier);
const entities = resourcesToHash(payrolls);
const loadedAt = idsToHashWithCurrentTimestamp(ids);
return {
ids: [ ...ids ],
entities: entities,
loadedAt: loadedAt,
selectedId: state.selectedId
};
}
default: {
return state;
}
}
} | identifier_body |
|
payrolls.reducer.ts | /**
* Copyright 2017 The Mifos Initiative.
* | *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ResourceState} from '../../../common/store/resource.reducer';
import {idsToHashWithCurrentTimestamp, resourcesToHash} from '../../../common/store/reducer.helper';
import * as payroll from './payroll-collection.actions';
import {PayrollCollectionHistory} from '../../../services/payroll/domain/payroll-collection-history.model';
export const initialState: ResourceState = {
ids: [],
entities: {},
loadedAt: {},
selectedId: null,
};
export function reducer(state = initialState, action: payroll.Actions): ResourceState {
switch (action.type) {
case payroll.LOAD_ALL_COLLECTIONS: {
return initialState;
}
case payroll.LOAD_ALL_COLLECTIONS_COMPLETE: {
const payrolls: PayrollCollectionHistory[] = action.payload;
const ids = payrolls.map(payroll => payroll.identifier);
const entities = resourcesToHash(payrolls);
const loadedAt = idsToHashWithCurrentTimestamp(ids);
return {
ids: [ ...ids ],
entities: entities,
loadedAt: loadedAt,
selectedId: state.selectedId
};
}
default: {
return state;
}
}
} | * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at | random_line_split |
payrolls.reducer.ts | /**
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ResourceState} from '../../../common/store/resource.reducer';
import {idsToHashWithCurrentTimestamp, resourcesToHash} from '../../../common/store/reducer.helper';
import * as payroll from './payroll-collection.actions';
import {PayrollCollectionHistory} from '../../../services/payroll/domain/payroll-collection-history.model';
export const initialState: ResourceState = {
ids: [],
entities: {},
loadedAt: {},
selectedId: null,
};
export function | (state = initialState, action: payroll.Actions): ResourceState {
switch (action.type) {
case payroll.LOAD_ALL_COLLECTIONS: {
return initialState;
}
case payroll.LOAD_ALL_COLLECTIONS_COMPLETE: {
const payrolls: PayrollCollectionHistory[] = action.payload;
const ids = payrolls.map(payroll => payroll.identifier);
const entities = resourcesToHash(payrolls);
const loadedAt = idsToHashWithCurrentTimestamp(ids);
return {
ids: [ ...ids ],
entities: entities,
loadedAt: loadedAt,
selectedId: state.selectedId
};
}
default: {
return state;
}
}
}
| reducer | identifier_name |
managers.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.contrib.auth.models import BaseUserManager
from django.db.models import Q
from django.utils import timezone
from django.utils.lru_cache import lru_cache
from pootle_app.models.permissions import check_user_permission
from pootle_translationproject.models import TranslationProject
from . import utils | __all__ = ('UserManager', )
class UserManager(BaseUserManager):
"""Pootle User manager.
This manager hides the 'nobody' and 'default' users for normal
queries, since they are special users. Code that needs access to these
users should use the methods get_default_user and get_nobody_user.
"""
PERMISSION_USERS = ('default', 'nobody')
META_USERS = ('default', 'nobody', 'system')
def _create_user(self, username, email, password, is_superuser,
**extra_fields):
"""Creates and saves a User with the given username, email,
password and superuser status.
Adapted from the core ``auth.User`` model's ``UserManager``: we
have no use for the ``is_staff`` field.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
utils.validate_email_unique(email)
user = self.model(username=username, email=email,
is_active=True, is_superuser=is_superuser,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True,
**extra_fields)
@lru_cache()
def get_default_user(self):
return self.get_queryset().get(username='default')
@lru_cache()
def get_nobody_user(self):
return self.get_queryset().get(username='nobody')
@lru_cache()
def get_system_user(self):
return self.get_queryset().get(username='system')
def hide_permission_users(self):
return self.get_queryset().exclude(username__in=self.PERMISSION_USERS)
def hide_meta(self):
return self.get_queryset().exclude(username__in=self.META_USERS)
def meta_users(self):
return self.get_queryset().filter(username__in=self.META_USERS)
def get_users_with_permission(self, permission_code, project, language):
default = self.get_default_user()
directory = TranslationProject.objects.get(
project=project,
language=language
).directory
if check_user_permission(default, permission_code, directory):
return self.hide_meta().filter(is_active=True)
user_filter = Q(
permissionset__positive_permissions__codename=permission_code
)
language_path = language.directory.pootle_path
project_path = project.directory.pootle_path
user_filter &= (
Q(permissionset__directory__pootle_path=directory.pootle_path)
| Q(permissionset__directory__pootle_path=language_path)
| Q(permissionset__directory__pootle_path=project_path)
)
user_filter |= Q(is_superuser=True)
return self.get_queryset().filter(user_filter).distinct() | random_line_split |
|
managers.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.contrib.auth.models import BaseUserManager
from django.db.models import Q
from django.utils import timezone
from django.utils.lru_cache import lru_cache
from pootle_app.models.permissions import check_user_permission
from pootle_translationproject.models import TranslationProject
from . import utils
__all__ = ('UserManager', )
class UserManager(BaseUserManager):
"""Pootle User manager.
This manager hides the 'nobody' and 'default' users for normal
queries, since they are special users. Code that needs access to these
users should use the methods get_default_user and get_nobody_user.
"""
PERMISSION_USERS = ('default', 'nobody')
META_USERS = ('default', 'nobody', 'system')
def _create_user(self, username, email, password, is_superuser,
**extra_fields):
"""Creates and saves a User with the given username, email,
password and superuser status.
Adapted from the core ``auth.User`` model's ``UserManager``: we
have no use for the ``is_staff`` field.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
utils.validate_email_unique(email)
user = self.model(username=username, email=email,
is_active=True, is_superuser=is_superuser,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True,
**extra_fields)
@lru_cache()
def get_default_user(self):
|
@lru_cache()
def get_nobody_user(self):
return self.get_queryset().get(username='nobody')
@lru_cache()
def get_system_user(self):
return self.get_queryset().get(username='system')
def hide_permission_users(self):
return self.get_queryset().exclude(username__in=self.PERMISSION_USERS)
def hide_meta(self):
return self.get_queryset().exclude(username__in=self.META_USERS)
def meta_users(self):
return self.get_queryset().filter(username__in=self.META_USERS)
def get_users_with_permission(self, permission_code, project, language):
default = self.get_default_user()
directory = TranslationProject.objects.get(
project=project,
language=language
).directory
if check_user_permission(default, permission_code, directory):
return self.hide_meta().filter(is_active=True)
user_filter = Q(
permissionset__positive_permissions__codename=permission_code
)
language_path = language.directory.pootle_path
project_path = project.directory.pootle_path
user_filter &= (
Q(permissionset__directory__pootle_path=directory.pootle_path)
| Q(permissionset__directory__pootle_path=language_path)
| Q(permissionset__directory__pootle_path=project_path)
)
user_filter |= Q(is_superuser=True)
return self.get_queryset().filter(user_filter).distinct()
| return self.get_queryset().get(username='default') | identifier_body |
managers.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.contrib.auth.models import BaseUserManager
from django.db.models import Q
from django.utils import timezone
from django.utils.lru_cache import lru_cache
from pootle_app.models.permissions import check_user_permission
from pootle_translationproject.models import TranslationProject
from . import utils
__all__ = ('UserManager', )
class | (BaseUserManager):
"""Pootle User manager.
This manager hides the 'nobody' and 'default' users for normal
queries, since they are special users. Code that needs access to these
users should use the methods get_default_user and get_nobody_user.
"""
PERMISSION_USERS = ('default', 'nobody')
META_USERS = ('default', 'nobody', 'system')
def _create_user(self, username, email, password, is_superuser,
**extra_fields):
"""Creates and saves a User with the given username, email,
password and superuser status.
Adapted from the core ``auth.User`` model's ``UserManager``: we
have no use for the ``is_staff`` field.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
utils.validate_email_unique(email)
user = self.model(username=username, email=email,
is_active=True, is_superuser=is_superuser,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True,
**extra_fields)
@lru_cache()
def get_default_user(self):
return self.get_queryset().get(username='default')
@lru_cache()
def get_nobody_user(self):
return self.get_queryset().get(username='nobody')
@lru_cache()
def get_system_user(self):
return self.get_queryset().get(username='system')
def hide_permission_users(self):
return self.get_queryset().exclude(username__in=self.PERMISSION_USERS)
def hide_meta(self):
return self.get_queryset().exclude(username__in=self.META_USERS)
def meta_users(self):
return self.get_queryset().filter(username__in=self.META_USERS)
def get_users_with_permission(self, permission_code, project, language):
default = self.get_default_user()
directory = TranslationProject.objects.get(
project=project,
language=language
).directory
if check_user_permission(default, permission_code, directory):
return self.hide_meta().filter(is_active=True)
user_filter = Q(
permissionset__positive_permissions__codename=permission_code
)
language_path = language.directory.pootle_path
project_path = project.directory.pootle_path
user_filter &= (
Q(permissionset__directory__pootle_path=directory.pootle_path)
| Q(permissionset__directory__pootle_path=language_path)
| Q(permissionset__directory__pootle_path=project_path)
)
user_filter |= Q(is_superuser=True)
return self.get_queryset().filter(user_filter).distinct()
| UserManager | identifier_name |
managers.py | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.contrib.auth.models import BaseUserManager
from django.db.models import Q
from django.utils import timezone
from django.utils.lru_cache import lru_cache
from pootle_app.models.permissions import check_user_permission
from pootle_translationproject.models import TranslationProject
from . import utils
__all__ = ('UserManager', )
class UserManager(BaseUserManager):
"""Pootle User manager.
This manager hides the 'nobody' and 'default' users for normal
queries, since they are special users. Code that needs access to these
users should use the methods get_default_user and get_nobody_user.
"""
PERMISSION_USERS = ('default', 'nobody')
META_USERS = ('default', 'nobody', 'system')
def _create_user(self, username, email, password, is_superuser,
**extra_fields):
"""Creates and saves a User with the given username, email,
password and superuser status.
Adapted from the core ``auth.User`` model's ``UserManager``: we
have no use for the ``is_staff`` field.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
utils.validate_email_unique(email)
user = self.model(username=username, email=email,
is_active=True, is_superuser=is_superuser,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True,
**extra_fields)
@lru_cache()
def get_default_user(self):
return self.get_queryset().get(username='default')
@lru_cache()
def get_nobody_user(self):
return self.get_queryset().get(username='nobody')
@lru_cache()
def get_system_user(self):
return self.get_queryset().get(username='system')
def hide_permission_users(self):
return self.get_queryset().exclude(username__in=self.PERMISSION_USERS)
def hide_meta(self):
return self.get_queryset().exclude(username__in=self.META_USERS)
def meta_users(self):
return self.get_queryset().filter(username__in=self.META_USERS)
def get_users_with_permission(self, permission_code, project, language):
default = self.get_default_user()
directory = TranslationProject.objects.get(
project=project,
language=language
).directory
if check_user_permission(default, permission_code, directory):
|
user_filter = Q(
permissionset__positive_permissions__codename=permission_code
)
language_path = language.directory.pootle_path
project_path = project.directory.pootle_path
user_filter &= (
Q(permissionset__directory__pootle_path=directory.pootle_path)
| Q(permissionset__directory__pootle_path=language_path)
| Q(permissionset__directory__pootle_path=project_path)
)
user_filter |= Q(is_superuser=True)
return self.get_queryset().filter(user_filter).distinct()
| return self.hide_meta().filter(is_active=True) | conditional_block |
social-windows-outline.js | import React from 'react'
import Icon from 'react-icon-base' | <g><path d="m33.8 22h-16.9v11.6l16.9 2.5v-14.1z m1.2-1.3v16.8l-19.4-2.8v-14h19.4z m-21.9 1.3h-11.8v9.3l11.8 1.7v-11z m1.3-1.3v13.8l-14.4-2.1v-11.7h14.4z m19.4-16.8l-16.9 2.4v11.9h16.9v-14.3z m1.2-1.4v17h-19.4v-14.3z m-21.9 4.4l-11.8 1.7v9.6h11.8v-11.3z m1.3-1.4v14h-14.4v-12z"/></g>
</Icon>
)
export default IoSocialWindowsOutline |
const IoSocialWindowsOutline = props => (
<Icon viewBox="0 0 40 40" {...props}> | random_line_split |
AbinsCalculateDWSingleCrystalTest.py | from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import *
import numpy as np
import json
from AbinsModules import CalculateDWSingleCrystal, LoadCASTEP, AbinsTestHelpers
class AbinsCalculateDWSingleCrystalTest(unittest.TestCase):
temperature = 10 # 10 K
# data
# Use case: one k-point
_c6h6 = "benzene_CalculateDWSingleCrystal"
# Use case: many k-points
_si2 = "Si2-sc_CalculateDWSingleCrystal"
def tearDown(self):
AbinsTestHelpers.remove_output_files(list_of_names=["CalculateDWSingleCrystal"])
# simple tests
def test_wrong_input(self):
filename = self._si2 + ".phonon"
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename))
good_data = castep_reader.read_phonon_file()
# wrong temperature
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=-10, abins_data=good_data)
# data from object of type AtomsData instead of object of type AbinsData
bad_data = good_data.extract()["atoms_data"]
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=self.temperature, abins_data=bad_data)
# main test
def test_good_case(self):
|
# helper functions
def _good_case(self, name=None):
# calculation of DW
good_data = self._get_good_data(filename=name)
good_tester = CalculateDWSingleCrystal(temperature=self.temperature, abins_data=good_data["DFT"])
calculated_data = good_tester.calculate_data()
# check if evaluated DW are correct
self.assertEqual(True, np.allclose(good_data["DW"], calculated_data.extract()))
def _get_good_data(self, filename=None):
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename + ".phonon"))
dw = self._prepare_data(filename=AbinsTestHelpers.find_file(filename=filename + "_crystal_DW.txt"))
return {"DFT": castep_reader.read_phonon_file(), "DW": dw}
# noinspection PyMethodMayBeStatic
def _prepare_data(self, filename=None):
"""Reads a correct values from ASCII file."""
with open(filename) as data_file:
correct_data = json.loads(data_file.read().replace("\n", " "))
return np.asarray(correct_data)
if __name__ == '__main__':
unittest.main()
| self._good_case(name=self._c6h6)
self._good_case(name=self._si2) | identifier_body |
AbinsCalculateDWSingleCrystalTest.py | from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import *
import numpy as np
import json
from AbinsModules import CalculateDWSingleCrystal, LoadCASTEP, AbinsTestHelpers
class | (unittest.TestCase):
temperature = 10 # 10 K
# data
# Use case: one k-point
_c6h6 = "benzene_CalculateDWSingleCrystal"
# Use case: many k-points
_si2 = "Si2-sc_CalculateDWSingleCrystal"
def tearDown(self):
AbinsTestHelpers.remove_output_files(list_of_names=["CalculateDWSingleCrystal"])
# simple tests
def test_wrong_input(self):
filename = self._si2 + ".phonon"
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename))
good_data = castep_reader.read_phonon_file()
# wrong temperature
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=-10, abins_data=good_data)
# data from object of type AtomsData instead of object of type AbinsData
bad_data = good_data.extract()["atoms_data"]
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=self.temperature, abins_data=bad_data)
# main test
def test_good_case(self):
self._good_case(name=self._c6h6)
self._good_case(name=self._si2)
# helper functions
def _good_case(self, name=None):
# calculation of DW
good_data = self._get_good_data(filename=name)
good_tester = CalculateDWSingleCrystal(temperature=self.temperature, abins_data=good_data["DFT"])
calculated_data = good_tester.calculate_data()
# check if evaluated DW are correct
self.assertEqual(True, np.allclose(good_data["DW"], calculated_data.extract()))
def _get_good_data(self, filename=None):
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename + ".phonon"))
dw = self._prepare_data(filename=AbinsTestHelpers.find_file(filename=filename + "_crystal_DW.txt"))
return {"DFT": castep_reader.read_phonon_file(), "DW": dw}
# noinspection PyMethodMayBeStatic
def _prepare_data(self, filename=None):
"""Reads a correct values from ASCII file."""
with open(filename) as data_file:
correct_data = json.loads(data_file.read().replace("\n", " "))
return np.asarray(correct_data)
if __name__ == '__main__':
unittest.main()
| AbinsCalculateDWSingleCrystalTest | identifier_name |
AbinsCalculateDWSingleCrystalTest.py | from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import *
import numpy as np
import json
from AbinsModules import CalculateDWSingleCrystal, LoadCASTEP, AbinsTestHelpers
class AbinsCalculateDWSingleCrystalTest(unittest.TestCase):
temperature = 10 # 10 K
# data
# Use case: one k-point
_c6h6 = "benzene_CalculateDWSingleCrystal"
# Use case: many k-points
_si2 = "Si2-sc_CalculateDWSingleCrystal"
def tearDown(self):
AbinsTestHelpers.remove_output_files(list_of_names=["CalculateDWSingleCrystal"])
# simple tests
def test_wrong_input(self):
filename = self._si2 + ".phonon"
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename))
good_data = castep_reader.read_phonon_file()
# wrong temperature
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=-10, abins_data=good_data)
# data from object of type AtomsData instead of object of type AbinsData
bad_data = good_data.extract()["atoms_data"]
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=self.temperature, abins_data=bad_data)
# main test
def test_good_case(self):
self._good_case(name=self._c6h6)
self._good_case(name=self._si2)
# helper functions
def _good_case(self, name=None):
# calculation of DW
good_data = self._get_good_data(filename=name)
good_tester = CalculateDWSingleCrystal(temperature=self.temperature, abins_data=good_data["DFT"])
calculated_data = good_tester.calculate_data()
# check if evaluated DW are correct
self.assertEqual(True, np.allclose(good_data["DW"], calculated_data.extract()))
def _get_good_data(self, filename=None):
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename + ".phonon"))
dw = self._prepare_data(filename=AbinsTestHelpers.find_file(filename=filename + "_crystal_DW.txt"))
| return {"DFT": castep_reader.read_phonon_file(), "DW": dw}
# noinspection PyMethodMayBeStatic
def _prepare_data(self, filename=None):
"""Reads a correct values from ASCII file."""
with open(filename) as data_file:
correct_data = json.loads(data_file.read().replace("\n", " "))
return np.asarray(correct_data)
if __name__ == '__main__':
unittest.main() | random_line_split |
|
AbinsCalculateDWSingleCrystalTest.py | from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import *
import numpy as np
import json
from AbinsModules import CalculateDWSingleCrystal, LoadCASTEP, AbinsTestHelpers
class AbinsCalculateDWSingleCrystalTest(unittest.TestCase):
temperature = 10 # 10 K
# data
# Use case: one k-point
_c6h6 = "benzene_CalculateDWSingleCrystal"
# Use case: many k-points
_si2 = "Si2-sc_CalculateDWSingleCrystal"
def tearDown(self):
AbinsTestHelpers.remove_output_files(list_of_names=["CalculateDWSingleCrystal"])
# simple tests
def test_wrong_input(self):
filename = self._si2 + ".phonon"
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename))
good_data = castep_reader.read_phonon_file()
# wrong temperature
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=-10, abins_data=good_data)
# data from object of type AtomsData instead of object of type AbinsData
bad_data = good_data.extract()["atoms_data"]
self.assertRaises(ValueError, CalculateDWSingleCrystal, temperature=self.temperature, abins_data=bad_data)
# main test
def test_good_case(self):
self._good_case(name=self._c6h6)
self._good_case(name=self._si2)
# helper functions
def _good_case(self, name=None):
# calculation of DW
good_data = self._get_good_data(filename=name)
good_tester = CalculateDWSingleCrystal(temperature=self.temperature, abins_data=good_data["DFT"])
calculated_data = good_tester.calculate_data()
# check if evaluated DW are correct
self.assertEqual(True, np.allclose(good_data["DW"], calculated_data.extract()))
def _get_good_data(self, filename=None):
castep_reader = LoadCASTEP(input_dft_filename=AbinsTestHelpers.find_file(filename=filename + ".phonon"))
dw = self._prepare_data(filename=AbinsTestHelpers.find_file(filename=filename + "_crystal_DW.txt"))
return {"DFT": castep_reader.read_phonon_file(), "DW": dw}
# noinspection PyMethodMayBeStatic
def _prepare_data(self, filename=None):
"""Reads a correct values from ASCII file."""
with open(filename) as data_file:
correct_data = json.loads(data_file.read().replace("\n", " "))
return np.asarray(correct_data)
if __name__ == '__main__':
| unittest.main() | conditional_block |
|
auth.service.ts | import { HttpClient } from '@angular/common/http';
import { Inject, Injectable } from '@angular/core';
import { map } from 'rxjs/operators';
import { accessTokenKey, baseUrl } from '../core/constants';
import { HubClient } from '../core/hub-client';
import { LocalStorageService } from '../core/local-storage.service';
import { Logger } from './logger.service';
@Injectable()
export class AuthService {
constructor(
@Inject(baseUrl) private _baseUrl: string,
private _httpClient: HttpClient,
private _hubClient: HubClient,
private _localStorageService: LocalStorageService,
private _loggerService: Logger
) {}
public logout() {
this._hubClient.disconnect();
this._localStorageService.put({ name: accessTokenKey, value: null });
}
public tryToLogin(options: { username: string; password: string }) |
}
| {
this._loggerService.trace('AuthService', 'tryToLogin');
return this._httpClient.post<any>(`${this._baseUrl}api/users/token`, options).pipe(
map(response => {
this._localStorageService.put({ name: accessTokenKey, value: response.accessToken });
return response.accessToken;
})
);
} | identifier_body |
auth.service.ts | import { HttpClient } from '@angular/common/http';
import { Inject, Injectable } from '@angular/core';
import { map } from 'rxjs/operators';
import { accessTokenKey, baseUrl } from '../core/constants';
import { HubClient } from '../core/hub-client';
import { LocalStorageService } from '../core/local-storage.service';
import { Logger } from './logger.service';
@Injectable()
export class AuthService {
constructor(
@Inject(baseUrl) private _baseUrl: string,
private _httpClient: HttpClient,
private _hubClient: HubClient,
private _localStorageService: LocalStorageService,
private _loggerService: Logger
) {}
public logout() { | this._loggerService.trace('AuthService', 'tryToLogin');
return this._httpClient.post<any>(`${this._baseUrl}api/users/token`, options).pipe(
map(response => {
this._localStorageService.put({ name: accessTokenKey, value: response.accessToken });
return response.accessToken;
})
);
}
} | this._hubClient.disconnect();
this._localStorageService.put({ name: accessTokenKey, value: null });
}
public tryToLogin(options: { username: string; password: string }) { | random_line_split |
auth.service.ts | import { HttpClient } from '@angular/common/http';
import { Inject, Injectable } from '@angular/core';
import { map } from 'rxjs/operators';
import { accessTokenKey, baseUrl } from '../core/constants';
import { HubClient } from '../core/hub-client';
import { LocalStorageService } from '../core/local-storage.service';
import { Logger } from './logger.service';
@Injectable()
export class AuthService {
constructor(
@Inject(baseUrl) private _baseUrl: string,
private _httpClient: HttpClient,
private _hubClient: HubClient,
private _localStorageService: LocalStorageService,
private _loggerService: Logger
) {}
public | () {
this._hubClient.disconnect();
this._localStorageService.put({ name: accessTokenKey, value: null });
}
public tryToLogin(options: { username: string; password: string }) {
this._loggerService.trace('AuthService', 'tryToLogin');
return this._httpClient.post<any>(`${this._baseUrl}api/users/token`, options).pipe(
map(response => {
this._localStorageService.put({ name: accessTokenKey, value: response.accessToken });
return response.accessToken;
})
);
}
}
| logout | identifier_name |
b-router.ts | /*!
* V4Fire Client Core
* https://github.com/V4Fire/Client
*
* Released under the MIT license
* https://github.com/V4Fire/Client/blob/master/LICENSE
*/
/**
* [[include:base/b-router/README.md]]
* @packageDocumentation
*/
import symbolGenerator from 'core/symbol';
import { deprecated } from 'core/functools/deprecation';
import globalRoutes from 'routes';
import type Async from 'core/async';
import iData, { component, prop, system, computed, hook, wait, watch } from 'super/i-data/i-data';
import engine, * as router from 'core/router';
import { fillRouteParams } from 'base/b-router/modules/normalizers';
import type { StaticRoutes, RouteOption, TransitionMethod } from 'base/b-router/interface';
export * from 'super/i-data/i-data';
export * from 'core/router/const';
export * from 'base/b-router/interface';
export const
$$ = symbolGenerator();
/**
* Component to route application pages
*/
@component({
deprecatedProps: {
pageProp: 'activeRoute',
pagesProp: 'routesProp'
}
})
export default class bRouter extends iData {
/**
* Type: page parameters
*/
readonly PageParams!: RouteOption;
/**
* Type: page query
*/
readonly PageQuery!: RouteOption;
/**
* Type: page meta
*/
readonly PageMeta!: RouteOption;
public override async!: Async<this>;
/**
* The static schema of application routes.
* By default, this value is taken from `routes/index.ts`.
*
* @example
* ```
* < b-router :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: Object,
required: false,
watch: (ctx, val, old) => {
if (!Object.fastCompare(val, old)) {
ctx.updateCurrentRoute();
}
}
})
readonly routesProp?: StaticRoutes;
/**
* Compiled schema of application routes
* @see [[bRouter.routesProp]]
*/
@system<bRouter>({
after: 'engine',
init: (o) => o.sync.link(o.compileStaticRoutes)
})
routes!: router.RouteBlueprints;
/**
* An initial route value.
* Usually, you don't need to provide this value manually,
* because it is inferring automatically, but sometimes it can be useful.
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: [String, Object],
required: false,
watch: 'updateCurrentRoute'
})
readonly initialRoute?: router.InitialRoute;
/**
* Base route path: all route paths are concatenated with this path
*
* @example
* ```
* < b-router :basePath = '/demo' | :routes = { &
* user: {
* /// '/demo/user'
* path: '/user'
* }
* } .
* ```
*/
@prop({watch: 'updateCurrentRoute'})
readonly basePathProp: string = '/';
/** @see [[bRouter.basePathProp]] */
@system<bRouter>((o) => o.sync.link())
basePath!: string;
/**
* If true, the router will intercept all click events on elements with a `href` attribute to emit a transition.
* An element with `href` can have additional attributes:
*
* * `data-router-method` - type of the used router method to emit the transition;
* * `data-router-go` - value for the router `go` method;
* * `data-router-params`, `data-router-query`, `data-router-meta` - additional parameters for the used router method
* (to provide an object use JSON).
*/
@prop(Boolean)
readonly interceptLinks: boolean = true;
/**
* A factory to create router engine.
* By default, this value is taken from `core/router/engines`.
*
* @example
* ```
* < b-router :engine = myCustomEngine
* ```
*/
@prop<bRouter>({
type: Function,
watch: 'updateCurrentRoute',
default: engine
})
readonly engineProp!: () => router.Router;
/**
* An internal router engine.
* For example, it can be the HTML5 history router or a router based on URL hash values.
*
* @see [[bRouter.engine]]
*/
@system((o) => o.sync.link((v) => (<(v: unknown) => router.Router>v)(o)))
protected engine!: router.Router;
/**
* Raw value of the active route
*/
@system()
protected routeStore?: router.Route;
/**
* Value of the active route
* @see [[bRouter.routeStore]]
*
* @example
* ```js
* console.log(route?.query)
* ```
*/
override get route(): CanUndef<this['r']['CurrentPage']> {
return this.field.get('routeStore');
}
/**
* @deprecated
* @see [[bRouter.route]]
*/
@deprecated({renamedTo: 'route'})
get page(): CanUndef<this['r']['CurrentPage']> {
return this.route;
}
/**
* Default route value
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*
* ```js
* router.defaultRoute.name === 'notFound'
* ```
*/
@computed({cache: true, dependencies: ['routes']})
get defaultRoute(): CanUndef<router.RouteBlueprint> {
let route;
for (let keys = Object.keys(this.routes), i = 0; i < keys.length; i++) {
const
el = this.routes[keys[i]];
if (el?.meta.default) {
route = el;
break;
}
}
return route;
}
/**
* Pushes a new route to the history stack.
* The method returns a promise that is resolved when the transition will be completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.push('main', {query: {foo: 1}});
* router.push('/user/:id', {params: {id: 1}});
* router.push('https://google.com');
* ```
*/
async push(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'push');
}
/**
* Replaces the current route.
* The method returns a promise that will be resolved when the transition is completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.replace('main', {query: {foo: 1}});
* router.replace('/user/:id', {params: {id: 1}});
* router.replace('https://google.com');
* ```
*/
async replace(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'replace');
}
/**
* Switches to a route from the history,
* identified by its relative position to the current route (with the current route being relative index 0).
* The method returns a promise that will be resolved when the transition is completed.
*
* @param pos
*
* @example
* ````js
* this.go(-1) // this.back();
* this.go(1) // this.forward();
* this.go(-2) // this.back(); this.back();
* ```
*/
async go(pos: number): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.go(pos);
await res;
}
/**
* Switches to the next route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async forward(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.forward();
await res;
}
/**
* Switches to the previous route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async back(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.back();
await res;
}
/**
* Clears the routes' history.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @param [filter] - filter predicate
*/
clear(filter?: router.HistoryClearFilter): Promise<void> {
return this.engine.clear(filter);
}
/**
* Clears all temporary routes from the history.
* The temporary route is a route that has `tmp` flag within its own properties, like, `params`, `query` or `meta`.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @example
* ```js
* this.push('redeem-money', {
* meta: {
* tmp: true
* }
* });
*
* this.clearTmp();
* ```
*/
clearTmp(): Promise<void> {
return this.engine.clearTmp();
}
/** @see [[router.getRoutePath]] */
getRoutePath(ref: string, opts: router.TransitionOptions = {}): CanUndef<string> {
return router.getRoutePath(ref, this.routes, opts);
}
/** @see [[router.getRoute]] */
getRoute(ref: string): CanUndef<router.RouteAPI> {
const {routes, basePath, defaultRoute} = this;
return router.getRoute(ref, routes, {basePath, defaultRoute});
}
/**
* @deprecated
* @see [[bRouter.getRoute]]
*/
@deprecated({renamedTo: 'getRoute'})
getPageOpts(ref: string): CanUndef<router.RouteBlueprint> {
return this.getRoute(ref);
}
/**
* Emits a new transition to the specified route
*
* @param ref - route name or URL or `null`, if the route is equal to the previous
* @param [opts] - additional transition options
* @param [method] - transition method
*
* @emits `beforeChange(route: Nullable<string>, params:` [[TransitionOptions]]`, method:` [[TransitionMethod]]`)`
*
* @emits `change(route:` [[Route]]`)`
* @emits `hardChange(route:` [[Route]]`)`
* @emits `softChange(route:` [[Route]]`)`
*
* @emits `transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
* @emits `$root.transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
*/
async emitTransition(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method: TransitionMethod = 'push'
): Promise<CanUndef<router.Route>> {
opts = router.getBlankRouteFrom(router.normalizeTransitionOpts(opts));
const
{r, engine} = this;
const
currentEngineRoute = engine.route ?? engine.page;
this.emit('beforeChange', ref, opts, method);
let
newRouteInfo: CanUndef<router.RouteAPI>;
const getEngineRoute = () => currentEngineRoute ?
currentEngineRoute.url ?? router.getRouteName(currentEngineRoute) :
undefined;
// Get information about the specified route
if (ref != null) {
newRouteInfo = this.getRoute(engine.id(ref));
// In this case, we don't have the specified ref to a transition,
// so we try to get information from the current route and use it as a blueprint to the new
} else if (currentEngineRoute) {
ref = getEngineRoute()!;
const
route = this.getRoute(ref);
if (route) {
newRouteInfo = Object.mixin(true, route, router.purifyRoute(currentEngineRoute));
}
}
const scroll = {
meta: {
scroll: {
x: pageXOffset,
y: pageYOffset
}
}
};
// To save scroll position before change to a new route
// we need to emit system "replace" transition with padding information about the scroll
if (currentEngineRoute && method !== 'replace') {
const
currentRouteWithScroll = Object.mixin(true, undefined, currentEngineRoute, scroll);
if (!Object.fastCompare(currentEngineRoute, currentRouteWithScroll)) {
await engine.replace(getEngineRoute()!, currentRouteWithScroll);
}
}
// We haven't found any routes that math to the specified ref
if (newRouteInfo == null) {
// The transition was emitted by a user, then we need to save the scroll
if (method !== 'event' && ref != null) {
await engine[method](ref, scroll);
}
return;
}
if ((<router.PurifiedRoute<router.RouteAPI>>newRouteInfo).name == null) {
const
nm = router.getRouteName(currentEngineRoute);
if (nm != null) {
newRouteInfo.name = nm;
}
}
const
currentRoute = this.field.get<router.Route>('routeStore'),
deepMixin = (...args) => Object.mixin({deep: true, skipUndefs: false}, ...args);
// If a new route matches by a name with the current,
// we need to mix a new state with the current
if (router.getRouteName(currentRoute) === newRouteInfo.name) {
deepMixin(newRouteInfo, router.getBlankRouteFrom(currentRoute), opts);
// Simple normalizing of a route state
} else {
deepMixin(newRouteInfo, opts);
}
const {meta} = newRouteInfo;
// If a route support filling from the root object or query parameters
fillRouteParams(newRouteInfo, this);
// We have two variants of transitions:
// "soft" - between routes were changed only query or meta parameters
// "hard" - first and second routes aren't equal by a name
// Mutations of query and meta parameters of a route shouldn't force re-render of components,
// that why we placed it to a prototype object by using `Object.create`
const nonWatchRouteValues = {
url: newRouteInfo.resolvePath(newRouteInfo.params),
query: newRouteInfo.query,
meta
};
const newRoute = Object.assign(
Object.create(nonWatchRouteValues),
Object.reject(router.convertRouteToPlainObject(newRouteInfo), Object.keys(nonWatchRouteValues))
);
let
hardChange = false;
// Emits the route transition event
const emitTransition = (onlyOwnTransition?: boolean) => {
const type = hardChange ? 'hard' : 'soft';
if (onlyOwnTransition) {
this.emit('transition', newRoute, type);
} else {
this.emit('change', newRoute);
this.emit('transition', newRoute, type);
r.emit('transition', newRoute, type);
}
};
// Checking that a new route is really needed, i.e., it isn't equal to the previous
let newRouteIsReallyNeeded = !Object.fastCompare(
router.getComparableRouteParams(currentRoute),
router.getComparableRouteParams(newRoute)
);
// Nothing changes between routes, but there are provided some meta object
if (!newRouteIsReallyNeeded && currentRoute != null && opts.meta != null) {
newRouteIsReallyNeeded = !Object.fastCompare(
Object.select(currentRoute.meta, opts.meta),
opts.meta
);
}
// The transition is necessary, but now we need to understand should we emit a "soft" or "hard" transition
if (newRouteIsReallyNeeded) {
this.field.set('routeStore', newRoute);
const
plainInfo = router.convertRouteToPlainObject(newRouteInfo);
const canRouteTransformToReplace =
currentRoute &&
method !== 'replace' &&
Object.fastCompare(router.convertRouteToPlainObject(currentRoute), plainInfo);
if (canRouteTransformToReplace) {
method = 'replace';
}
// If the used engine does not support the requested transition method,
// we should use `replace`
if (!Object.isFunction(engine[method])) {
method = 'replace';
}
// This transition is marked as `external`,
// i.e. it refers to another site
if (newRouteInfo.meta.external) {
const u = newRoute.url;
location.href = u !== '' ? u : '/';
return;
}
await engine[method](newRoute.url, plainInfo);
const isSoftTransition = Boolean(r.route && Object.fastCompare(
router.convertRouteToPlainObjectWithoutProto(currentRoute),
router.convertRouteToPlainObjectWithoutProto(newRoute)
));
// In this transition were changed only properties from a prototype,
// that why it can be emitted as a soft transition, i.e. without forcing of the re-rendering of components
if (isSoftTransition) {
this.emit('softChange', newRoute);
// We get a prototype by using `__proto__` link,
// because `Object.getPrototypeOf` returns a non-watchable object.
// This behavior is based on a strategy that every touch to an object property of the watched object
// will create a child watch object.
const
proto = r.route?.__proto__;
if (Object.isDictionary(proto)) {
// Correct values from the root route object
for (let keys = Object.keys(nonWatchRouteValues), i = 0; i < keys.length; i++) {
const key = keys[i];
proto[key] = nonWatchRouteValues[key];
}
}
} else {
hardChange = true;
this.emit('hardChange', newRoute);
r.route = newRoute;
}
emitTransition();
// This route is equal to the previous, and we don't actually do transition,
// but for a "push" request we need to emit a "fake" transition event anyway
} else if (method === 'push') {
emitTransition();
// In this case, we don't do transition, but still,
// we should emit the special event, because some methods, like, `back` or `forward` can wait for it
} else {
emitTransition(true);
}
// Restoring the scroll position
if (meta.autoScroll !== false) {
(async () => {
const label = {
label: $$.autoScroll
};
const setScroll = () => {
const
s = meta.scroll;
if (s != null) {
this.r.scrollTo(s.x, s.y);
} else if (hardChange) {
this.r.scrollTo(0, 0);
}
};
// Restoring of scroll for static height components
await this.nextTick(label);
setScroll();
// Restoring of scroll for dynamic height components
await this.async.sleep(10, label);
setScroll();
})().catch(stderr);
}
return newRoute;
}
/**
* @deprecated
* @see [[bRouter.emitTransition]]
*/
@deprecated({renamedTo: 'emitTransition'})
setPage(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method?: TransitionMethod
): Promise<CanUndef<router.Route>> {
return this.emitTransition(ref, opts, method);
}
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param [routes] - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
basePath: string,
routes?: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param activeRoute
* @param [routes] - static schema of application routes
*/
updateRoutes(
basePath: string,
activeRoute: router.InitialRoute,
routes?: StaticRoutes
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param routes - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
routes: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* @param basePathOrRoutes
* @param [routesOrActiveRoute]
* @param [activeRouteOrRoutes]
*/
@wait('beforeReady')
async updateRoutes(
basePathOrRoutes: string | StaticRoutes,
routesOrActiveRoute?: StaticRoutes | Nullable<router.InitialRoute>,
activeRouteOrRoutes?: Nullable<router.InitialRoute> | StaticRoutes
): Promise<router.RouteBlueprints> {
let
basePath,
routes,
activeRoute;
if (Object.isString(basePathOrRoutes)) {
basePath = basePathOrRoutes;
if (Object.isString(routesOrActiveRoute)) {
routes = <StaticRoutes>activeRouteOrRoutes;
activeRoute = routesOrActiveRoute;
} else {
routes = routesOrActiveRoute;
activeRoute = <Nullable<router.InitialRoute>>activeRouteOrRoutes;
}
} else {
routes = basePathOrRoutes;
activeRoute = <Nullable<router.InitialRoute>>routesOrActiveRoute;
}
if (basePath != null) {
this.basePath = basePath;
}
if (routes != null) {
this.routes = this.compileStaticRoutes(routes);
}
this.routeStore = undefined;
await this.initRoute(activeRoute ?? this.initialRoute ?? this.defaultRoute);
return this.routes;
}
protected override initRemoteData(): CanUndef<CanPromise<router.RouteBlueprints | Dictionary>> {
if (!this.db) {
return;
}
const
val = this.convertDBToComponent<StaticRoutes>(this.db);
if (Object.isDictionary(val)) {
return Promise.all(this.state.set(val)).then(() => val);
}
if (Object.isArray(val)) {
// eslint-disable-next-line prefer-spread
return this.updateRoutes.apply(this, val);
}
return this.routes;
}
/**
* Initializes the router within an application
* @emits `$root.initRouter(router:` [[bRouter]]`)`
*/
@hook('created')
protected init(): void {
this.field.set('routerStore', this, this.$root);
this.r.emit('initRouter', this);
}
/**
* Initializes the specified route
* @param [route] - route
*/
@hook('beforeDataCreate')
protected initRoute(route: Nullable<router.InitialRoute> = this.initialRoute): Promise<void> {
if (route != null) {
if (Object.isString(route)) {
return this.replace(route);
}
return this.replace(router.getRouteName(route), Object.reject(route, router.routeNames));
}
return this.replace(null);
}
/**
* Updates the current route value
*/
@wait({defer: true, label: $$.updateCurrentRoute})
protected updateCurrentRoute(): Promise<void> |
/**
* Compiles the specified static routes with the current base path and returns a new object
* @param [routes]
*/
protected compileStaticRoutes(routes: StaticRoutes = this.engine.routes ?? globalRoutes): router.RouteBlueprints {
return router.compileStaticRoutes(routes, {basePath: this.basePath});
}
protected override initBaseAPI(): void {
super.initBaseAPI();
const
i = this.instance;
this.compileStaticRoutes = i.compileStaticRoutes.bind(this);
this.emitTransition = i.emitTransition.bind(this);
}
/**
* Handler: click on an element with the `href` attribute
* @param e
*/
@watch({
field: 'document:click',
wrapper: (o, cb) => o.dom.delegate('[href]', cb)
})
protected async onLink(e: MouseEvent): Promise<void> {
const
a = <HTMLElement>e.delegateTarget,
href = a.getAttribute('href')?.trim();
const cantPrevent =
!this.interceptLinks ||
href == null ||
href === '' ||
href.startsWith('#') ||
href.startsWith('javascript:') ||
router.isExternal.test(href);
if (cantPrevent) {
return;
}
e.preventDefault();
const
l = Object.assign(document.createElement('a'), {href});
if (a.getAttribute('target') === '_blank' || e.ctrlKey) {
globalThis.open(l.href, '_blank');
return;
}
const
method = a.getAttribute('data-router-method');
switch (method) {
case 'back':
this.back().catch(stderr);
break;
case 'forward':
this.back().catch(stderr);
break;
case 'go': {
const go = Object.parse(a.getAttribute('data-router-go'));
this.go(Object.isNumber(go) ? go : -1).catch(stderr);
break;
}
default: {
const
params = Object.parse(a.getAttribute('data-router-params')),
query = Object.parse(a.getAttribute('data-router-query')),
meta = Object.parse(a.getAttribute('data-router-meta'));
await this[method === 'replace' ? 'replace' : 'push'](href, {
params: Object.isDictionary(params) ? params : {},
query: Object.isDictionary(query) ? query : {},
meta: Object.isDictionary(meta) ? meta : {}
});
}
}
}
}
| {
return this.initRoute();
} | identifier_body |
b-router.ts | /*!
* V4Fire Client Core
* https://github.com/V4Fire/Client
*
* Released under the MIT license
* https://github.com/V4Fire/Client/blob/master/LICENSE
*/
/**
* [[include:base/b-router/README.md]]
* @packageDocumentation
*/
import symbolGenerator from 'core/symbol';
import { deprecated } from 'core/functools/deprecation';
import globalRoutes from 'routes';
import type Async from 'core/async';
import iData, { component, prop, system, computed, hook, wait, watch } from 'super/i-data/i-data';
import engine, * as router from 'core/router';
import { fillRouteParams } from 'base/b-router/modules/normalizers';
import type { StaticRoutes, RouteOption, TransitionMethod } from 'base/b-router/interface';
export * from 'super/i-data/i-data';
export * from 'core/router/const';
export * from 'base/b-router/interface';
export const
$$ = symbolGenerator();
/**
* Component to route application pages
*/
@component({
deprecatedProps: {
pageProp: 'activeRoute',
pagesProp: 'routesProp'
}
})
export default class bRouter extends iData {
/**
* Type: page parameters
*/
readonly PageParams!: RouteOption;
/**
* Type: page query
*/
readonly PageQuery!: RouteOption;
/**
* Type: page meta
*/
readonly PageMeta!: RouteOption;
public override async!: Async<this>;
/**
* The static schema of application routes.
* By default, this value is taken from `routes/index.ts`.
*
* @example
* ```
* < b-router :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: Object,
required: false,
watch: (ctx, val, old) => {
if (!Object.fastCompare(val, old)) {
ctx.updateCurrentRoute();
}
}
})
readonly routesProp?: StaticRoutes;
/**
* Compiled schema of application routes
* @see [[bRouter.routesProp]]
*/
@system<bRouter>({
after: 'engine',
init: (o) => o.sync.link(o.compileStaticRoutes)
})
routes!: router.RouteBlueprints;
/**
* An initial route value.
* Usually, you don't need to provide this value manually,
* because it is inferring automatically, but sometimes it can be useful.
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: [String, Object],
required: false,
watch: 'updateCurrentRoute'
})
readonly initialRoute?: router.InitialRoute;
/**
* Base route path: all route paths are concatenated with this path
*
* @example
* ```
* < b-router :basePath = '/demo' | :routes = { &
* user: {
* /// '/demo/user'
* path: '/user'
* }
* } .
* ```
*/
@prop({watch: 'updateCurrentRoute'})
readonly basePathProp: string = '/';
/** @see [[bRouter.basePathProp]] */
@system<bRouter>((o) => o.sync.link())
basePath!: string;
/**
* If true, the router will intercept all click events on elements with a `href` attribute to emit a transition.
* An element with `href` can have additional attributes:
*
* * `data-router-method` - type of the used router method to emit the transition;
* * `data-router-go` - value for the router `go` method;
* * `data-router-params`, `data-router-query`, `data-router-meta` - additional parameters for the used router method
* (to provide an object use JSON).
*/
@prop(Boolean)
readonly interceptLinks: boolean = true;
/**
* A factory to create router engine.
* By default, this value is taken from `core/router/engines`.
*
* @example
* ```
* < b-router :engine = myCustomEngine
* ```
*/
@prop<bRouter>({
type: Function,
watch: 'updateCurrentRoute',
default: engine
})
readonly engineProp!: () => router.Router;
/**
* An internal router engine.
* For example, it can be the HTML5 history router or a router based on URL hash values.
*
* @see [[bRouter.engine]]
*/
@system((o) => o.sync.link((v) => (<(v: unknown) => router.Router>v)(o)))
protected engine!: router.Router;
/**
* Raw value of the active route
*/
@system()
protected routeStore?: router.Route;
/**
* Value of the active route
* @see [[bRouter.routeStore]]
*
* @example
* ```js
* console.log(route?.query)
* ```
*/
override get route(): CanUndef<this['r']['CurrentPage']> {
return this.field.get('routeStore');
}
/**
* @deprecated
* @see [[bRouter.route]]
*/
@deprecated({renamedTo: 'route'})
get page(): CanUndef<this['r']['CurrentPage']> {
return this.route;
}
/**
* Default route value
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*
* ```js
* router.defaultRoute.name === 'notFound'
* ```
*/
@computed({cache: true, dependencies: ['routes']})
get defaultRoute(): CanUndef<router.RouteBlueprint> {
let route;
for (let keys = Object.keys(this.routes), i = 0; i < keys.length; i++) {
const
el = this.routes[keys[i]];
if (el?.meta.default) {
route = el;
break;
}
}
return route;
}
/**
* Pushes a new route to the history stack.
* The method returns a promise that is resolved when the transition will be completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.push('main', {query: {foo: 1}});
* router.push('/user/:id', {params: {id: 1}});
* router.push('https://google.com');
* ```
*/
async push(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'push');
}
/**
* Replaces the current route.
* The method returns a promise that will be resolved when the transition is completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.replace('main', {query: {foo: 1}});
* router.replace('/user/:id', {params: {id: 1}});
* router.replace('https://google.com');
* ```
*/
async replace(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'replace');
}
/**
* Switches to a route from the history,
* identified by its relative position to the current route (with the current route being relative index 0).
* The method returns a promise that will be resolved when the transition is completed.
*
* @param pos
*
* @example
* ````js
* this.go(-1) // this.back();
* this.go(1) // this.forward();
* this.go(-2) // this.back(); this.back();
* ```
*/
async go(pos: number): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.go(pos);
await res;
}
/**
* Switches to the next route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async forward(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.forward();
await res;
}
/**
* Switches to the previous route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async back(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.back();
await res;
}
/**
* Clears the routes' history.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @param [filter] - filter predicate
*/
clear(filter?: router.HistoryClearFilter): Promise<void> {
return this.engine.clear(filter);
}
/**
* Clears all temporary routes from the history.
* The temporary route is a route that has `tmp` flag within its own properties, like, `params`, `query` or `meta`.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @example
* ```js
* this.push('redeem-money', {
* meta: {
* tmp: true
* }
* });
*
* this.clearTmp();
* ```
*/
clearTmp(): Promise<void> {
return this.engine.clearTmp();
}
/** @see [[router.getRoutePath]] */
getRoutePath(ref: string, opts: router.TransitionOptions = {}): CanUndef<string> {
return router.getRoutePath(ref, this.routes, opts);
}
/** @see [[router.getRoute]] */
getRoute(ref: string): CanUndef<router.RouteAPI> {
const {routes, basePath, defaultRoute} = this;
return router.getRoute(ref, routes, {basePath, defaultRoute});
}
/**
* @deprecated
* @see [[bRouter.getRoute]]
*/
@deprecated({renamedTo: 'getRoute'})
getPageOpts(ref: string): CanUndef<router.RouteBlueprint> {
return this.getRoute(ref);
}
/**
* Emits a new transition to the specified route
*
* @param ref - route name or URL or `null`, if the route is equal to the previous
* @param [opts] - additional transition options
* @param [method] - transition method
*
* @emits `beforeChange(route: Nullable<string>, params:` [[TransitionOptions]]`, method:` [[TransitionMethod]]`)`
*
* @emits `change(route:` [[Route]]`)`
* @emits `hardChange(route:` [[Route]]`)`
* @emits `softChange(route:` [[Route]]`)`
*
* @emits `transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
* @emits `$root.transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
*/
async emitTransition(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method: TransitionMethod = 'push'
): Promise<CanUndef<router.Route>> {
opts = router.getBlankRouteFrom(router.normalizeTransitionOpts(opts));
const
{r, engine} = this;
const
currentEngineRoute = engine.route ?? engine.page;
this.emit('beforeChange', ref, opts, method);
let
newRouteInfo: CanUndef<router.RouteAPI>;
const getEngineRoute = () => currentEngineRoute ?
currentEngineRoute.url ?? router.getRouteName(currentEngineRoute) :
undefined;
// Get information about the specified route
if (ref != null) {
newRouteInfo = this.getRoute(engine.id(ref));
// In this case, we don't have the specified ref to a transition,
// so we try to get information from the current route and use it as a blueprint to the new
} else if (currentEngineRoute) {
ref = getEngineRoute()!;
const
route = this.getRoute(ref);
if (route) {
newRouteInfo = Object.mixin(true, route, router.purifyRoute(currentEngineRoute));
}
}
const scroll = {
meta: {
scroll: {
x: pageXOffset,
y: pageYOffset
}
}
};
// To save scroll position before change to a new route
// we need to emit system "replace" transition with padding information about the scroll
if (currentEngineRoute && method !== 'replace') {
const
currentRouteWithScroll = Object.mixin(true, undefined, currentEngineRoute, scroll);
if (!Object.fastCompare(currentEngineRoute, currentRouteWithScroll)) {
await engine.replace(getEngineRoute()!, currentRouteWithScroll);
}
}
// We haven't found any routes that math to the specified ref
if (newRouteInfo == null) {
// The transition was emitted by a user, then we need to save the scroll
if (method !== 'event' && ref != null) {
await engine[method](ref, scroll);
}
return;
}
if ((<router.PurifiedRoute<router.RouteAPI>>newRouteInfo).name == null) {
const
nm = router.getRouteName(currentEngineRoute);
if (nm != null) {
newRouteInfo.name = nm;
}
}
const
currentRoute = this.field.get<router.Route>('routeStore'),
deepMixin = (...args) => Object.mixin({deep: true, skipUndefs: false}, ...args);
// If a new route matches by a name with the current,
// we need to mix a new state with the current
if (router.getRouteName(currentRoute) === newRouteInfo.name) {
deepMixin(newRouteInfo, router.getBlankRouteFrom(currentRoute), opts);
// Simple normalizing of a route state
} else {
deepMixin(newRouteInfo, opts);
}
const {meta} = newRouteInfo;
// If a route support filling from the root object or query parameters
fillRouteParams(newRouteInfo, this);
// We have two variants of transitions:
// "soft" - between routes were changed only query or meta parameters
// "hard" - first and second routes aren't equal by a name
// Mutations of query and meta parameters of a route shouldn't force re-render of components,
// that why we placed it to a prototype object by using `Object.create`
const nonWatchRouteValues = {
url: newRouteInfo.resolvePath(newRouteInfo.params),
query: newRouteInfo.query,
meta
};
const newRoute = Object.assign(
Object.create(nonWatchRouteValues),
Object.reject(router.convertRouteToPlainObject(newRouteInfo), Object.keys(nonWatchRouteValues))
);
let
hardChange = false;
// Emits the route transition event
const emitTransition = (onlyOwnTransition?: boolean) => {
const type = hardChange ? 'hard' : 'soft';
if (onlyOwnTransition) {
this.emit('transition', newRoute, type);
} else {
this.emit('change', newRoute);
this.emit('transition', newRoute, type);
r.emit('transition', newRoute, type);
}
};
// Checking that a new route is really needed, i.e., it isn't equal to the previous
let newRouteIsReallyNeeded = !Object.fastCompare(
router.getComparableRouteParams(currentRoute),
router.getComparableRouteParams(newRoute)
);
// Nothing changes between routes, but there are provided some meta object
if (!newRouteIsReallyNeeded && currentRoute != null && opts.meta != null) {
newRouteIsReallyNeeded = !Object.fastCompare(
Object.select(currentRoute.meta, opts.meta),
opts.meta
);
}
// The transition is necessary, but now we need to understand should we emit a "soft" or "hard" transition
if (newRouteIsReallyNeeded) {
this.field.set('routeStore', newRoute);
const
plainInfo = router.convertRouteToPlainObject(newRouteInfo);
const canRouteTransformToReplace =
currentRoute &&
method !== 'replace' &&
Object.fastCompare(router.convertRouteToPlainObject(currentRoute), plainInfo);
if (canRouteTransformToReplace) {
method = 'replace';
}
// If the used engine does not support the requested transition method,
// we should use `replace`
if (!Object.isFunction(engine[method])) {
method = 'replace';
}
// This transition is marked as `external`,
// i.e. it refers to another site
if (newRouteInfo.meta.external) {
const u = newRoute.url;
location.href = u !== '' ? u : '/';
return;
}
await engine[method](newRoute.url, plainInfo);
const isSoftTransition = Boolean(r.route && Object.fastCompare(
router.convertRouteToPlainObjectWithoutProto(currentRoute),
router.convertRouteToPlainObjectWithoutProto(newRoute)
));
// In this transition were changed only properties from a prototype,
// that why it can be emitted as a soft transition, i.e. without forcing of the re-rendering of components
if (isSoftTransition) {
this.emit('softChange', newRoute);
// We get a prototype by using `__proto__` link,
// because `Object.getPrototypeOf` returns a non-watchable object.
// This behavior is based on a strategy that every touch to an object property of the watched object
// will create a child watch object.
const
proto = r.route?.__proto__;
if (Object.isDictionary(proto)) {
// Correct values from the root route object
for (let keys = Object.keys(nonWatchRouteValues), i = 0; i < keys.length; i++) {
const key = keys[i];
proto[key] = nonWatchRouteValues[key];
}
}
} else {
hardChange = true;
this.emit('hardChange', newRoute);
r.route = newRoute;
}
emitTransition();
// This route is equal to the previous, and we don't actually do transition,
// but for a "push" request we need to emit a "fake" transition event anyway
} else if (method === 'push') {
emitTransition();
// In this case, we don't do transition, but still,
// we should emit the special event, because some methods, like, `back` or `forward` can wait for it
} else {
emitTransition(true);
}
// Restoring the scroll position
if (meta.autoScroll !== false) {
(async () => {
const label = {
label: $$.autoScroll
};
const setScroll = () => {
const
s = meta.scroll;
if (s != null) {
this.r.scrollTo(s.x, s.y);
} else if (hardChange) {
this.r.scrollTo(0, 0);
}
};
// Restoring of scroll for static height components
await this.nextTick(label);
setScroll();
// Restoring of scroll for dynamic height components
await this.async.sleep(10, label);
setScroll();
})().catch(stderr);
}
return newRoute;
}
/**
* @deprecated
* @see [[bRouter.emitTransition]]
*/
@deprecated({renamedTo: 'emitTransition'})
setPage(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method?: TransitionMethod
): Promise<CanUndef<router.Route>> {
return this.emitTransition(ref, opts, method);
}
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param [routes] - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
basePath: string,
routes?: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param activeRoute
* @param [routes] - static schema of application routes
*/
updateRoutes(
basePath: string,
activeRoute: router.InitialRoute,
routes?: StaticRoutes
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param routes - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
routes: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* @param basePathOrRoutes
* @param [routesOrActiveRoute]
* @param [activeRouteOrRoutes]
*/
@wait('beforeReady')
async updateRoutes(
basePathOrRoutes: string | StaticRoutes,
routesOrActiveRoute?: StaticRoutes | Nullable<router.InitialRoute>,
activeRouteOrRoutes?: Nullable<router.InitialRoute> | StaticRoutes
): Promise<router.RouteBlueprints> {
let
basePath,
routes,
activeRoute;
if (Object.isString(basePathOrRoutes)) {
basePath = basePathOrRoutes;
if (Object.isString(routesOrActiveRoute)) {
routes = <StaticRoutes>activeRouteOrRoutes;
activeRoute = routesOrActiveRoute;
} else {
routes = routesOrActiveRoute;
activeRoute = <Nullable<router.InitialRoute>>activeRouteOrRoutes;
}
} else {
routes = basePathOrRoutes;
activeRoute = <Nullable<router.InitialRoute>>routesOrActiveRoute;
}
if (basePath != null) {
this.basePath = basePath;
}
if (routes != null) {
this.routes = this.compileStaticRoutes(routes);
}
this.routeStore = undefined;
await this.initRoute(activeRoute ?? this.initialRoute ?? this.defaultRoute);
return this.routes;
}
protected override initRemoteData(): CanUndef<CanPromise<router.RouteBlueprints | Dictionary>> {
if (!this.db) {
return;
}
const
val = this.convertDBToComponent<StaticRoutes>(this.db);
if (Object.isDictionary(val)) {
return Promise.all(this.state.set(val)).then(() => val);
}
if (Object.isArray(val)) {
// eslint-disable-next-line prefer-spread
return this.updateRoutes.apply(this, val);
}
return this.routes;
}
/**
* Initializes the router within an application
* @emits `$root.initRouter(router:` [[bRouter]]`)`
*/
@hook('created')
protected init(): void {
this.field.set('routerStore', this, this.$root);
this.r.emit('initRouter', this);
}
/**
* Initializes the specified route
* @param [route] - route
*/
@hook('beforeDataCreate')
protected initRoute(route: Nullable<router.InitialRoute> = this.initialRoute): Promise<void> {
if (route != null) {
if (Object.isString(route)) {
return this.replace(route);
}
return this.replace(router.getRouteName(route), Object.reject(route, router.routeNames));
}
return this.replace(null);
}
/**
* Updates the current route value
*/
@wait({defer: true, label: $$.updateCurrentRoute})
protected updateCurrentRoute(): Promise<void> {
return this.initRoute();
}
/**
* Compiles the specified static routes with the current base path and returns a new object
* @param [routes]
*/
protected compileStaticRoutes(routes: StaticRoutes = this.engine.routes ?? globalRoutes): router.RouteBlueprints {
return router.compileStaticRoutes(routes, {basePath: this.basePath});
}
protected override | (): void {
super.initBaseAPI();
const
i = this.instance;
this.compileStaticRoutes = i.compileStaticRoutes.bind(this);
this.emitTransition = i.emitTransition.bind(this);
}
/**
* Handler: click on an element with the `href` attribute
* @param e
*/
@watch({
field: 'document:click',
wrapper: (o, cb) => o.dom.delegate('[href]', cb)
})
protected async onLink(e: MouseEvent): Promise<void> {
const
a = <HTMLElement>e.delegateTarget,
href = a.getAttribute('href')?.trim();
const cantPrevent =
!this.interceptLinks ||
href == null ||
href === '' ||
href.startsWith('#') ||
href.startsWith('javascript:') ||
router.isExternal.test(href);
if (cantPrevent) {
return;
}
e.preventDefault();
const
l = Object.assign(document.createElement('a'), {href});
if (a.getAttribute('target') === '_blank' || e.ctrlKey) {
globalThis.open(l.href, '_blank');
return;
}
const
method = a.getAttribute('data-router-method');
switch (method) {
case 'back':
this.back().catch(stderr);
break;
case 'forward':
this.back().catch(stderr);
break;
case 'go': {
const go = Object.parse(a.getAttribute('data-router-go'));
this.go(Object.isNumber(go) ? go : -1).catch(stderr);
break;
}
default: {
const
params = Object.parse(a.getAttribute('data-router-params')),
query = Object.parse(a.getAttribute('data-router-query')),
meta = Object.parse(a.getAttribute('data-router-meta'));
await this[method === 'replace' ? 'replace' : 'push'](href, {
params: Object.isDictionary(params) ? params : {},
query: Object.isDictionary(query) ? query : {},
meta: Object.isDictionary(meta) ? meta : {}
});
}
}
}
}
| initBaseAPI | identifier_name |
b-router.ts | /*!
* V4Fire Client Core
* https://github.com/V4Fire/Client
*
* Released under the MIT license
* https://github.com/V4Fire/Client/blob/master/LICENSE
*/
/**
* [[include:base/b-router/README.md]]
* @packageDocumentation
*/
import symbolGenerator from 'core/symbol';
import { deprecated } from 'core/functools/deprecation';
import globalRoutes from 'routes';
import type Async from 'core/async';
import iData, { component, prop, system, computed, hook, wait, watch } from 'super/i-data/i-data';
import engine, * as router from 'core/router';
import { fillRouteParams } from 'base/b-router/modules/normalizers';
import type { StaticRoutes, RouteOption, TransitionMethod } from 'base/b-router/interface';
export * from 'super/i-data/i-data';
export * from 'core/router/const';
export * from 'base/b-router/interface';
export const
$$ = symbolGenerator();
/**
* Component to route application pages
*/
@component({
deprecatedProps: {
pageProp: 'activeRoute',
pagesProp: 'routesProp'
}
})
export default class bRouter extends iData {
/**
* Type: page parameters
*/
readonly PageParams!: RouteOption;
/**
* Type: page query
*/
readonly PageQuery!: RouteOption;
/**
* Type: page meta
*/
readonly PageMeta!: RouteOption;
public override async!: Async<this>;
/**
* The static schema of application routes.
* By default, this value is taken from `routes/index.ts`.
*
* @example
* ```
* < b-router :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: Object,
required: false,
watch: (ctx, val, old) => {
if (!Object.fastCompare(val, old)) {
ctx.updateCurrentRoute();
}
}
})
readonly routesProp?: StaticRoutes;
/**
* Compiled schema of application routes
* @see [[bRouter.routesProp]]
*/
@system<bRouter>({
after: 'engine',
init: (o) => o.sync.link(o.compileStaticRoutes)
})
routes!: router.RouteBlueprints;
/**
* An initial route value.
* Usually, you don't need to provide this value manually,
* because it is inferring automatically, but sometimes it can be useful.
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: [String, Object],
required: false,
watch: 'updateCurrentRoute'
})
readonly initialRoute?: router.InitialRoute;
/**
* Base route path: all route paths are concatenated with this path
*
* @example
* ```
* < b-router :basePath = '/demo' | :routes = { &
* user: {
* /// '/demo/user'
* path: '/user'
* }
* } .
* ```
*/
@prop({watch: 'updateCurrentRoute'})
readonly basePathProp: string = '/';
/** @see [[bRouter.basePathProp]] */
@system<bRouter>((o) => o.sync.link())
basePath!: string;
/**
* If true, the router will intercept all click events on elements with a `href` attribute to emit a transition.
* An element with `href` can have additional attributes:
*
* * `data-router-method` - type of the used router method to emit the transition;
* * `data-router-go` - value for the router `go` method;
* * `data-router-params`, `data-router-query`, `data-router-meta` - additional parameters for the used router method
* (to provide an object use JSON).
*/
@prop(Boolean)
readonly interceptLinks: boolean = true;
/**
* A factory to create router engine.
* By default, this value is taken from `core/router/engines`.
*
* @example
* ```
* < b-router :engine = myCustomEngine
* ```
*/
@prop<bRouter>({
type: Function,
watch: 'updateCurrentRoute',
default: engine
})
readonly engineProp!: () => router.Router;
/**
* An internal router engine.
* For example, it can be the HTML5 history router or a router based on URL hash values.
*
* @see [[bRouter.engine]]
*/
@system((o) => o.sync.link((v) => (<(v: unknown) => router.Router>v)(o)))
protected engine!: router.Router;
/**
* Raw value of the active route
*/
@system()
protected routeStore?: router.Route;
/**
* Value of the active route
* @see [[bRouter.routeStore]]
*
* @example
* ```js
* console.log(route?.query)
* ```
*/
override get route(): CanUndef<this['r']['CurrentPage']> {
return this.field.get('routeStore');
}
/**
* @deprecated
* @see [[bRouter.route]]
*/
@deprecated({renamedTo: 'route'})
get page(): CanUndef<this['r']['CurrentPage']> {
return this.route;
}
/**
* Default route value
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*
* ```js
* router.defaultRoute.name === 'notFound'
* ```
*/
@computed({cache: true, dependencies: ['routes']})
get defaultRoute(): CanUndef<router.RouteBlueprint> {
let route;
for (let keys = Object.keys(this.routes), i = 0; i < keys.length; i++) {
const
el = this.routes[keys[i]];
if (el?.meta.default) {
route = el;
break;
}
}
return route;
}
/**
* Pushes a new route to the history stack.
* The method returns a promise that is resolved when the transition will be completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.push('main', {query: {foo: 1}});
* router.push('/user/:id', {params: {id: 1}});
* router.push('https://google.com');
* ```
*/
async push(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'push');
}
/**
* Replaces the current route.
* The method returns a promise that will be resolved when the transition is completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.replace('main', {query: {foo: 1}});
* router.replace('/user/:id', {params: {id: 1}});
* router.replace('https://google.com');
* ```
*/
async replace(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'replace');
}
/**
* Switches to a route from the history,
* identified by its relative position to the current route (with the current route being relative index 0).
* The method returns a promise that will be resolved when the transition is completed.
*
* @param pos
*
* @example
* ````js
* this.go(-1) // this.back();
* this.go(1) // this.forward();
* this.go(-2) // this.back(); this.back();
* ```
*/
async go(pos: number): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.go(pos);
await res;
}
/**
* Switches to the next route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async forward(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.forward();
await res;
}
/**
* Switches to the previous route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async back(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.back();
await res;
}
/**
* Clears the routes' history.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @param [filter] - filter predicate
*/
clear(filter?: router.HistoryClearFilter): Promise<void> {
return this.engine.clear(filter);
}
/**
* Clears all temporary routes from the history.
* The temporary route is a route that has `tmp` flag within its own properties, like, `params`, `query` or `meta`.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @example
* ```js
* this.push('redeem-money', {
* meta: {
* tmp: true
* }
* });
*
* this.clearTmp();
* ```
*/
clearTmp(): Promise<void> {
return this.engine.clearTmp();
}
/** @see [[router.getRoutePath]] */
getRoutePath(ref: string, opts: router.TransitionOptions = {}): CanUndef<string> {
return router.getRoutePath(ref, this.routes, opts);
}
/** @see [[router.getRoute]] */
getRoute(ref: string): CanUndef<router.RouteAPI> {
const {routes, basePath, defaultRoute} = this;
return router.getRoute(ref, routes, {basePath, defaultRoute});
}
/**
* @deprecated
* @see [[bRouter.getRoute]]
*/
@deprecated({renamedTo: 'getRoute'})
getPageOpts(ref: string): CanUndef<router.RouteBlueprint> {
return this.getRoute(ref);
}
/**
* Emits a new transition to the specified route
*
* @param ref - route name or URL or `null`, if the route is equal to the previous
* @param [opts] - additional transition options
* @param [method] - transition method
*
* @emits `beforeChange(route: Nullable<string>, params:` [[TransitionOptions]]`, method:` [[TransitionMethod]]`)`
*
* @emits `change(route:` [[Route]]`)`
* @emits `hardChange(route:` [[Route]]`)`
* @emits `softChange(route:` [[Route]]`)`
*
* @emits `transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
* @emits `$root.transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
*/
async emitTransition(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method: TransitionMethod = 'push'
): Promise<CanUndef<router.Route>> {
opts = router.getBlankRouteFrom(router.normalizeTransitionOpts(opts));
const
{r, engine} = this;
const
currentEngineRoute = engine.route ?? engine.page;
this.emit('beforeChange', ref, opts, method);
let
newRouteInfo: CanUndef<router.RouteAPI>;
const getEngineRoute = () => currentEngineRoute ?
currentEngineRoute.url ?? router.getRouteName(currentEngineRoute) :
undefined;
// Get information about the specified route
if (ref != null) {
newRouteInfo = this.getRoute(engine.id(ref));
// In this case, we don't have the specified ref to a transition,
// so we try to get information from the current route and use it as a blueprint to the new
} else if (currentEngineRoute) {
ref = getEngineRoute()!;
const
route = this.getRoute(ref);
if (route) {
newRouteInfo = Object.mixin(true, route, router.purifyRoute(currentEngineRoute));
}
}
const scroll = {
meta: {
scroll: {
x: pageXOffset,
y: pageYOffset
}
} | if (currentEngineRoute && method !== 'replace') {
const
currentRouteWithScroll = Object.mixin(true, undefined, currentEngineRoute, scroll);
if (!Object.fastCompare(currentEngineRoute, currentRouteWithScroll)) {
await engine.replace(getEngineRoute()!, currentRouteWithScroll);
}
}
// We haven't found any routes that math to the specified ref
if (newRouteInfo == null) {
// The transition was emitted by a user, then we need to save the scroll
if (method !== 'event' && ref != null) {
await engine[method](ref, scroll);
}
return;
}
if ((<router.PurifiedRoute<router.RouteAPI>>newRouteInfo).name == null) {
const
nm = router.getRouteName(currentEngineRoute);
if (nm != null) {
newRouteInfo.name = nm;
}
}
const
currentRoute = this.field.get<router.Route>('routeStore'),
deepMixin = (...args) => Object.mixin({deep: true, skipUndefs: false}, ...args);
// If a new route matches by a name with the current,
// we need to mix a new state with the current
if (router.getRouteName(currentRoute) === newRouteInfo.name) {
deepMixin(newRouteInfo, router.getBlankRouteFrom(currentRoute), opts);
// Simple normalizing of a route state
} else {
deepMixin(newRouteInfo, opts);
}
const {meta} = newRouteInfo;
// If a route support filling from the root object or query parameters
fillRouteParams(newRouteInfo, this);
// We have two variants of transitions:
// "soft" - between routes were changed only query or meta parameters
// "hard" - first and second routes aren't equal by a name
// Mutations of query and meta parameters of a route shouldn't force re-render of components,
// that why we placed it to a prototype object by using `Object.create`
const nonWatchRouteValues = {
url: newRouteInfo.resolvePath(newRouteInfo.params),
query: newRouteInfo.query,
meta
};
const newRoute = Object.assign(
Object.create(nonWatchRouteValues),
Object.reject(router.convertRouteToPlainObject(newRouteInfo), Object.keys(nonWatchRouteValues))
);
let
hardChange = false;
// Emits the route transition event
const emitTransition = (onlyOwnTransition?: boolean) => {
const type = hardChange ? 'hard' : 'soft';
if (onlyOwnTransition) {
this.emit('transition', newRoute, type);
} else {
this.emit('change', newRoute);
this.emit('transition', newRoute, type);
r.emit('transition', newRoute, type);
}
};
// Checking that a new route is really needed, i.e., it isn't equal to the previous
let newRouteIsReallyNeeded = !Object.fastCompare(
router.getComparableRouteParams(currentRoute),
router.getComparableRouteParams(newRoute)
);
// Nothing changes between routes, but there are provided some meta object
if (!newRouteIsReallyNeeded && currentRoute != null && opts.meta != null) {
newRouteIsReallyNeeded = !Object.fastCompare(
Object.select(currentRoute.meta, opts.meta),
opts.meta
);
}
// The transition is necessary, but now we need to understand should we emit a "soft" or "hard" transition
if (newRouteIsReallyNeeded) {
this.field.set('routeStore', newRoute);
const
plainInfo = router.convertRouteToPlainObject(newRouteInfo);
const canRouteTransformToReplace =
currentRoute &&
method !== 'replace' &&
Object.fastCompare(router.convertRouteToPlainObject(currentRoute), plainInfo);
if (canRouteTransformToReplace) {
method = 'replace';
}
// If the used engine does not support the requested transition method,
// we should use `replace`
if (!Object.isFunction(engine[method])) {
method = 'replace';
}
// This transition is marked as `external`,
// i.e. it refers to another site
if (newRouteInfo.meta.external) {
const u = newRoute.url;
location.href = u !== '' ? u : '/';
return;
}
await engine[method](newRoute.url, plainInfo);
const isSoftTransition = Boolean(r.route && Object.fastCompare(
router.convertRouteToPlainObjectWithoutProto(currentRoute),
router.convertRouteToPlainObjectWithoutProto(newRoute)
));
// In this transition were changed only properties from a prototype,
// that why it can be emitted as a soft transition, i.e. without forcing of the re-rendering of components
if (isSoftTransition) {
this.emit('softChange', newRoute);
// We get a prototype by using `__proto__` link,
// because `Object.getPrototypeOf` returns a non-watchable object.
// This behavior is based on a strategy that every touch to an object property of the watched object
// will create a child watch object.
const
proto = r.route?.__proto__;
if (Object.isDictionary(proto)) {
// Correct values from the root route object
for (let keys = Object.keys(nonWatchRouteValues), i = 0; i < keys.length; i++) {
const key = keys[i];
proto[key] = nonWatchRouteValues[key];
}
}
} else {
hardChange = true;
this.emit('hardChange', newRoute);
r.route = newRoute;
}
emitTransition();
// This route is equal to the previous, and we don't actually do transition,
// but for a "push" request we need to emit a "fake" transition event anyway
} else if (method === 'push') {
emitTransition();
// In this case, we don't do transition, but still,
// we should emit the special event, because some methods, like, `back` or `forward` can wait for it
} else {
emitTransition(true);
}
// Restoring the scroll position
if (meta.autoScroll !== false) {
(async () => {
const label = {
label: $$.autoScroll
};
const setScroll = () => {
const
s = meta.scroll;
if (s != null) {
this.r.scrollTo(s.x, s.y);
} else if (hardChange) {
this.r.scrollTo(0, 0);
}
};
// Restoring of scroll for static height components
await this.nextTick(label);
setScroll();
// Restoring of scroll for dynamic height components
await this.async.sleep(10, label);
setScroll();
})().catch(stderr);
}
return newRoute;
}
/**
* @deprecated
* @see [[bRouter.emitTransition]]
*/
@deprecated({renamedTo: 'emitTransition'})
setPage(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method?: TransitionMethod
): Promise<CanUndef<router.Route>> {
return this.emitTransition(ref, opts, method);
}
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param [routes] - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
basePath: string,
routes?: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param activeRoute
* @param [routes] - static schema of application routes
*/
updateRoutes(
basePath: string,
activeRoute: router.InitialRoute,
routes?: StaticRoutes
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param routes - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
routes: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* @param basePathOrRoutes
* @param [routesOrActiveRoute]
* @param [activeRouteOrRoutes]
*/
@wait('beforeReady')
async updateRoutes(
basePathOrRoutes: string | StaticRoutes,
routesOrActiveRoute?: StaticRoutes | Nullable<router.InitialRoute>,
activeRouteOrRoutes?: Nullable<router.InitialRoute> | StaticRoutes
): Promise<router.RouteBlueprints> {
let
basePath,
routes,
activeRoute;
if (Object.isString(basePathOrRoutes)) {
basePath = basePathOrRoutes;
if (Object.isString(routesOrActiveRoute)) {
routes = <StaticRoutes>activeRouteOrRoutes;
activeRoute = routesOrActiveRoute;
} else {
routes = routesOrActiveRoute;
activeRoute = <Nullable<router.InitialRoute>>activeRouteOrRoutes;
}
} else {
routes = basePathOrRoutes;
activeRoute = <Nullable<router.InitialRoute>>routesOrActiveRoute;
}
if (basePath != null) {
this.basePath = basePath;
}
if (routes != null) {
this.routes = this.compileStaticRoutes(routes);
}
this.routeStore = undefined;
await this.initRoute(activeRoute ?? this.initialRoute ?? this.defaultRoute);
return this.routes;
}
protected override initRemoteData(): CanUndef<CanPromise<router.RouteBlueprints | Dictionary>> {
if (!this.db) {
return;
}
const
val = this.convertDBToComponent<StaticRoutes>(this.db);
if (Object.isDictionary(val)) {
return Promise.all(this.state.set(val)).then(() => val);
}
if (Object.isArray(val)) {
// eslint-disable-next-line prefer-spread
return this.updateRoutes.apply(this, val);
}
return this.routes;
}
/**
* Initializes the router within an application
* @emits `$root.initRouter(router:` [[bRouter]]`)`
*/
@hook('created')
protected init(): void {
this.field.set('routerStore', this, this.$root);
this.r.emit('initRouter', this);
}
/**
* Initializes the specified route
* @param [route] - route
*/
@hook('beforeDataCreate')
protected initRoute(route: Nullable<router.InitialRoute> = this.initialRoute): Promise<void> {
if (route != null) {
if (Object.isString(route)) {
return this.replace(route);
}
return this.replace(router.getRouteName(route), Object.reject(route, router.routeNames));
}
return this.replace(null);
}
/**
* Updates the current route value
*/
@wait({defer: true, label: $$.updateCurrentRoute})
protected updateCurrentRoute(): Promise<void> {
return this.initRoute();
}
/**
* Compiles the specified static routes with the current base path and returns a new object
* @param [routes]
*/
protected compileStaticRoutes(routes: StaticRoutes = this.engine.routes ?? globalRoutes): router.RouteBlueprints {
return router.compileStaticRoutes(routes, {basePath: this.basePath});
}
protected override initBaseAPI(): void {
super.initBaseAPI();
const
i = this.instance;
this.compileStaticRoutes = i.compileStaticRoutes.bind(this);
this.emitTransition = i.emitTransition.bind(this);
}
/**
* Handler: click on an element with the `href` attribute
* @param e
*/
@watch({
field: 'document:click',
wrapper: (o, cb) => o.dom.delegate('[href]', cb)
})
protected async onLink(e: MouseEvent): Promise<void> {
const
a = <HTMLElement>e.delegateTarget,
href = a.getAttribute('href')?.trim();
const cantPrevent =
!this.interceptLinks ||
href == null ||
href === '' ||
href.startsWith('#') ||
href.startsWith('javascript:') ||
router.isExternal.test(href);
if (cantPrevent) {
return;
}
e.preventDefault();
const
l = Object.assign(document.createElement('a'), {href});
if (a.getAttribute('target') === '_blank' || e.ctrlKey) {
globalThis.open(l.href, '_blank');
return;
}
const
method = a.getAttribute('data-router-method');
switch (method) {
case 'back':
this.back().catch(stderr);
break;
case 'forward':
this.back().catch(stderr);
break;
case 'go': {
const go = Object.parse(a.getAttribute('data-router-go'));
this.go(Object.isNumber(go) ? go : -1).catch(stderr);
break;
}
default: {
const
params = Object.parse(a.getAttribute('data-router-params')),
query = Object.parse(a.getAttribute('data-router-query')),
meta = Object.parse(a.getAttribute('data-router-meta'));
await this[method === 'replace' ? 'replace' : 'push'](href, {
params: Object.isDictionary(params) ? params : {},
query: Object.isDictionary(query) ? query : {},
meta: Object.isDictionary(meta) ? meta : {}
});
}
}
}
} | };
// To save scroll position before change to a new route
// we need to emit system "replace" transition with padding information about the scroll | random_line_split |
b-router.ts | /*!
* V4Fire Client Core
* https://github.com/V4Fire/Client
*
* Released under the MIT license
* https://github.com/V4Fire/Client/blob/master/LICENSE
*/
/**
* [[include:base/b-router/README.md]]
* @packageDocumentation
*/
import symbolGenerator from 'core/symbol';
import { deprecated } from 'core/functools/deprecation';
import globalRoutes from 'routes';
import type Async from 'core/async';
import iData, { component, prop, system, computed, hook, wait, watch } from 'super/i-data/i-data';
import engine, * as router from 'core/router';
import { fillRouteParams } from 'base/b-router/modules/normalizers';
import type { StaticRoutes, RouteOption, TransitionMethod } from 'base/b-router/interface';
export * from 'super/i-data/i-data';
export * from 'core/router/const';
export * from 'base/b-router/interface';
export const
$$ = symbolGenerator();
/**
* Component to route application pages
*/
@component({
deprecatedProps: {
pageProp: 'activeRoute',
pagesProp: 'routesProp'
}
})
export default class bRouter extends iData {
/**
* Type: page parameters
*/
readonly PageParams!: RouteOption;
/**
* Type: page query
*/
readonly PageQuery!: RouteOption;
/**
* Type: page meta
*/
readonly PageMeta!: RouteOption;
public override async!: Async<this>;
/**
* The static schema of application routes.
* By default, this value is taken from `routes/index.ts`.
*
* @example
* ```
* < b-router :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: Object,
required: false,
watch: (ctx, val, old) => {
if (!Object.fastCompare(val, old)) {
ctx.updateCurrentRoute();
}
}
})
readonly routesProp?: StaticRoutes;
/**
* Compiled schema of application routes
* @see [[bRouter.routesProp]]
*/
@system<bRouter>({
after: 'engine',
init: (o) => o.sync.link(o.compileStaticRoutes)
})
routes!: router.RouteBlueprints;
/**
* An initial route value.
* Usually, you don't need to provide this value manually,
* because it is inferring automatically, but sometimes it can be useful.
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*/
@prop<bRouter>({
type: [String, Object],
required: false,
watch: 'updateCurrentRoute'
})
readonly initialRoute?: router.InitialRoute;
/**
* Base route path: all route paths are concatenated with this path
*
* @example
* ```
* < b-router :basePath = '/demo' | :routes = { &
* user: {
* /// '/demo/user'
* path: '/user'
* }
* } .
* ```
*/
@prop({watch: 'updateCurrentRoute'})
readonly basePathProp: string = '/';
/** @see [[bRouter.basePathProp]] */
@system<bRouter>((o) => o.sync.link())
basePath!: string;
/**
* If true, the router will intercept all click events on elements with a `href` attribute to emit a transition.
* An element with `href` can have additional attributes:
*
* * `data-router-method` - type of the used router method to emit the transition;
* * `data-router-go` - value for the router `go` method;
* * `data-router-params`, `data-router-query`, `data-router-meta` - additional parameters for the used router method
* (to provide an object use JSON).
*/
@prop(Boolean)
readonly interceptLinks: boolean = true;
/**
* A factory to create router engine.
* By default, this value is taken from `core/router/engines`.
*
* @example
* ```
* < b-router :engine = myCustomEngine
* ```
*/
@prop<bRouter>({
type: Function,
watch: 'updateCurrentRoute',
default: engine
})
readonly engineProp!: () => router.Router;
/**
* An internal router engine.
* For example, it can be the HTML5 history router or a router based on URL hash values.
*
* @see [[bRouter.engine]]
*/
@system((o) => o.sync.link((v) => (<(v: unknown) => router.Router>v)(o)))
protected engine!: router.Router;
/**
* Raw value of the active route
*/
@system()
protected routeStore?: router.Route;
/**
* Value of the active route
* @see [[bRouter.routeStore]]
*
* @example
* ```js
* console.log(route?.query)
* ```
*/
override get route(): CanUndef<this['r']['CurrentPage']> {
return this.field.get('routeStore');
}
/**
* @deprecated
* @see [[bRouter.route]]
*/
@deprecated({renamedTo: 'route'})
get page(): CanUndef<this['r']['CurrentPage']> {
return this.route;
}
/**
* Default route value
*
* @example
* ```
* < b-router :initialRoute = 'main' | :routes = { &
* main: {
* path: '/'
* },
*
* notFound: {
* default: true
* }
* } .
* ```
*
* ```js
* router.defaultRoute.name === 'notFound'
* ```
*/
@computed({cache: true, dependencies: ['routes']})
get defaultRoute(): CanUndef<router.RouteBlueprint> {
let route;
for (let keys = Object.keys(this.routes), i = 0; i < keys.length; i++) {
const
el = this.routes[keys[i]];
if (el?.meta.default) {
route = el;
break;
}
}
return route;
}
/**
* Pushes a new route to the history stack.
* The method returns a promise that is resolved when the transition will be completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.push('main', {query: {foo: 1}});
* router.push('/user/:id', {params: {id: 1}});
* router.push('https://google.com');
* ```
*/
async push(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'push');
}
/**
* Replaces the current route.
* The method returns a promise that will be resolved when the transition is completed.
*
* @param route - route name or URL
* @param [opts] - additional options
*
* @example
* ```js
* router.replace('main', {query: {foo: 1}});
* router.replace('/user/:id', {params: {id: 1}});
* router.replace('https://google.com');
* ```
*/
async replace(route: Nullable<string>, opts?: router.TransitionOptions): Promise<void> {
await this.emitTransition(route, opts, 'replace');
}
/**
* Switches to a route from the history,
* identified by its relative position to the current route (with the current route being relative index 0).
* The method returns a promise that will be resolved when the transition is completed.
*
* @param pos
*
* @example
* ````js
* this.go(-1) // this.back();
* this.go(1) // this.forward();
* this.go(-2) // this.back(); this.back();
* ```
*/
async go(pos: number): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.go(pos);
await res;
}
/**
* Switches to the next route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async forward(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.forward();
await res;
}
/**
* Switches to the previous route from the history.
* The method returns a promise that will be resolved when the transition is completed.
*/
async back(): Promise<void> {
const res = this.promisifyOnce('transition');
this.engine.back();
await res;
}
/**
* Clears the routes' history.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @param [filter] - filter predicate
*/
clear(filter?: router.HistoryClearFilter): Promise<void> {
return this.engine.clear(filter);
}
/**
* Clears all temporary routes from the history.
* The temporary route is a route that has `tmp` flag within its own properties, like, `params`, `query` or `meta`.
* Mind, this method can't work properly with `HistoryAPI` based engines.
*
* @example
* ```js
* this.push('redeem-money', {
* meta: {
* tmp: true
* }
* });
*
* this.clearTmp();
* ```
*/
clearTmp(): Promise<void> {
return this.engine.clearTmp();
}
/** @see [[router.getRoutePath]] */
getRoutePath(ref: string, opts: router.TransitionOptions = {}): CanUndef<string> {
return router.getRoutePath(ref, this.routes, opts);
}
/** @see [[router.getRoute]] */
getRoute(ref: string): CanUndef<router.RouteAPI> {
const {routes, basePath, defaultRoute} = this;
return router.getRoute(ref, routes, {basePath, defaultRoute});
}
/**
* @deprecated
* @see [[bRouter.getRoute]]
*/
@deprecated({renamedTo: 'getRoute'})
getPageOpts(ref: string): CanUndef<router.RouteBlueprint> {
return this.getRoute(ref);
}
/**
* Emits a new transition to the specified route
*
* @param ref - route name or URL or `null`, if the route is equal to the previous
* @param [opts] - additional transition options
* @param [method] - transition method
*
* @emits `beforeChange(route: Nullable<string>, params:` [[TransitionOptions]]`, method:` [[TransitionMethod]]`)`
*
* @emits `change(route:` [[Route]]`)`
* @emits `hardChange(route:` [[Route]]`)`
* @emits `softChange(route:` [[Route]]`)`
*
* @emits `transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
* @emits `$root.transition(route:` [[Route]]`, type:` [[TransitionType]]`)`
*/
async emitTransition(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method: TransitionMethod = 'push'
): Promise<CanUndef<router.Route>> {
opts = router.getBlankRouteFrom(router.normalizeTransitionOpts(opts));
const
{r, engine} = this;
const
currentEngineRoute = engine.route ?? engine.page;
this.emit('beforeChange', ref, opts, method);
let
newRouteInfo: CanUndef<router.RouteAPI>;
const getEngineRoute = () => currentEngineRoute ?
currentEngineRoute.url ?? router.getRouteName(currentEngineRoute) :
undefined;
// Get information about the specified route
if (ref != null) {
newRouteInfo = this.getRoute(engine.id(ref));
// In this case, we don't have the specified ref to a transition,
// so we try to get information from the current route and use it as a blueprint to the new
} else if (currentEngineRoute) {
ref = getEngineRoute()!;
const
route = this.getRoute(ref);
if (route) {
newRouteInfo = Object.mixin(true, route, router.purifyRoute(currentEngineRoute));
}
}
const scroll = {
meta: {
scroll: {
x: pageXOffset,
y: pageYOffset
}
}
};
// To save scroll position before change to a new route
// we need to emit system "replace" transition with padding information about the scroll
if (currentEngineRoute && method !== 'replace') {
const
currentRouteWithScroll = Object.mixin(true, undefined, currentEngineRoute, scroll);
if (!Object.fastCompare(currentEngineRoute, currentRouteWithScroll)) {
await engine.replace(getEngineRoute()!, currentRouteWithScroll);
}
}
// We haven't found any routes that math to the specified ref
if (newRouteInfo == null) {
// The transition was emitted by a user, then we need to save the scroll
if (method !== 'event' && ref != null) {
await engine[method](ref, scroll);
}
return;
}
if ((<router.PurifiedRoute<router.RouteAPI>>newRouteInfo).name == null) {
const
nm = router.getRouteName(currentEngineRoute);
if (nm != null) {
newRouteInfo.name = nm;
}
}
const
currentRoute = this.field.get<router.Route>('routeStore'),
deepMixin = (...args) => Object.mixin({deep: true, skipUndefs: false}, ...args);
// If a new route matches by a name with the current,
// we need to mix a new state with the current
if (router.getRouteName(currentRoute) === newRouteInfo.name) {
deepMixin(newRouteInfo, router.getBlankRouteFrom(currentRoute), opts);
// Simple normalizing of a route state
} else {
deepMixin(newRouteInfo, opts);
}
const {meta} = newRouteInfo;
// If a route support filling from the root object or query parameters
fillRouteParams(newRouteInfo, this);
// We have two variants of transitions:
// "soft" - between routes were changed only query or meta parameters
// "hard" - first and second routes aren't equal by a name
// Mutations of query and meta parameters of a route shouldn't force re-render of components,
// that why we placed it to a prototype object by using `Object.create`
const nonWatchRouteValues = {
url: newRouteInfo.resolvePath(newRouteInfo.params),
query: newRouteInfo.query,
meta
};
const newRoute = Object.assign(
Object.create(nonWatchRouteValues),
Object.reject(router.convertRouteToPlainObject(newRouteInfo), Object.keys(nonWatchRouteValues))
);
let
hardChange = false;
// Emits the route transition event
const emitTransition = (onlyOwnTransition?: boolean) => {
const type = hardChange ? 'hard' : 'soft';
if (onlyOwnTransition) {
this.emit('transition', newRoute, type);
} else {
this.emit('change', newRoute);
this.emit('transition', newRoute, type);
r.emit('transition', newRoute, type);
}
};
// Checking that a new route is really needed, i.e., it isn't equal to the previous
let newRouteIsReallyNeeded = !Object.fastCompare(
router.getComparableRouteParams(currentRoute),
router.getComparableRouteParams(newRoute)
);
// Nothing changes between routes, but there are provided some meta object
if (!newRouteIsReallyNeeded && currentRoute != null && opts.meta != null) {
newRouteIsReallyNeeded = !Object.fastCompare(
Object.select(currentRoute.meta, opts.meta),
opts.meta
);
}
// The transition is necessary, but now we need to understand should we emit a "soft" or "hard" transition
if (newRouteIsReallyNeeded) {
this.field.set('routeStore', newRoute);
const
plainInfo = router.convertRouteToPlainObject(newRouteInfo);
const canRouteTransformToReplace =
currentRoute &&
method !== 'replace' &&
Object.fastCompare(router.convertRouteToPlainObject(currentRoute), plainInfo);
if (canRouteTransformToReplace) {
method = 'replace';
}
// If the used engine does not support the requested transition method,
// we should use `replace`
if (!Object.isFunction(engine[method])) {
method = 'replace';
}
// This transition is marked as `external`,
// i.e. it refers to another site
if (newRouteInfo.meta.external) {
const u = newRoute.url;
location.href = u !== '' ? u : '/';
return;
}
await engine[method](newRoute.url, plainInfo);
const isSoftTransition = Boolean(r.route && Object.fastCompare(
router.convertRouteToPlainObjectWithoutProto(currentRoute),
router.convertRouteToPlainObjectWithoutProto(newRoute)
));
// In this transition were changed only properties from a prototype,
// that why it can be emitted as a soft transition, i.e. without forcing of the re-rendering of components
if (isSoftTransition) {
this.emit('softChange', newRoute);
// We get a prototype by using `__proto__` link,
// because `Object.getPrototypeOf` returns a non-watchable object.
// This behavior is based on a strategy that every touch to an object property of the watched object
// will create a child watch object.
const
proto = r.route?.__proto__;
if (Object.isDictionary(proto)) {
// Correct values from the root route object
for (let keys = Object.keys(nonWatchRouteValues), i = 0; i < keys.length; i++) {
const key = keys[i];
proto[key] = nonWatchRouteValues[key];
}
}
} else {
hardChange = true;
this.emit('hardChange', newRoute);
r.route = newRoute;
}
emitTransition();
// This route is equal to the previous, and we don't actually do transition,
// but for a "push" request we need to emit a "fake" transition event anyway
} else if (method === 'push') {
emitTransition();
// In this case, we don't do transition, but still,
// we should emit the special event, because some methods, like, `back` or `forward` can wait for it
} else {
emitTransition(true);
}
// Restoring the scroll position
if (meta.autoScroll !== false) {
(async () => {
const label = {
label: $$.autoScroll
};
const setScroll = () => {
const
s = meta.scroll;
if (s != null) {
this.r.scrollTo(s.x, s.y);
} else if (hardChange) {
this.r.scrollTo(0, 0);
}
};
// Restoring of scroll for static height components
await this.nextTick(label);
setScroll();
// Restoring of scroll for dynamic height components
await this.async.sleep(10, label);
setScroll();
})().catch(stderr);
}
return newRoute;
}
/**
* @deprecated
* @see [[bRouter.emitTransition]]
*/
@deprecated({renamedTo: 'emitTransition'})
setPage(
ref: Nullable<string>,
opts?: router.TransitionOptions,
method?: TransitionMethod
): Promise<CanUndef<router.Route>> {
return this.emitTransition(ref, opts, method);
}
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param [routes] - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
basePath: string,
routes?: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param basePath - base route path
* @param activeRoute
* @param [routes] - static schema of application routes
*/
updateRoutes(
basePath: string,
activeRoute: router.InitialRoute,
routes?: StaticRoutes
): Promise<router.RouteBlueprints>;
/**
* Updates the schema of routes
*
* @param routes - static schema of application routes
* @param [activeRoute]
*/
updateRoutes(
routes: StaticRoutes,
activeRoute?: Nullable<router.InitialRoute>
): Promise<router.RouteBlueprints>;
/**
* @param basePathOrRoutes
* @param [routesOrActiveRoute]
* @param [activeRouteOrRoutes]
*/
@wait('beforeReady')
async updateRoutes(
basePathOrRoutes: string | StaticRoutes,
routesOrActiveRoute?: StaticRoutes | Nullable<router.InitialRoute>,
activeRouteOrRoutes?: Nullable<router.InitialRoute> | StaticRoutes
): Promise<router.RouteBlueprints> {
let
basePath,
routes,
activeRoute;
if (Object.isString(basePathOrRoutes)) {
basePath = basePathOrRoutes;
if (Object.isString(routesOrActiveRoute)) {
routes = <StaticRoutes>activeRouteOrRoutes;
activeRoute = routesOrActiveRoute;
} else {
routes = routesOrActiveRoute;
activeRoute = <Nullable<router.InitialRoute>>activeRouteOrRoutes;
}
} else {
routes = basePathOrRoutes;
activeRoute = <Nullable<router.InitialRoute>>routesOrActiveRoute;
}
if (basePath != null) {
this.basePath = basePath;
}
if (routes != null) {
this.routes = this.compileStaticRoutes(routes);
}
this.routeStore = undefined;
await this.initRoute(activeRoute ?? this.initialRoute ?? this.defaultRoute);
return this.routes;
}
protected override initRemoteData(): CanUndef<CanPromise<router.RouteBlueprints | Dictionary>> {
if (!this.db) {
return;
}
const
val = this.convertDBToComponent<StaticRoutes>(this.db);
if (Object.isDictionary(val)) |
if (Object.isArray(val)) {
// eslint-disable-next-line prefer-spread
return this.updateRoutes.apply(this, val);
}
return this.routes;
}
/**
* Initializes the router within an application
* @emits `$root.initRouter(router:` [[bRouter]]`)`
*/
@hook('created')
protected init(): void {
this.field.set('routerStore', this, this.$root);
this.r.emit('initRouter', this);
}
/**
* Initializes the specified route
* @param [route] - route
*/
@hook('beforeDataCreate')
protected initRoute(route: Nullable<router.InitialRoute> = this.initialRoute): Promise<void> {
if (route != null) {
if (Object.isString(route)) {
return this.replace(route);
}
return this.replace(router.getRouteName(route), Object.reject(route, router.routeNames));
}
return this.replace(null);
}
/**
* Updates the current route value
*/
@wait({defer: true, label: $$.updateCurrentRoute})
protected updateCurrentRoute(): Promise<void> {
return this.initRoute();
}
/**
* Compiles the specified static routes with the current base path and returns a new object
* @param [routes]
*/
protected compileStaticRoutes(routes: StaticRoutes = this.engine.routes ?? globalRoutes): router.RouteBlueprints {
return router.compileStaticRoutes(routes, {basePath: this.basePath});
}
protected override initBaseAPI(): void {
super.initBaseAPI();
const
i = this.instance;
this.compileStaticRoutes = i.compileStaticRoutes.bind(this);
this.emitTransition = i.emitTransition.bind(this);
}
/**
* Handler: click on an element with the `href` attribute
* @param e
*/
@watch({
field: 'document:click',
wrapper: (o, cb) => o.dom.delegate('[href]', cb)
})
protected async onLink(e: MouseEvent): Promise<void> {
const
a = <HTMLElement>e.delegateTarget,
href = a.getAttribute('href')?.trim();
const cantPrevent =
!this.interceptLinks ||
href == null ||
href === '' ||
href.startsWith('#') ||
href.startsWith('javascript:') ||
router.isExternal.test(href);
if (cantPrevent) {
return;
}
e.preventDefault();
const
l = Object.assign(document.createElement('a'), {href});
if (a.getAttribute('target') === '_blank' || e.ctrlKey) {
globalThis.open(l.href, '_blank');
return;
}
const
method = a.getAttribute('data-router-method');
switch (method) {
case 'back':
this.back().catch(stderr);
break;
case 'forward':
this.back().catch(stderr);
break;
case 'go': {
const go = Object.parse(a.getAttribute('data-router-go'));
this.go(Object.isNumber(go) ? go : -1).catch(stderr);
break;
}
default: {
const
params = Object.parse(a.getAttribute('data-router-params')),
query = Object.parse(a.getAttribute('data-router-query')),
meta = Object.parse(a.getAttribute('data-router-meta'));
await this[method === 'replace' ? 'replace' : 'push'](href, {
params: Object.isDictionary(params) ? params : {},
query: Object.isDictionary(query) ? query : {},
meta: Object.isDictionary(meta) ? meta : {}
});
}
}
}
}
| {
return Promise.all(this.state.set(val)).then(() => val);
} | conditional_block |
apiService.js | import wretch from 'wretch';
const api = wretch()
.url(process.env.API_URL || 'http://localhost:3002/api')
.auth(`Bearer ${localStorage.getItem('authToken')}`);
/*
* Exported methods shouldn't be used directly from a component; use
* one of the actual API libs instead.
*/
export function get(app, path, data = undefined, config = {}) {
let request = api.url(`/${app}/${path}`);
if (data) {
request = request.query(data);
}
return request.get().res(parseResponse);
}
export function post(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.post(data)
.badRequest(parseValidationError)
.res(parseResponse); | export function put(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.put(data)
.badRequest(parseValidationError)
.res(parseResponse);
}
export function delete_(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.delete(data)
.res(parseResponse);
}
export class ValidationError extends Error {
constructor(message, validationErrors) {
super(message); // 'Error' breaks prototype chain here
Object.setPrototypeOf(this, new.target.prototype); // restore prototype chain
this.validationErrors = validationErrors;
this.statusCode = 400;
}
}
const isJSONResponse = response => /application\/json/.test(response.headers.get('content-type'));
const parseResponse = response => {
if (!response.ok) {
throw new Error(response.status + ' ' + response.statusText);
}
if (isJSONResponse(response)) {
return response.json();
}
return response.text();
};
const parseValidationError = async ({ response, text }) => {
if (isJSONResponse(response)) {
const json = JSON.parse(text);
throw new ValidationError(json.error, json.message);
}
throw new Error(error.text);
}; | }
| random_line_split |
apiService.js | import wretch from 'wretch';
const api = wretch()
.url(process.env.API_URL || 'http://localhost:3002/api')
.auth(`Bearer ${localStorage.getItem('authToken')}`);
/*
* Exported methods shouldn't be used directly from a component; use
* one of the actual API libs instead.
*/
export function get(app, path, data = undefined, config = {}) {
let request = api.url(`/${app}/${path}`);
if (data) {
request = request.query(data);
}
return request.get().res(parseResponse);
}
export function post(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.post(data)
.badRequest(parseValidationError)
.res(parseResponse);
}
export function put(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.put(data)
.badRequest(parseValidationError)
.res(parseResponse);
}
export function | (app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.delete(data)
.res(parseResponse);
}
export class ValidationError extends Error {
constructor(message, validationErrors) {
super(message); // 'Error' breaks prototype chain here
Object.setPrototypeOf(this, new.target.prototype); // restore prototype chain
this.validationErrors = validationErrors;
this.statusCode = 400;
}
}
const isJSONResponse = response => /application\/json/.test(response.headers.get('content-type'));
const parseResponse = response => {
if (!response.ok) {
throw new Error(response.status + ' ' + response.statusText);
}
if (isJSONResponse(response)) {
return response.json();
}
return response.text();
};
const parseValidationError = async ({ response, text }) => {
if (isJSONResponse(response)) {
const json = JSON.parse(text);
throw new ValidationError(json.error, json.message);
}
throw new Error(error.text);
};
| delete_ | identifier_name |
apiService.js | import wretch from 'wretch';
const api = wretch()
.url(process.env.API_URL || 'http://localhost:3002/api')
.auth(`Bearer ${localStorage.getItem('authToken')}`);
/*
* Exported methods shouldn't be used directly from a component; use
* one of the actual API libs instead.
*/
export function get(app, path, data = undefined, config = {}) {
let request = api.url(`/${app}/${path}`);
if (data) {
request = request.query(data);
}
return request.get().res(parseResponse);
}
export function post(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.post(data)
.badRequest(parseValidationError)
.res(parseResponse);
}
export function put(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.put(data)
.badRequest(parseValidationError)
.res(parseResponse);
}
export function delete_(app, path, data = null, config = {}) {
return api
.url(`/${app}/${path}`)
.delete(data)
.res(parseResponse);
}
export class ValidationError extends Error {
constructor(message, validationErrors) {
super(message); // 'Error' breaks prototype chain here
Object.setPrototypeOf(this, new.target.prototype); // restore prototype chain
this.validationErrors = validationErrors;
this.statusCode = 400;
}
}
const isJSONResponse = response => /application\/json/.test(response.headers.get('content-type'));
const parseResponse = response => {
if (!response.ok) |
if (isJSONResponse(response)) {
return response.json();
}
return response.text();
};
const parseValidationError = async ({ response, text }) => {
if (isJSONResponse(response)) {
const json = JSON.parse(text);
throw new ValidationError(json.error, json.message);
}
throw new Error(error.text);
};
| {
throw new Error(response.status + ' ' + response.statusText);
} | conditional_block |
form.ts | import { Component, ElementRef, ViewChild, EventEmitter, Output, Input, OnInit } from '@angular/core';
import { IonicPage, NavParams, NavController, Events } from 'ionic-angular';
import * as umf from '../../../core/framework/index';
import { UmfApp } from '../../../core/framework/index';
function bindEventHandlersToCustomEvents(formComponent, eventHandlers) {
let formInstance = formComponent.form;
let app = formComponent.app;
// Bind all 'form event handlers'.
for (let eventHandler of eventHandlers) {
// Don't bind default event handlers, because they are already auto-bound inside FormInstance.
if (eventHandler.runAt.indexOf('form:') === 0) {
continue;
}
formComponent.events.subscribe(eventHandler.runAt, e => {
// Augment event args with form which is firing the event. This is needed,
// so that event handler can know from which particular form this event is coming.
e.form = formComponent;
formInstance.handleEvent(eventHandler.runAt, eventHandler, e);
});
}
}
@IonicPage({
name: 'form',
segment: 'form/:id'
})
@Component({
selector: 'component-form',
templateUrl: 'form.html'
})
export class FormComponent implements OnInit {
tabindex: number = 1;
urlData: null;
initialized: boolean = false;
responseMetadata: {};
useUrl: true;
params: any;
visibleInputFields: any;
outputFieldValues: any;
disabled: boolean = false;
self: any;
submitButtonLabel: any;
app: any;
form: any;
metadata: any;
nav: any;
events: any;
@Input() initializedForm: boolean;
@Input() appOutput: any = null;
@Input() formOutput: any = null;
@Input() useUrlOutput: any = null;
@Input() fieldOutput: any = null;
@Input() parentOutput: boolean = true;
constructor(public paramsCtrl: NavParams,
public navCtrl: NavController,
public eventCtrl: Events) {
this.params = paramsCtrl;
this.nav = navCtrl;
this.events = eventCtrl;
}
ngOnInit() {
this.app = this.params.data.app;
this.form = this.params.data.form;
this.metadata = this.params.data.metadata;
this.self = this;
this.initialized = this.initializedForm;
this.init();
}
async initialiseInputs(field, app) {
field.inputs = app.controlRegister.createInputControllers(field.value.inputs);
let promises = [];
for (let input of field.inputs) {
let i = field.value.inputs.find(t => t.inputId === input.metadata.inputId);
if (i != null) {
let p = input.init(i.value);
promises.push(p);
}
}
await Promise.all(promises);
};
init() {
if (!this.initialized) {
var form = this.form;
this.initialized = true;
this.visibleInputFields = form.inputs.filter(t => t.metadata.hidden == false),
this.submitButtonLabel = form.metadata.customProperties != null && form.metadata.customProperties.submitButtonLabel
? form.metadata.customProperties.submitButtonLabel
: "Submit";
this.tabindex += 1; |
var app = this.app;
this.events.publish("form:loaded", { app: app });
// Auto-submit form if necessary.
if (form.metadata.postOnLoad) {
this.submit(app, form, null);
}
}
};
enableForm() {
var formInstance = this.form;
// Hide all inputs, to re-render them. This is needed due to the way that
// Svelte *seems* to work - it doesn't re-render nested components, unless they are recreated.
this.visibleInputFields = [];
this.visibleInputFields = formInstance.inputs.filter(t => t.metadata.hidden == false),
this.disabled = false;
};
renderResponse(response) {
var formInstance = this.form;
// Force Svelte to re-render outputs.
this.outputFieldValues = null;
this.outputFieldValues = formInstance.outputs;
this.responseMetadata = response.metadata;
};
async submit(app, formInstance, event, redirect = null) {
if (event != null) {
event.preventDefault();
}
var skipValidation =
!formInstance.metadata.postOnLoadValidation &&
formInstance.metadata.postOnLoad &&
// if initialization of the form, i.e. - first post.
redirect == null;
let data = await formInstance.prepareForm(!skipValidation);
// If not all required inputs are filled.
if (data == null) {
return;
}
// Disable double-posts.
this.disabled = true;
// If postOnLoad == true, then the input field values should appear in the url.
// Reason is that postOnLoad == true is used by "report" pages, which need
// their filters to be saved in the url. This does not apply to forms
// with postOnLoad == false, because those forms are usually for creating new data
// and hence should not be tracked in browser's history based on parameters.
if (formInstance.metadata.postOnLoad && redirect) {
let urlParams = await formInstance.getSerializedInputValues();
// Update url in the browser.
app.go(formInstance.metadata.id, urlParams);
return;
}
await formInstance.fire("form:posting", { response: null, app: app });
try {
let response = await app.server.postForm(formInstance.metadata.id, data);
await formInstance.fire("form:responseReceived", { response: response, app: app });
formInstance.setOutputFieldValues(response);
// Null response is treated as a server-side error.
if (response == null) {
throw new Error(`Received null response.`);
}
await app.runFunctions(response.metadata.functionsToRun);
if (response.metadata.handler == "" || response.metadata.handler == null) {
this.renderResponse(response);
}
else {
app.handleResponse(response, formInstance);
}
await formInstance.fire("form:responseHandled", { response: response, app: app });
this.enableForm();
// Signal event to child controls.
this.events.publish("form:responseHandled", {
form: self,
invokedByUser: event != null
});
}
catch (e) {
this.enableForm();
}
}
}; | random_line_split |
|
form.ts | import { Component, ElementRef, ViewChild, EventEmitter, Output, Input, OnInit } from '@angular/core';
import { IonicPage, NavParams, NavController, Events } from 'ionic-angular';
import * as umf from '../../../core/framework/index';
import { UmfApp } from '../../../core/framework/index';
function bindEventHandlersToCustomEvents(formComponent, eventHandlers) {
let formInstance = formComponent.form;
let app = formComponent.app;
// Bind all 'form event handlers'.
for (let eventHandler of eventHandlers) {
// Don't bind default event handlers, because they are already auto-bound inside FormInstance.
if (eventHandler.runAt.indexOf('form:') === 0) {
continue;
}
formComponent.events.subscribe(eventHandler.runAt, e => {
// Augment event args with form which is firing the event. This is needed,
// so that event handler can know from which particular form this event is coming.
e.form = formComponent;
formInstance.handleEvent(eventHandler.runAt, eventHandler, e);
});
}
}
@IonicPage({
name: 'form',
segment: 'form/:id'
})
@Component({
selector: 'component-form',
templateUrl: 'form.html'
})
export class FormComponent implements OnInit {
tabindex: number = 1;
urlData: null;
initialized: boolean = false;
responseMetadata: {};
useUrl: true;
params: any;
visibleInputFields: any;
outputFieldValues: any;
disabled: boolean = false;
self: any;
submitButtonLabel: any;
app: any;
form: any;
metadata: any;
nav: any;
events: any;
@Input() initializedForm: boolean;
@Input() appOutput: any = null;
@Input() formOutput: any = null;
@Input() useUrlOutput: any = null;
@Input() fieldOutput: any = null;
@Input() parentOutput: boolean = true;
constructor(public paramsCtrl: NavParams,
public navCtrl: NavController,
public eventCtrl: Events) {
this.params = paramsCtrl;
this.nav = navCtrl;
this.events = eventCtrl;
}
ngOnInit() {
this.app = this.params.data.app;
this.form = this.params.data.form;
this.metadata = this.params.data.metadata;
this.self = this;
this.initialized = this.initializedForm;
this.init();
}
async initialiseInputs(field, app) {
field.inputs = app.controlRegister.createInputControllers(field.value.inputs);
let promises = [];
for (let input of field.inputs) {
let i = field.value.inputs.find(t => t.inputId === input.metadata.inputId);
if (i != null) {
let p = input.init(i.value);
promises.push(p);
}
}
await Promise.all(promises);
};
init() {
if (!this.initialized) {
var form = this.form;
this.initialized = true;
this.visibleInputFields = form.inputs.filter(t => t.metadata.hidden == false),
this.submitButtonLabel = form.metadata.customProperties != null && form.metadata.customProperties.submitButtonLabel
? form.metadata.customProperties.submitButtonLabel
: "Submit";
this.tabindex += 1;
var app = this.app;
this.events.publish("form:loaded", { app: app });
// Auto-submit form if necessary.
if (form.metadata.postOnLoad) {
this.submit(app, form, null);
}
}
};
enableForm() {
var formInstance = this.form;
// Hide all inputs, to re-render them. This is needed due to the way that
// Svelte *seems* to work - it doesn't re-render nested components, unless they are recreated.
this.visibleInputFields = [];
this.visibleInputFields = formInstance.inputs.filter(t => t.metadata.hidden == false),
this.disabled = false;
};
renderResponse(response) {
var formInstance = this.form;
// Force Svelte to re-render outputs.
this.outputFieldValues = null;
this.outputFieldValues = formInstance.outputs;
this.responseMetadata = response.metadata;
};
async submit(app, formInstance, event, redirect = null) {
if (event != null) {
event.preventDefault();
}
var skipValidation =
!formInstance.metadata.postOnLoadValidation &&
formInstance.metadata.postOnLoad &&
// if initialization of the form, i.e. - first post.
redirect == null;
let data = await formInstance.prepareForm(!skipValidation);
// If not all required inputs are filled.
if (data == null) {
return;
}
// Disable double-posts.
this.disabled = true;
// If postOnLoad == true, then the input field values should appear in the url.
// Reason is that postOnLoad == true is used by "report" pages, which need
// their filters to be saved in the url. This does not apply to forms
// with postOnLoad == false, because those forms are usually for creating new data
// and hence should not be tracked in browser's history based on parameters.
if (formInstance.metadata.postOnLoad && redirect) {
let urlParams = await formInstance.getSerializedInputValues();
// Update url in the browser.
app.go(formInstance.metadata.id, urlParams);
return;
}
await formInstance.fire("form:posting", { response: null, app: app });
try {
let response = await app.server.postForm(formInstance.metadata.id, data);
await formInstance.fire("form:responseReceived", { response: response, app: app });
formInstance.setOutputFieldValues(response);
// Null response is treated as a server-side error.
if (response == null) |
await app.runFunctions(response.metadata.functionsToRun);
if (response.metadata.handler == "" || response.metadata.handler == null) {
this.renderResponse(response);
}
else {
app.handleResponse(response, formInstance);
}
await formInstance.fire("form:responseHandled", { response: response, app: app });
this.enableForm();
// Signal event to child controls.
this.events.publish("form:responseHandled", {
form: self,
invokedByUser: event != null
});
}
catch (e) {
this.enableForm();
}
}
};
| {
throw new Error(`Received null response.`);
} | conditional_block |
form.ts | import { Component, ElementRef, ViewChild, EventEmitter, Output, Input, OnInit } from '@angular/core';
import { IonicPage, NavParams, NavController, Events } from 'ionic-angular';
import * as umf from '../../../core/framework/index';
import { UmfApp } from '../../../core/framework/index';
function bindEventHandlersToCustomEvents(formComponent, eventHandlers) {
let formInstance = formComponent.form;
let app = formComponent.app;
// Bind all 'form event handlers'.
for (let eventHandler of eventHandlers) {
// Don't bind default event handlers, because they are already auto-bound inside FormInstance.
if (eventHandler.runAt.indexOf('form:') === 0) {
continue;
}
formComponent.events.subscribe(eventHandler.runAt, e => {
// Augment event args with form which is firing the event. This is needed,
// so that event handler can know from which particular form this event is coming.
e.form = formComponent;
formInstance.handleEvent(eventHandler.runAt, eventHandler, e);
});
}
}
@IonicPage({
name: 'form',
segment: 'form/:id'
})
@Component({
selector: 'component-form',
templateUrl: 'form.html'
})
export class FormComponent implements OnInit {
tabindex: number = 1;
urlData: null;
initialized: boolean = false;
responseMetadata: {};
useUrl: true;
params: any;
visibleInputFields: any;
outputFieldValues: any;
disabled: boolean = false;
self: any;
submitButtonLabel: any;
app: any;
form: any;
metadata: any;
nav: any;
events: any;
@Input() initializedForm: boolean;
@Input() appOutput: any = null;
@Input() formOutput: any = null;
@Input() useUrlOutput: any = null;
@Input() fieldOutput: any = null;
@Input() parentOutput: boolean = true;
constructor(public paramsCtrl: NavParams,
public navCtrl: NavController,
public eventCtrl: Events) {
this.params = paramsCtrl;
this.nav = navCtrl;
this.events = eventCtrl;
}
| () {
this.app = this.params.data.app;
this.form = this.params.data.form;
this.metadata = this.params.data.metadata;
this.self = this;
this.initialized = this.initializedForm;
this.init();
}
async initialiseInputs(field, app) {
field.inputs = app.controlRegister.createInputControllers(field.value.inputs);
let promises = [];
for (let input of field.inputs) {
let i = field.value.inputs.find(t => t.inputId === input.metadata.inputId);
if (i != null) {
let p = input.init(i.value);
promises.push(p);
}
}
await Promise.all(promises);
};
init() {
if (!this.initialized) {
var form = this.form;
this.initialized = true;
this.visibleInputFields = form.inputs.filter(t => t.metadata.hidden == false),
this.submitButtonLabel = form.metadata.customProperties != null && form.metadata.customProperties.submitButtonLabel
? form.metadata.customProperties.submitButtonLabel
: "Submit";
this.tabindex += 1;
var app = this.app;
this.events.publish("form:loaded", { app: app });
// Auto-submit form if necessary.
if (form.metadata.postOnLoad) {
this.submit(app, form, null);
}
}
};
enableForm() {
var formInstance = this.form;
// Hide all inputs, to re-render them. This is needed due to the way that
// Svelte *seems* to work - it doesn't re-render nested components, unless they are recreated.
this.visibleInputFields = [];
this.visibleInputFields = formInstance.inputs.filter(t => t.metadata.hidden == false),
this.disabled = false;
};
renderResponse(response) {
var formInstance = this.form;
// Force Svelte to re-render outputs.
this.outputFieldValues = null;
this.outputFieldValues = formInstance.outputs;
this.responseMetadata = response.metadata;
};
async submit(app, formInstance, event, redirect = null) {
if (event != null) {
event.preventDefault();
}
var skipValidation =
!formInstance.metadata.postOnLoadValidation &&
formInstance.metadata.postOnLoad &&
// if initialization of the form, i.e. - first post.
redirect == null;
let data = await formInstance.prepareForm(!skipValidation);
// If not all required inputs are filled.
if (data == null) {
return;
}
// Disable double-posts.
this.disabled = true;
// If postOnLoad == true, then the input field values should appear in the url.
// Reason is that postOnLoad == true is used by "report" pages, which need
// their filters to be saved in the url. This does not apply to forms
// with postOnLoad == false, because those forms are usually for creating new data
// and hence should not be tracked in browser's history based on parameters.
if (formInstance.metadata.postOnLoad && redirect) {
let urlParams = await formInstance.getSerializedInputValues();
// Update url in the browser.
app.go(formInstance.metadata.id, urlParams);
return;
}
await formInstance.fire("form:posting", { response: null, app: app });
try {
let response = await app.server.postForm(formInstance.metadata.id, data);
await formInstance.fire("form:responseReceived", { response: response, app: app });
formInstance.setOutputFieldValues(response);
// Null response is treated as a server-side error.
if (response == null) {
throw new Error(`Received null response.`);
}
await app.runFunctions(response.metadata.functionsToRun);
if (response.metadata.handler == "" || response.metadata.handler == null) {
this.renderResponse(response);
}
else {
app.handleResponse(response, formInstance);
}
await formInstance.fire("form:responseHandled", { response: response, app: app });
this.enableForm();
// Signal event to child controls.
this.events.publish("form:responseHandled", {
form: self,
invokedByUser: event != null
});
}
catch (e) {
this.enableForm();
}
}
};
| ngOnInit | identifier_name |
Randomizer.js | /*** @jsx React.DOM */
var React = require('react');
var JigsawStore = require('../stores/JigsawStore');
var JigsawActions = require('../actions/JigsawActions');
/**
* A button to update the random number generator seed.
* @type {*|Function}
*/
var Randomizer = React.createClass({
getInitialState: function() {
return {seed: JigsawStore.getRandSeed()};
},
componentDidMount: function() {
JigsawStore.addChangeListener(this._onChange);
},
componentWillUnmount: function() {
JigsawStore.removeChangeListener(this._onChange);
},
render: function() { | <button className="btn btn-default" onClick={this._randomize}>Randomize</button>
);
},
_randomize: function() {
JigsawActions.randomize();
return false;
},
_onChange: function() {
this.setState({seed: JigsawStore.getRandSeed()})
}
});
module.exports = Randomizer; | var disabled = ! this.state.canRedo;
return ( | random_line_split |
settings.py | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_^@_*(#&=lo-gt=1d)_c--27h7#hlqlt@(gteqt3$-awssiqr='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'cars',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'PAGE_SIZE': 10,
'EXCEPTION_HANDLER': 'rest_framework_json_api.exceptions.exception_handler',
'DEFAULT_PAGINATION_CLASS':
'rest_framework_json_api.pagination.PageNumberPagination',
'DEFAULT_PARSER_CLASSES': (
'rest_framework_json_api.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
), | ),
'DEFAULT_METADATA_CLASS': 'rest_framework_json_api.metadata.JSONAPIMetadata',
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning',
} | 'DEFAULT_RENDERER_CLASSES': (
'rest_framework_json_api.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer', | random_line_split |
multi_tenancy.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
__author__ = "@jframos"
import behave
from behave import step
from hamcrest import assert_that, equal_to, is_, has_length
from commons.rabbit_utils import RabbitMQConsumer
import qautils.configuration.configuration_utils as configuration_utils
from fiwarefacts_client.window_size_model_utils import get_window_size_rabbitmq_message
from fiwarecloto_client.client import ClotoClient
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_PORT, \
PROPERTIES_CONFIG_SERVICE_HOST, PROPERTIES_CONFIG_SERVICE_USER, PROPERTIES_CONFIG_SERVICE_PASSWORD
from commons.constants import PROPERTIES_CONFIG_RABBITMQ_SERVICE, PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES, \
PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME, PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE, \
PROPERTIES_CONFIG_FACTS_SERVICE, PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID, \
FACTS_DEFAULT_WINDOW_SIZE, PROPERTIES_CONFIG_CLOTO_SERVICE
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_OS_USERNAME, \
PROPERTIES_CONFIG_SERVICE_OS_PASSWORD, PROPERTIES_CONFIG_SERVICE_RESOURCE, \
PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL, PROPERTIES_CONFIG_SERVICE_PROTOCOL
from commons.step_helpers import send_context_notification_step_helper
from qautils.dataset.dataset_utils import DatasetUtils
from commons.custom_asserts import is_message_in_consumer_list
behave.use_step_matcher("re")
_dataset_utils = DatasetUtils()
@step(u'the secondary tenant-id configured is registered in CLOTO component')
def given_tenant_id_is_registered_in_cloto(context):
context.secondary_tenant_id = \
configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID]
print ("> Initiating Cloto REST Client for the secondary Tenant")
context.secondary_cloto_client = ClotoClient(
username=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_USERNAME],
password=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_PASSWORD],
tenant_id=context.secondary_tenant_id,
auth_url=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL],
api_protocol=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
api_host=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
api_port=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
api_resource=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
print ("> A GET request is executed to CLOTO component, "
"to init all data about that secondary tenant in its system.")
_, response = context.secondary_cloto_client.\
get_tenant_id_resource_client().get_tenant_id(context.secondary_tenant_id)
assert_that(response.ok,
"TenantId '{}' for testing cannot be "
"retrieved from CLOTO: Message: {}".format(context.secondary_tenant_id, response.text))
@step(u'the following notifications are received for "(?P<server_id>.*)" and secondary tenant-id with values')
@step(u'a context notification is received for "(?P<server_id>.*)" and secondary tenant-id with values')
def a_context_update_is_received_for_secondary_tenant(context, server_id):
send_context_notification_step_helper(context, context.secondary_tenant_id, server_id)
@step(u'a new secondary RabbitMQ consumer is looking into the configured message bus')
def new_secondaty_consumer_looking_for_messages(context):
# Init RabbitMQ consumer
context.secondaty_rabbitmq_consumer = RabbitMQConsumer(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_message_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES]
context.secondaty_rabbitmq_consumer.exchange = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.secondaty_rabbitmq_consumer.exchange_type = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE]
# Append consumer to the 'context' consumer list
context.rabbitmq_consumer_list.append(context.secondaty_rabbitmq_consumer)
# Set default window size to 2 (FACTS) - Secondary Tenant
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, FACTS_DEFAULT_WINDOW_SIZE)
context.rabbitmq_publisher.send_message(message)
# Run secondary consumer
context.secondaty_rabbitmq_consumer.routing_key = context.secondary_tenant_id
context.secondaty_rabbitmq_consumer.run_as_thread()
@step(u'the message sent to RabbitMQ with the secondary tenant has got the following monitoring attributes')
@step(u'the messages sent to RabbitMQ with the secondary tenant have got the following monitoring attributes')
def following_messages_are_sent_to_secondary_consumer(context):
for element in context.table.rows:
ex |
@step(u'no messages have been received by the secondary RabbitMQ consumer')
def no_messages_received_for_secondary_tenant(context):
print ("> Received main list: " + str(context.secondaty_rabbitmq_consumer.message_list))
print ("> Received seconday list: " + str(context.rabbitmq_consumer.message_list))
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(0),
"Secondary RabbitMQ consumer has retrieved messages from the bus, and it should NOT")
@step(u'"(?P<number_of_notifications>.*)" notification is sent to RabbitMQ with the secondary tenant')
@step(u'"(?P<number_of_notifications>.*)" notifications are sent to RabbitMQ with the secondary tenant')
def notifications_are_received_by_secondary_consumer(context, number_of_notifications):
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(int(number_of_notifications)),
"Secondary RabbitMQ consumer has NOT retrieved the expected number of messages from the bus")
@step(u'window size is set to "(?P<window_size>.*)" for the secondary tenant')
def window_size_is_set(context, window_size):
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, window_size)
context.rabbitmq_publisher.send_message(message)
| pected_message = dict(element.as_dict())
expected_message = _dataset_utils.prepare_data(expected_message)
assert_that(expected_message, is_message_in_consumer_list(context.secondaty_rabbitmq_consumer.message_list),
"A message with the expected content has not been received by the secondary RabbitMQ consumer")
| conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.