file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
roulette.component.js
"use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; Object.defineProperty(exports, "__esModule", { value: true }); var core_1 = require("@angular/core"); var app_service_1 = require("./app.service"); require("./rxjs-extensions"); var RouletteComponent = (function () { function
(services) { this.services = services; this.a = 3000 / (Math.pow(20 * this.services.timba.players.length, 35)); this.totalRounds = 20 * this.services.timba.players.length; this.initialRounds = 10 * this.services.timba.players.length; this.accRounds = 15 * this.services.timba.players.length; } RouletteComponent.prototype.ngOnInit = function () { var _this = this; $("#welcome").css("opacity", "1"); setTimeout(function () { $("#welcome").css("opacity", "0"); setTimeout(function () { _this.addPlayerRoulette(0); _this.addPlayerRouletteFade(0); setTimeout(function () { _this.showAndHide("three"); setTimeout(function () { _this.showAndHide("two"); setTimeout(function () { _this.showAndHide("one"); setTimeout(function () { _this.rotate(_this.services.timba.winnerIndex); setTimeout(function () { _this.services.playing = false; _this.services.nav = 'winner'; }, 24000); }, 2000); }, 2000); }, 2000); }, 500 * _this.services.timba.players.length); }, 1000); }, 4000); }; RouletteComponent.prototype.showAndHide = function (n) { $("#" + n).css("opacity", "1"); setTimeout(function () { $("#" + n).css("opacity", "0"); }, 1000); }; RouletteComponent.prototype.addPlayerRoulette = function (i) { if (i < this.services.timba.players.length) { $("#roulette").append("<div id=\"roulette" + i + "\" class=\"roulette-cell\" style=\"transition:opacity 0.5s ease-in-out;opacity:0;transform: rotate(" + i * 360 / this.services.timba.players.length + "deg) translateX(200px);\">" + this.services.timba.players[i].email + "</div>"); this.addPlayerRoulette(++i); } }; RouletteComponent.prototype.addPlayerRouletteFade = function (i) { var _this = this; setTimeout(function () { if (i < _this.services.timba.players.length) { $("#roulette" + i).css("opacity", "1"); if (_this.services.timba.players[i].email == _this.services.user.email) { $("#roulette" + i).css("text-shadow", "0 0 10px #fff"); $("#roulette" + i).css("font-weight", "bold"); } _this.addPlayerRouletteFade(++i); } }, 500); }; RouletteComponent.prototype.rotate = function (i) { $("#roulette").css("transition", "transform 20s cubic-bezier(0.2, 0, 0.000000000000000000000000000000000000000001, 1)"); $("#roulette").css("transform", "rotate(" + (4320 - Math.floor(i * 360 / this.services.timba.players.length)) + "deg)"); }; return RouletteComponent; }()); RouletteComponent = __decorate([ core_1.Component({ selector: 'roulette', templateUrl: 'app/roulette.component.html', styleUrls: ['app/roulette.component.css'] }), __metadata("design:paramtypes", [app_service_1.AppService]) ], RouletteComponent); exports.RouletteComponent = RouletteComponent; //# sourceMappingURL=roulette.component.js.map
RouletteComponent
identifier_name
roulette.component.js
"use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; Object.defineProperty(exports, "__esModule", { value: true }); var core_1 = require("@angular/core"); var app_service_1 = require("./app.service"); require("./rxjs-extensions"); var RouletteComponent = (function () { function RouletteComponent(services) { this.services = services; this.a = 3000 / (Math.pow(20 * this.services.timba.players.length, 35)); this.totalRounds = 20 * this.services.timba.players.length; this.initialRounds = 10 * this.services.timba.players.length; this.accRounds = 15 * this.services.timba.players.length; } RouletteComponent.prototype.ngOnInit = function () { var _this = this; $("#welcome").css("opacity", "1"); setTimeout(function () { $("#welcome").css("opacity", "0"); setTimeout(function () { _this.addPlayerRoulette(0); _this.addPlayerRouletteFade(0); setTimeout(function () { _this.showAndHide("three"); setTimeout(function () { _this.showAndHide("two"); setTimeout(function () { _this.showAndHide("one"); setTimeout(function () { _this.rotate(_this.services.timba.winnerIndex); setTimeout(function () { _this.services.playing = false; _this.services.nav = 'winner'; }, 24000); }, 2000); }, 2000); }, 2000); }, 500 * _this.services.timba.players.length); }, 1000); }, 4000); }; RouletteComponent.prototype.showAndHide = function (n) { $("#" + n).css("opacity", "1"); setTimeout(function () {
RouletteComponent.prototype.addPlayerRoulette = function (i) { if (i < this.services.timba.players.length) { $("#roulette").append("<div id=\"roulette" + i + "\" class=\"roulette-cell\" style=\"transition:opacity 0.5s ease-in-out;opacity:0;transform: rotate(" + i * 360 / this.services.timba.players.length + "deg) translateX(200px);\">" + this.services.timba.players[i].email + "</div>"); this.addPlayerRoulette(++i); } }; RouletteComponent.prototype.addPlayerRouletteFade = function (i) { var _this = this; setTimeout(function () { if (i < _this.services.timba.players.length) { $("#roulette" + i).css("opacity", "1"); if (_this.services.timba.players[i].email == _this.services.user.email) { $("#roulette" + i).css("text-shadow", "0 0 10px #fff"); $("#roulette" + i).css("font-weight", "bold"); } _this.addPlayerRouletteFade(++i); } }, 500); }; RouletteComponent.prototype.rotate = function (i) { $("#roulette").css("transition", "transform 20s cubic-bezier(0.2, 0, 0.000000000000000000000000000000000000000001, 1)"); $("#roulette").css("transform", "rotate(" + (4320 - Math.floor(i * 360 / this.services.timba.players.length)) + "deg)"); }; return RouletteComponent; }()); RouletteComponent = __decorate([ core_1.Component({ selector: 'roulette', templateUrl: 'app/roulette.component.html', styleUrls: ['app/roulette.component.css'] }), __metadata("design:paramtypes", [app_service_1.AppService]) ], RouletteComponent); exports.RouletteComponent = RouletteComponent; //# sourceMappingURL=roulette.component.js.map
$("#" + n).css("opacity", "0"); }, 1000); };
random_line_split
roulette.component.js
"use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; Object.defineProperty(exports, "__esModule", { value: true }); var core_1 = require("@angular/core"); var app_service_1 = require("./app.service"); require("./rxjs-extensions"); var RouletteComponent = (function () { function RouletteComponent(services)
RouletteComponent.prototype.ngOnInit = function () { var _this = this; $("#welcome").css("opacity", "1"); setTimeout(function () { $("#welcome").css("opacity", "0"); setTimeout(function () { _this.addPlayerRoulette(0); _this.addPlayerRouletteFade(0); setTimeout(function () { _this.showAndHide("three"); setTimeout(function () { _this.showAndHide("two"); setTimeout(function () { _this.showAndHide("one"); setTimeout(function () { _this.rotate(_this.services.timba.winnerIndex); setTimeout(function () { _this.services.playing = false; _this.services.nav = 'winner'; }, 24000); }, 2000); }, 2000); }, 2000); }, 500 * _this.services.timba.players.length); }, 1000); }, 4000); }; RouletteComponent.prototype.showAndHide = function (n) { $("#" + n).css("opacity", "1"); setTimeout(function () { $("#" + n).css("opacity", "0"); }, 1000); }; RouletteComponent.prototype.addPlayerRoulette = function (i) { if (i < this.services.timba.players.length) { $("#roulette").append("<div id=\"roulette" + i + "\" class=\"roulette-cell\" style=\"transition:opacity 0.5s ease-in-out;opacity:0;transform: rotate(" + i * 360 / this.services.timba.players.length + "deg) translateX(200px);\">" + this.services.timba.players[i].email + "</div>"); this.addPlayerRoulette(++i); } }; RouletteComponent.prototype.addPlayerRouletteFade = function (i) { var _this = this; setTimeout(function () { if (i < _this.services.timba.players.length) { $("#roulette" + i).css("opacity", "1"); if (_this.services.timba.players[i].email == _this.services.user.email) { $("#roulette" + i).css("text-shadow", "0 0 10px #fff"); $("#roulette" + i).css("font-weight", "bold"); } _this.addPlayerRouletteFade(++i); } }, 500); }; RouletteComponent.prototype.rotate = function (i) { $("#roulette").css("transition", "transform 20s cubic-bezier(0.2, 0, 0.000000000000000000000000000000000000000001, 1)"); $("#roulette").css("transform", "rotate(" + (4320 - Math.floor(i * 360 / this.services.timba.players.length)) + "deg)"); }; return RouletteComponent; }()); RouletteComponent = __decorate([ core_1.Component({ selector: 'roulette', templateUrl: 'app/roulette.component.html', styleUrls: ['app/roulette.component.css'] }), __metadata("design:paramtypes", [app_service_1.AppService]) ], RouletteComponent); exports.RouletteComponent = RouletteComponent; //# sourceMappingURL=roulette.component.js.map
{ this.services = services; this.a = 3000 / (Math.pow(20 * this.services.timba.players.length, 35)); this.totalRounds = 20 * this.services.timba.players.length; this.initialRounds = 10 * this.services.timba.players.length; this.accRounds = 15 * this.services.timba.players.length; }
identifier_body
ethertype.rs
use core::convert::TryFrom; use num_enum::TryFromPrimitive; use serde::{Deserialize, Serialize}; /// https://en.wikipedia.org/wiki/EtherType#Examples #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TryFromPrimitive, Deserialize, Serialize, )] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, ARP = 0x0806, WakeOnLan = 0x0842, SLPP = 0x8102, Ipv6 = 0x86dd,
pub fn from_bytes(bytes: &[u8]) -> Self { let n = u16::from_be_bytes([bytes[0], bytes[1]]); Self::try_from(n).unwrap_or_else(|_| panic!("Unknwn EtherType {:04x}", n)) } pub fn to_bytes(self) -> [u8; 2] { u16::to_be_bytes(self as u16) } }
EthernetFlowControl = 0x8808, EthernetSlowProtocol = 0x8809, } impl EtherType {
random_line_split
ethertype.rs
use core::convert::TryFrom; use num_enum::TryFromPrimitive; use serde::{Deserialize, Serialize}; /// https://en.wikipedia.org/wiki/EtherType#Examples #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TryFromPrimitive, Deserialize, Serialize, )] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, ARP = 0x0806, WakeOnLan = 0x0842, SLPP = 0x8102, Ipv6 = 0x86dd, EthernetFlowControl = 0x8808, EthernetSlowProtocol = 0x8809, } impl EtherType { pub fn
(bytes: &[u8]) -> Self { let n = u16::from_be_bytes([bytes[0], bytes[1]]); Self::try_from(n).unwrap_or_else(|_| panic!("Unknwn EtherType {:04x}", n)) } pub fn to_bytes(self) -> [u8; 2] { u16::to_be_bytes(self as u16) } }
from_bytes
identifier_name
ethertype.rs
use core::convert::TryFrom; use num_enum::TryFromPrimitive; use serde::{Deserialize, Serialize}; /// https://en.wikipedia.org/wiki/EtherType#Examples #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TryFromPrimitive, Deserialize, Serialize, )] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, ARP = 0x0806, WakeOnLan = 0x0842, SLPP = 0x8102, Ipv6 = 0x86dd, EthernetFlowControl = 0x8808, EthernetSlowProtocol = 0x8809, } impl EtherType { pub fn from_bytes(bytes: &[u8]) -> Self { let n = u16::from_be_bytes([bytes[0], bytes[1]]); Self::try_from(n).unwrap_or_else(|_| panic!("Unknwn EtherType {:04x}", n)) } pub fn to_bytes(self) -> [u8; 2]
}
{ u16::to_be_bytes(self as u16) }
identifier_body
__init__.py
# Copyright 2008 German Aerospace Center (DLR) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
from webdav.acp.Ace import ACE from webdav.acp.GrantDeny import GrantDeny from webdav.acp.Privilege import Privilege from webdav.acp.Principal import Principal __version__ = "$LastChangedRevision: 2 $"
from webdav.acp.Acl import ACL
random_line_split
validators.py
## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ Validation functions """ def _convert_x_to_10(x): if x != 'X': return int(x) else: return 10 def is_isbn10(val): """ Test if argument is an ISBN-10 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 10: return False r = sum([(10 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_isbn13(val): """ Test if argument is an ISBN-13 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 13: return False total = sum([int(num) * weight for num, weight in zip(val, (1, 3) * 6)]) ck = (10 - total) % 10 return ck == int(val[-1]) def is_isbn(val): """ Test if argument is an ISBN-10 or ISBN-13 number """ return is_isbn10(val) or is_isbn13(val) def
(val): """ Test if argument is an ISSN number """ val = val.replace("-", "").replace(" ", "") if len(val) != 8: return False r = sum([(8 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_all_uppercase(val): """ Returns true if more than 75% of the characters are upper-case """ uppers = 0 for c in val: if c.isupper(): uppers += 1 if 1.0 * uppers / len(val) > 0.75: return True else: return False def is_probably_list(val, separators=[';','-',',']): """ Returns true if string looks like a list - e.g. a, b, c, or a; b; c;""" if val: LIMIT = 2 counts = dict([(x, 0) for x in separators]) warn = False for c in val: for sep in separators: if c == sep: counts[sep] += 1 for sep, n in counts.items(): if n >= LIMIT: return True
is_issn
identifier_name
validators.py
## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ Validation functions """ def _convert_x_to_10(x): if x != 'X': return int(x) else:
def is_isbn10(val): """ Test if argument is an ISBN-10 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 10: return False r = sum([(10 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_isbn13(val): """ Test if argument is an ISBN-13 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 13: return False total = sum([int(num) * weight for num, weight in zip(val, (1, 3) * 6)]) ck = (10 - total) % 10 return ck == int(val[-1]) def is_isbn(val): """ Test if argument is an ISBN-10 or ISBN-13 number """ return is_isbn10(val) or is_isbn13(val) def is_issn(val): """ Test if argument is an ISSN number """ val = val.replace("-", "").replace(" ", "") if len(val) != 8: return False r = sum([(8 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_all_uppercase(val): """ Returns true if more than 75% of the characters are upper-case """ uppers = 0 for c in val: if c.isupper(): uppers += 1 if 1.0 * uppers / len(val) > 0.75: return True else: return False def is_probably_list(val, separators=[';','-',',']): """ Returns true if string looks like a list - e.g. a, b, c, or a; b; c;""" if val: LIMIT = 2 counts = dict([(x, 0) for x in separators]) warn = False for c in val: for sep in separators: if c == sep: counts[sep] += 1 for sep, n in counts.items(): if n >= LIMIT: return True
return 10
conditional_block
validators.py
## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ Validation functions """ def _convert_x_to_10(x): if x != 'X': return int(x) else: return 10 def is_isbn10(val): """ Test if argument is an ISBN-10 number
Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 10: return False r = sum([(10 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_isbn13(val): """ Test if argument is an ISBN-13 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 13: return False total = sum([int(num) * weight for num, weight in zip(val, (1, 3) * 6)]) ck = (10 - total) % 10 return ck == int(val[-1]) def is_isbn(val): """ Test if argument is an ISBN-10 or ISBN-13 number """ return is_isbn10(val) or is_isbn13(val) def is_issn(val): """ Test if argument is an ISSN number """ val = val.replace("-", "").replace(" ", "") if len(val) != 8: return False r = sum([(8 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_all_uppercase(val): """ Returns true if more than 75% of the characters are upper-case """ uppers = 0 for c in val: if c.isupper(): uppers += 1 if 1.0 * uppers / len(val) > 0.75: return True else: return False def is_probably_list(val, separators=[';','-',',']): """ Returns true if string looks like a list - e.g. a, b, c, or a; b; c;""" if val: LIMIT = 2 counts = dict([(x, 0) for x in separators]) warn = False for c in val: for sep in separators: if c == sep: counts[sep] += 1 for sep, n in counts.items(): if n >= LIMIT: return True
random_line_split
validators.py
## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ Validation functions """ def _convert_x_to_10(x): if x != 'X': return int(x) else: return 10 def is_isbn10(val): """ Test if argument is an ISBN-10 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 10: return False r = sum([(10 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_isbn13(val): """ Test if argument is an ISBN-13 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 13: return False total = sum([int(num) * weight for num, weight in zip(val, (1, 3) * 6)]) ck = (10 - total) % 10 return ck == int(val[-1]) def is_isbn(val):
def is_issn(val): """ Test if argument is an ISSN number """ val = val.replace("-", "").replace(" ", "") if len(val) != 8: return False r = sum([(8 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_all_uppercase(val): """ Returns true if more than 75% of the characters are upper-case """ uppers = 0 for c in val: if c.isupper(): uppers += 1 if 1.0 * uppers / len(val) > 0.75: return True else: return False def is_probably_list(val, separators=[';','-',',']): """ Returns true if string looks like a list - e.g. a, b, c, or a; b; c;""" if val: LIMIT = 2 counts = dict([(x, 0) for x in separators]) warn = False for c in val: for sep in separators: if c == sep: counts[sep] += 1 for sep, n in counts.items(): if n >= LIMIT: return True
""" Test if argument is an ISBN-10 or ISBN-13 number """ return is_isbn10(val) or is_isbn13(val)
identifier_body
GetSliceTags.py
from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed from PLC.Filter import Filter from PLC.SliceTags import SliceTag, SliceTags from PLC.Persons import Person, Persons from PLC.Sites import Site, Sites from PLC.Nodes import Nodes from PLC.Slices import Slice, Slices from PLC.Auth import Auth class GetSliceTags(Method): """ Returns an array of structs containing details about slice and sliver attributes. An attribute is a sliver attribute if the node_id field is set. If slice_tag_filter is specified and is an array of slice attribute identifiers, or a struct of slice attribute attributes, only slice attributes matching the filter will be returned. If return_fields is specified, only the specified details will be returned. Users may only query attributes of slices or slivers of which they are members. PIs may only query attributes of slices or slivers at their sites, or of which they are members. Admins may query attributes of any slice or sliver. """ roles = ['admin', 'pi', 'user', 'node'] accepts = [ Auth(), Mixed([SliceTag.fields['slice_tag_id']], Filter(SliceTag.fields)), Parameter([str], "List of fields to return", nullok = True) ] returns = [SliceTag.fields] def call(self, auth, slice_tag_filter = None, return_fields = None): # If we are not admin, make sure to only return our own slice # and sliver attributes. # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # # Get slices that we are able to view # valid_slice_ids = self.caller['slice_ids'] # if 'pi' in self.caller['roles'] and self.caller['site_ids']: # sites = Sites(self.api, self.caller['site_ids']) # for site in sites: # valid_slice_ids += site['slice_ids'] # # techs can view all slices on the nodes at their site # if 'tech' in self.caller['roles'] and self.caller['site_ids']: # nodes = Nodes(self.api, {'site_id': self.caller['site_ids']}, ['site_id', 'slice_ids']) # for node in nodes: # valid_slice_ids.extend(node['slice_ids']) # # if not valid_slice_ids: # return [] # # # Get slice attributes that we are able to view # valid_slice_tag_ids = [] # slices = Slices(self.api, valid_slice_ids) # for slice in slices: # valid_slice_tag_ids += slice['slice_tag_ids'] # # if not valid_slice_tag_ids: # return [] # # if slice_tag_filter is None: # slice_tag_filter = valid_slice_tag_ids # Must query at least slice_tag_id (see below)
if return_fields is not None and 'slice_tag_id' not in return_fields: return_fields.append('slice_tag_id') added_fields = True else: added_fields = False slice_tags = SliceTags(self.api, slice_tag_filter, return_fields) # Filter out slice attributes that are not viewable # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # slice_tags = [slice_tag for slice_tag in slice_tags if slice_tag['slice_tag_id'] in valid_slice_tag_ids] # Remove slice_tag_id if not specified if added_fields: for slice_tag in slice_tags: if 'slice_tag_id' in slice_tag: del slice_tag['slice_tag_id'] return slice_tags
identifier_body
GetSliceTags.py
from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed from PLC.Filter import Filter from PLC.SliceTags import SliceTag, SliceTags from PLC.Persons import Person, Persons from PLC.Sites import Site, Sites from PLC.Nodes import Nodes
from PLC.Auth import Auth class GetSliceTags(Method): """ Returns an array of structs containing details about slice and sliver attributes. An attribute is a sliver attribute if the node_id field is set. If slice_tag_filter is specified and is an array of slice attribute identifiers, or a struct of slice attribute attributes, only slice attributes matching the filter will be returned. If return_fields is specified, only the specified details will be returned. Users may only query attributes of slices or slivers of which they are members. PIs may only query attributes of slices or slivers at their sites, or of which they are members. Admins may query attributes of any slice or sliver. """ roles = ['admin', 'pi', 'user', 'node'] accepts = [ Auth(), Mixed([SliceTag.fields['slice_tag_id']], Filter(SliceTag.fields)), Parameter([str], "List of fields to return", nullok = True) ] returns = [SliceTag.fields] def call(self, auth, slice_tag_filter = None, return_fields = None): # If we are not admin, make sure to only return our own slice # and sliver attributes. # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # # Get slices that we are able to view # valid_slice_ids = self.caller['slice_ids'] # if 'pi' in self.caller['roles'] and self.caller['site_ids']: # sites = Sites(self.api, self.caller['site_ids']) # for site in sites: # valid_slice_ids += site['slice_ids'] # # techs can view all slices on the nodes at their site # if 'tech' in self.caller['roles'] and self.caller['site_ids']: # nodes = Nodes(self.api, {'site_id': self.caller['site_ids']}, ['site_id', 'slice_ids']) # for node in nodes: # valid_slice_ids.extend(node['slice_ids']) # # if not valid_slice_ids: # return [] # # # Get slice attributes that we are able to view # valid_slice_tag_ids = [] # slices = Slices(self.api, valid_slice_ids) # for slice in slices: # valid_slice_tag_ids += slice['slice_tag_ids'] # # if not valid_slice_tag_ids: # return [] # # if slice_tag_filter is None: # slice_tag_filter = valid_slice_tag_ids # Must query at least slice_tag_id (see below) if return_fields is not None and 'slice_tag_id' not in return_fields: return_fields.append('slice_tag_id') added_fields = True else: added_fields = False slice_tags = SliceTags(self.api, slice_tag_filter, return_fields) # Filter out slice attributes that are not viewable # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # slice_tags = [slice_tag for slice_tag in slice_tags if slice_tag['slice_tag_id'] in valid_slice_tag_ids] # Remove slice_tag_id if not specified if added_fields: for slice_tag in slice_tags: if 'slice_tag_id' in slice_tag: del slice_tag['slice_tag_id'] return slice_tags
from PLC.Slices import Slice, Slices
random_line_split
GetSliceTags.py
from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed from PLC.Filter import Filter from PLC.SliceTags import SliceTag, SliceTags from PLC.Persons import Person, Persons from PLC.Sites import Site, Sites from PLC.Nodes import Nodes from PLC.Slices import Slice, Slices from PLC.Auth import Auth class GetSliceTags(Method): """ Returns an array of structs containing details about slice and sliver attributes. An attribute is a sliver attribute if the node_id field is set. If slice_tag_filter is specified and is an array of slice attribute identifiers, or a struct of slice attribute attributes, only slice attributes matching the filter will be returned. If return_fields is specified, only the specified details will be returned. Users may only query attributes of slices or slivers of which they are members. PIs may only query attributes of slices or slivers at their sites, or of which they are members. Admins may query attributes of any slice or sliver. """ roles = ['admin', 'pi', 'user', 'node'] accepts = [ Auth(), Mixed([SliceTag.fields['slice_tag_id']], Filter(SliceTag.fields)), Parameter([str], "List of fields to return", nullok = True) ] returns = [SliceTag.fields] def
(self, auth, slice_tag_filter = None, return_fields = None): # If we are not admin, make sure to only return our own slice # and sliver attributes. # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # # Get slices that we are able to view # valid_slice_ids = self.caller['slice_ids'] # if 'pi' in self.caller['roles'] and self.caller['site_ids']: # sites = Sites(self.api, self.caller['site_ids']) # for site in sites: # valid_slice_ids += site['slice_ids'] # # techs can view all slices on the nodes at their site # if 'tech' in self.caller['roles'] and self.caller['site_ids']: # nodes = Nodes(self.api, {'site_id': self.caller['site_ids']}, ['site_id', 'slice_ids']) # for node in nodes: # valid_slice_ids.extend(node['slice_ids']) # # if not valid_slice_ids: # return [] # # # Get slice attributes that we are able to view # valid_slice_tag_ids = [] # slices = Slices(self.api, valid_slice_ids) # for slice in slices: # valid_slice_tag_ids += slice['slice_tag_ids'] # # if not valid_slice_tag_ids: # return [] # # if slice_tag_filter is None: # slice_tag_filter = valid_slice_tag_ids # Must query at least slice_tag_id (see below) if return_fields is not None and 'slice_tag_id' not in return_fields: return_fields.append('slice_tag_id') added_fields = True else: added_fields = False slice_tags = SliceTags(self.api, slice_tag_filter, return_fields) # Filter out slice attributes that are not viewable # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # slice_tags = [slice_tag for slice_tag in slice_tags if slice_tag['slice_tag_id'] in valid_slice_tag_ids] # Remove slice_tag_id if not specified if added_fields: for slice_tag in slice_tags: if 'slice_tag_id' in slice_tag: del slice_tag['slice_tag_id'] return slice_tags
call
identifier_name
GetSliceTags.py
from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed from PLC.Filter import Filter from PLC.SliceTags import SliceTag, SliceTags from PLC.Persons import Person, Persons from PLC.Sites import Site, Sites from PLC.Nodes import Nodes from PLC.Slices import Slice, Slices from PLC.Auth import Auth class GetSliceTags(Method): """ Returns an array of structs containing details about slice and sliver attributes. An attribute is a sliver attribute if the node_id field is set. If slice_tag_filter is specified and is an array of slice attribute identifiers, or a struct of slice attribute attributes, only slice attributes matching the filter will be returned. If return_fields is specified, only the specified details will be returned. Users may only query attributes of slices or slivers of which they are members. PIs may only query attributes of slices or slivers at their sites, or of which they are members. Admins may query attributes of any slice or sliver. """ roles = ['admin', 'pi', 'user', 'node'] accepts = [ Auth(), Mixed([SliceTag.fields['slice_tag_id']], Filter(SliceTag.fields)), Parameter([str], "List of fields to return", nullok = True) ] returns = [SliceTag.fields] def call(self, auth, slice_tag_filter = None, return_fields = None): # If we are not admin, make sure to only return our own slice # and sliver attributes. # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # # Get slices that we are able to view # valid_slice_ids = self.caller['slice_ids'] # if 'pi' in self.caller['roles'] and self.caller['site_ids']: # sites = Sites(self.api, self.caller['site_ids']) # for site in sites: # valid_slice_ids += site['slice_ids'] # # techs can view all slices on the nodes at their site # if 'tech' in self.caller['roles'] and self.caller['site_ids']: # nodes = Nodes(self.api, {'site_id': self.caller['site_ids']}, ['site_id', 'slice_ids']) # for node in nodes: # valid_slice_ids.extend(node['slice_ids']) # # if not valid_slice_ids: # return [] # # # Get slice attributes that we are able to view # valid_slice_tag_ids = [] # slices = Slices(self.api, valid_slice_ids) # for slice in slices: # valid_slice_tag_ids += slice['slice_tag_ids'] # # if not valid_slice_tag_ids: # return [] # # if slice_tag_filter is None: # slice_tag_filter = valid_slice_tag_ids # Must query at least slice_tag_id (see below) if return_fields is not None and 'slice_tag_id' not in return_fields:
else: added_fields = False slice_tags = SliceTags(self.api, slice_tag_filter, return_fields) # Filter out slice attributes that are not viewable # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # slice_tags = [slice_tag for slice_tag in slice_tags if slice_tag['slice_tag_id'] in valid_slice_tag_ids] # Remove slice_tag_id if not specified if added_fields: for slice_tag in slice_tags: if 'slice_tag_id' in slice_tag: del slice_tag['slice_tag_id'] return slice_tags
return_fields.append('slice_tag_id') added_fields = True
conditional_block
parser.ts
import {Reporter} from './index'; export class Parser { ch: string = ""; // current character aheadchars: string[] = []; at: number = 0; // index of the current character line: number = 0; // current line atline: number = 0; // index of the first character of the line reporter: Reporter; source: () => string; // read the next char from source constructor(reporter: Reporter, source: (() => string) | string) { this.reporter = reporter; if (typeof source === 'string') { let pos = -1; this.source = () => source[++pos] || ''; } else { this.source = source; } this.ch = this.source() ; } static isNotNumberChar(ch: string) { return !Parser.isNumberChar(ch); } static isNumberChar(ch: string) { return '0' <= ch && ch <= '9'; } static isNotWordChar(ch: string) { return !Parser.isWordChar(ch); } static isWordChar(ch: string) { return ch === '_' || ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ('0' <= ch && ch <= '9'); } static
(ch: string) { return !Parser.isSpaceChar(ch); } static isSpaceChar(ch: string) { return ch === ' ' || ch === '\t'; } static isNotLineChar(ch: string) { return !Parser.isLineChar(ch); } static isLineChar(ch: string) { return ch === '\n'; } static isNotAnySpaceChar(ch: string) { return !Parser.isAnySpaceChar(ch); } static isAnySpaceChar(ch: string) { return ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r'; } atEnd() : boolean { return !this.ch; } ahead(amount: number) : string { let ret = this.ch; if (ret) { amount--; let ch: string; let i = this.aheadchars.length; while (i > 0 && amount-- > 0) ret += (ch = this.aheadchars[--i]); while (amount-- > 0 && (ch = this.source())) { this.aheadchars.unshift(ch); ret += ch; } } return ret; } next() : string { if (Parser.isLineChar(this.ch)) { this.line++; this.atline = this.at + 1; } this.ch = this.aheadchars.length ? this.aheadchars.pop()! : this.source(); if (this.ch) this.at++; return this.ch; } error(message: string) { this.reporter.diagnostic({ is: "error", msg: message, row: this.line + 1, col: this.at - this.atline + 1 }); } test<T extends string>(expected: T, consume = true) : T | "" { let actual = expected.length === 1 ? this.ch : this.ahead(expected.length); let same = actual === expected; if (same && consume) for (let i = 0; i < expected.length; i++) this.next(); return same ? expected : ""; } consume<T extends string>(expected: T) : T { let pos = 0; while (pos < expected.length && this.ch === expected[pos]) { pos++; this.next(); } if (pos !== expected.length) this.error(`expecting: ${expected}, received: ${expected.substring(0, pos)}${this.ch}`); return expected; } while(predicate: (ch: string) => boolean, minLength: number) : string { let ret = ""; while (this.ch && predicate(this.ch)) { ret += this.ch; this.next(); } if (ret.length < minLength) this.error(`expecting to match ${predicate.name} x (${ret.length}/${minLength}), received: ${this.ch}`); return ret; } skip(predicate: (ch: string) => boolean, minLength: number = 0) : number { let count = 0; while (this.ch && predicate(this.ch)) { this.next(); count++; } if (count < minLength) this.error(`expecting to match ${predicate.name} x (${count}/${minLength}), received: ${this.ch}`); return count; } }
isNotSpaceChar
identifier_name
parser.ts
import {Reporter} from './index'; export class Parser { ch: string = ""; // current character aheadchars: string[] = []; at: number = 0; // index of the current character line: number = 0; // current line atline: number = 0; // index of the first character of the line reporter: Reporter; source: () => string; // read the next char from source constructor(reporter: Reporter, source: (() => string) | string) { this.reporter = reporter; if (typeof source === 'string') { let pos = -1; this.source = () => source[++pos] || ''; } else { this.source = source; } this.ch = this.source() ; } static isNotNumberChar(ch: string) { return !Parser.isNumberChar(ch); } static isNumberChar(ch: string) { return '0' <= ch && ch <= '9'; } static isNotWordChar(ch: string) { return !Parser.isWordChar(ch); } static isWordChar(ch: string) { return ch === '_' || ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ('0' <= ch && ch <= '9'); } static isNotSpaceChar(ch: string) { return !Parser.isSpaceChar(ch); } static isSpaceChar(ch: string) { return ch === ' ' || ch === '\t'; } static isNotLineChar(ch: string) { return !Parser.isLineChar(ch); } static isLineChar(ch: string) { return ch === '\n'; } static isNotAnySpaceChar(ch: string) { return !Parser.isAnySpaceChar(ch); } static isAnySpaceChar(ch: string) { return ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r'; } atEnd() : boolean { return !this.ch; } ahead(amount: number) : string { let ret = this.ch; if (ret) { amount--; let ch: string; let i = this.aheadchars.length; while (i > 0 && amount-- > 0) ret += (ch = this.aheadchars[--i]); while (amount-- > 0 && (ch = this.source())) { this.aheadchars.unshift(ch); ret += ch; } } return ret; } next() : string { if (Parser.isLineChar(this.ch)) { this.line++; this.atline = this.at + 1; } this.ch = this.aheadchars.length ? this.aheadchars.pop()! : this.source(); if (this.ch) this.at++; return this.ch; } error(message: string) { this.reporter.diagnostic({ is: "error", msg: message, row: this.line + 1, col: this.at - this.atline + 1 }); } test<T extends string>(expected: T, consume = true) : T | "" { let actual = expected.length === 1 ? this.ch : this.ahead(expected.length); let same = actual === expected; if (same && consume) for (let i = 0; i < expected.length; i++) this.next(); return same ? expected : ""; } consume<T extends string>(expected: T) : T { let pos = 0; while (pos < expected.length && this.ch === expected[pos]) { pos++; this.next(); } if (pos !== expected.length) this.error(`expecting: ${expected}, received: ${expected.substring(0, pos)}${this.ch}`); return expected; }
ret += this.ch; this.next(); } if (ret.length < minLength) this.error(`expecting to match ${predicate.name} x (${ret.length}/${minLength}), received: ${this.ch}`); return ret; } skip(predicate: (ch: string) => boolean, minLength: number = 0) : number { let count = 0; while (this.ch && predicate(this.ch)) { this.next(); count++; } if (count < minLength) this.error(`expecting to match ${predicate.name} x (${count}/${minLength}), received: ${this.ch}`); return count; } }
while(predicate: (ch: string) => boolean, minLength: number) : string { let ret = ""; while (this.ch && predicate(this.ch)) {
random_line_split
parser.ts
import {Reporter} from './index'; export class Parser { ch: string = ""; // current character aheadchars: string[] = []; at: number = 0; // index of the current character line: number = 0; // current line atline: number = 0; // index of the first character of the line reporter: Reporter; source: () => string; // read the next char from source constructor(reporter: Reporter, source: (() => string) | string) { this.reporter = reporter; if (typeof source === 'string') { let pos = -1; this.source = () => source[++pos] || ''; } else { this.source = source; } this.ch = this.source() ; } static isNotNumberChar(ch: string) { return !Parser.isNumberChar(ch); } static isNumberChar(ch: string)
static isNotWordChar(ch: string) { return !Parser.isWordChar(ch); } static isWordChar(ch: string) { return ch === '_' || ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ('0' <= ch && ch <= '9'); } static isNotSpaceChar(ch: string) { return !Parser.isSpaceChar(ch); } static isSpaceChar(ch: string) { return ch === ' ' || ch === '\t'; } static isNotLineChar(ch: string) { return !Parser.isLineChar(ch); } static isLineChar(ch: string) { return ch === '\n'; } static isNotAnySpaceChar(ch: string) { return !Parser.isAnySpaceChar(ch); } static isAnySpaceChar(ch: string) { return ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r'; } atEnd() : boolean { return !this.ch; } ahead(amount: number) : string { let ret = this.ch; if (ret) { amount--; let ch: string; let i = this.aheadchars.length; while (i > 0 && amount-- > 0) ret += (ch = this.aheadchars[--i]); while (amount-- > 0 && (ch = this.source())) { this.aheadchars.unshift(ch); ret += ch; } } return ret; } next() : string { if (Parser.isLineChar(this.ch)) { this.line++; this.atline = this.at + 1; } this.ch = this.aheadchars.length ? this.aheadchars.pop()! : this.source(); if (this.ch) this.at++; return this.ch; } error(message: string) { this.reporter.diagnostic({ is: "error", msg: message, row: this.line + 1, col: this.at - this.atline + 1 }); } test<T extends string>(expected: T, consume = true) : T | "" { let actual = expected.length === 1 ? this.ch : this.ahead(expected.length); let same = actual === expected; if (same && consume) for (let i = 0; i < expected.length; i++) this.next(); return same ? expected : ""; } consume<T extends string>(expected: T) : T { let pos = 0; while (pos < expected.length && this.ch === expected[pos]) { pos++; this.next(); } if (pos !== expected.length) this.error(`expecting: ${expected}, received: ${expected.substring(0, pos)}${this.ch}`); return expected; } while(predicate: (ch: string) => boolean, minLength: number) : string { let ret = ""; while (this.ch && predicate(this.ch)) { ret += this.ch; this.next(); } if (ret.length < minLength) this.error(`expecting to match ${predicate.name} x (${ret.length}/${minLength}), received: ${this.ch}`); return ret; } skip(predicate: (ch: string) => boolean, minLength: number = 0) : number { let count = 0; while (this.ch && predicate(this.ch)) { this.next(); count++; } if (count < minLength) this.error(`expecting to match ${predicate.name} x (${count}/${minLength}), received: ${this.ch}`); return count; } }
{ return '0' <= ch && ch <= '9'; }
identifier_body
parser.ts
import {Reporter} from './index'; export class Parser { ch: string = ""; // current character aheadchars: string[] = []; at: number = 0; // index of the current character line: number = 0; // current line atline: number = 0; // index of the first character of the line reporter: Reporter; source: () => string; // read the next char from source constructor(reporter: Reporter, source: (() => string) | string) { this.reporter = reporter; if (typeof source === 'string') { let pos = -1; this.source = () => source[++pos] || ''; } else { this.source = source; } this.ch = this.source() ; } static isNotNumberChar(ch: string) { return !Parser.isNumberChar(ch); } static isNumberChar(ch: string) { return '0' <= ch && ch <= '9'; } static isNotWordChar(ch: string) { return !Parser.isWordChar(ch); } static isWordChar(ch: string) { return ch === '_' || ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ('0' <= ch && ch <= '9'); } static isNotSpaceChar(ch: string) { return !Parser.isSpaceChar(ch); } static isSpaceChar(ch: string) { return ch === ' ' || ch === '\t'; } static isNotLineChar(ch: string) { return !Parser.isLineChar(ch); } static isLineChar(ch: string) { return ch === '\n'; } static isNotAnySpaceChar(ch: string) { return !Parser.isAnySpaceChar(ch); } static isAnySpaceChar(ch: string) { return ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r'; } atEnd() : boolean { return !this.ch; } ahead(amount: number) : string { let ret = this.ch; if (ret) { amount--; let ch: string; let i = this.aheadchars.length; while (i > 0 && amount-- > 0) ret += (ch = this.aheadchars[--i]); while (amount-- > 0 && (ch = this.source())) { this.aheadchars.unshift(ch); ret += ch; } } return ret; } next() : string { if (Parser.isLineChar(this.ch)) { this.line++; this.atline = this.at + 1; } this.ch = this.aheadchars.length ? this.aheadchars.pop()! : this.source(); if (this.ch) this.at++; return this.ch; } error(message: string) { this.reporter.diagnostic({ is: "error", msg: message, row: this.line + 1, col: this.at - this.atline + 1 }); } test<T extends string>(expected: T, consume = true) : T | "" { let actual = expected.length === 1 ? this.ch : this.ahead(expected.length); let same = actual === expected; if (same && consume) for (let i = 0; i < expected.length; i++) this.next(); return same ? expected : ""; } consume<T extends string>(expected: T) : T { let pos = 0; while (pos < expected.length && this.ch === expected[pos]) { pos++; this.next(); } if (pos !== expected.length) this.error(`expecting: ${expected}, received: ${expected.substring(0, pos)}${this.ch}`); return expected; } while(predicate: (ch: string) => boolean, minLength: number) : string { let ret = ""; while (this.ch && predicate(this.ch)) { ret += this.ch; this.next(); } if (ret.length < minLength) this.error(`expecting to match ${predicate.name} x (${ret.length}/${minLength}), received: ${this.ch}`); return ret; } skip(predicate: (ch: string) => boolean, minLength: number = 0) : number { let count = 0; while (this.ch && predicate(this.ch))
if (count < minLength) this.error(`expecting to match ${predicate.name} x (${count}/${minLength}), received: ${this.ch}`); return count; } }
{ this.next(); count++; }
conditional_block
setup.py
import os import sys import glob try: import numpy except ImportError: print "You need to have numpy installed on your system to run setup.py. Sorry!" sys.exit() try: from Cython.Distutils import build_ext except ImportError: print "You need to have Cython installed on your system to run setup.py. Sorry!" sys.exit() from setuptools import setup, find_packages, Extension if os.environ.get('USER','') == 'vagrant':
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() include_dirs_for_concoct = [numpy.get_include(), '/opt/local/include/'] setup( name = "anvio", version = open('VERSION').read().strip(), scripts = [script for script in glob.glob('bin/*') if not script.endswith('-OBSOLETE')], include_package_data = True, packages = find_packages(), install_requires = ['bottle>=0.12.7', 'pysam>=0.8.3', 'hcluster>=0.2.0', 'ete2>=2.2', 'scipy', 'scikit-learn>=0.15', 'django>=1.7', 'cython>=0.21a1'], cmdclass = {'build_ext': build_ext}, ext_modules = [ Extension('anvio.columnprofile', sources = ['./anvio/extensions/columnprofile.c']), Extension("anvio.vbgmm", sources=["./anvio/extensions/concoct/vbgmm.pyx", "./anvio/extensions/concoct/c_vbgmm_fit.c"], libraries =['gsl', 'gslcblas'], include_dirs=include_dirs_for_concoct), ], author = "anvi'o Authors", author_email = "[email protected]", description = "An interactive analysis and visualization platform for 'omics data. See https://merenlab.org/projects/anvio for more information", license = "GPLv3+", keywords = "metagenomics metatranscriptomics microbiology shotgun genomics MBL pipeline sequencing bam visualization SNP", url = "https://meren.github.io/projects/anvio/", classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Natural Language :: English', 'Operating System :: MacOS', 'Operating System :: POSIX', 'Programming Language :: Python :: 2.7', 'Programming Language :: JavaScript', 'Programming Language :: C', 'Topic :: Scientific/Engineering', ], )
del os.link
conditional_block
setup.py
import os import sys import glob try: import numpy except ImportError: print "You need to have numpy installed on your system to run setup.py. Sorry!" sys.exit() try: from Cython.Distutils import build_ext except ImportError: print "You need to have Cython installed on your system to run setup.py. Sorry!" sys.exit() from setuptools import setup, find_packages, Extension if os.environ.get('USER','') == 'vagrant': del os.link os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() include_dirs_for_concoct = [numpy.get_include(), '/opt/local/include/'] setup( name = "anvio", version = open('VERSION').read().strip(), scripts = [script for script in glob.glob('bin/*') if not script.endswith('-OBSOLETE')], include_package_data = True, packages = find_packages(), install_requires = ['bottle>=0.12.7', 'pysam>=0.8.3', 'hcluster>=0.2.0', 'ete2>=2.2', 'scipy', 'scikit-learn>=0.15', 'django>=1.7', 'cython>=0.21a1'], cmdclass = {'build_ext': build_ext}, ext_modules = [ Extension('anvio.columnprofile', sources = ['./anvio/extensions/columnprofile.c']), Extension("anvio.vbgmm", sources=["./anvio/extensions/concoct/vbgmm.pyx", "./anvio/extensions/concoct/c_vbgmm_fit.c"], libraries =['gsl', 'gslcblas'], include_dirs=include_dirs_for_concoct), ], author = "anvi'o Authors", author_email = "[email protected]", description = "An interactive analysis and visualization platform for 'omics data. See https://merenlab.org/projects/anvio for more information", license = "GPLv3+", keywords = "metagenomics metatranscriptomics microbiology shotgun genomics MBL pipeline sequencing bam visualization SNP", url = "https://meren.github.io/projects/anvio/", classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Natural Language :: English',
'Programming Language :: C', 'Topic :: Scientific/Engineering', ], )
'Operating System :: MacOS', 'Operating System :: POSIX', 'Programming Language :: Python :: 2.7', 'Programming Language :: JavaScript',
random_line_split
settings.py
SECRET_KEY = 'asdf' DATABASES = { 'default': { 'NAME': 'test.db', 'ENGINE': 'django.db.backends.sqlite3', } } INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.staticfiles', 'revproxy', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'tests.urls' import os BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, 'templates'), ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'DIRS': TEMPLATE_DIRS, }, ] LOGGING = { 'version': 1, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler', }, }, 'loggers': { 'revproxy': {
}, }
'handlers': ['null'], 'propagate': False, },
random_line_split
InternalLinkModalView.js
import ModalView from 'OpenOrchestra/Service/Modal/View/ModalView' import FormBuilder from 'OpenOrchestra/Service/Form/Model/FormBuilder' import InternalLinkFormView from 'OpenOrchestra/Service/Tinymce/Plugins/InternalLink/View/InternalLinkFormView' /** * InternalLinkModalView */ class InternalLinkModalView extends ModalView { /** * Initialize * @param {Editor} editor * @param {Object} data */
({editor, data}) { super.initialize(); this._editor = editor; this._data = data; } /** * render internal link modal form */ render() { let template = this._renderTemplate('Tinymce/InternalLink/internalLinkForm'); this.$el.html(template); let $formRegion = $('.modal-body', this.$el); this._displayLoader($formRegion); let url = Routing.generate('open_orchestra_backoffice_internal_link_form'); FormBuilder.createFormFromUrl(url, (form) => { let internalLinkFormView = new InternalLinkFormView({ form: form, editor: this._editor, modal: this }); $formRegion.html(internalLinkFormView.render().$el); }, this._data); return this; } } export default InternalLinkModalView
initialize
identifier_name
InternalLinkModalView.js
import ModalView from 'OpenOrchestra/Service/Modal/View/ModalView' import FormBuilder from 'OpenOrchestra/Service/Form/Model/FormBuilder' import InternalLinkFormView from 'OpenOrchestra/Service/Tinymce/Plugins/InternalLink/View/InternalLinkFormView' /** * InternalLinkModalView */ class InternalLinkModalView extends ModalView { /** * Initialize * @param {Editor} editor * @param {Object} data */ initialize({editor, data}) { super.initialize(); this._editor = editor; this._data = data; } /** * render internal link modal form */ render()
} export default InternalLinkModalView
{ let template = this._renderTemplate('Tinymce/InternalLink/internalLinkForm'); this.$el.html(template); let $formRegion = $('.modal-body', this.$el); this._displayLoader($formRegion); let url = Routing.generate('open_orchestra_backoffice_internal_link_form'); FormBuilder.createFormFromUrl(url, (form) => { let internalLinkFormView = new InternalLinkFormView({ form: form, editor: this._editor, modal: this }); $formRegion.html(internalLinkFormView.render().$el); }, this._data); return this; }
identifier_body
InternalLinkModalView.js
import ModalView from 'OpenOrchestra/Service/Modal/View/ModalView' import FormBuilder from 'OpenOrchestra/Service/Form/Model/FormBuilder' import InternalLinkFormView from 'OpenOrchestra/Service/Tinymce/Plugins/InternalLink/View/InternalLinkFormView' /** * InternalLinkModalView */ class InternalLinkModalView extends ModalView { /** * Initialize * @param {Editor} editor * @param {Object} data */ initialize({editor, data}) { super.initialize(); this._editor = editor; this._data = data; } /** * render internal link modal form */ render() { let template = this._renderTemplate('Tinymce/InternalLink/internalLinkForm'); this.$el.html(template); let $formRegion = $('.modal-body', this.$el); this._displayLoader($formRegion); let url = Routing.generate('open_orchestra_backoffice_internal_link_form'); FormBuilder.createFormFromUrl(url, (form) => { let internalLinkFormView = new InternalLinkFormView({ form: form, editor: this._editor, modal: this }); $formRegion.html(internalLinkFormView.render().$el);
} export default InternalLinkModalView
}, this._data); return this; }
random_line_split
mix_panel.js
App.addChild('MixPanel', { el: 'body', activate: function(){ this.VISIT_MIN_TIME = 10000; this.user = null; this.controller = this.$el.data('controller'); this.action = this.$el.data('action'); this.user = this.$el.data('user'); if(window.mixpanel){ this.detectLogin(); this.startTracking(); this.trackTwitterShare(); this.trackFacebookShare(); try { this.trackOnFacebookLike(); } catch(e) { console.log(e); } } }, startTracking: function(){ var self = this; this.trackPageVisit('projects', 'show', 'Visited project page'); this.trackPageVisit('explore', 'index', 'Explored projects'); this.trackPageLoad('contributions', 'show', 'Finished contribution'); this.trackPageLoad('contributions', 'edit', 'Selected reward'); }, trackPageLoad: function(controller, action, text){ var self = this; this.trackOnPage(controller, action, function(){ self.track(text); }); }, trackPageVisit: function(controller, action, text){ var self = this; this.trackOnPage(controller, action, function(){ self.trackVisit(text); }); }, trackOnPage: function(controller, action, callback){ if(this.controller == controller && this.action == action){ callback(); } }, trackTwitterShare: function() { var self = this; this.$('#twitter_share_button').on('click', function(event){ self.track('Share a project', { ref: $(event.currentTarget).data('title'), social_network: 'Twitter' }); }); }, trackFacebookShare: function() { var self = this; this.$('a#facebook_share').on('click', function(event){ self.track('Share a project', { ref: $(event.currentTarget).data('title'), social_network: 'Facebook' }); }); }, trackOnFacebookLike: function() { var self = this; FB.Event.subscribe('edge.create', function(url, html_element){ self.track('Liked a project', { ref: $(html_element).data('title') }); }); }, onLogin: function(){ mixpanel.alias(this.user.id); if(this.user.created_today){ this.track("Signed up"); } else{ this.track("Logged in"); } }, detectLogin: function(){ if(this.user){ if(this.user.id != store.get('user_id')){ this.onLogin(); store.set('user_id', this.user.id); } } else{ store.set('user_id', null); } }, identifyUser: function(){ if (this.user){ mixpanel.name_tag(this.user.email); mixpanel.identify(this.user.id); mixpanel.people.set({ "$email": this.user.email, "$created": this.user.created_at, "$last_login": this.user.last_sign_in_at, "contributions": this.user.total_contributed_projects }); } }, track: function(text, options){ this.identifyUser(); var opt = options || {}; var obj = $(this); var ref = (obj.attr('href') != undefined) ? obj.attr('href') : (opt.ref ? opt.ref : null); var default_options = { 'page name': document.title, 'user_id': null, 'created': null, 'last_login': null, 'contributions': null, 'has_contributions': null, 'project': ref, 'url': window.location }; if(this.user)
var opt = $.fn.extend(default_options, opt); mixpanel.track(text, opt); }, mixPanelEvent: function(target, event, text, options){ var self = this; this.$(target).on(event, function(){ self.track(text, options); }); }, trackVisit: function(eventName){ var self = this; window.setTimeout(function(){ self.track(eventName); }, this.VISIT_MIN_TIME); } });
{ default_options.user_id = this.user.id; default_options.created = this.user.created_at; default_options.last_login = this.user.last_sign_in_at; default_options.contributions = this.user.total_contributed_projects; default_options.has_contributions = (this.user.total_contributed_projects > 0); }
conditional_block
mix_panel.js
App.addChild('MixPanel', { el: 'body', activate: function(){ this.VISIT_MIN_TIME = 10000; this.user = null; this.controller = this.$el.data('controller'); this.action = this.$el.data('action'); this.user = this.$el.data('user'); if(window.mixpanel){ this.detectLogin(); this.startTracking(); this.trackTwitterShare(); this.trackFacebookShare(); try { this.trackOnFacebookLike(); } catch(e) { console.log(e); } } }, startTracking: function(){ var self = this; this.trackPageVisit('projects', 'show', 'Visited project page'); this.trackPageVisit('explore', 'index', 'Explored projects'); this.trackPageLoad('contributions', 'show', 'Finished contribution'); this.trackPageLoad('contributions', 'edit', 'Selected reward'); }, trackPageLoad: function(controller, action, text){ var self = this; this.trackOnPage(controller, action, function(){ self.track(text); }); }, trackPageVisit: function(controller, action, text){ var self = this; this.trackOnPage(controller, action, function(){ self.trackVisit(text); }); }, trackOnPage: function(controller, action, callback){ if(this.controller == controller && this.action == action){ callback(); } }, trackTwitterShare: function() { var self = this; this.$('#twitter_share_button').on('click', function(event){
}, trackFacebookShare: function() { var self = this; this.$('a#facebook_share').on('click', function(event){ self.track('Share a project', { ref: $(event.currentTarget).data('title'), social_network: 'Facebook' }); }); }, trackOnFacebookLike: function() { var self = this; FB.Event.subscribe('edge.create', function(url, html_element){ self.track('Liked a project', { ref: $(html_element).data('title') }); }); }, onLogin: function(){ mixpanel.alias(this.user.id); if(this.user.created_today){ this.track("Signed up"); } else{ this.track("Logged in"); } }, detectLogin: function(){ if(this.user){ if(this.user.id != store.get('user_id')){ this.onLogin(); store.set('user_id', this.user.id); } } else{ store.set('user_id', null); } }, identifyUser: function(){ if (this.user){ mixpanel.name_tag(this.user.email); mixpanel.identify(this.user.id); mixpanel.people.set({ "$email": this.user.email, "$created": this.user.created_at, "$last_login": this.user.last_sign_in_at, "contributions": this.user.total_contributed_projects }); } }, track: function(text, options){ this.identifyUser(); var opt = options || {}; var obj = $(this); var ref = (obj.attr('href') != undefined) ? obj.attr('href') : (opt.ref ? opt.ref : null); var default_options = { 'page name': document.title, 'user_id': null, 'created': null, 'last_login': null, 'contributions': null, 'has_contributions': null, 'project': ref, 'url': window.location }; if(this.user){ default_options.user_id = this.user.id; default_options.created = this.user.created_at; default_options.last_login = this.user.last_sign_in_at; default_options.contributions = this.user.total_contributed_projects; default_options.has_contributions = (this.user.total_contributed_projects > 0); } var opt = $.fn.extend(default_options, opt); mixpanel.track(text, opt); }, mixPanelEvent: function(target, event, text, options){ var self = this; this.$(target).on(event, function(){ self.track(text, options); }); }, trackVisit: function(eventName){ var self = this; window.setTimeout(function(){ self.track(eventName); }, this.VISIT_MIN_TIME); } });
self.track('Share a project', { ref: $(event.currentTarget).data('title'), social_network: 'Twitter' }); });
random_line_split
Lungo.Attributes.js
/** * Object with data-attributes (HTML5) with a special <markup> * * @namespace Lungo * @class Attributes * * @author Javier Jimenez Villar <[email protected]> || @soyjavi * @author Guillermo Pascual <[email protected]> || @pasku1 */ Lungo.Attributes = { count: { selector: '*', html: '<span class="tag theme count">{{value}}</span>' }, pull: { selector: 'section', html: '<div class="{{value}}" data-control="pull" data-icon="down" data-loading="black">\ <strong>title</strong>\ </div>' }, progress: {
</div>' }, label: { selector: '*', html: '<abbr>{{value}}</abbr>' }, icon: { selector: '*', html: '<span class="icon {{value}}"></span>' }, image: { selector: '*', html: '<img src="{{value}}" class="icon" />' }, title: { selector: 'header', html: '<span class="title centered">{{value}}</span>' }, loading: { selector: '*', html: '<div class="loading {{value}}">\ <span class="top"></span>\ <span class="right"></span>\ <span class="bottom"></span>\ <span class="left"></span>\ </div>' }, back: { selector: 'header', html: '<nav class="left"><a href="#back" data-router="section" class="left"><span class="icon {{value}}"></span></a></nav>' } };
selector: '*', html: '<div class="progress">\ <span class="bar"><span class="value" style="width:{{value}};"></span></span>\
random_line_split
bytes.rs
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::types::{Intern, RawInternKey}; use fnv::FnvHashMap; use lazy_static::lazy_static; use parking_lot::RwLock; use serde::{Deserialize, Deserializer}; use std::fmt; use std::sync::Arc; /// Slices of bytes intern as BytesKey impl Intern for &[u8] { type Key = BytesKey; fn intern(self) -> Self::Key { BytesKey(BYTES_TABLE.intern(self)) } } /// Owned strings intern as StringKey, with the interning /// based on the raw bytes of the string impl Intern for String { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Str (slices) intern as StringKey, with the interning /// based on the raw bytes of the str. impl Intern for &str { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Interned bytes #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct BytesKey(RawInternKey); impl BytesKey { pub fn lookup(self) -> &'static [u8] { BYTES_TABLE.lookup(self.0) } } impl fmt::Debug for BytesKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let bytes_value = self.lookup(); write!(f, "{:?}", bytes_value) } } /// An interned string #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct StringKey(RawInternKey); impl StringKey { /// Get a reference to the original str. pub fn lookup(self) -> &'static str { let bytes = BYTES_TABLE.lookup(self.0); // This is safe because the bytes we are converting originally came // from a str when we interned it: the only way to get a StringKey is // to intern an (already valid) string, so if we have a StringKey then // its bytes must be valid UTF-8. unsafe { std::str::from_utf8_unchecked(bytes) } } /// Convert the interned string key into an interned bytes key. Because /// strings intern as their raw bytes, this is an O(1) operation. /// Note the reverse (BytesKey.as_str) is a fallible operation since /// the bytes may not be valid UTF-8. pub fn as_bytes(self) -> BytesKey { BytesKey(self.0) } } impl fmt::Debug for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{:?}", str_value) } } impl fmt::Display for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{}", str_value) } } impl<'de> Deserialize<'de> for StringKey { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { Deserialize::deserialize(deserializer).map(|s: String| s.intern()) } } // Static table used in the bytes/str Intern implementations lazy_static! { static ref BYTES_TABLE: BytesTable = BytesTable::new(); } /// Similar to the generic `InternTable` but customized for sequences of raw bytes (and strings). pub struct BytesTable { data: Arc<RwLock<BytesTableData>>, } impl BytesTable { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(BytesTableData::new())), } } pub fn intern(&self, value: &[u8]) -> RawInternKey { if let Some(prev) = self.data.read().get(&value) { return prev; } let mut writer = self.data.write(); writer.intern(value) } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { self.data.read().lookup(key) } } /// BytesTableData is similar to InternTableData but customized for sequences /// of raw bytes (and notably, strings). struct BytesTableData { // Raw data storage, allocated in large chunks buffer: Option<&'static mut [u8]>, // Reverse mapping of index=>value, used to convert an // interned key back to (a reference to) its value items: Vec<&'static [u8]>, // Mapping of values to their interned indices table: FnvHashMap<&'static [u8], RawInternKey>, } impl BytesTableData { const BUFFER_SIZE: usize = 4096; pub fn new() -> Self { Self { buffer: Some(Self::new_buffer()), items: Default::default(), table: Default::default(),
} } fn new_buffer() -> &'static mut [u8] { Box::leak(Box::new([0; Self::BUFFER_SIZE])) } pub fn get(&self, value: &[u8]) -> Option<RawInternKey> { self.table.get(value).cloned() } // Copy the byte slice into 'static memory by appending it to a buffer, if there is room. // If the buffer fills up and the value is small, start over with a new buffer. // If the value is large, just give it its own memory. fn alloc(&mut self, value: &[u8]) -> &'static [u8] { let len = value.len(); let mut buffer = self.buffer.take().unwrap(); if len > buffer.len() { if len >= Self::BUFFER_SIZE / 16 { // This byte slice is so big it can just have its own memory. self.buffer = Some(buffer); return Box::leak(value.into()); } else { buffer = Self::new_buffer() } } let (mem, remaining) = buffer.split_at_mut(len); mem.copy_from_slice(value); self.buffer = Some(remaining); mem } pub fn intern(&mut self, value: &[u8]) -> RawInternKey { // If there's an existing value return it if let Some(prev) = self.get(&value) { return prev; } // Otherwise intern let key = RawInternKey::new(self.items.len()); let static_bytes = self.alloc(value); self.items.push(static_bytes); self.table.insert(static_bytes, key); key } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { let index = key.as_usize(); self.items[index] } }
random_line_split
bytes.rs
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::types::{Intern, RawInternKey}; use fnv::FnvHashMap; use lazy_static::lazy_static; use parking_lot::RwLock; use serde::{Deserialize, Deserializer}; use std::fmt; use std::sync::Arc; /// Slices of bytes intern as BytesKey impl Intern for &[u8] { type Key = BytesKey; fn intern(self) -> Self::Key { BytesKey(BYTES_TABLE.intern(self)) } } /// Owned strings intern as StringKey, with the interning /// based on the raw bytes of the string impl Intern for String { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Str (slices) intern as StringKey, with the interning /// based on the raw bytes of the str. impl Intern for &str { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Interned bytes #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct BytesKey(RawInternKey); impl BytesKey { pub fn lookup(self) -> &'static [u8] { BYTES_TABLE.lookup(self.0) } } impl fmt::Debug for BytesKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let bytes_value = self.lookup(); write!(f, "{:?}", bytes_value) } } /// An interned string #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct StringKey(RawInternKey); impl StringKey { /// Get a reference to the original str. pub fn lookup(self) -> &'static str { let bytes = BYTES_TABLE.lookup(self.0); // This is safe because the bytes we are converting originally came // from a str when we interned it: the only way to get a StringKey is // to intern an (already valid) string, so if we have a StringKey then // its bytes must be valid UTF-8. unsafe { std::str::from_utf8_unchecked(bytes) } } /// Convert the interned string key into an interned bytes key. Because /// strings intern as their raw bytes, this is an O(1) operation. /// Note the reverse (BytesKey.as_str) is a fallible operation since /// the bytes may not be valid UTF-8. pub fn as_bytes(self) -> BytesKey { BytesKey(self.0) } } impl fmt::Debug for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{:?}", str_value) } } impl fmt::Display for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{}", str_value) } } impl<'de> Deserialize<'de> for StringKey { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { Deserialize::deserialize(deserializer).map(|s: String| s.intern()) } } // Static table used in the bytes/str Intern implementations lazy_static! { static ref BYTES_TABLE: BytesTable = BytesTable::new(); } /// Similar to the generic `InternTable` but customized for sequences of raw bytes (and strings). pub struct BytesTable { data: Arc<RwLock<BytesTableData>>, } impl BytesTable { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(BytesTableData::new())), } } pub fn intern(&self, value: &[u8]) -> RawInternKey { if let Some(prev) = self.data.read().get(&value) { return prev; } let mut writer = self.data.write(); writer.intern(value) } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { self.data.read().lookup(key) } } /// BytesTableData is similar to InternTableData but customized for sequences /// of raw bytes (and notably, strings). struct BytesTableData { // Raw data storage, allocated in large chunks buffer: Option<&'static mut [u8]>, // Reverse mapping of index=>value, used to convert an // interned key back to (a reference to) its value items: Vec<&'static [u8]>, // Mapping of values to their interned indices table: FnvHashMap<&'static [u8], RawInternKey>, } impl BytesTableData { const BUFFER_SIZE: usize = 4096; pub fn
() -> Self { Self { buffer: Some(Self::new_buffer()), items: Default::default(), table: Default::default(), } } fn new_buffer() -> &'static mut [u8] { Box::leak(Box::new([0; Self::BUFFER_SIZE])) } pub fn get(&self, value: &[u8]) -> Option<RawInternKey> { self.table.get(value).cloned() } // Copy the byte slice into 'static memory by appending it to a buffer, if there is room. // If the buffer fills up and the value is small, start over with a new buffer. // If the value is large, just give it its own memory. fn alloc(&mut self, value: &[u8]) -> &'static [u8] { let len = value.len(); let mut buffer = self.buffer.take().unwrap(); if len > buffer.len() { if len >= Self::BUFFER_SIZE / 16 { // This byte slice is so big it can just have its own memory. self.buffer = Some(buffer); return Box::leak(value.into()); } else { buffer = Self::new_buffer() } } let (mem, remaining) = buffer.split_at_mut(len); mem.copy_from_slice(value); self.buffer = Some(remaining); mem } pub fn intern(&mut self, value: &[u8]) -> RawInternKey { // If there's an existing value return it if let Some(prev) = self.get(&value) { return prev; } // Otherwise intern let key = RawInternKey::new(self.items.len()); let static_bytes = self.alloc(value); self.items.push(static_bytes); self.table.insert(static_bytes, key); key } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { let index = key.as_usize(); self.items[index] } }
new
identifier_name
bytes.rs
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::types::{Intern, RawInternKey}; use fnv::FnvHashMap; use lazy_static::lazy_static; use parking_lot::RwLock; use serde::{Deserialize, Deserializer}; use std::fmt; use std::sync::Arc; /// Slices of bytes intern as BytesKey impl Intern for &[u8] { type Key = BytesKey; fn intern(self) -> Self::Key { BytesKey(BYTES_TABLE.intern(self)) } } /// Owned strings intern as StringKey, with the interning /// based on the raw bytes of the string impl Intern for String { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Str (slices) intern as StringKey, with the interning /// based on the raw bytes of the str. impl Intern for &str { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Interned bytes #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct BytesKey(RawInternKey); impl BytesKey { pub fn lookup(self) -> &'static [u8] { BYTES_TABLE.lookup(self.0) } } impl fmt::Debug for BytesKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let bytes_value = self.lookup(); write!(f, "{:?}", bytes_value) } } /// An interned string #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct StringKey(RawInternKey); impl StringKey { /// Get a reference to the original str. pub fn lookup(self) -> &'static str { let bytes = BYTES_TABLE.lookup(self.0); // This is safe because the bytes we are converting originally came // from a str when we interned it: the only way to get a StringKey is // to intern an (already valid) string, so if we have a StringKey then // its bytes must be valid UTF-8. unsafe { std::str::from_utf8_unchecked(bytes) } } /// Convert the interned string key into an interned bytes key. Because /// strings intern as their raw bytes, this is an O(1) operation. /// Note the reverse (BytesKey.as_str) is a fallible operation since /// the bytes may not be valid UTF-8. pub fn as_bytes(self) -> BytesKey { BytesKey(self.0) } } impl fmt::Debug for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{:?}", str_value) } } impl fmt::Display for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{}", str_value) } } impl<'de> Deserialize<'de> for StringKey { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { Deserialize::deserialize(deserializer).map(|s: String| s.intern()) } } // Static table used in the bytes/str Intern implementations lazy_static! { static ref BYTES_TABLE: BytesTable = BytesTable::new(); } /// Similar to the generic `InternTable` but customized for sequences of raw bytes (and strings). pub struct BytesTable { data: Arc<RwLock<BytesTableData>>, } impl BytesTable { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(BytesTableData::new())), } } pub fn intern(&self, value: &[u8]) -> RawInternKey { if let Some(prev) = self.data.read().get(&value) { return prev; } let mut writer = self.data.write(); writer.intern(value) } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { self.data.read().lookup(key) } } /// BytesTableData is similar to InternTableData but customized for sequences /// of raw bytes (and notably, strings). struct BytesTableData { // Raw data storage, allocated in large chunks buffer: Option<&'static mut [u8]>, // Reverse mapping of index=>value, used to convert an // interned key back to (a reference to) its value items: Vec<&'static [u8]>, // Mapping of values to their interned indices table: FnvHashMap<&'static [u8], RawInternKey>, } impl BytesTableData { const BUFFER_SIZE: usize = 4096; pub fn new() -> Self { Self { buffer: Some(Self::new_buffer()), items: Default::default(), table: Default::default(), } } fn new_buffer() -> &'static mut [u8] { Box::leak(Box::new([0; Self::BUFFER_SIZE])) } pub fn get(&self, value: &[u8]) -> Option<RawInternKey>
// Copy the byte slice into 'static memory by appending it to a buffer, if there is room. // If the buffer fills up and the value is small, start over with a new buffer. // If the value is large, just give it its own memory. fn alloc(&mut self, value: &[u8]) -> &'static [u8] { let len = value.len(); let mut buffer = self.buffer.take().unwrap(); if len > buffer.len() { if len >= Self::BUFFER_SIZE / 16 { // This byte slice is so big it can just have its own memory. self.buffer = Some(buffer); return Box::leak(value.into()); } else { buffer = Self::new_buffer() } } let (mem, remaining) = buffer.split_at_mut(len); mem.copy_from_slice(value); self.buffer = Some(remaining); mem } pub fn intern(&mut self, value: &[u8]) -> RawInternKey { // If there's an existing value return it if let Some(prev) = self.get(&value) { return prev; } // Otherwise intern let key = RawInternKey::new(self.items.len()); let static_bytes = self.alloc(value); self.items.push(static_bytes); self.table.insert(static_bytes, key); key } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { let index = key.as_usize(); self.items[index] } }
{ self.table.get(value).cloned() }
identifier_body
bytes.rs
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::types::{Intern, RawInternKey}; use fnv::FnvHashMap; use lazy_static::lazy_static; use parking_lot::RwLock; use serde::{Deserialize, Deserializer}; use std::fmt; use std::sync::Arc; /// Slices of bytes intern as BytesKey impl Intern for &[u8] { type Key = BytesKey; fn intern(self) -> Self::Key { BytesKey(BYTES_TABLE.intern(self)) } } /// Owned strings intern as StringKey, with the interning /// based on the raw bytes of the string impl Intern for String { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Str (slices) intern as StringKey, with the interning /// based on the raw bytes of the str. impl Intern for &str { type Key = StringKey; fn intern(self) -> Self::Key { StringKey(BYTES_TABLE.intern(self.as_bytes())) } } /// Interned bytes #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct BytesKey(RawInternKey); impl BytesKey { pub fn lookup(self) -> &'static [u8] { BYTES_TABLE.lookup(self.0) } } impl fmt::Debug for BytesKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let bytes_value = self.lookup(); write!(f, "{:?}", bytes_value) } } /// An interned string #[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)] pub struct StringKey(RawInternKey); impl StringKey { /// Get a reference to the original str. pub fn lookup(self) -> &'static str { let bytes = BYTES_TABLE.lookup(self.0); // This is safe because the bytes we are converting originally came // from a str when we interned it: the only way to get a StringKey is // to intern an (already valid) string, so if we have a StringKey then // its bytes must be valid UTF-8. unsafe { std::str::from_utf8_unchecked(bytes) } } /// Convert the interned string key into an interned bytes key. Because /// strings intern as their raw bytes, this is an O(1) operation. /// Note the reverse (BytesKey.as_str) is a fallible operation since /// the bytes may not be valid UTF-8. pub fn as_bytes(self) -> BytesKey { BytesKey(self.0) } } impl fmt::Debug for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{:?}", str_value) } } impl fmt::Display for StringKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let str_value = self.lookup(); write!(f, "{}", str_value) } } impl<'de> Deserialize<'de> for StringKey { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { Deserialize::deserialize(deserializer).map(|s: String| s.intern()) } } // Static table used in the bytes/str Intern implementations lazy_static! { static ref BYTES_TABLE: BytesTable = BytesTable::new(); } /// Similar to the generic `InternTable` but customized for sequences of raw bytes (and strings). pub struct BytesTable { data: Arc<RwLock<BytesTableData>>, } impl BytesTable { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(BytesTableData::new())), } } pub fn intern(&self, value: &[u8]) -> RawInternKey { if let Some(prev) = self.data.read().get(&value) { return prev; } let mut writer = self.data.write(); writer.intern(value) } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { self.data.read().lookup(key) } } /// BytesTableData is similar to InternTableData but customized for sequences /// of raw bytes (and notably, strings). struct BytesTableData { // Raw data storage, allocated in large chunks buffer: Option<&'static mut [u8]>, // Reverse mapping of index=>value, used to convert an // interned key back to (a reference to) its value items: Vec<&'static [u8]>, // Mapping of values to their interned indices table: FnvHashMap<&'static [u8], RawInternKey>, } impl BytesTableData { const BUFFER_SIZE: usize = 4096; pub fn new() -> Self { Self { buffer: Some(Self::new_buffer()), items: Default::default(), table: Default::default(), } } fn new_buffer() -> &'static mut [u8] { Box::leak(Box::new([0; Self::BUFFER_SIZE])) } pub fn get(&self, value: &[u8]) -> Option<RawInternKey> { self.table.get(value).cloned() } // Copy the byte slice into 'static memory by appending it to a buffer, if there is room. // If the buffer fills up and the value is small, start over with a new buffer. // If the value is large, just give it its own memory. fn alloc(&mut self, value: &[u8]) -> &'static [u8] { let len = value.len(); let mut buffer = self.buffer.take().unwrap(); if len > buffer.len() { if len >= Self::BUFFER_SIZE / 16 { // This byte slice is so big it can just have its own memory. self.buffer = Some(buffer); return Box::leak(value.into()); } else
} let (mem, remaining) = buffer.split_at_mut(len); mem.copy_from_slice(value); self.buffer = Some(remaining); mem } pub fn intern(&mut self, value: &[u8]) -> RawInternKey { // If there's an existing value return it if let Some(prev) = self.get(&value) { return prev; } // Otherwise intern let key = RawInternKey::new(self.items.len()); let static_bytes = self.alloc(value); self.items.push(static_bytes); self.table.insert(static_bytes, key); key } pub fn lookup(&self, key: RawInternKey) -> &'static [u8] { let index = key.as_usize(); self.items[index] } }
{ buffer = Self::new_buffer() }
conditional_block
ad-banner.component.ts
// #docregion import { Component, Input, AfterViewInit, ViewChild, ComponentFactoryResolver, OnDestroy } from '@angular/core'; import { AdDirective } from './ad.directive'; import { AdItem } from './ad-item'; import { AdComponent } from './ad.component'; @Component({ selector: 'add-banner', // #docregion ad-host template: ` <div class="ad-banner"> <h3>Advertisements</h3> <template ad-host></template> </div> ` // #enddocregion ad-host }) export class AdBannerComponent implements AfterViewInit, OnDestroy { @Input() ads: AdItem[]; currentAddIndex: number = -1; @ViewChild(AdDirective) adHost: AdDirective; subscription: any; interval: any; constructor(private _componentFactoryResolver: ComponentFactoryResolver) { } ngAfterViewInit() { this.loadComponent(); this.getAds(); }
() { clearInterval(this.interval); } loadComponent() { this.currentAddIndex = (this.currentAddIndex + 1) % this.ads.length; let adItem = this.ads[this.currentAddIndex]; let componentFactory = this._componentFactoryResolver.resolveComponentFactory(adItem.component); let viewContainerRef = this.adHost.viewContainerRef; viewContainerRef.clear(); let componentRef = viewContainerRef.createComponent(componentFactory); (<AdComponent>componentRef.instance).data = adItem.data; } getAds() { this.interval = setInterval(() => { this.loadComponent(); }, 3000); } }
ngOnDestroy
identifier_name
ad-banner.component.ts
// #docregion import { Component, Input, AfterViewInit, ViewChild, ComponentFactoryResolver, OnDestroy } from '@angular/core'; import { AdDirective } from './ad.directive'; import { AdItem } from './ad-item'; import { AdComponent } from './ad.component';
<h3>Advertisements</h3> <template ad-host></template> </div> ` // #enddocregion ad-host }) export class AdBannerComponent implements AfterViewInit, OnDestroy { @Input() ads: AdItem[]; currentAddIndex: number = -1; @ViewChild(AdDirective) adHost: AdDirective; subscription: any; interval: any; constructor(private _componentFactoryResolver: ComponentFactoryResolver) { } ngAfterViewInit() { this.loadComponent(); this.getAds(); } ngOnDestroy() { clearInterval(this.interval); } loadComponent() { this.currentAddIndex = (this.currentAddIndex + 1) % this.ads.length; let adItem = this.ads[this.currentAddIndex]; let componentFactory = this._componentFactoryResolver.resolveComponentFactory(adItem.component); let viewContainerRef = this.adHost.viewContainerRef; viewContainerRef.clear(); let componentRef = viewContainerRef.createComponent(componentFactory); (<AdComponent>componentRef.instance).data = adItem.data; } getAds() { this.interval = setInterval(() => { this.loadComponent(); }, 3000); } }
@Component({ selector: 'add-banner', // #docregion ad-host template: ` <div class="ad-banner">
random_line_split
ad-banner.component.ts
// #docregion import { Component, Input, AfterViewInit, ViewChild, ComponentFactoryResolver, OnDestroy } from '@angular/core'; import { AdDirective } from './ad.directive'; import { AdItem } from './ad-item'; import { AdComponent } from './ad.component'; @Component({ selector: 'add-banner', // #docregion ad-host template: ` <div class="ad-banner"> <h3>Advertisements</h3> <template ad-host></template> </div> ` // #enddocregion ad-host }) export class AdBannerComponent implements AfterViewInit, OnDestroy { @Input() ads: AdItem[]; currentAddIndex: number = -1; @ViewChild(AdDirective) adHost: AdDirective; subscription: any; interval: any; constructor(private _componentFactoryResolver: ComponentFactoryResolver) { } ngAfterViewInit() { this.loadComponent(); this.getAds(); } ngOnDestroy() { clearInterval(this.interval); } loadComponent() { this.currentAddIndex = (this.currentAddIndex + 1) % this.ads.length; let adItem = this.ads[this.currentAddIndex]; let componentFactory = this._componentFactoryResolver.resolveComponentFactory(adItem.component); let viewContainerRef = this.adHost.viewContainerRef; viewContainerRef.clear(); let componentRef = viewContainerRef.createComponent(componentFactory); (<AdComponent>componentRef.instance).data = adItem.data; } getAds()
}
{ this.interval = setInterval(() => { this.loadComponent(); }, 3000); }
identifier_body
handler.py
# -*- coding: utf-8 -*- """signal handlers registered by the imager_profile app""" from __future__ import unicode_literals from django.conf import settings from django.db.models.signals import post_save from django.db.models.signals import pre_delete from django.dispatch import receiver from imager_profile.models import ImagerProfile import logging logger = logging.getLogger(__name__) @receiver(post_save, sender=settings.AUTH_USER_MODEL) def ensure_imager_profile(sender, **kwargs): """Create and save an ImagerProfile after every new User is created.""" if kwargs.get('created', False):
@receiver(pre_delete, sender=settings.AUTH_USER_MODEL) def remove_imager_profile(sender, **kwargs): try: kwargs['instance'].profile.delete() except (KeyError, AttributeError): msg = ( "ImagerProfile instance not deleted for {}. " "Perhaps it does not exist?" ) logger.warn(msg.format(kwargs['instance']))
try: new_profile = ImagerProfile(user=kwargs['instance']) new_profile.save() except (KeyError, ValueError): logger.error('Unable to create ImagerProfile for User instance.')
conditional_block
handler.py
# -*- coding: utf-8 -*- """signal handlers registered by the imager_profile app""" from __future__ import unicode_literals from django.conf import settings from django.db.models.signals import post_save from django.db.models.signals import pre_delete
import logging logger = logging.getLogger(__name__) @receiver(post_save, sender=settings.AUTH_USER_MODEL) def ensure_imager_profile(sender, **kwargs): """Create and save an ImagerProfile after every new User is created.""" if kwargs.get('created', False): try: new_profile = ImagerProfile(user=kwargs['instance']) new_profile.save() except (KeyError, ValueError): logger.error('Unable to create ImagerProfile for User instance.') @receiver(pre_delete, sender=settings.AUTH_USER_MODEL) def remove_imager_profile(sender, **kwargs): try: kwargs['instance'].profile.delete() except (KeyError, AttributeError): msg = ( "ImagerProfile instance not deleted for {}. " "Perhaps it does not exist?" ) logger.warn(msg.format(kwargs['instance']))
from django.dispatch import receiver from imager_profile.models import ImagerProfile
random_line_split
handler.py
# -*- coding: utf-8 -*- """signal handlers registered by the imager_profile app""" from __future__ import unicode_literals from django.conf import settings from django.db.models.signals import post_save from django.db.models.signals import pre_delete from django.dispatch import receiver from imager_profile.models import ImagerProfile import logging logger = logging.getLogger(__name__) @receiver(post_save, sender=settings.AUTH_USER_MODEL) def ensure_imager_profile(sender, **kwargs):
@receiver(pre_delete, sender=settings.AUTH_USER_MODEL) def remove_imager_profile(sender, **kwargs): try: kwargs['instance'].profile.delete() except (KeyError, AttributeError): msg = ( "ImagerProfile instance not deleted for {}. " "Perhaps it does not exist?" ) logger.warn(msg.format(kwargs['instance']))
"""Create and save an ImagerProfile after every new User is created.""" if kwargs.get('created', False): try: new_profile = ImagerProfile(user=kwargs['instance']) new_profile.save() except (KeyError, ValueError): logger.error('Unable to create ImagerProfile for User instance.')
identifier_body
handler.py
# -*- coding: utf-8 -*- """signal handlers registered by the imager_profile app""" from __future__ import unicode_literals from django.conf import settings from django.db.models.signals import post_save from django.db.models.signals import pre_delete from django.dispatch import receiver from imager_profile.models import ImagerProfile import logging logger = logging.getLogger(__name__) @receiver(post_save, sender=settings.AUTH_USER_MODEL) def
(sender, **kwargs): """Create and save an ImagerProfile after every new User is created.""" if kwargs.get('created', False): try: new_profile = ImagerProfile(user=kwargs['instance']) new_profile.save() except (KeyError, ValueError): logger.error('Unable to create ImagerProfile for User instance.') @receiver(pre_delete, sender=settings.AUTH_USER_MODEL) def remove_imager_profile(sender, **kwargs): try: kwargs['instance'].profile.delete() except (KeyError, AttributeError): msg = ( "ImagerProfile instance not deleted for {}. " "Perhaps it does not exist?" ) logger.warn(msg.format(kwargs['instance']))
ensure_imager_profile
identifier_name
LayersSVGIcon.tsx
// This is a generated file from running the "createIcons" script. This file should not be updated manually. import { forwardRef } from "react"; import { SVGIcon, SVGIconProps } from "@react-md/icon"; export const LayersSVGIcon = forwardRef<SVGSVGElement, SVGIconProps>( function LayersSVGIcon(props, ref) { return ( <SVGIcon {...props} ref={ref}> <path d="M11.99 18.54l-7.37-5.73L3 14.07l9 7 9-7-1.63-1.27-7.38 5.74zM12 16l7.36-5.73L21 9l-9-7-9 7 1.63 1.27L12 16z" /> </SVGIcon> ); }
);
random_line_split
driver.py
# Copyright (C) 2011 Google Inc. All rights reserved. # Copyright (c) 2015, 2016 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import logging import re import shlex import sys import time import os from webkitpy.common.system import path from webkitpy.common.system.profiler import ProfilerFactory _log = logging.getLogger(__name__) class DriverInput(object): def
(self, test_name, timeout, image_hash, should_run_pixel_test, should_dump_jsconsolelog_in_stderr=None, args=None): self.test_name = test_name self.timeout = timeout # in ms self.image_hash = image_hash self.should_run_pixel_test = should_run_pixel_test self.should_dump_jsconsolelog_in_stderr = should_dump_jsconsolelog_in_stderr self.args = args or [] def __repr__(self): return "DriverInput(test_name='{}', timeout={}, image_hash={}, should_run_pixel_test={}, should_dump_jsconsolelog_in_stderr={}'".format(self.test_name, self.timeout, self.image_hash, self.should_run_pixel_test, self.should_dump_jsconsolelog_in_stderr) class DriverOutput(object): """Groups information about a output from driver for easy passing and post-processing of data.""" metrics_patterns = [] metrics_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), '')) metrics_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+: '), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), '')) metrics_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}')) metrics_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}')) metrics_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}')) metrics_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}')) metrics_patterns.append((re.compile('\([0-9]+px'), 'px')) metrics_patterns.append((re.compile(' *" *\n +" *'), ' ')) metrics_patterns.append((re.compile('" +$'), '"')) metrics_patterns.append((re.compile('- '), '-')) metrics_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"')) metrics_patterns.append((re.compile('\s+"\n'), '"\n')) metrics_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth')) metrics_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight')) metrics_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX')) metrics_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY')) metrics_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled')) def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = text self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = audio # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid def has_stderr(self): return bool(self.error) def strip_metrics(self): self.strip_patterns(self.metrics_patterns) def strip_patterns(self, patterns): if not self.text: return for pattern in patterns: self.text = re.sub(pattern[0], pattern[1], self.text) def strip_stderror_patterns(self, patterns): if not self.error: return for pattern in patterns: self.error = re.sub(pattern[0], pattern[1], self.error) class Driver(object): """object for running test(s) using DumpRenderTree/WebKitTestRunner.""" def __init__(self, port, worker_number, pixel_tests, no_timeout=False): """Initialize a Driver to subsequently run tests. Typically this routine will spawn DumpRenderTree in a config ready for subsequent input. port - reference back to the port object. worker_number - identifier for a particular worker/driver instance """ self._port = port self._worker_number = worker_number self._no_timeout = no_timeout self._driver_tempdir = None self._driver_user_directory_suffix = None self._driver_user_cache_directory = None # WebKitTestRunner can report back subprocess crashes by printing # "#CRASHED - PROCESSNAME". Since those can happen at any time and ServerProcess # won't be aware of them (since the actual tool didn't crash, just a subprocess) # we record the crashed subprocess name here. self._crashed_process_name = None self._crashed_pid = None self._driver_timed_out = False # stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated # stderr output, as well as if we've seen #EOF on this driver instance. # FIXME: We should probably remove _read_first_block and _read_optional_image_block and # instead scope these locally in run_test. self.error_from_test = str() self.err_seen_eof = False self._server_name = self._port.driver_name() self._server_process = None self._measurements = {} if self._port.get_option("profile"): profiler_name = self._port.get_option("profiler") self._profiler = ProfilerFactory.create_profiler(self._port.host, self._port._path_to_driver(), self._port.results_directory(), profiler_name) else: self._profiler = None self.web_platform_test_server_doc_root = self._port.web_platform_test_server_doc_root() self.web_platform_test_server_base_url = self._port.web_platform_test_server_base_url() def __del__(self): self.stop() def run_test(self, driver_input, stop_when_done): """Run a single test and return the results. Note that it is okay if a test times out or crashes and leaves the driver in an indeterminate state. The upper layers of the program are responsible for cleaning up and ensuring things are okay. Returns a DriverOutput object. """ start_time = time.time() self.start(driver_input.should_run_pixel_test, driver_input.args) test_begin_time = time.time() self._driver_timed_out = False self._crash_report_from_driver = None self.error_from_test = str() self.err_seen_eof = False command = self._command_from_driver_input(driver_input) # Certain timeouts are detected by the tool itself; tool detection is better, # because results contain partial output in this case. Make script timeout longer # by 5 seconds to avoid racing for which timeout is detected first. # FIXME: It's not the job of the driver to decide what the timeouts should be. # Move the additional timeout to driver_input. if self._no_timeout: deadline = test_begin_time + 60 * 60 * 24 * 7 # 7 days. Using sys.maxint causes a hang. else: deadline = test_begin_time + int(driver_input.timeout) / 1000.0 + 5 self._server_process.write(command) text, audio = self._read_first_block(deadline, driver_input.test_name) # First block is either text or audio image, actual_image_hash = self._read_optional_image_block(deadline, driver_input.test_name) # The second (optional) block is image data. crashed = self.has_crashed() timed_out = self._server_process.timed_out driver_timed_out = self._driver_timed_out pid = self._server_process.pid() if stop_when_done or crashed or timed_out: # We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output. # In the timeout case, we kill the hung process as well. out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0) if out: text += out if err: self.error_from_test += err self._server_process = None crash_log = None if self._crash_report_from_driver: crash_log = self._crash_report_from_driver elif crashed: self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time) # If we don't find a crash log use a placeholder error message instead. if not crash_log: pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid" crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str) # Print stdout and stderr to the placeholder crash log; we want as much context as possible. if self.error_from_test: crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test) return DriverOutput(text, image, actual_image_hash, audio, crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements, timeout=timed_out or driver_timed_out, error=self.error_from_test, crashed_process_name=self._crashed_process_name, crashed_pid=self._crashed_pid, crash_log=crash_log, pid=pid) def _get_crash_log(self, stdout, stderr, newer_than): return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than) def _command_wrapper(self): # Hook for injecting valgrind or other runtime instrumentation, used by e.g. tools/valgrind/valgrind_tests.py. wrapper_arguments = [] if self._profiler: wrapper_arguments = self._profiler.wrapper_arguments() if self._port.get_option('wrapper'): return shlex.split(self._port.get_option('wrapper')) + wrapper_arguments return wrapper_arguments HTTP_DIR = "http/tests/" HTTP_LOCAL_DIR = "http/tests/local/" def is_http_test(self, test_name): return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR) def is_web_platform_test(self, test_name): return test_name.startswith(self.web_platform_test_server_doc_root) def test_to_uri(self, test_name): """Convert a test name to a URI.""" if self.is_web_platform_test(test_name): return self.web_platform_test_server_base_url + test_name[len(self.web_platform_test_server_doc_root):] if not self.is_http_test(test_name): return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name)) relative_path = test_name[len(self.HTTP_DIR):] # TODO(dpranke): remove the SSL reference? if relative_path.startswith("ssl/"): return "https://127.0.0.1:8443/" + relative_path return "http://127.0.0.1:8000/" + relative_path def uri_to_test(self, uri): """Return the base layout test name for a given URI. This returns the test name for a given URI, e.g., if you passed in "file:///src/LayoutTests/fast/html/keygen.html" it would return "fast/html/keygen.html". """ if uri.startswith("file:///"): prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir()) if not prefix.endswith('/'): prefix += '/' return uri[len(prefix):] if uri.startswith(self.web_platform_test_server_base_url): return uri.replace(self.web_platform_test_server_base_url, self.web_platform_test_server_doc_root) if uri.startswith("http://"): return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR) if uri.startswith("https://"): return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR) raise NotImplementedError('unknown url type: %s' % uri) def has_crashed(self): if self._server_process is None: return False if self._crashed_process_name: return True if self._server_process.has_crashed(): self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True return False def start(self, pixel_tests, per_test_args): # FIXME: Callers shouldn't normally call this, since this routine # may not be specifying the correct combination of pixel test and # per_test args. # # The only reason we have this routine at all is so the perftestrunner # can pause before running a test; it might be better to push that # into run_test() directly. if not self._server_process: self._start(pixel_tests, per_test_args) self._run_post_start_tasks() def _append_environment_variable_path(self, environment, variable, path): if variable in environment: environment[variable] = environment[variable] + os.pathsep + path else: environment[variable] = path def _setup_environ_for_driver(self, environment): build_root_path = str(self._port._build_path()) self._append_environment_variable_path(environment, 'DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, 'DYLD_FRAMEWORK_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_FRAMEWORK_PATH', build_root_path) # Use an isolated temp directory that can be deleted after testing (especially important on Mac, as # CoreMedia disk cache is in the temp directory). environment['TMPDIR'] = str(self._driver_tempdir) environment['DIRHELPER_USER_DIR_SUFFIX'] = self._driver_user_directory_suffix # Put certain normally persistent files into the temp directory (e.g. IndexedDB storage). if sys.platform == 'cygwin': environment['DUMPRENDERTREE_TEMP'] = path.cygpath(str(self._driver_tempdir)) else: environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir) environment['LOCAL_RESOURCE_ROOT'] = str(self._port.layout_tests_dir()) environment['ASAN_OPTIONS'] = "allocator_may_return_null=1" environment['__XPC_ASAN_OPTIONS'] = environment['ASAN_OPTIONS'] if 'WEBKIT_OUTPUTDIR' in os.environ: environment['WEBKIT_OUTPUTDIR'] = os.environ['WEBKIT_OUTPUTDIR'] if self._profiler: environment = self._profiler.adjusted_environment(environment) return environment def _setup_environ_for_test(self): environment = self._port.setup_environ_for_server(self._server_name) environment = self._setup_environ_for_driver(environment) return environment def _start(self, pixel_tests, per_test_args): self.stop() # Each driver process should be using individual directories under _driver_tempdir (which is deleted when stopping), # however some subsystems on some platforms could end up using process default ones. self._port._clear_global_caches_and_temporary_files() self._driver_tempdir = self._port._driver_tempdir() self._driver_user_directory_suffix = os.path.basename(str(self._driver_tempdir)) user_cache_directory = self._port._path_to_user_cache_directory(self._driver_user_directory_suffix) if user_cache_directory: self._port._filesystem.maybe_make_directory(user_cache_directory) self._driver_user_cache_directory = user_cache_directory environment = self._setup_environ_for_test() self._crashed_process_name = None self._crashed_pid = None self._server_process = self._port._test_runner_process_constructor(self._port, self._server_name, self.cmd_line(pixel_tests, per_test_args), environment, worker_number=self._worker_number) self._server_process.start() def _run_post_start_tasks(self): # Remote drivers may override this to delay post-start tasks until the server has ack'd. if self._profiler: self._profiler.attach_to_pid(self._pid_on_target()) def _pid_on_target(self): # Remote drivers will override this method to return the pid on the device. return self._server_process.pid() def stop(self): if self._server_process: self._server_process.stop(self._port.driver_stop_timeout()) self._server_process = None if self._profiler: self._profiler.profile_after_exit() if self._driver_tempdir: self._port._filesystem.rmtree(str(self._driver_tempdir)) self._driver_tempdir = None if self._driver_user_cache_directory: self._port._filesystem.rmtree(self._driver_user_cache_directory) self._driver_user_cache_directory = None def cmd_line(self, pixel_tests, per_test_args): cmd = self._command_wrapper() cmd.append(self._port._path_to_driver()) if self._port.get_option('gc_between_tests'): cmd.append('--gc-between-tests') if self._port.get_option('complex_text'): cmd.append('--complex-text') if self._port.get_option('accelerated_drawing'): cmd.append('--accelerated-drawing') if self._port.get_option('remote_layer_tree'): cmd.append('--remote-layer-tree') if self._port.get_option('threaded'): cmd.append('--threaded') if self._no_timeout: cmd.append('--no-timeout') for allowed_host in self._port.allowed_hosts(): cmd.append('--allowed-host') cmd.append(allowed_host) cmd.extend(self._port.get_option('additional_drt_flag', [])) cmd.extend(self._port.additional_drt_flag()) cmd.extend(per_test_args) cmd.append('-') return cmd def _check_for_driver_timeout(self, out_line): if out_line.startswith("#PID UNRESPONSIVE - "): match = re.match('#PID UNRESPONSIVE - (\S+)', out_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', out_line) child_process_pid = int(match.group(1)) if match else None err_line = 'Wait on notifyDone timed out, process ' + child_process_name + ' pid = ' + str(child_process_pid) self.error_from_test += err_line _log.debug(err_line) if self._port.get_option("sample_on_timeout"): self._port.sample_process(child_process_name, child_process_pid) if out_line == "FAIL: Timed out waiting for notifyDone to be called\n": self._driver_timed_out = True def _check_for_address_sanitizer_violation(self, error_line): if "ERROR: AddressSanitizer" in error_line: return True def _check_for_driver_crash_or_unresponsiveness(self, error_line): crashed_check = error_line.rstrip('\r\n') if crashed_check == "#CRASHED": self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True elif error_line.startswith("#CRASHED - "): match = re.match('#CRASHED - (\S+)', error_line) self._crashed_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) self._crashed_pid = int(match.group(1)) if match else None _log.debug('%s crash, pid = %s' % (self._crashed_process_name, str(self._crashed_pid))) return True elif error_line.startswith("#PROCESS UNRESPONSIVE - "): match = re.match('#PROCESS UNRESPONSIVE - (\S+)', error_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) child_process_pid = int(match.group(1)) if match else None _log.debug('%s is unresponsive, pid = %s' % (child_process_name, str(child_process_pid))) self._driver_timed_out = True if child_process_pid: self._port.sample_process(child_process_name, child_process_pid) self.error_from_test += error_line self._server_process.write('#SAMPLE FINISHED\n', True) # Must be able to ignore a broken pipe here, target process may already be closed. return True return self.has_crashed() def _command_from_driver_input(self, driver_input): # FIXME: performance tests pass in full URLs instead of test names. if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'): command = driver_input.test_name elif self.is_web_platform_test(driver_input.test_name) or (self.is_http_test(driver_input.test_name) and (self._port.get_option('webkit_test_runner') or sys.platform == "cygwin")): command = self.test_to_uri(driver_input.test_name) command += "'--absolutePath'" command += self._port.abspath_for_test(driver_input.test_name) else: command = self._port.abspath_for_test(driver_input.test_name) if sys.platform == 'cygwin': command = path.cygpath(command) assert not driver_input.image_hash or driver_input.should_run_pixel_test # ' is the separator between arguments. if self._port.supports_per_test_timeout(): command += "'--timeout'%s" % driver_input.timeout if driver_input.should_run_pixel_test: command += "'--pixel-test" if driver_input.should_dump_jsconsolelog_in_stderr: command += "'--dump-jsconsolelog-in-stderr" if driver_input.image_hash: command += "'" + driver_input.image_hash return command + "\n" def _read_first_block(self, deadline, test_name): # returns (text_content, audio_content) block = self._read_block(deadline, test_name) if block.malloc: self._measurements['Malloc'] = float(block.malloc) if block.js_heap: self._measurements['JSHeap'] = float(block.js_heap) if block.content_type == 'audio/wav': return (None, block.decoded_content) return (block.decoded_content, None) def _read_optional_image_block(self, deadline, test_name): # returns (image, actual_image_hash) block = self._read_block(deadline, test_name, wait_for_stderr_eof=True) if block.content and block.content_type == 'image/png': return (block.decoded_content, block.content_hash) return (None, block.content_hash) def _read_header(self, block, line, header_text, header_attr, header_filter=None): if line.startswith(header_text) and getattr(block, header_attr) is None: value = line.split()[1] if header_filter: value = header_filter(value) setattr(block, header_attr, value) return True return False def _process_stdout_line(self, block, line): if (self._read_header(block, line, 'Content-Type: ', 'content_type') or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding') or self._read_header(block, line, 'Content-Length: ', '_content_length', int) or self._read_header(block, line, 'ActualHash: ', 'content_hash') or self._read_header(block, line, 'DumpMalloc: ', 'malloc') or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap')): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content += line def _strip_eof(self, line): if line and line.endswith("#EOF\n"): return line[:-5], True return line, False def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while not self.has_crashed(): if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline) if self._server_process.timed_out or self.has_crashed(): break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != "\n": _log.error(" %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = self._server_process.read_stdout(deadline, block._content_length) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = "" # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind('\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[:end_of_previous_error_line] else: self.error_from_test = "" # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += err_line else: self.error_from_test += err_line if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() block.decode_content() return block @staticmethod def check_driver(port): # This checks if the required system dependencies for the driver are met. # Since this is the generic class implementation, just return True. return True class ContentBlock(object): def __init__(self): self.content_type = None self.encoding = None self.content_hash = None self._content_length = None # Content is treated as binary data even though the text output is usually UTF-8. self.content = str() # FIXME: Should be bytearray() once we require Python 2.6. self.decoded_content = None self.malloc = None self.js_heap = None def decode_content(self): if self.encoding == 'base64' and self.content is not None: self.decoded_content = base64.b64decode(self.content) else: self.decoded_content = self.content class DriverProxy(object): """A wrapper for managing two Driver instances, one with pixel tests and one without. This allows us to handle plain text tests and ref tests with a single driver.""" def __init__(self, port, worker_number, driver_instance_constructor, pixel_tests, no_timeout): self._port = port self._worker_number = worker_number self._driver_instance_constructor = driver_instance_constructor self._no_timeout = no_timeout # FIXME: We shouldn't need to create a driver until we actually run a test. self._driver = self._make_driver(pixel_tests) self._driver_cmd_line = None def _make_driver(self, pixel_tests): return self._driver_instance_constructor(self._port, self._worker_number, pixel_tests, self._no_timeout) # FIXME: this should be a @classmethod (or implemented on Port instead). def is_http_test(self, test_name): return self._driver.is_http_test(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def test_to_uri(self, test_name): return self._driver.test_to_uri(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def uri_to_test(self, uri): return self._driver.uri_to_test(uri) def run_test(self, driver_input, stop_when_done): pixel_tests_needed = driver_input.should_run_pixel_test cmd_line_key = self._cmd_line_as_key(pixel_tests_needed, driver_input.args) if cmd_line_key != self._driver_cmd_line: self._driver.stop() self._driver = self._make_driver(pixel_tests_needed) self._driver_cmd_line = cmd_line_key return self._driver.run_test(driver_input, stop_when_done) def has_crashed(self): return self._driver.has_crashed() def stop(self): self._driver.stop() # FIXME: this should be a @classmethod (or implemented on Port instead). def cmd_line(self, pixel_tests=None, per_test_args=None): return self._driver.cmd_line(pixel_tests, per_test_args or []) def _cmd_line_as_key(self, pixel_tests, per_test_args): return ' '.join(self.cmd_line(pixel_tests, per_test_args))
__init__
identifier_name
driver.py
# Copyright (C) 2011 Google Inc. All rights reserved. # Copyright (c) 2015, 2016 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import logging import re import shlex import sys import time import os from webkitpy.common.system import path from webkitpy.common.system.profiler import ProfilerFactory _log = logging.getLogger(__name__) class DriverInput(object): def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, should_dump_jsconsolelog_in_stderr=None, args=None): self.test_name = test_name self.timeout = timeout # in ms self.image_hash = image_hash self.should_run_pixel_test = should_run_pixel_test self.should_dump_jsconsolelog_in_stderr = should_dump_jsconsolelog_in_stderr self.args = args or [] def __repr__(self): return "DriverInput(test_name='{}', timeout={}, image_hash={}, should_run_pixel_test={}, should_dump_jsconsolelog_in_stderr={}'".format(self.test_name, self.timeout, self.image_hash, self.should_run_pixel_test, self.should_dump_jsconsolelog_in_stderr) class DriverOutput(object): """Groups information about a output from driver for easy passing and post-processing of data.""" metrics_patterns = [] metrics_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), '')) metrics_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+: '), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), '')) metrics_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}')) metrics_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}')) metrics_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}')) metrics_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}')) metrics_patterns.append((re.compile('\([0-9]+px'), 'px')) metrics_patterns.append((re.compile(' *" *\n +" *'), ' ')) metrics_patterns.append((re.compile('" +$'), '"')) metrics_patterns.append((re.compile('- '), '-')) metrics_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"')) metrics_patterns.append((re.compile('\s+"\n'), '"\n')) metrics_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth')) metrics_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight')) metrics_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX')) metrics_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY')) metrics_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled')) def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = text self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = audio # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid def has_stderr(self): return bool(self.error) def strip_metrics(self): self.strip_patterns(self.metrics_patterns) def strip_patterns(self, patterns): if not self.text: return for pattern in patterns: self.text = re.sub(pattern[0], pattern[1], self.text) def strip_stderror_patterns(self, patterns): if not self.error: return for pattern in patterns: self.error = re.sub(pattern[0], pattern[1], self.error) class Driver(object): """object for running test(s) using DumpRenderTree/WebKitTestRunner.""" def __init__(self, port, worker_number, pixel_tests, no_timeout=False): """Initialize a Driver to subsequently run tests. Typically this routine will spawn DumpRenderTree in a config ready for subsequent input. port - reference back to the port object. worker_number - identifier for a particular worker/driver instance """ self._port = port self._worker_number = worker_number self._no_timeout = no_timeout self._driver_tempdir = None self._driver_user_directory_suffix = None self._driver_user_cache_directory = None # WebKitTestRunner can report back subprocess crashes by printing # "#CRASHED - PROCESSNAME". Since those can happen at any time and ServerProcess # won't be aware of them (since the actual tool didn't crash, just a subprocess) # we record the crashed subprocess name here. self._crashed_process_name = None self._crashed_pid = None self._driver_timed_out = False # stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated # stderr output, as well as if we've seen #EOF on this driver instance. # FIXME: We should probably remove _read_first_block and _read_optional_image_block and # instead scope these locally in run_test. self.error_from_test = str() self.err_seen_eof = False self._server_name = self._port.driver_name() self._server_process = None self._measurements = {} if self._port.get_option("profile"): profiler_name = self._port.get_option("profiler") self._profiler = ProfilerFactory.create_profiler(self._port.host, self._port._path_to_driver(), self._port.results_directory(), profiler_name) else: self._profiler = None self.web_platform_test_server_doc_root = self._port.web_platform_test_server_doc_root() self.web_platform_test_server_base_url = self._port.web_platform_test_server_base_url() def __del__(self): self.stop() def run_test(self, driver_input, stop_when_done): """Run a single test and return the results. Note that it is okay if a test times out or crashes and leaves the driver in an indeterminate state. The upper layers of the program are responsible for cleaning up and ensuring things are okay. Returns a DriverOutput object. """ start_time = time.time() self.start(driver_input.should_run_pixel_test, driver_input.args) test_begin_time = time.time() self._driver_timed_out = False self._crash_report_from_driver = None self.error_from_test = str() self.err_seen_eof = False command = self._command_from_driver_input(driver_input) # Certain timeouts are detected by the tool itself; tool detection is better, # because results contain partial output in this case. Make script timeout longer # by 5 seconds to avoid racing for which timeout is detected first. # FIXME: It's not the job of the driver to decide what the timeouts should be. # Move the additional timeout to driver_input. if self._no_timeout: deadline = test_begin_time + 60 * 60 * 24 * 7 # 7 days. Using sys.maxint causes a hang. else: deadline = test_begin_time + int(driver_input.timeout) / 1000.0 + 5 self._server_process.write(command) text, audio = self._read_first_block(deadline, driver_input.test_name) # First block is either text or audio image, actual_image_hash = self._read_optional_image_block(deadline, driver_input.test_name) # The second (optional) block is image data. crashed = self.has_crashed() timed_out = self._server_process.timed_out driver_timed_out = self._driver_timed_out pid = self._server_process.pid() if stop_when_done or crashed or timed_out: # We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output. # In the timeout case, we kill the hung process as well. out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0) if out: text += out if err: self.error_from_test += err self._server_process = None crash_log = None if self._crash_report_from_driver: crash_log = self._crash_report_from_driver elif crashed: self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time) # If we don't find a crash log use a placeholder error message instead. if not crash_log: pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid" crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str) # Print stdout and stderr to the placeholder crash log; we want as much context as possible. if self.error_from_test: crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test) return DriverOutput(text, image, actual_image_hash, audio, crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements, timeout=timed_out or driver_timed_out, error=self.error_from_test, crashed_process_name=self._crashed_process_name, crashed_pid=self._crashed_pid, crash_log=crash_log, pid=pid) def _get_crash_log(self, stdout, stderr, newer_than): return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than) def _command_wrapper(self): # Hook for injecting valgrind or other runtime instrumentation, used by e.g. tools/valgrind/valgrind_tests.py. wrapper_arguments = [] if self._profiler: wrapper_arguments = self._profiler.wrapper_arguments() if self._port.get_option('wrapper'): return shlex.split(self._port.get_option('wrapper')) + wrapper_arguments return wrapper_arguments HTTP_DIR = "http/tests/" HTTP_LOCAL_DIR = "http/tests/local/" def is_http_test(self, test_name): return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR) def is_web_platform_test(self, test_name): return test_name.startswith(self.web_platform_test_server_doc_root) def test_to_uri(self, test_name): """Convert a test name to a URI.""" if self.is_web_platform_test(test_name): return self.web_platform_test_server_base_url + test_name[len(self.web_platform_test_server_doc_root):] if not self.is_http_test(test_name): return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name)) relative_path = test_name[len(self.HTTP_DIR):] # TODO(dpranke): remove the SSL reference? if relative_path.startswith("ssl/"): return "https://127.0.0.1:8443/" + relative_path return "http://127.0.0.1:8000/" + relative_path def uri_to_test(self, uri): """Return the base layout test name for a given URI. This returns the test name for a given URI, e.g., if you passed in "file:///src/LayoutTests/fast/html/keygen.html" it would return "fast/html/keygen.html". """ if uri.startswith("file:///"): prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir()) if not prefix.endswith('/'): prefix += '/' return uri[len(prefix):] if uri.startswith(self.web_platform_test_server_base_url): return uri.replace(self.web_platform_test_server_base_url, self.web_platform_test_server_doc_root) if uri.startswith("http://"): return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR) if uri.startswith("https://"): return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR) raise NotImplementedError('unknown url type: %s' % uri) def has_crashed(self): if self._server_process is None: return False if self._crashed_process_name: return True if self._server_process.has_crashed(): self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True return False def start(self, pixel_tests, per_test_args): # FIXME: Callers shouldn't normally call this, since this routine # may not be specifying the correct combination of pixel test and # per_test args. # # The only reason we have this routine at all is so the perftestrunner # can pause before running a test; it might be better to push that # into run_test() directly. if not self._server_process: self._start(pixel_tests, per_test_args) self._run_post_start_tasks() def _append_environment_variable_path(self, environment, variable, path): if variable in environment: environment[variable] = environment[variable] + os.pathsep + path else: environment[variable] = path def _setup_environ_for_driver(self, environment): build_root_path = str(self._port._build_path()) self._append_environment_variable_path(environment, 'DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, 'DYLD_FRAMEWORK_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_FRAMEWORK_PATH', build_root_path) # Use an isolated temp directory that can be deleted after testing (especially important on Mac, as # CoreMedia disk cache is in the temp directory). environment['TMPDIR'] = str(self._driver_tempdir) environment['DIRHELPER_USER_DIR_SUFFIX'] = self._driver_user_directory_suffix # Put certain normally persistent files into the temp directory (e.g. IndexedDB storage). if sys.platform == 'cygwin': environment['DUMPRENDERTREE_TEMP'] = path.cygpath(str(self._driver_tempdir)) else: environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir) environment['LOCAL_RESOURCE_ROOT'] = str(self._port.layout_tests_dir()) environment['ASAN_OPTIONS'] = "allocator_may_return_null=1" environment['__XPC_ASAN_OPTIONS'] = environment['ASAN_OPTIONS'] if 'WEBKIT_OUTPUTDIR' in os.environ: environment['WEBKIT_OUTPUTDIR'] = os.environ['WEBKIT_OUTPUTDIR'] if self._profiler: environment = self._profiler.adjusted_environment(environment) return environment def _setup_environ_for_test(self): environment = self._port.setup_environ_for_server(self._server_name) environment = self._setup_environ_for_driver(environment) return environment def _start(self, pixel_tests, per_test_args): self.stop() # Each driver process should be using individual directories under _driver_tempdir (which is deleted when stopping), # however some subsystems on some platforms could end up using process default ones. self._port._clear_global_caches_and_temporary_files() self._driver_tempdir = self._port._driver_tempdir() self._driver_user_directory_suffix = os.path.basename(str(self._driver_tempdir)) user_cache_directory = self._port._path_to_user_cache_directory(self._driver_user_directory_suffix) if user_cache_directory: self._port._filesystem.maybe_make_directory(user_cache_directory) self._driver_user_cache_directory = user_cache_directory environment = self._setup_environ_for_test() self._crashed_process_name = None self._crashed_pid = None self._server_process = self._port._test_runner_process_constructor(self._port, self._server_name, self.cmd_line(pixel_tests, per_test_args), environment, worker_number=self._worker_number) self._server_process.start() def _run_post_start_tasks(self): # Remote drivers may override this to delay post-start tasks until the server has ack'd. if self._profiler: self._profiler.attach_to_pid(self._pid_on_target()) def _pid_on_target(self): # Remote drivers will override this method to return the pid on the device. return self._server_process.pid() def stop(self): if self._server_process: self._server_process.stop(self._port.driver_stop_timeout()) self._server_process = None if self._profiler: self._profiler.profile_after_exit() if self._driver_tempdir: self._port._filesystem.rmtree(str(self._driver_tempdir)) self._driver_tempdir = None if self._driver_user_cache_directory: self._port._filesystem.rmtree(self._driver_user_cache_directory) self._driver_user_cache_directory = None def cmd_line(self, pixel_tests, per_test_args): cmd = self._command_wrapper() cmd.append(self._port._path_to_driver()) if self._port.get_option('gc_between_tests'): cmd.append('--gc-between-tests') if self._port.get_option('complex_text'): cmd.append('--complex-text') if self._port.get_option('accelerated_drawing'): cmd.append('--accelerated-drawing') if self._port.get_option('remote_layer_tree'): cmd.append('--remote-layer-tree') if self._port.get_option('threaded'): cmd.append('--threaded') if self._no_timeout: cmd.append('--no-timeout') for allowed_host in self._port.allowed_hosts(): cmd.append('--allowed-host') cmd.append(allowed_host) cmd.extend(self._port.get_option('additional_drt_flag', [])) cmd.extend(self._port.additional_drt_flag()) cmd.extend(per_test_args) cmd.append('-') return cmd def _check_for_driver_timeout(self, out_line): if out_line.startswith("#PID UNRESPONSIVE - "): match = re.match('#PID UNRESPONSIVE - (\S+)', out_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', out_line) child_process_pid = int(match.group(1)) if match else None err_line = 'Wait on notifyDone timed out, process ' + child_process_name + ' pid = ' + str(child_process_pid) self.error_from_test += err_line _log.debug(err_line) if self._port.get_option("sample_on_timeout"): self._port.sample_process(child_process_name, child_process_pid) if out_line == "FAIL: Timed out waiting for notifyDone to be called\n": self._driver_timed_out = True def _check_for_address_sanitizer_violation(self, error_line): if "ERROR: AddressSanitizer" in error_line: return True def _check_for_driver_crash_or_unresponsiveness(self, error_line): crashed_check = error_line.rstrip('\r\n') if crashed_check == "#CRASHED": self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True elif error_line.startswith("#CRASHED - "): match = re.match('#CRASHED - (\S+)', error_line) self._crashed_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) self._crashed_pid = int(match.group(1)) if match else None _log.debug('%s crash, pid = %s' % (self._crashed_process_name, str(self._crashed_pid))) return True elif error_line.startswith("#PROCESS UNRESPONSIVE - "): match = re.match('#PROCESS UNRESPONSIVE - (\S+)', error_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) child_process_pid = int(match.group(1)) if match else None _log.debug('%s is unresponsive, pid = %s' % (child_process_name, str(child_process_pid))) self._driver_timed_out = True if child_process_pid: self._port.sample_process(child_process_name, child_process_pid) self.error_from_test += error_line self._server_process.write('#SAMPLE FINISHED\n', True) # Must be able to ignore a broken pipe here, target process may already be closed. return True return self.has_crashed() def _command_from_driver_input(self, driver_input): # FIXME: performance tests pass in full URLs instead of test names. if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'): command = driver_input.test_name elif self.is_web_platform_test(driver_input.test_name) or (self.is_http_test(driver_input.test_name) and (self._port.get_option('webkit_test_runner') or sys.platform == "cygwin")): command = self.test_to_uri(driver_input.test_name) command += "'--absolutePath'" command += self._port.abspath_for_test(driver_input.test_name) else: command = self._port.abspath_for_test(driver_input.test_name) if sys.platform == 'cygwin': command = path.cygpath(command) assert not driver_input.image_hash or driver_input.should_run_pixel_test # ' is the separator between arguments. if self._port.supports_per_test_timeout(): command += "'--timeout'%s" % driver_input.timeout if driver_input.should_run_pixel_test: command += "'--pixel-test" if driver_input.should_dump_jsconsolelog_in_stderr: command += "'--dump-jsconsolelog-in-stderr" if driver_input.image_hash: command += "'" + driver_input.image_hash return command + "\n" def _read_first_block(self, deadline, test_name): # returns (text_content, audio_content) block = self._read_block(deadline, test_name) if block.malloc: self._measurements['Malloc'] = float(block.malloc) if block.js_heap: self._measurements['JSHeap'] = float(block.js_heap) if block.content_type == 'audio/wav': return (None, block.decoded_content) return (block.decoded_content, None) def _read_optional_image_block(self, deadline, test_name): # returns (image, actual_image_hash) block = self._read_block(deadline, test_name, wait_for_stderr_eof=True) if block.content and block.content_type == 'image/png': return (block.decoded_content, block.content_hash) return (None, block.content_hash) def _read_header(self, block, line, header_text, header_attr, header_filter=None): if line.startswith(header_text) and getattr(block, header_attr) is None:
return False def _process_stdout_line(self, block, line): if (self._read_header(block, line, 'Content-Type: ', 'content_type') or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding') or self._read_header(block, line, 'Content-Length: ', '_content_length', int) or self._read_header(block, line, 'ActualHash: ', 'content_hash') or self._read_header(block, line, 'DumpMalloc: ', 'malloc') or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap')): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content += line def _strip_eof(self, line): if line and line.endswith("#EOF\n"): return line[:-5], True return line, False def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while not self.has_crashed(): if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline) if self._server_process.timed_out or self.has_crashed(): break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != "\n": _log.error(" %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = self._server_process.read_stdout(deadline, block._content_length) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = "" # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind('\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[:end_of_previous_error_line] else: self.error_from_test = "" # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += err_line else: self.error_from_test += err_line if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() block.decode_content() return block @staticmethod def check_driver(port): # This checks if the required system dependencies for the driver are met. # Since this is the generic class implementation, just return True. return True class ContentBlock(object): def __init__(self): self.content_type = None self.encoding = None self.content_hash = None self._content_length = None # Content is treated as binary data even though the text output is usually UTF-8. self.content = str() # FIXME: Should be bytearray() once we require Python 2.6. self.decoded_content = None self.malloc = None self.js_heap = None def decode_content(self): if self.encoding == 'base64' and self.content is not None: self.decoded_content = base64.b64decode(self.content) else: self.decoded_content = self.content class DriverProxy(object): """A wrapper for managing two Driver instances, one with pixel tests and one without. This allows us to handle plain text tests and ref tests with a single driver.""" def __init__(self, port, worker_number, driver_instance_constructor, pixel_tests, no_timeout): self._port = port self._worker_number = worker_number self._driver_instance_constructor = driver_instance_constructor self._no_timeout = no_timeout # FIXME: We shouldn't need to create a driver until we actually run a test. self._driver = self._make_driver(pixel_tests) self._driver_cmd_line = None def _make_driver(self, pixel_tests): return self._driver_instance_constructor(self._port, self._worker_number, pixel_tests, self._no_timeout) # FIXME: this should be a @classmethod (or implemented on Port instead). def is_http_test(self, test_name): return self._driver.is_http_test(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def test_to_uri(self, test_name): return self._driver.test_to_uri(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def uri_to_test(self, uri): return self._driver.uri_to_test(uri) def run_test(self, driver_input, stop_when_done): pixel_tests_needed = driver_input.should_run_pixel_test cmd_line_key = self._cmd_line_as_key(pixel_tests_needed, driver_input.args) if cmd_line_key != self._driver_cmd_line: self._driver.stop() self._driver = self._make_driver(pixel_tests_needed) self._driver_cmd_line = cmd_line_key return self._driver.run_test(driver_input, stop_when_done) def has_crashed(self): return self._driver.has_crashed() def stop(self): self._driver.stop() # FIXME: this should be a @classmethod (or implemented on Port instead). def cmd_line(self, pixel_tests=None, per_test_args=None): return self._driver.cmd_line(pixel_tests, per_test_args or []) def _cmd_line_as_key(self, pixel_tests, per_test_args): return ' '.join(self.cmd_line(pixel_tests, per_test_args))
value = line.split()[1] if header_filter: value = header_filter(value) setattr(block, header_attr, value) return True
conditional_block
driver.py
# Copyright (C) 2011 Google Inc. All rights reserved. # Copyright (c) 2015, 2016 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import logging import re import shlex import sys import time import os from webkitpy.common.system import path from webkitpy.common.system.profiler import ProfilerFactory _log = logging.getLogger(__name__) class DriverInput(object): def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, should_dump_jsconsolelog_in_stderr=None, args=None): self.test_name = test_name self.timeout = timeout # in ms self.image_hash = image_hash self.should_run_pixel_test = should_run_pixel_test self.should_dump_jsconsolelog_in_stderr = should_dump_jsconsolelog_in_stderr self.args = args or [] def __repr__(self): return "DriverInput(test_name='{}', timeout={}, image_hash={}, should_run_pixel_test={}, should_dump_jsconsolelog_in_stderr={}'".format(self.test_name, self.timeout, self.image_hash, self.should_run_pixel_test, self.should_dump_jsconsolelog_in_stderr) class DriverOutput(object): """Groups information about a output from driver for easy passing and post-processing of data.""" metrics_patterns = [] metrics_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), '')) metrics_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+: '), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), '')) metrics_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}')) metrics_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}')) metrics_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}')) metrics_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}')) metrics_patterns.append((re.compile('\([0-9]+px'), 'px')) metrics_patterns.append((re.compile(' *" *\n +" *'), ' ')) metrics_patterns.append((re.compile('" +$'), '"')) metrics_patterns.append((re.compile('- '), '-')) metrics_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"')) metrics_patterns.append((re.compile('\s+"\n'), '"\n')) metrics_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth')) metrics_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight')) metrics_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX')) metrics_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY')) metrics_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled')) def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = text self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = audio # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid def has_stderr(self): return bool(self.error) def strip_metrics(self): self.strip_patterns(self.metrics_patterns) def strip_patterns(self, patterns): if not self.text: return for pattern in patterns: self.text = re.sub(pattern[0], pattern[1], self.text) def strip_stderror_patterns(self, patterns): if not self.error: return for pattern in patterns: self.error = re.sub(pattern[0], pattern[1], self.error) class Driver(object): """object for running test(s) using DumpRenderTree/WebKitTestRunner.""" def __init__(self, port, worker_number, pixel_tests, no_timeout=False): """Initialize a Driver to subsequently run tests. Typically this routine will spawn DumpRenderTree in a config ready for subsequent input. port - reference back to the port object. worker_number - identifier for a particular worker/driver instance """ self._port = port self._worker_number = worker_number self._no_timeout = no_timeout self._driver_tempdir = None self._driver_user_directory_suffix = None self._driver_user_cache_directory = None # WebKitTestRunner can report back subprocess crashes by printing # "#CRASHED - PROCESSNAME". Since those can happen at any time and ServerProcess # won't be aware of them (since the actual tool didn't crash, just a subprocess) # we record the crashed subprocess name here. self._crashed_process_name = None self._crashed_pid = None self._driver_timed_out = False # stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated # stderr output, as well as if we've seen #EOF on this driver instance. # FIXME: We should probably remove _read_first_block and _read_optional_image_block and # instead scope these locally in run_test. self.error_from_test = str() self.err_seen_eof = False self._server_name = self._port.driver_name() self._server_process = None self._measurements = {} if self._port.get_option("profile"): profiler_name = self._port.get_option("profiler") self._profiler = ProfilerFactory.create_profiler(self._port.host, self._port._path_to_driver(), self._port.results_directory(), profiler_name) else: self._profiler = None self.web_platform_test_server_doc_root = self._port.web_platform_test_server_doc_root() self.web_platform_test_server_base_url = self._port.web_platform_test_server_base_url() def __del__(self): self.stop() def run_test(self, driver_input, stop_when_done): """Run a single test and return the results. Note that it is okay if a test times out or crashes and leaves the driver in an indeterminate state. The upper layers of the program are responsible for cleaning up and ensuring things are okay. Returns a DriverOutput object. """ start_time = time.time() self.start(driver_input.should_run_pixel_test, driver_input.args) test_begin_time = time.time() self._driver_timed_out = False self._crash_report_from_driver = None self.error_from_test = str() self.err_seen_eof = False command = self._command_from_driver_input(driver_input) # Certain timeouts are detected by the tool itself; tool detection is better, # because results contain partial output in this case. Make script timeout longer # by 5 seconds to avoid racing for which timeout is detected first. # FIXME: It's not the job of the driver to decide what the timeouts should be. # Move the additional timeout to driver_input. if self._no_timeout: deadline = test_begin_time + 60 * 60 * 24 * 7 # 7 days. Using sys.maxint causes a hang. else: deadline = test_begin_time + int(driver_input.timeout) / 1000.0 + 5 self._server_process.write(command) text, audio = self._read_first_block(deadline, driver_input.test_name) # First block is either text or audio image, actual_image_hash = self._read_optional_image_block(deadline, driver_input.test_name) # The second (optional) block is image data. crashed = self.has_crashed() timed_out = self._server_process.timed_out driver_timed_out = self._driver_timed_out pid = self._server_process.pid() if stop_when_done or crashed or timed_out: # We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output. # In the timeout case, we kill the hung process as well. out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0) if out: text += out if err: self.error_from_test += err self._server_process = None crash_log = None if self._crash_report_from_driver: crash_log = self._crash_report_from_driver elif crashed: self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time) # If we don't find a crash log use a placeholder error message instead. if not crash_log: pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid" crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str) # Print stdout and stderr to the placeholder crash log; we want as much context as possible. if self.error_from_test: crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test) return DriverOutput(text, image, actual_image_hash, audio, crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements, timeout=timed_out or driver_timed_out, error=self.error_from_test, crashed_process_name=self._crashed_process_name, crashed_pid=self._crashed_pid, crash_log=crash_log, pid=pid) def _get_crash_log(self, stdout, stderr, newer_than): return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than) def _command_wrapper(self): # Hook for injecting valgrind or other runtime instrumentation, used by e.g. tools/valgrind/valgrind_tests.py. wrapper_arguments = [] if self._profiler: wrapper_arguments = self._profiler.wrapper_arguments() if self._port.get_option('wrapper'): return shlex.split(self._port.get_option('wrapper')) + wrapper_arguments return wrapper_arguments HTTP_DIR = "http/tests/" HTTP_LOCAL_DIR = "http/tests/local/" def is_http_test(self, test_name): return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR) def is_web_platform_test(self, test_name): return test_name.startswith(self.web_platform_test_server_doc_root) def test_to_uri(self, test_name): """Convert a test name to a URI.""" if self.is_web_platform_test(test_name): return self.web_platform_test_server_base_url + test_name[len(self.web_platform_test_server_doc_root):] if not self.is_http_test(test_name): return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name)) relative_path = test_name[len(self.HTTP_DIR):] # TODO(dpranke): remove the SSL reference? if relative_path.startswith("ssl/"): return "https://127.0.0.1:8443/" + relative_path return "http://127.0.0.1:8000/" + relative_path def uri_to_test(self, uri): """Return the base layout test name for a given URI. This returns the test name for a given URI, e.g., if you passed in "file:///src/LayoutTests/fast/html/keygen.html" it would return "fast/html/keygen.html". """ if uri.startswith("file:///"): prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir()) if not prefix.endswith('/'): prefix += '/' return uri[len(prefix):] if uri.startswith(self.web_platform_test_server_base_url): return uri.replace(self.web_platform_test_server_base_url, self.web_platform_test_server_doc_root) if uri.startswith("http://"): return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR) if uri.startswith("https://"): return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR) raise NotImplementedError('unknown url type: %s' % uri) def has_crashed(self): if self._server_process is None: return False if self._crashed_process_name: return True if self._server_process.has_crashed(): self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True return False def start(self, pixel_tests, per_test_args): # FIXME: Callers shouldn't normally call this, since this routine # may not be specifying the correct combination of pixel test and # per_test args. # # The only reason we have this routine at all is so the perftestrunner # can pause before running a test; it might be better to push that # into run_test() directly. if not self._server_process: self._start(pixel_tests, per_test_args) self._run_post_start_tasks() def _append_environment_variable_path(self, environment, variable, path): if variable in environment: environment[variable] = environment[variable] + os.pathsep + path else: environment[variable] = path def _setup_environ_for_driver(self, environment): build_root_path = str(self._port._build_path()) self._append_environment_variable_path(environment, 'DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, 'DYLD_FRAMEWORK_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_FRAMEWORK_PATH', build_root_path) # Use an isolated temp directory that can be deleted after testing (especially important on Mac, as # CoreMedia disk cache is in the temp directory). environment['TMPDIR'] = str(self._driver_tempdir) environment['DIRHELPER_USER_DIR_SUFFIX'] = self._driver_user_directory_suffix # Put certain normally persistent files into the temp directory (e.g. IndexedDB storage). if sys.platform == 'cygwin': environment['DUMPRENDERTREE_TEMP'] = path.cygpath(str(self._driver_tempdir)) else: environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir) environment['LOCAL_RESOURCE_ROOT'] = str(self._port.layout_tests_dir()) environment['ASAN_OPTIONS'] = "allocator_may_return_null=1" environment['__XPC_ASAN_OPTIONS'] = environment['ASAN_OPTIONS'] if 'WEBKIT_OUTPUTDIR' in os.environ: environment['WEBKIT_OUTPUTDIR'] = os.environ['WEBKIT_OUTPUTDIR'] if self._profiler: environment = self._profiler.adjusted_environment(environment) return environment def _setup_environ_for_test(self): environment = self._port.setup_environ_for_server(self._server_name) environment = self._setup_environ_for_driver(environment) return environment def _start(self, pixel_tests, per_test_args): self.stop() # Each driver process should be using individual directories under _driver_tempdir (which is deleted when stopping), # however some subsystems on some platforms could end up using process default ones. self._port._clear_global_caches_and_temporary_files() self._driver_tempdir = self._port._driver_tempdir() self._driver_user_directory_suffix = os.path.basename(str(self._driver_tempdir)) user_cache_directory = self._port._path_to_user_cache_directory(self._driver_user_directory_suffix) if user_cache_directory: self._port._filesystem.maybe_make_directory(user_cache_directory) self._driver_user_cache_directory = user_cache_directory environment = self._setup_environ_for_test() self._crashed_process_name = None self._crashed_pid = None self._server_process = self._port._test_runner_process_constructor(self._port, self._server_name, self.cmd_line(pixel_tests, per_test_args), environment, worker_number=self._worker_number) self._server_process.start() def _run_post_start_tasks(self): # Remote drivers may override this to delay post-start tasks until the server has ack'd. if self._profiler: self._profiler.attach_to_pid(self._pid_on_target()) def _pid_on_target(self): # Remote drivers will override this method to return the pid on the device. return self._server_process.pid() def stop(self): if self._server_process: self._server_process.stop(self._port.driver_stop_timeout()) self._server_process = None if self._profiler: self._profiler.profile_after_exit() if self._driver_tempdir: self._port._filesystem.rmtree(str(self._driver_tempdir)) self._driver_tempdir = None if self._driver_user_cache_directory: self._port._filesystem.rmtree(self._driver_user_cache_directory) self._driver_user_cache_directory = None def cmd_line(self, pixel_tests, per_test_args): cmd = self._command_wrapper() cmd.append(self._port._path_to_driver()) if self._port.get_option('gc_between_tests'): cmd.append('--gc-between-tests') if self._port.get_option('complex_text'): cmd.append('--complex-text') if self._port.get_option('accelerated_drawing'): cmd.append('--accelerated-drawing') if self._port.get_option('remote_layer_tree'): cmd.append('--remote-layer-tree') if self._port.get_option('threaded'): cmd.append('--threaded') if self._no_timeout: cmd.append('--no-timeout') for allowed_host in self._port.allowed_hosts(): cmd.append('--allowed-host') cmd.append(allowed_host) cmd.extend(self._port.get_option('additional_drt_flag', [])) cmd.extend(self._port.additional_drt_flag()) cmd.extend(per_test_args) cmd.append('-') return cmd def _check_for_driver_timeout(self, out_line): if out_line.startswith("#PID UNRESPONSIVE - "): match = re.match('#PID UNRESPONSIVE - (\S+)', out_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', out_line) child_process_pid = int(match.group(1)) if match else None err_line = 'Wait on notifyDone timed out, process ' + child_process_name + ' pid = ' + str(child_process_pid) self.error_from_test += err_line _log.debug(err_line) if self._port.get_option("sample_on_timeout"): self._port.sample_process(child_process_name, child_process_pid) if out_line == "FAIL: Timed out waiting for notifyDone to be called\n": self._driver_timed_out = True def _check_for_address_sanitizer_violation(self, error_line): if "ERROR: AddressSanitizer" in error_line: return True def _check_for_driver_crash_or_unresponsiveness(self, error_line): crashed_check = error_line.rstrip('\r\n') if crashed_check == "#CRASHED": self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True elif error_line.startswith("#CRASHED - "): match = re.match('#CRASHED - (\S+)', error_line) self._crashed_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) self._crashed_pid = int(match.group(1)) if match else None _log.debug('%s crash, pid = %s' % (self._crashed_process_name, str(self._crashed_pid))) return True elif error_line.startswith("#PROCESS UNRESPONSIVE - "): match = re.match('#PROCESS UNRESPONSIVE - (\S+)', error_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) child_process_pid = int(match.group(1)) if match else None _log.debug('%s is unresponsive, pid = %s' % (child_process_name, str(child_process_pid))) self._driver_timed_out = True if child_process_pid: self._port.sample_process(child_process_name, child_process_pid) self.error_from_test += error_line self._server_process.write('#SAMPLE FINISHED\n', True) # Must be able to ignore a broken pipe here, target process may already be closed. return True return self.has_crashed() def _command_from_driver_input(self, driver_input): # FIXME: performance tests pass in full URLs instead of test names. if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'): command = driver_input.test_name elif self.is_web_platform_test(driver_input.test_name) or (self.is_http_test(driver_input.test_name) and (self._port.get_option('webkit_test_runner') or sys.platform == "cygwin")): command = self.test_to_uri(driver_input.test_name) command += "'--absolutePath'" command += self._port.abspath_for_test(driver_input.test_name) else: command = self._port.abspath_for_test(driver_input.test_name) if sys.platform == 'cygwin': command = path.cygpath(command) assert not driver_input.image_hash or driver_input.should_run_pixel_test # ' is the separator between arguments. if self._port.supports_per_test_timeout(): command += "'--timeout'%s" % driver_input.timeout if driver_input.should_run_pixel_test: command += "'--pixel-test" if driver_input.should_dump_jsconsolelog_in_stderr: command += "'--dump-jsconsolelog-in-stderr" if driver_input.image_hash: command += "'" + driver_input.image_hash return command + "\n" def _read_first_block(self, deadline, test_name): # returns (text_content, audio_content) block = self._read_block(deadline, test_name) if block.malloc: self._measurements['Malloc'] = float(block.malloc) if block.js_heap: self._measurements['JSHeap'] = float(block.js_heap) if block.content_type == 'audio/wav': return (None, block.decoded_content) return (block.decoded_content, None) def _read_optional_image_block(self, deadline, test_name): # returns (image, actual_image_hash) block = self._read_block(deadline, test_name, wait_for_stderr_eof=True) if block.content and block.content_type == 'image/png': return (block.decoded_content, block.content_hash) return (None, block.content_hash) def _read_header(self, block, line, header_text, header_attr, header_filter=None): if line.startswith(header_text) and getattr(block, header_attr) is None: value = line.split()[1] if header_filter: value = header_filter(value) setattr(block, header_attr, value) return True return False def _process_stdout_line(self, block, line): if (self._read_header(block, line, 'Content-Type: ', 'content_type') or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding') or self._read_header(block, line, 'Content-Length: ', '_content_length', int) or self._read_header(block, line, 'ActualHash: ', 'content_hash') or self._read_header(block, line, 'DumpMalloc: ', 'malloc') or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap')): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content += line def _strip_eof(self, line): if line and line.endswith("#EOF\n"): return line[:-5], True return line, False def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while not self.has_crashed(): if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline)
if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != "\n": _log.error(" %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = self._server_process.read_stdout(deadline, block._content_length) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = "" # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind('\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[:end_of_previous_error_line] else: self.error_from_test = "" # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += err_line else: self.error_from_test += err_line if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() block.decode_content() return block @staticmethod def check_driver(port): # This checks if the required system dependencies for the driver are met. # Since this is the generic class implementation, just return True. return True class ContentBlock(object): def __init__(self): self.content_type = None self.encoding = None self.content_hash = None self._content_length = None # Content is treated as binary data even though the text output is usually UTF-8. self.content = str() # FIXME: Should be bytearray() once we require Python 2.6. self.decoded_content = None self.malloc = None self.js_heap = None def decode_content(self): if self.encoding == 'base64' and self.content is not None: self.decoded_content = base64.b64decode(self.content) else: self.decoded_content = self.content class DriverProxy(object): """A wrapper for managing two Driver instances, one with pixel tests and one without. This allows us to handle plain text tests and ref tests with a single driver.""" def __init__(self, port, worker_number, driver_instance_constructor, pixel_tests, no_timeout): self._port = port self._worker_number = worker_number self._driver_instance_constructor = driver_instance_constructor self._no_timeout = no_timeout # FIXME: We shouldn't need to create a driver until we actually run a test. self._driver = self._make_driver(pixel_tests) self._driver_cmd_line = None def _make_driver(self, pixel_tests): return self._driver_instance_constructor(self._port, self._worker_number, pixel_tests, self._no_timeout) # FIXME: this should be a @classmethod (or implemented on Port instead). def is_http_test(self, test_name): return self._driver.is_http_test(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def test_to_uri(self, test_name): return self._driver.test_to_uri(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def uri_to_test(self, uri): return self._driver.uri_to_test(uri) def run_test(self, driver_input, stop_when_done): pixel_tests_needed = driver_input.should_run_pixel_test cmd_line_key = self._cmd_line_as_key(pixel_tests_needed, driver_input.args) if cmd_line_key != self._driver_cmd_line: self._driver.stop() self._driver = self._make_driver(pixel_tests_needed) self._driver_cmd_line = cmd_line_key return self._driver.run_test(driver_input, stop_when_done) def has_crashed(self): return self._driver.has_crashed() def stop(self): self._driver.stop() # FIXME: this should be a @classmethod (or implemented on Port instead). def cmd_line(self, pixel_tests=None, per_test_args=None): return self._driver.cmd_line(pixel_tests, per_test_args or []) def _cmd_line_as_key(self, pixel_tests, per_test_args): return ' '.join(self.cmd_line(pixel_tests, per_test_args))
if self._server_process.timed_out or self.has_crashed(): break
random_line_split
driver.py
# Copyright (C) 2011 Google Inc. All rights reserved. # Copyright (c) 2015, 2016 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import logging import re import shlex import sys import time import os from webkitpy.common.system import path from webkitpy.common.system.profiler import ProfilerFactory _log = logging.getLogger(__name__) class DriverInput(object): def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, should_dump_jsconsolelog_in_stderr=None, args=None): self.test_name = test_name self.timeout = timeout # in ms self.image_hash = image_hash self.should_run_pixel_test = should_run_pixel_test self.should_dump_jsconsolelog_in_stderr = should_dump_jsconsolelog_in_stderr self.args = args or [] def __repr__(self): return "DriverInput(test_name='{}', timeout={}, image_hash={}, should_run_pixel_test={}, should_dump_jsconsolelog_in_stderr={}'".format(self.test_name, self.timeout, self.image_hash, self.should_run_pixel_test, self.should_dump_jsconsolelog_in_stderr) class DriverOutput(object): """Groups information about a output from driver for easy passing and post-processing of data.""" metrics_patterns = [] metrics_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), '')) metrics_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+: '), '')) metrics_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), '')) metrics_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}')) metrics_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}')) metrics_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}')) metrics_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}')) metrics_patterns.append((re.compile('\([0-9]+px'), 'px')) metrics_patterns.append((re.compile(' *" *\n +" *'), ' ')) metrics_patterns.append((re.compile('" +$'), '"')) metrics_patterns.append((re.compile('- '), '-')) metrics_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"')) metrics_patterns.append((re.compile('\s+"\n'), '"\n')) metrics_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth')) metrics_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight')) metrics_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX')) metrics_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY')) metrics_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled')) def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = text self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = audio # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid def has_stderr(self): return bool(self.error) def strip_metrics(self): self.strip_patterns(self.metrics_patterns) def strip_patterns(self, patterns): if not self.text: return for pattern in patterns: self.text = re.sub(pattern[0], pattern[1], self.text) def strip_stderror_patterns(self, patterns): if not self.error: return for pattern in patterns: self.error = re.sub(pattern[0], pattern[1], self.error) class Driver(object): """object for running test(s) using DumpRenderTree/WebKitTestRunner.""" def __init__(self, port, worker_number, pixel_tests, no_timeout=False): """Initialize a Driver to subsequently run tests. Typically this routine will spawn DumpRenderTree in a config ready for subsequent input. port - reference back to the port object. worker_number - identifier for a particular worker/driver instance """ self._port = port self._worker_number = worker_number self._no_timeout = no_timeout self._driver_tempdir = None self._driver_user_directory_suffix = None self._driver_user_cache_directory = None # WebKitTestRunner can report back subprocess crashes by printing # "#CRASHED - PROCESSNAME". Since those can happen at any time and ServerProcess # won't be aware of them (since the actual tool didn't crash, just a subprocess) # we record the crashed subprocess name here. self._crashed_process_name = None self._crashed_pid = None self._driver_timed_out = False # stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated # stderr output, as well as if we've seen #EOF on this driver instance. # FIXME: We should probably remove _read_first_block and _read_optional_image_block and # instead scope these locally in run_test. self.error_from_test = str() self.err_seen_eof = False self._server_name = self._port.driver_name() self._server_process = None self._measurements = {} if self._port.get_option("profile"): profiler_name = self._port.get_option("profiler") self._profiler = ProfilerFactory.create_profiler(self._port.host, self._port._path_to_driver(), self._port.results_directory(), profiler_name) else: self._profiler = None self.web_platform_test_server_doc_root = self._port.web_platform_test_server_doc_root() self.web_platform_test_server_base_url = self._port.web_platform_test_server_base_url() def __del__(self): self.stop() def run_test(self, driver_input, stop_when_done): """Run a single test and return the results. Note that it is okay if a test times out or crashes and leaves the driver in an indeterminate state. The upper layers of the program are responsible for cleaning up and ensuring things are okay. Returns a DriverOutput object. """ start_time = time.time() self.start(driver_input.should_run_pixel_test, driver_input.args) test_begin_time = time.time() self._driver_timed_out = False self._crash_report_from_driver = None self.error_from_test = str() self.err_seen_eof = False command = self._command_from_driver_input(driver_input) # Certain timeouts are detected by the tool itself; tool detection is better, # because results contain partial output in this case. Make script timeout longer # by 5 seconds to avoid racing for which timeout is detected first. # FIXME: It's not the job of the driver to decide what the timeouts should be. # Move the additional timeout to driver_input. if self._no_timeout: deadline = test_begin_time + 60 * 60 * 24 * 7 # 7 days. Using sys.maxint causes a hang. else: deadline = test_begin_time + int(driver_input.timeout) / 1000.0 + 5 self._server_process.write(command) text, audio = self._read_first_block(deadline, driver_input.test_name) # First block is either text or audio image, actual_image_hash = self._read_optional_image_block(deadline, driver_input.test_name) # The second (optional) block is image data. crashed = self.has_crashed() timed_out = self._server_process.timed_out driver_timed_out = self._driver_timed_out pid = self._server_process.pid() if stop_when_done or crashed or timed_out: # We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output. # In the timeout case, we kill the hung process as well. out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0) if out: text += out if err: self.error_from_test += err self._server_process = None crash_log = None if self._crash_report_from_driver: crash_log = self._crash_report_from_driver elif crashed: self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time) # If we don't find a crash log use a placeholder error message instead. if not crash_log: pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid" crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str) # Print stdout and stderr to the placeholder crash log; we want as much context as possible. if self.error_from_test: crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test) return DriverOutput(text, image, actual_image_hash, audio, crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements, timeout=timed_out or driver_timed_out, error=self.error_from_test, crashed_process_name=self._crashed_process_name, crashed_pid=self._crashed_pid, crash_log=crash_log, pid=pid) def _get_crash_log(self, stdout, stderr, newer_than): return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than) def _command_wrapper(self): # Hook for injecting valgrind or other runtime instrumentation, used by e.g. tools/valgrind/valgrind_tests.py. wrapper_arguments = [] if self._profiler: wrapper_arguments = self._profiler.wrapper_arguments() if self._port.get_option('wrapper'): return shlex.split(self._port.get_option('wrapper')) + wrapper_arguments return wrapper_arguments HTTP_DIR = "http/tests/" HTTP_LOCAL_DIR = "http/tests/local/" def is_http_test(self, test_name): return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR) def is_web_platform_test(self, test_name): return test_name.startswith(self.web_platform_test_server_doc_root) def test_to_uri(self, test_name): """Convert a test name to a URI.""" if self.is_web_platform_test(test_name): return self.web_platform_test_server_base_url + test_name[len(self.web_platform_test_server_doc_root):] if not self.is_http_test(test_name): return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name)) relative_path = test_name[len(self.HTTP_DIR):] # TODO(dpranke): remove the SSL reference? if relative_path.startswith("ssl/"): return "https://127.0.0.1:8443/" + relative_path return "http://127.0.0.1:8000/" + relative_path def uri_to_test(self, uri): """Return the base layout test name for a given URI. This returns the test name for a given URI, e.g., if you passed in "file:///src/LayoutTests/fast/html/keygen.html" it would return "fast/html/keygen.html". """ if uri.startswith("file:///"): prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir()) if not prefix.endswith('/'): prefix += '/' return uri[len(prefix):] if uri.startswith(self.web_platform_test_server_base_url): return uri.replace(self.web_platform_test_server_base_url, self.web_platform_test_server_doc_root) if uri.startswith("http://"): return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR) if uri.startswith("https://"): return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR) raise NotImplementedError('unknown url type: %s' % uri) def has_crashed(self): if self._server_process is None: return False if self._crashed_process_name: return True if self._server_process.has_crashed(): self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True return False def start(self, pixel_tests, per_test_args): # FIXME: Callers shouldn't normally call this, since this routine # may not be specifying the correct combination of pixel test and # per_test args. # # The only reason we have this routine at all is so the perftestrunner # can pause before running a test; it might be better to push that # into run_test() directly. if not self._server_process: self._start(pixel_tests, per_test_args) self._run_post_start_tasks() def _append_environment_variable_path(self, environment, variable, path): if variable in environment: environment[variable] = environment[variable] + os.pathsep + path else: environment[variable] = path def _setup_environ_for_driver(self, environment):
def _setup_environ_for_test(self): environment = self._port.setup_environ_for_server(self._server_name) environment = self._setup_environ_for_driver(environment) return environment def _start(self, pixel_tests, per_test_args): self.stop() # Each driver process should be using individual directories under _driver_tempdir (which is deleted when stopping), # however some subsystems on some platforms could end up using process default ones. self._port._clear_global_caches_and_temporary_files() self._driver_tempdir = self._port._driver_tempdir() self._driver_user_directory_suffix = os.path.basename(str(self._driver_tempdir)) user_cache_directory = self._port._path_to_user_cache_directory(self._driver_user_directory_suffix) if user_cache_directory: self._port._filesystem.maybe_make_directory(user_cache_directory) self._driver_user_cache_directory = user_cache_directory environment = self._setup_environ_for_test() self._crashed_process_name = None self._crashed_pid = None self._server_process = self._port._test_runner_process_constructor(self._port, self._server_name, self.cmd_line(pixel_tests, per_test_args), environment, worker_number=self._worker_number) self._server_process.start() def _run_post_start_tasks(self): # Remote drivers may override this to delay post-start tasks until the server has ack'd. if self._profiler: self._profiler.attach_to_pid(self._pid_on_target()) def _pid_on_target(self): # Remote drivers will override this method to return the pid on the device. return self._server_process.pid() def stop(self): if self._server_process: self._server_process.stop(self._port.driver_stop_timeout()) self._server_process = None if self._profiler: self._profiler.profile_after_exit() if self._driver_tempdir: self._port._filesystem.rmtree(str(self._driver_tempdir)) self._driver_tempdir = None if self._driver_user_cache_directory: self._port._filesystem.rmtree(self._driver_user_cache_directory) self._driver_user_cache_directory = None def cmd_line(self, pixel_tests, per_test_args): cmd = self._command_wrapper() cmd.append(self._port._path_to_driver()) if self._port.get_option('gc_between_tests'): cmd.append('--gc-between-tests') if self._port.get_option('complex_text'): cmd.append('--complex-text') if self._port.get_option('accelerated_drawing'): cmd.append('--accelerated-drawing') if self._port.get_option('remote_layer_tree'): cmd.append('--remote-layer-tree') if self._port.get_option('threaded'): cmd.append('--threaded') if self._no_timeout: cmd.append('--no-timeout') for allowed_host in self._port.allowed_hosts(): cmd.append('--allowed-host') cmd.append(allowed_host) cmd.extend(self._port.get_option('additional_drt_flag', [])) cmd.extend(self._port.additional_drt_flag()) cmd.extend(per_test_args) cmd.append('-') return cmd def _check_for_driver_timeout(self, out_line): if out_line.startswith("#PID UNRESPONSIVE - "): match = re.match('#PID UNRESPONSIVE - (\S+)', out_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', out_line) child_process_pid = int(match.group(1)) if match else None err_line = 'Wait on notifyDone timed out, process ' + child_process_name + ' pid = ' + str(child_process_pid) self.error_from_test += err_line _log.debug(err_line) if self._port.get_option("sample_on_timeout"): self._port.sample_process(child_process_name, child_process_pid) if out_line == "FAIL: Timed out waiting for notifyDone to be called\n": self._driver_timed_out = True def _check_for_address_sanitizer_violation(self, error_line): if "ERROR: AddressSanitizer" in error_line: return True def _check_for_driver_crash_or_unresponsiveness(self, error_line): crashed_check = error_line.rstrip('\r\n') if crashed_check == "#CRASHED": self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True elif error_line.startswith("#CRASHED - "): match = re.match('#CRASHED - (\S+)', error_line) self._crashed_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) self._crashed_pid = int(match.group(1)) if match else None _log.debug('%s crash, pid = %s' % (self._crashed_process_name, str(self._crashed_pid))) return True elif error_line.startswith("#PROCESS UNRESPONSIVE - "): match = re.match('#PROCESS UNRESPONSIVE - (\S+)', error_line) child_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) child_process_pid = int(match.group(1)) if match else None _log.debug('%s is unresponsive, pid = %s' % (child_process_name, str(child_process_pid))) self._driver_timed_out = True if child_process_pid: self._port.sample_process(child_process_name, child_process_pid) self.error_from_test += error_line self._server_process.write('#SAMPLE FINISHED\n', True) # Must be able to ignore a broken pipe here, target process may already be closed. return True return self.has_crashed() def _command_from_driver_input(self, driver_input): # FIXME: performance tests pass in full URLs instead of test names. if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'): command = driver_input.test_name elif self.is_web_platform_test(driver_input.test_name) or (self.is_http_test(driver_input.test_name) and (self._port.get_option('webkit_test_runner') or sys.platform == "cygwin")): command = self.test_to_uri(driver_input.test_name) command += "'--absolutePath'" command += self._port.abspath_for_test(driver_input.test_name) else: command = self._port.abspath_for_test(driver_input.test_name) if sys.platform == 'cygwin': command = path.cygpath(command) assert not driver_input.image_hash or driver_input.should_run_pixel_test # ' is the separator between arguments. if self._port.supports_per_test_timeout(): command += "'--timeout'%s" % driver_input.timeout if driver_input.should_run_pixel_test: command += "'--pixel-test" if driver_input.should_dump_jsconsolelog_in_stderr: command += "'--dump-jsconsolelog-in-stderr" if driver_input.image_hash: command += "'" + driver_input.image_hash return command + "\n" def _read_first_block(self, deadline, test_name): # returns (text_content, audio_content) block = self._read_block(deadline, test_name) if block.malloc: self._measurements['Malloc'] = float(block.malloc) if block.js_heap: self._measurements['JSHeap'] = float(block.js_heap) if block.content_type == 'audio/wav': return (None, block.decoded_content) return (block.decoded_content, None) def _read_optional_image_block(self, deadline, test_name): # returns (image, actual_image_hash) block = self._read_block(deadline, test_name, wait_for_stderr_eof=True) if block.content and block.content_type == 'image/png': return (block.decoded_content, block.content_hash) return (None, block.content_hash) def _read_header(self, block, line, header_text, header_attr, header_filter=None): if line.startswith(header_text) and getattr(block, header_attr) is None: value = line.split()[1] if header_filter: value = header_filter(value) setattr(block, header_attr, value) return True return False def _process_stdout_line(self, block, line): if (self._read_header(block, line, 'Content-Type: ', 'content_type') or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding') or self._read_header(block, line, 'Content-Length: ', '_content_length', int) or self._read_header(block, line, 'ActualHash: ', 'content_hash') or self._read_header(block, line, 'DumpMalloc: ', 'malloc') or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap')): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content += line def _strip_eof(self, line): if line and line.endswith("#EOF\n"): return line[:-5], True return line, False def _read_block(self, deadline, test_name, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False asan_violation_detected = False while not self.has_crashed(): if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline) if self._server_process.timed_out or self.has_crashed(): break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: self._check_for_driver_timeout(out_line) if out_line[-1] != "\n": _log.error(" %s -> Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug." % test_name) content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: block.content = self._server_process.read_stdout(deadline, block._content_length) if err_line: if self._check_for_driver_crash_or_unresponsiveness(err_line): break elif self._check_for_address_sanitizer_violation(err_line): asan_violation_detected = True self._crash_report_from_driver = "" # ASan report starts with a nondescript line, we only detect the second line. end_of_previous_error_line = self.error_from_test.rfind('\n', 0, -1) if end_of_previous_error_line > 0: self.error_from_test = self.error_from_test[:end_of_previous_error_line] else: self.error_from_test = "" # Symbolication can take a very long time, give it 10 extra minutes to finish. # FIXME: This can likely be removed once <rdar://problem/18701447> is fixed. deadline += 10 * 60 * 1000 if asan_violation_detected: self._crash_report_from_driver += err_line else: self.error_from_test += err_line if asan_violation_detected and not self._crashed_process_name: self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() block.decode_content() return block @staticmethod def check_driver(port): # This checks if the required system dependencies for the driver are met. # Since this is the generic class implementation, just return True. return True class ContentBlock(object): def __init__(self): self.content_type = None self.encoding = None self.content_hash = None self._content_length = None # Content is treated as binary data even though the text output is usually UTF-8. self.content = str() # FIXME: Should be bytearray() once we require Python 2.6. self.decoded_content = None self.malloc = None self.js_heap = None def decode_content(self): if self.encoding == 'base64' and self.content is not None: self.decoded_content = base64.b64decode(self.content) else: self.decoded_content = self.content class DriverProxy(object): """A wrapper for managing two Driver instances, one with pixel tests and one without. This allows us to handle plain text tests and ref tests with a single driver.""" def __init__(self, port, worker_number, driver_instance_constructor, pixel_tests, no_timeout): self._port = port self._worker_number = worker_number self._driver_instance_constructor = driver_instance_constructor self._no_timeout = no_timeout # FIXME: We shouldn't need to create a driver until we actually run a test. self._driver = self._make_driver(pixel_tests) self._driver_cmd_line = None def _make_driver(self, pixel_tests): return self._driver_instance_constructor(self._port, self._worker_number, pixel_tests, self._no_timeout) # FIXME: this should be a @classmethod (or implemented on Port instead). def is_http_test(self, test_name): return self._driver.is_http_test(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def test_to_uri(self, test_name): return self._driver.test_to_uri(test_name) # FIXME: this should be a @classmethod (or implemented on Port instead). def uri_to_test(self, uri): return self._driver.uri_to_test(uri) def run_test(self, driver_input, stop_when_done): pixel_tests_needed = driver_input.should_run_pixel_test cmd_line_key = self._cmd_line_as_key(pixel_tests_needed, driver_input.args) if cmd_line_key != self._driver_cmd_line: self._driver.stop() self._driver = self._make_driver(pixel_tests_needed) self._driver_cmd_line = cmd_line_key return self._driver.run_test(driver_input, stop_when_done) def has_crashed(self): return self._driver.has_crashed() def stop(self): self._driver.stop() # FIXME: this should be a @classmethod (or implemented on Port instead). def cmd_line(self, pixel_tests=None, per_test_args=None): return self._driver.cmd_line(pixel_tests, per_test_args or []) def _cmd_line_as_key(self, pixel_tests, per_test_args): return ' '.join(self.cmd_line(pixel_tests, per_test_args))
build_root_path = str(self._port._build_path()) self._append_environment_variable_path(environment, 'DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_LIBRARY_PATH', build_root_path) self._append_environment_variable_path(environment, 'DYLD_FRAMEWORK_PATH', build_root_path) self._append_environment_variable_path(environment, '__XPC_DYLD_FRAMEWORK_PATH', build_root_path) # Use an isolated temp directory that can be deleted after testing (especially important on Mac, as # CoreMedia disk cache is in the temp directory). environment['TMPDIR'] = str(self._driver_tempdir) environment['DIRHELPER_USER_DIR_SUFFIX'] = self._driver_user_directory_suffix # Put certain normally persistent files into the temp directory (e.g. IndexedDB storage). if sys.platform == 'cygwin': environment['DUMPRENDERTREE_TEMP'] = path.cygpath(str(self._driver_tempdir)) else: environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir) environment['LOCAL_RESOURCE_ROOT'] = str(self._port.layout_tests_dir()) environment['ASAN_OPTIONS'] = "allocator_may_return_null=1" environment['__XPC_ASAN_OPTIONS'] = environment['ASAN_OPTIONS'] if 'WEBKIT_OUTPUTDIR' in os.environ: environment['WEBKIT_OUTPUTDIR'] = os.environ['WEBKIT_OUTPUTDIR'] if self._profiler: environment = self._profiler.adjusted_environment(environment) return environment
identifier_body
init-res-into-things.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(unknown_features)] #![feature(box_syntax)] #![feature(unsafe_destructor)] use std::cell::Cell; // Resources can't be copied, but storing into data structures counts // as a move unless the stored thing is used afterwards. struct r<'a> { i: &'a Cell<int>, } struct BoxR<'a> { x: r<'a> } #[unsafe_destructor] impl<'a> Drop for r<'a> { fn drop(&mut self) { self.i.set(self.i.get() + 1) } } fn r(i: &Cell<int>) -> r { r { i: i } } fn test_rec() { let i = &Cell::new(0); { let _a = BoxR {x: r(i)}; } assert_eq!(i.get(), 1); } fn test_tag() { enum t<'a> { t0(r<'a>), }
} fn test_tup() { let i = &Cell::new(0); { let _a = (r(i), 0); } assert_eq!(i.get(), 1); } fn test_unique() { let i = &Cell::new(0); { let _a = box r(i); } assert_eq!(i.get(), 1); } fn test_unique_rec() { let i = &Cell::new(0); { let _a = box BoxR { x: r(i) }; } assert_eq!(i.get(), 1); } pub fn main() { test_rec(); test_tag(); test_tup(); test_unique(); test_unique_rec(); }
let i = &Cell::new(0); { let _a = t::t0(r(i)); } assert_eq!(i.get(), 1);
random_line_split
init-res-into-things.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(unknown_features)] #![feature(box_syntax)] #![feature(unsafe_destructor)] use std::cell::Cell; // Resources can't be copied, but storing into data structures counts // as a move unless the stored thing is used afterwards. struct
<'a> { i: &'a Cell<int>, } struct BoxR<'a> { x: r<'a> } #[unsafe_destructor] impl<'a> Drop for r<'a> { fn drop(&mut self) { self.i.set(self.i.get() + 1) } } fn r(i: &Cell<int>) -> r { r { i: i } } fn test_rec() { let i = &Cell::new(0); { let _a = BoxR {x: r(i)}; } assert_eq!(i.get(), 1); } fn test_tag() { enum t<'a> { t0(r<'a>), } let i = &Cell::new(0); { let _a = t::t0(r(i)); } assert_eq!(i.get(), 1); } fn test_tup() { let i = &Cell::new(0); { let _a = (r(i), 0); } assert_eq!(i.get(), 1); } fn test_unique() { let i = &Cell::new(0); { let _a = box r(i); } assert_eq!(i.get(), 1); } fn test_unique_rec() { let i = &Cell::new(0); { let _a = box BoxR { x: r(i) }; } assert_eq!(i.get(), 1); } pub fn main() { test_rec(); test_tag(); test_tup(); test_unique(); test_unique_rec(); }
r
identifier_name
settings.component.ts
/* * This file is part of *** M y C o R e *** * See http://www.mycore.de/ for details. * * MyCoRe is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * MyCoRe is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MyCoRe. If not, see <http://www.gnu.org/licenses/>. */ import {Component} from '@angular/core'; import {CommunicationService} from '../service/communication.service'; import {RESTService} from '../service/rest.service'; import {Settings} from './settings'; @Component({ selector: 'web-cli-settings', templateUrl: 'app/settings/settings.html' }) export class WebCliSettingsComponent { settings: Settings; constructor(private _communicationService: CommunicationService, private _restService: RESTService){ this._restService.continueIfOneFails.subscribe( value => this.settings.continueIfOneFails = value ); } ngOnInit() { this.settings = this.getSettingsFromCookie(500, 10, true, false); this._communicationService.setSettings(this.settings); this._restService.setContinueIfOneFails(this.settings.continueIfOneFails); } onHistoryChange() { if (localStorage.getItem("historySize") != this.settings.historySize + "") { localStorage.setItem("historySize", this.settings.historySize + ""); } } onComHistoryChange() { if (localStorage.getItem("comHistorySize") != this.settings.comHistorySize + "") { localStorage.setItem("comHistorySize", this.settings.comHistorySize + ""); } } onAutoScrollChange(event) { if (localStorage.getItem("autoScroll") != event.srcElement.checked + "") { localStorage.setItem("autoScroll", event.srcElement.checked); } } onContinueIfOneFailsChange(event) { if (localStorage.getItem("continueIfOneFails") != event.srcElement.checked + "") { localStorage.setItem("continueIfOneFails", event.srcElement.checked); } this._restService.setContinueIfOneFails(event.srcElement.checked); } deleteCommandHistory() { this._communicationService.setCommandHistory([]); localStorage.removeItem("commandHistory"); } private getSettingsFromCookie(defaultHSize: number, defaultComHSize: number, defaultAutoScroll: boolean, defaultContinueIfOneFails: boolean) { var storageHSize = localStorage.getItem("historySize"); if (storageHSize != undefined && storageHSize != ""){ defaultHSize = parseInt(storageHSize); } else { localStorage.setItem("historySize", defaultHSize + ""); } var storageComHSize = localStorage.getItem("comHistorySize"); if (storageComHSize != undefined && storageComHSize != "")
else { localStorage.setItem("comHistorySize", defaultComHSize + ""); } var storageAutoScroll = localStorage.getItem("autoScroll"); if (storageAutoScroll != undefined && storageAutoScroll != ""){ defaultAutoScroll = (storageAutoScroll == "true"); } else { localStorage.setItem("autoScroll", defaultAutoScroll + ""); } var storageContinueIfOneFails = localStorage.getItem("continueIfOneFails"); if (storageContinueIfOneFails != undefined && storageContinueIfOneFails != ""){ defaultContinueIfOneFails = (storageContinueIfOneFails == "true"); } else { localStorage.setItem("defaultContinueIfOneFails", defaultContinueIfOneFails + "") } return new Settings(defaultHSize, defaultComHSize, defaultAutoScroll, defaultContinueIfOneFails); } }
{ defaultComHSize = parseInt(storageComHSize); }
conditional_block
settings.component.ts
/* * This file is part of *** M y C o R e *** * See http://www.mycore.de/ for details. * * MyCoRe is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * MyCoRe is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MyCoRe. If not, see <http://www.gnu.org/licenses/>. */ import {Component} from '@angular/core'; import {CommunicationService} from '../service/communication.service'; import {RESTService} from '../service/rest.service'; import {Settings} from './settings'; @Component({ selector: 'web-cli-settings', templateUrl: 'app/settings/settings.html' }) export class WebCliSettingsComponent { settings: Settings;
(private _communicationService: CommunicationService, private _restService: RESTService){ this._restService.continueIfOneFails.subscribe( value => this.settings.continueIfOneFails = value ); } ngOnInit() { this.settings = this.getSettingsFromCookie(500, 10, true, false); this._communicationService.setSettings(this.settings); this._restService.setContinueIfOneFails(this.settings.continueIfOneFails); } onHistoryChange() { if (localStorage.getItem("historySize") != this.settings.historySize + "") { localStorage.setItem("historySize", this.settings.historySize + ""); } } onComHistoryChange() { if (localStorage.getItem("comHistorySize") != this.settings.comHistorySize + "") { localStorage.setItem("comHistorySize", this.settings.comHistorySize + ""); } } onAutoScrollChange(event) { if (localStorage.getItem("autoScroll") != event.srcElement.checked + "") { localStorage.setItem("autoScroll", event.srcElement.checked); } } onContinueIfOneFailsChange(event) { if (localStorage.getItem("continueIfOneFails") != event.srcElement.checked + "") { localStorage.setItem("continueIfOneFails", event.srcElement.checked); } this._restService.setContinueIfOneFails(event.srcElement.checked); } deleteCommandHistory() { this._communicationService.setCommandHistory([]); localStorage.removeItem("commandHistory"); } private getSettingsFromCookie(defaultHSize: number, defaultComHSize: number, defaultAutoScroll: boolean, defaultContinueIfOneFails: boolean) { var storageHSize = localStorage.getItem("historySize"); if (storageHSize != undefined && storageHSize != ""){ defaultHSize = parseInt(storageHSize); } else { localStorage.setItem("historySize", defaultHSize + ""); } var storageComHSize = localStorage.getItem("comHistorySize"); if (storageComHSize != undefined && storageComHSize != ""){ defaultComHSize = parseInt(storageComHSize); } else { localStorage.setItem("comHistorySize", defaultComHSize + ""); } var storageAutoScroll = localStorage.getItem("autoScroll"); if (storageAutoScroll != undefined && storageAutoScroll != ""){ defaultAutoScroll = (storageAutoScroll == "true"); } else { localStorage.setItem("autoScroll", defaultAutoScroll + ""); } var storageContinueIfOneFails = localStorage.getItem("continueIfOneFails"); if (storageContinueIfOneFails != undefined && storageContinueIfOneFails != ""){ defaultContinueIfOneFails = (storageContinueIfOneFails == "true"); } else { localStorage.setItem("defaultContinueIfOneFails", defaultContinueIfOneFails + "") } return new Settings(defaultHSize, defaultComHSize, defaultAutoScroll, defaultContinueIfOneFails); } }
constructor
identifier_name
settings.component.ts
/* * This file is part of *** M y C o R e *** * See http://www.mycore.de/ for details. * * MyCoRe is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * MyCoRe is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MyCoRe. If not, see <http://www.gnu.org/licenses/>. */ import {Component} from '@angular/core'; import {CommunicationService} from '../service/communication.service'; import {RESTService} from '../service/rest.service'; import {Settings} from './settings'; @Component({ selector: 'web-cli-settings', templateUrl: 'app/settings/settings.html' }) export class WebCliSettingsComponent { settings: Settings; constructor(private _communicationService: CommunicationService, private _restService: RESTService){ this._restService.continueIfOneFails.subscribe( value => this.settings.continueIfOneFails = value ); } ngOnInit() { this.settings = this.getSettingsFromCookie(500, 10, true, false); this._communicationService.setSettings(this.settings); this._restService.setContinueIfOneFails(this.settings.continueIfOneFails); } onHistoryChange() { if (localStorage.getItem("historySize") != this.settings.historySize + "") { localStorage.setItem("historySize", this.settings.historySize + ""); } } onComHistoryChange() { if (localStorage.getItem("comHistorySize") != this.settings.comHistorySize + "") { localStorage.setItem("comHistorySize", this.settings.comHistorySize + ""); } } onAutoScrollChange(event) { if (localStorage.getItem("autoScroll") != event.srcElement.checked + "") { localStorage.setItem("autoScroll", event.srcElement.checked); } } onContinueIfOneFailsChange(event) { if (localStorage.getItem("continueIfOneFails") != event.srcElement.checked + "") { localStorage.setItem("continueIfOneFails", event.srcElement.checked); } this._restService.setContinueIfOneFails(event.srcElement.checked); } deleteCommandHistory() { this._communicationService.setCommandHistory([]); localStorage.removeItem("commandHistory"); } private getSettingsFromCookie(defaultHSize: number, defaultComHSize: number, defaultAutoScroll: boolean, defaultContinueIfOneFails: boolean) { var storageHSize = localStorage.getItem("historySize"); if (storageHSize != undefined && storageHSize != ""){ defaultHSize = parseInt(storageHSize); } else { localStorage.setItem("historySize", defaultHSize + ""); } var storageComHSize = localStorage.getItem("comHistorySize"); if (storageComHSize != undefined && storageComHSize != ""){ defaultComHSize = parseInt(storageComHSize); } else { localStorage.setItem("comHistorySize", defaultComHSize + "");
} var storageAutoScroll = localStorage.getItem("autoScroll"); if (storageAutoScroll != undefined && storageAutoScroll != ""){ defaultAutoScroll = (storageAutoScroll == "true"); } else { localStorage.setItem("autoScroll", defaultAutoScroll + ""); } var storageContinueIfOneFails = localStorage.getItem("continueIfOneFails"); if (storageContinueIfOneFails != undefined && storageContinueIfOneFails != ""){ defaultContinueIfOneFails = (storageContinueIfOneFails == "true"); } else { localStorage.setItem("defaultContinueIfOneFails", defaultContinueIfOneFails + "") } return new Settings(defaultHSize, defaultComHSize, defaultAutoScroll, defaultContinueIfOneFails); } }
random_line_split
settings.component.ts
/* * This file is part of *** M y C o R e *** * See http://www.mycore.de/ for details. * * MyCoRe is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * MyCoRe is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MyCoRe. If not, see <http://www.gnu.org/licenses/>. */ import {Component} from '@angular/core'; import {CommunicationService} from '../service/communication.service'; import {RESTService} from '../service/rest.service'; import {Settings} from './settings'; @Component({ selector: 'web-cli-settings', templateUrl: 'app/settings/settings.html' }) export class WebCliSettingsComponent { settings: Settings; constructor(private _communicationService: CommunicationService, private _restService: RESTService){ this._restService.continueIfOneFails.subscribe( value => this.settings.continueIfOneFails = value ); } ngOnInit() { this.settings = this.getSettingsFromCookie(500, 10, true, false); this._communicationService.setSettings(this.settings); this._restService.setContinueIfOneFails(this.settings.continueIfOneFails); } onHistoryChange() { if (localStorage.getItem("historySize") != this.settings.historySize + "") { localStorage.setItem("historySize", this.settings.historySize + ""); } } onComHistoryChange() { if (localStorage.getItem("comHistorySize") != this.settings.comHistorySize + "") { localStorage.setItem("comHistorySize", this.settings.comHistorySize + ""); } } onAutoScrollChange(event) { if (localStorage.getItem("autoScroll") != event.srcElement.checked + "") { localStorage.setItem("autoScroll", event.srcElement.checked); } } onContinueIfOneFailsChange(event)
deleteCommandHistory() { this._communicationService.setCommandHistory([]); localStorage.removeItem("commandHistory"); } private getSettingsFromCookie(defaultHSize: number, defaultComHSize: number, defaultAutoScroll: boolean, defaultContinueIfOneFails: boolean) { var storageHSize = localStorage.getItem("historySize"); if (storageHSize != undefined && storageHSize != ""){ defaultHSize = parseInt(storageHSize); } else { localStorage.setItem("historySize", defaultHSize + ""); } var storageComHSize = localStorage.getItem("comHistorySize"); if (storageComHSize != undefined && storageComHSize != ""){ defaultComHSize = parseInt(storageComHSize); } else { localStorage.setItem("comHistorySize", defaultComHSize + ""); } var storageAutoScroll = localStorage.getItem("autoScroll"); if (storageAutoScroll != undefined && storageAutoScroll != ""){ defaultAutoScroll = (storageAutoScroll == "true"); } else { localStorage.setItem("autoScroll", defaultAutoScroll + ""); } var storageContinueIfOneFails = localStorage.getItem("continueIfOneFails"); if (storageContinueIfOneFails != undefined && storageContinueIfOneFails != ""){ defaultContinueIfOneFails = (storageContinueIfOneFails == "true"); } else { localStorage.setItem("defaultContinueIfOneFails", defaultContinueIfOneFails + "") } return new Settings(defaultHSize, defaultComHSize, defaultAutoScroll, defaultContinueIfOneFails); } }
{ if (localStorage.getItem("continueIfOneFails") != event.srcElement.checked + "") { localStorage.setItem("continueIfOneFails", event.srcElement.checked); } this._restService.setContinueIfOneFails(event.srcElement.checked); }
identifier_body
heatmap.data.ts
import { Dictionary } from '../../../models/json/dictionary'; /** * Reuse of HTML DOM Rect class, added here to shut Typescript up. */ export interface DOMRect { width: number; height: number; } /** * Specifies border settings for a component. The color can be a class (prefixed with '.') or color name. */ interface BorderRule { width: number, color: string, } /** * This is the format of the data to be provided to the component. */ export interface HeatCellData { /** * Value displayed in the text; also identifies which cell triggered a hover or click event. */ title: string, /** * This value corresponds to the name of a heat color specified in the heatmap options. */ value: boolean | string, } export interface HeatBatchData { /** * Header name for a batch (group) of cells. */ title: string, /** * Batches are normally displayed using alternating background (and maybe foreground) colors. You can, however, * specify a heat that would match a name in the heatmap options; thus creating custom colors for each batch. */ value: string | null, /** * The cells that make up this batch. One day, perhaps, we can make these cells and/or nested batches. Fun! */ cells?: Array<HeatCellData>, } /** * Used to specify the color of your cell data. Each value can be either a color string ('white' or '#334455', etc.), * or a CSS class prefixed with a period (.). You optionally specify heat colors in the options object you provide to * the component. */ export interface HeatColor { bg: string | string[], fg: string, border?: BorderRule, } /** * Each batch (groups of cells) has colors for the header, the body background, and the border style. You optionally * specify batch colors in the options object you provide to the component. If both the header and body have a border, * the header's bottom border and body's top border will have a width of 0 to prevent division. */ export interface BatchColor { header: HeatColor, body: HeatColor, } /** * Options specific to the display (sizes, viewability, padding, etc.) of the headers, batches and cells. */ export interface ViewRules { /** * If you have multiple heatmaps on a page, this value distinguishes the one you are drawing onto. * Needed by D3 to access the div to draw the canvas on. */ component?: string, /** * The height of the header bar for rendering batch titles. Defaults to a whopping 48 pixels. */ headerHeight?: number, /** * The minimum padding on both the left and right side of the chart. Defaults to 1 pixel. After all calculations * are done, if there is _extra_ space, then the padding will be _increased_ so we can center the chart. */ minSidePadding?: number, /** * This is guaranteed padding at the bottom of the chart. Useful if you want to draw a border. */ minBottomPadding?: number, /**
stacked?: boolean, } /** * Options specific to the use of colors in the headers, batches and cells. */ interface ColorRules { /** * This is the default array colors for the batches and their headers. The list of colors rotates for each batch * (not each column). The defaults are reddish-brown header on white background, and very light gray on very light * gray. A batch can override using this default by specifying a heat (in its value property). */ batchColors?: Array<BatchColor>, /** * This array colors the cells, batches and their headers. The property names correspond to the value property in * the heatmap data, which can just be a boolean, string, or array of strings. So you can have values of true/false, * or strings like 'S', 'M', 'L', etc. The value is not required to be a key in this object; if it is missing, the * noColor heat will be used as backup. */ heatColors?: Dictionary<HeatColor>, /** * Default color when no heat value matches. */ noColor?: HeatColor, /** * Whether to shade cells with multiple heat values using a gradient of those heat colors. The default is true, * but bear in mind that anything over three colors (really, it depends on the calculated cell width) tends to * look bad. If you turn gradients off, and a cell has multiple heats, the default gradient color (which may * also be a gradient) will be used instead, or the noColor property if you don't provide a default gradient. * Note that another problem with gradients is that it makes it difficult to determine which text color to use, so * either: Turn off the foreground color (fg == 'transparent'); or use high contrasts between your background and * foreground color options. */ showGradients?: boolean, /** * So, even if you do decide to display gradients, you can specify that you want to limit the number of colors to * shade. If the cell exceeds this limit, the default gradient or no-color value is used instead. */ maxGradients?: number, /** * The default color or color gradient to use for when gradients are turned off or a cell exceeds the maximum * number of heats. Note this is a full HeatColor, so you can specify the foreground color with it. */ defaultGradient?: HeatColor, } /** * Options specific to handling hovering over the cells. */ interface HoverRules { /** * A color to highlight the background of a cell when the mouse hovers over it. Hover colors can never be gradients. */ color?: HeatColor, /** * How long to wait, in milliseconds, before firing a hover event over one of the cells. Defaults to 500ms. */ delay?: number, } /** * Collects rules for displaying text in various heatmap components. */ interface TextRule { /** * Whether to display the title. The default for headers is true; for cells, false. */ showText?: boolean, /** * The size, in pixels, for text. Defaults to 14px in headers, 6px in cells. */ fontSize?: number, /** * Whether to allow the text to be split on multiple lines. Note that this is dependent on how tall the component * is. Splitting is based on whitespace, slashes (/) and hyphens (-). The default is true. */ allowSplit?: boolean, /** * Whether to hyphenate the text in the component, if there is no room to fully write it, and if splitting doesn't * do the trick. Hyphenating is based solely on the presence of English consonants, and is therefore not very * robust, because loading an entire dictionary seems like overkill. (This may change in the future, but for now it * just doesn't seem viable.) Note that this is still dependent on how tall the component is. The default is true * for headers; for cells, false. Text that still doesn't fit, after splitting and hyphenating, if they are * allowed, will result in truncating the text, and adding an ellipsis. */ hyphenate?: boolean, } interface TextRules { headers?: TextRule, cells?: TextRule, } /** * Rules for displaying the minimap. */ interface MinimapRules { /** * Where to draw the minimap within the heatmap. Set to null to not draw a minimap at all. This is just the width * and height of the minimap, not where it will be placed. Note that the minimap will overlay the heatmap, * obscuring any cells that are under it. The default is a 200x200 area. If the heatmap has no zoom extent, or * the min and max range of the zoom extent are the same value, then this is the same as requesting no minimap. */ size?: DOMRect | null, /** * The starting location of the minimap. The default is 'bottom-right'. */ startCorner?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right', /** * Whether the minimap should have hot corners (buttons in each corner of the minimap) that will cause it to move * to the equivalent corner of the heatmap, in order to move it out of the user's line of sight. The default is * true. */ hotCorners?: boolean, /** * This future option will allow the heatmap to attempt to redraw its columns to avoid overlapping the minimap. * The default will be true, when this option is implemented. Also when implemented, the minimap corners will be * limited to the bottom corners, because rearranging the headers is just not an option. */ attemptAvoidance?: boolean, /** * The minimap can only draw header text; it's nutty to try to draw a 1px font, let alone to write text in each * cell. The minimap's header does not try to be a mirror of the heatmap, it just writes a single line, ellipsed * if necessary, at this tiny font. While the columns should be a perfect mirror of the heatmap, the cells will * just be too small to write in. So the split and hyphenate options are ignored. If the provided font size is * too tiny for the minimap header height, the header title won't be written at all. Defaults to 6px. */ text?: TextRule, /** * The thickness and color of the minimap border. Defaults to 2px and black. */ border?: BorderRule, /** * The thickness and color of the minimap zoom panner (the border that indicates the heatmap zoom area). * Defaults to 1px and dark blue. */ panner?: BorderRule, } /** * Options specific to the zooming and the display of the minimap. */ interface ZoomRules { /** * The magnification range for zooming in on the heatmap. Both numbers must be positive (>0), and can be decimal. * A value of 1 means full view, or 1x magnification. Higher numbers mean you are zooming in, and number less than * 1 means you are shrinking the heatmap. The default is [1, 4]. The maximum magnification you want is going to * depend on how much data you have. Extremely large amounts of data will cause your cells to look like pebbles or * even dots, so you will want to bump that number up if that is your situation. */ zoomExtent?: [number, number] | null, /** * If the cells are really tiny, even rendering the text at a tiny font looks ridiculous. But if you zoom in to * the given magnification, the cell text can be made visible. Note that if you turn cell text off, it overrides * this setting. The default for this value is equal to the lowest zoomExtent value. */ cellTitleExtent?: number, /** * How to draw the minimap within the heatmap. */ minimap?: MinimapRules, } /** * You must provide an options object to the component, even if you do not wish to override any defaults. It's required, * because their is a high likelihood that you won't be fond of the defaults, and to encourage you to use application * colors that match your desired user experience. */ export class HeatmapOptions { view?: ViewRules; color?: ColorRules; hover?: HoverRules; text?: TextRules; zoom?: ZoomRules; static merge(custom: HeatmapOptions, defaults: HeatmapOptions): HeatmapOptions { const merged = new HeatmapOptions(); merged.view = Object.assign({}, defaults.view, custom.view); merged.color = Object.assign({}, defaults.color, custom.color); merged.hover = Object.assign({}, defaults.hover, custom.hover); merged.text = Object.assign({}, defaults.text, custom.text); merged.zoom = Object.assign({minimap: {}}, defaults.zoom, custom.zoom); if (defaults.text && custom.text) { merged.text.headers = Object.assign({}, defaults.text.headers, custom.text.headers); merged.text.cells = Object.assign({}, defaults.text.cells, custom.text.cells); } if (defaults.zoom && defaults.zoom.minimap && custom.zoom && custom.zoom.minimap) { merged.zoom.minimap = Object.assign({}, defaults.zoom.minimap, custom.zoom.minimap); if (defaults.zoom.minimap.text && custom.zoom.minimap.text) { merged.zoom.minimap.text = Object.assign({}, defaults.zoom.minimap.text, custom.zoom.minimap.text); } } // validate values merged.view.headerHeight = Math.max(0, merged.view.headerHeight); merged.view.minSidePadding = Math.max(0, merged.view.minSidePadding); merged.view.minBottomPadding = Math.max(0, merged.view.minBottomPadding); if (!merged.color.batchColors) { merged.color.batchColors = []; } if (!merged.color.heatColors) { merged.color.heatColors = {}; } if (!merged.color.noColor) { merged.color.noColor = { bg: 'transparent', fg: 'transparent', }; } merged.color.maxGradients = Math.max(1, merged.color.maxGradients); if (!merged.hover.color) { merged.hover.color = { bg: 'transparent', fg: 'transparent', }; } else if (merged.hover.color.bg && Array.isArray(merged.hover.color.bg)) { merged.hover.color.bg = (merged.hover.color.bg.length > 0) ? merged.hover.color.bg[0] : 'transparent'; } merged.hover.delay = Math.max(1, merged.hover.delay); if (merged.text.headers && merged.text.headers.fontSize) { merged.text.headers.fontSize = Math.max(0, merged.text.headers.fontSize); } if (merged.text.cells && merged.text.cells.fontSize) { merged.text.cells.fontSize = Math.max(0, merged.text.cells.fontSize); } // make sure we have a valid zoom extent, the numbers are valid, and they are sorted let zoom: number[] = merged.zoom.zoomExtent; if (!zoom || (zoom.length < 1)) { zoom = [1, 1]; } else if (zoom.length < 2) { zoom = [1, zoom[0]]; } else if (zoom.length > 2) { zoom = merged.zoom.zoomExtent.slice(0, 2); } zoom = zoom.map(extent => Math.max(.01, extent)).sort((a, b) => a - b); merged.zoom.cellTitleExtent = Math.min(Math.max(zoom[0], merged.zoom.cellTitleExtent), zoom[1]) || zoom[0]; // ensure NaN/null/undefined does not trip us up merged.zoom.zoomExtent = zoom as [number, number]; if (merged.zoom.zoomExtent[0] === merged.zoom.zoomExtent[1]) { merged.zoom.zoomExtent = null; merged.zoom.minimap.size = null; } // validate the minimap settings if (merged.zoom.minimap.size) { merged.zoom.minimap.size.width = Math.max(0, merged.zoom.minimap.size.width); merged.zoom.minimap.size.height = Math.max(0, merged.zoom.minimap.size.height); if ((merged.zoom.minimap.size.width === 0) && (merged.zoom.minimap.size.height === 0)) { merged.zoom.minimap.size = null; } } if (merged.zoom.minimap.text) { merged.zoom.minimap.text.fontSize = Math.max(0, merged.zoom.minimap.text.fontSize); merged.zoom.minimap.text.allowSplit = false; merged.zoom.minimap.text.hyphenate = false; } if (merged.zoom.minimap.border) { merged.zoom.minimap.border.width = Math.max(1, merged.zoom.minimap.border.width); } if (merged.zoom.minimap.panner) { merged.zoom.minimap.panner.width = Math.max(1, merged.zoom.minimap.panner.width); } return merged; } static reciprocateZoom(options: HeatmapOptions): [number, number] { if (!options || !options.zoom || !options.zoom.zoomExtent || (options.zoom.zoomExtent.length < 2)) { return null; } let zoom = options.zoom.zoomExtent.map(extent => Math.max(.01, 1 / extent)).sort((a, b) => a - b); return zoom as [number, number]; } } export const DEFAULT_OPTIONS: HeatmapOptions = { view: { component: '', headerHeight: 48, minSidePadding: 1, minBottomPadding: 1, }, color: { batchColors: [ {header: {bg: '#e3f2fd', fg: '#333'}, body: {bg: '#e3f2fd', fg: 'black'}}, {header: {bg: 'transparent', fg: '#333'}, body: {bg: 'transparent', fg: 'black'}}, ], heatColors: { 'true': {bg: '#e66', fg: 'black'}, 'false': {bg: '#ccc', fg: 'black'}, }, noColor: {bg: 'transparent', fg: 'black'}, showGradients: true, maxGradients: 3, defaultGradient: {bg: ['red', 'green'], fg: 'white'} }, hover: { color: {bg: '#f0f099', fg: 'black'}, delay: 500, }, text: { headers: { showText: true, fontSize: 14, allowSplit: true, hyphenate: true, }, cells: { showText: false, fontSize: 6, allowSplit: true, hyphenate: false, }, }, zoom: { zoomExtent: [1, 4], minimap: { size: { width: 200, height: 200, }, startCorner: 'bottom-right', hotCorners: true, attemptAvoidance: true, text: { showText: true, fontSize: 6, allowSplit: false, hyphenate: false }, border: { width: 2, color: 'black', }, panner: { width: 1, color: 'blue', }, }, }, };
* The future feature will draw the batches in blocks, instead of in columns. This will hopefully allow better * distribution to reduce open space, may be a better visualization for some types of data, and allow greater * freedom when trying to reserve space for the minimap without overlaying the data. */
random_line_split
heatmap.data.ts
import { Dictionary } from '../../../models/json/dictionary'; /** * Reuse of HTML DOM Rect class, added here to shut Typescript up. */ export interface DOMRect { width: number; height: number; } /** * Specifies border settings for a component. The color can be a class (prefixed with '.') or color name. */ interface BorderRule { width: number, color: string, } /** * This is the format of the data to be provided to the component. */ export interface HeatCellData { /** * Value displayed in the text; also identifies which cell triggered a hover or click event. */ title: string, /** * This value corresponds to the name of a heat color specified in the heatmap options. */ value: boolean | string, } export interface HeatBatchData { /** * Header name for a batch (group) of cells. */ title: string, /** * Batches are normally displayed using alternating background (and maybe foreground) colors. You can, however, * specify a heat that would match a name in the heatmap options; thus creating custom colors for each batch. */ value: string | null, /** * The cells that make up this batch. One day, perhaps, we can make these cells and/or nested batches. Fun! */ cells?: Array<HeatCellData>, } /** * Used to specify the color of your cell data. Each value can be either a color string ('white' or '#334455', etc.), * or a CSS class prefixed with a period (.). You optionally specify heat colors in the options object you provide to * the component. */ export interface HeatColor { bg: string | string[], fg: string, border?: BorderRule, } /** * Each batch (groups of cells) has colors for the header, the body background, and the border style. You optionally * specify batch colors in the options object you provide to the component. If both the header and body have a border, * the header's bottom border and body's top border will have a width of 0 to prevent division. */ export interface BatchColor { header: HeatColor, body: HeatColor, } /** * Options specific to the display (sizes, viewability, padding, etc.) of the headers, batches and cells. */ export interface ViewRules { /** * If you have multiple heatmaps on a page, this value distinguishes the one you are drawing onto. * Needed by D3 to access the div to draw the canvas on. */ component?: string, /** * The height of the header bar for rendering batch titles. Defaults to a whopping 48 pixels. */ headerHeight?: number, /** * The minimum padding on both the left and right side of the chart. Defaults to 1 pixel. After all calculations * are done, if there is _extra_ space, then the padding will be _increased_ so we can center the chart. */ minSidePadding?: number, /** * This is guaranteed padding at the bottom of the chart. Useful if you want to draw a border. */ minBottomPadding?: number, /** * The future feature will draw the batches in blocks, instead of in columns. This will hopefully allow better * distribution to reduce open space, may be a better visualization for some types of data, and allow greater * freedom when trying to reserve space for the minimap without overlaying the data. */ stacked?: boolean, } /** * Options specific to the use of colors in the headers, batches and cells. */ interface ColorRules { /** * This is the default array colors for the batches and their headers. The list of colors rotates for each batch * (not each column). The defaults are reddish-brown header on white background, and very light gray on very light * gray. A batch can override using this default by specifying a heat (in its value property). */ batchColors?: Array<BatchColor>, /** * This array colors the cells, batches and their headers. The property names correspond to the value property in * the heatmap data, which can just be a boolean, string, or array of strings. So you can have values of true/false, * or strings like 'S', 'M', 'L', etc. The value is not required to be a key in this object; if it is missing, the * noColor heat will be used as backup. */ heatColors?: Dictionary<HeatColor>, /** * Default color when no heat value matches. */ noColor?: HeatColor, /** * Whether to shade cells with multiple heat values using a gradient of those heat colors. The default is true, * but bear in mind that anything over three colors (really, it depends on the calculated cell width) tends to * look bad. If you turn gradients off, and a cell has multiple heats, the default gradient color (which may * also be a gradient) will be used instead, or the noColor property if you don't provide a default gradient. * Note that another problem with gradients is that it makes it difficult to determine which text color to use, so * either: Turn off the foreground color (fg == 'transparent'); or use high contrasts between your background and * foreground color options. */ showGradients?: boolean, /** * So, even if you do decide to display gradients, you can specify that you want to limit the number of colors to * shade. If the cell exceeds this limit, the default gradient or no-color value is used instead. */ maxGradients?: number, /** * The default color or color gradient to use for when gradients are turned off or a cell exceeds the maximum * number of heats. Note this is a full HeatColor, so you can specify the foreground color with it. */ defaultGradient?: HeatColor, } /** * Options specific to handling hovering over the cells. */ interface HoverRules { /** * A color to highlight the background of a cell when the mouse hovers over it. Hover colors can never be gradients. */ color?: HeatColor, /** * How long to wait, in milliseconds, before firing a hover event over one of the cells. Defaults to 500ms. */ delay?: number, } /** * Collects rules for displaying text in various heatmap components. */ interface TextRule { /** * Whether to display the title. The default for headers is true; for cells, false. */ showText?: boolean, /** * The size, in pixels, for text. Defaults to 14px in headers, 6px in cells. */ fontSize?: number, /** * Whether to allow the text to be split on multiple lines. Note that this is dependent on how tall the component * is. Splitting is based on whitespace, slashes (/) and hyphens (-). The default is true. */ allowSplit?: boolean, /** * Whether to hyphenate the text in the component, if there is no room to fully write it, and if splitting doesn't * do the trick. Hyphenating is based solely on the presence of English consonants, and is therefore not very * robust, because loading an entire dictionary seems like overkill. (This may change in the future, but for now it * just doesn't seem viable.) Note that this is still dependent on how tall the component is. The default is true * for headers; for cells, false. Text that still doesn't fit, after splitting and hyphenating, if they are * allowed, will result in truncating the text, and adding an ellipsis. */ hyphenate?: boolean, } interface TextRules { headers?: TextRule, cells?: TextRule, } /** * Rules for displaying the minimap. */ interface MinimapRules { /** * Where to draw the minimap within the heatmap. Set to null to not draw a minimap at all. This is just the width * and height of the minimap, not where it will be placed. Note that the minimap will overlay the heatmap, * obscuring any cells that are under it. The default is a 200x200 area. If the heatmap has no zoom extent, or * the min and max range of the zoom extent are the same value, then this is the same as requesting no minimap. */ size?: DOMRect | null, /** * The starting location of the minimap. The default is 'bottom-right'. */ startCorner?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right', /** * Whether the minimap should have hot corners (buttons in each corner of the minimap) that will cause it to move * to the equivalent corner of the heatmap, in order to move it out of the user's line of sight. The default is * true. */ hotCorners?: boolean, /** * This future option will allow the heatmap to attempt to redraw its columns to avoid overlapping the minimap. * The default will be true, when this option is implemented. Also when implemented, the minimap corners will be * limited to the bottom corners, because rearranging the headers is just not an option. */ attemptAvoidance?: boolean, /** * The minimap can only draw header text; it's nutty to try to draw a 1px font, let alone to write text in each * cell. The minimap's header does not try to be a mirror of the heatmap, it just writes a single line, ellipsed * if necessary, at this tiny font. While the columns should be a perfect mirror of the heatmap, the cells will * just be too small to write in. So the split and hyphenate options are ignored. If the provided font size is * too tiny for the minimap header height, the header title won't be written at all. Defaults to 6px. */ text?: TextRule, /** * The thickness and color of the minimap border. Defaults to 2px and black. */ border?: BorderRule, /** * The thickness and color of the minimap zoom panner (the border that indicates the heatmap zoom area). * Defaults to 1px and dark blue. */ panner?: BorderRule, } /** * Options specific to the zooming and the display of the minimap. */ interface ZoomRules { /** * The magnification range for zooming in on the heatmap. Both numbers must be positive (>0), and can be decimal. * A value of 1 means full view, or 1x magnification. Higher numbers mean you are zooming in, and number less than * 1 means you are shrinking the heatmap. The default is [1, 4]. The maximum magnification you want is going to * depend on how much data you have. Extremely large amounts of data will cause your cells to look like pebbles or * even dots, so you will want to bump that number up if that is your situation. */ zoomExtent?: [number, number] | null, /** * If the cells are really tiny, even rendering the text at a tiny font looks ridiculous. But if you zoom in to * the given magnification, the cell text can be made visible. Note that if you turn cell text off, it overrides * this setting. The default for this value is equal to the lowest zoomExtent value. */ cellTitleExtent?: number, /** * How to draw the minimap within the heatmap. */ minimap?: MinimapRules, } /** * You must provide an options object to the component, even if you do not wish to override any defaults. It's required, * because their is a high likelihood that you won't be fond of the defaults, and to encourage you to use application * colors that match your desired user experience. */ export class HeatmapOptions { view?: ViewRules; color?: ColorRules; hover?: HoverRules; text?: TextRules; zoom?: ZoomRules; static merge(custom: HeatmapOptions, defaults: HeatmapOptions): HeatmapOptions { const merged = new HeatmapOptions(); merged.view = Object.assign({}, defaults.view, custom.view); merged.color = Object.assign({}, defaults.color, custom.color); merged.hover = Object.assign({}, defaults.hover, custom.hover); merged.text = Object.assign({}, defaults.text, custom.text); merged.zoom = Object.assign({minimap: {}}, defaults.zoom, custom.zoom); if (defaults.text && custom.text) { merged.text.headers = Object.assign({}, defaults.text.headers, custom.text.headers); merged.text.cells = Object.assign({}, defaults.text.cells, custom.text.cells); } if (defaults.zoom && defaults.zoom.minimap && custom.zoom && custom.zoom.minimap) { merged.zoom.minimap = Object.assign({}, defaults.zoom.minimap, custom.zoom.minimap); if (defaults.zoom.minimap.text && custom.zoom.minimap.text) { merged.zoom.minimap.text = Object.assign({}, defaults.zoom.minimap.text, custom.zoom.minimap.text); } } // validate values merged.view.headerHeight = Math.max(0, merged.view.headerHeight); merged.view.minSidePadding = Math.max(0, merged.view.minSidePadding); merged.view.minBottomPadding = Math.max(0, merged.view.minBottomPadding); if (!merged.color.batchColors) { merged.color.batchColors = []; } if (!merged.color.heatColors) { merged.color.heatColors = {}; } if (!merged.color.noColor) { merged.color.noColor = { bg: 'transparent', fg: 'transparent', }; } merged.color.maxGradients = Math.max(1, merged.color.maxGradients); if (!merged.hover.color) { merged.hover.color = { bg: 'transparent', fg: 'transparent', }; } else if (merged.hover.color.bg && Array.isArray(merged.hover.color.bg)) { merged.hover.color.bg = (merged.hover.color.bg.length > 0) ? merged.hover.color.bg[0] : 'transparent'; } merged.hover.delay = Math.max(1, merged.hover.delay); if (merged.text.headers && merged.text.headers.fontSize) { merged.text.headers.fontSize = Math.max(0, merged.text.headers.fontSize); } if (merged.text.cells && merged.text.cells.fontSize) { merged.text.cells.fontSize = Math.max(0, merged.text.cells.fontSize); } // make sure we have a valid zoom extent, the numbers are valid, and they are sorted let zoom: number[] = merged.zoom.zoomExtent; if (!zoom || (zoom.length < 1)) { zoom = [1, 1]; } else if (zoom.length < 2) { zoom = [1, zoom[0]]; } else if (zoom.length > 2) { zoom = merged.zoom.zoomExtent.slice(0, 2); } zoom = zoom.map(extent => Math.max(.01, extent)).sort((a, b) => a - b); merged.zoom.cellTitleExtent = Math.min(Math.max(zoom[0], merged.zoom.cellTitleExtent), zoom[1]) || zoom[0]; // ensure NaN/null/undefined does not trip us up merged.zoom.zoomExtent = zoom as [number, number]; if (merged.zoom.zoomExtent[0] === merged.zoom.zoomExtent[1]) { merged.zoom.zoomExtent = null; merged.zoom.minimap.size = null; } // validate the minimap settings if (merged.zoom.minimap.size) { merged.zoom.minimap.size.width = Math.max(0, merged.zoom.minimap.size.width); merged.zoom.minimap.size.height = Math.max(0, merged.zoom.minimap.size.height); if ((merged.zoom.minimap.size.width === 0) && (merged.zoom.minimap.size.height === 0)) { merged.zoom.minimap.size = null; } } if (merged.zoom.minimap.text) { merged.zoom.minimap.text.fontSize = Math.max(0, merged.zoom.minimap.text.fontSize); merged.zoom.minimap.text.allowSplit = false; merged.zoom.minimap.text.hyphenate = false; } if (merged.zoom.minimap.border) { merged.zoom.minimap.border.width = Math.max(1, merged.zoom.minimap.border.width); } if (merged.zoom.minimap.panner) { merged.zoom.minimap.panner.width = Math.max(1, merged.zoom.minimap.panner.width); } return merged; } static
(options: HeatmapOptions): [number, number] { if (!options || !options.zoom || !options.zoom.zoomExtent || (options.zoom.zoomExtent.length < 2)) { return null; } let zoom = options.zoom.zoomExtent.map(extent => Math.max(.01, 1 / extent)).sort((a, b) => a - b); return zoom as [number, number]; } } export const DEFAULT_OPTIONS: HeatmapOptions = { view: { component: '', headerHeight: 48, minSidePadding: 1, minBottomPadding: 1, }, color: { batchColors: [ {header: {bg: '#e3f2fd', fg: '#333'}, body: {bg: '#e3f2fd', fg: 'black'}}, {header: {bg: 'transparent', fg: '#333'}, body: {bg: 'transparent', fg: 'black'}}, ], heatColors: { 'true': {bg: '#e66', fg: 'black'}, 'false': {bg: '#ccc', fg: 'black'}, }, noColor: {bg: 'transparent', fg: 'black'}, showGradients: true, maxGradients: 3, defaultGradient: {bg: ['red', 'green'], fg: 'white'} }, hover: { color: {bg: '#f0f099', fg: 'black'}, delay: 500, }, text: { headers: { showText: true, fontSize: 14, allowSplit: true, hyphenate: true, }, cells: { showText: false, fontSize: 6, allowSplit: true, hyphenate: false, }, }, zoom: { zoomExtent: [1, 4], minimap: { size: { width: 200, height: 200, }, startCorner: 'bottom-right', hotCorners: true, attemptAvoidance: true, text: { showText: true, fontSize: 6, allowSplit: false, hyphenate: false }, border: { width: 2, color: 'black', }, panner: { width: 1, color: 'blue', }, }, }, };
reciprocateZoom
identifier_name
heatmap.data.ts
import { Dictionary } from '../../../models/json/dictionary'; /** * Reuse of HTML DOM Rect class, added here to shut Typescript up. */ export interface DOMRect { width: number; height: number; } /** * Specifies border settings for a component. The color can be a class (prefixed with '.') or color name. */ interface BorderRule { width: number, color: string, } /** * This is the format of the data to be provided to the component. */ export interface HeatCellData { /** * Value displayed in the text; also identifies which cell triggered a hover or click event. */ title: string, /** * This value corresponds to the name of a heat color specified in the heatmap options. */ value: boolean | string, } export interface HeatBatchData { /** * Header name for a batch (group) of cells. */ title: string, /** * Batches are normally displayed using alternating background (and maybe foreground) colors. You can, however, * specify a heat that would match a name in the heatmap options; thus creating custom colors for each batch. */ value: string | null, /** * The cells that make up this batch. One day, perhaps, we can make these cells and/or nested batches. Fun! */ cells?: Array<HeatCellData>, } /** * Used to specify the color of your cell data. Each value can be either a color string ('white' or '#334455', etc.), * or a CSS class prefixed with a period (.). You optionally specify heat colors in the options object you provide to * the component. */ export interface HeatColor { bg: string | string[], fg: string, border?: BorderRule, } /** * Each batch (groups of cells) has colors for the header, the body background, and the border style. You optionally * specify batch colors in the options object you provide to the component. If both the header and body have a border, * the header's bottom border and body's top border will have a width of 0 to prevent division. */ export interface BatchColor { header: HeatColor, body: HeatColor, } /** * Options specific to the display (sizes, viewability, padding, etc.) of the headers, batches and cells. */ export interface ViewRules { /** * If you have multiple heatmaps on a page, this value distinguishes the one you are drawing onto. * Needed by D3 to access the div to draw the canvas on. */ component?: string, /** * The height of the header bar for rendering batch titles. Defaults to a whopping 48 pixels. */ headerHeight?: number, /** * The minimum padding on both the left and right side of the chart. Defaults to 1 pixel. After all calculations * are done, if there is _extra_ space, then the padding will be _increased_ so we can center the chart. */ minSidePadding?: number, /** * This is guaranteed padding at the bottom of the chart. Useful if you want to draw a border. */ minBottomPadding?: number, /** * The future feature will draw the batches in blocks, instead of in columns. This will hopefully allow better * distribution to reduce open space, may be a better visualization for some types of data, and allow greater * freedom when trying to reserve space for the minimap without overlaying the data. */ stacked?: boolean, } /** * Options specific to the use of colors in the headers, batches and cells. */ interface ColorRules { /** * This is the default array colors for the batches and their headers. The list of colors rotates for each batch * (not each column). The defaults are reddish-brown header on white background, and very light gray on very light * gray. A batch can override using this default by specifying a heat (in its value property). */ batchColors?: Array<BatchColor>, /** * This array colors the cells, batches and their headers. The property names correspond to the value property in * the heatmap data, which can just be a boolean, string, or array of strings. So you can have values of true/false, * or strings like 'S', 'M', 'L', etc. The value is not required to be a key in this object; if it is missing, the * noColor heat will be used as backup. */ heatColors?: Dictionary<HeatColor>, /** * Default color when no heat value matches. */ noColor?: HeatColor, /** * Whether to shade cells with multiple heat values using a gradient of those heat colors. The default is true, * but bear in mind that anything over three colors (really, it depends on the calculated cell width) tends to * look bad. If you turn gradients off, and a cell has multiple heats, the default gradient color (which may * also be a gradient) will be used instead, or the noColor property if you don't provide a default gradient. * Note that another problem with gradients is that it makes it difficult to determine which text color to use, so * either: Turn off the foreground color (fg == 'transparent'); or use high contrasts between your background and * foreground color options. */ showGradients?: boolean, /** * So, even if you do decide to display gradients, you can specify that you want to limit the number of colors to * shade. If the cell exceeds this limit, the default gradient or no-color value is used instead. */ maxGradients?: number, /** * The default color or color gradient to use for when gradients are turned off or a cell exceeds the maximum * number of heats. Note this is a full HeatColor, so you can specify the foreground color with it. */ defaultGradient?: HeatColor, } /** * Options specific to handling hovering over the cells. */ interface HoverRules { /** * A color to highlight the background of a cell when the mouse hovers over it. Hover colors can never be gradients. */ color?: HeatColor, /** * How long to wait, in milliseconds, before firing a hover event over one of the cells. Defaults to 500ms. */ delay?: number, } /** * Collects rules for displaying text in various heatmap components. */ interface TextRule { /** * Whether to display the title. The default for headers is true; for cells, false. */ showText?: boolean, /** * The size, in pixels, for text. Defaults to 14px in headers, 6px in cells. */ fontSize?: number, /** * Whether to allow the text to be split on multiple lines. Note that this is dependent on how tall the component * is. Splitting is based on whitespace, slashes (/) and hyphens (-). The default is true. */ allowSplit?: boolean, /** * Whether to hyphenate the text in the component, if there is no room to fully write it, and if splitting doesn't * do the trick. Hyphenating is based solely on the presence of English consonants, and is therefore not very * robust, because loading an entire dictionary seems like overkill. (This may change in the future, but for now it * just doesn't seem viable.) Note that this is still dependent on how tall the component is. The default is true * for headers; for cells, false. Text that still doesn't fit, after splitting and hyphenating, if they are * allowed, will result in truncating the text, and adding an ellipsis. */ hyphenate?: boolean, } interface TextRules { headers?: TextRule, cells?: TextRule, } /** * Rules for displaying the minimap. */ interface MinimapRules { /** * Where to draw the minimap within the heatmap. Set to null to not draw a minimap at all. This is just the width * and height of the minimap, not where it will be placed. Note that the minimap will overlay the heatmap, * obscuring any cells that are under it. The default is a 200x200 area. If the heatmap has no zoom extent, or * the min and max range of the zoom extent are the same value, then this is the same as requesting no minimap. */ size?: DOMRect | null, /** * The starting location of the minimap. The default is 'bottom-right'. */ startCorner?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right', /** * Whether the minimap should have hot corners (buttons in each corner of the minimap) that will cause it to move * to the equivalent corner of the heatmap, in order to move it out of the user's line of sight. The default is * true. */ hotCorners?: boolean, /** * This future option will allow the heatmap to attempt to redraw its columns to avoid overlapping the minimap. * The default will be true, when this option is implemented. Also when implemented, the minimap corners will be * limited to the bottom corners, because rearranging the headers is just not an option. */ attemptAvoidance?: boolean, /** * The minimap can only draw header text; it's nutty to try to draw a 1px font, let alone to write text in each * cell. The minimap's header does not try to be a mirror of the heatmap, it just writes a single line, ellipsed * if necessary, at this tiny font. While the columns should be a perfect mirror of the heatmap, the cells will * just be too small to write in. So the split and hyphenate options are ignored. If the provided font size is * too tiny for the minimap header height, the header title won't be written at all. Defaults to 6px. */ text?: TextRule, /** * The thickness and color of the minimap border. Defaults to 2px and black. */ border?: BorderRule, /** * The thickness and color of the minimap zoom panner (the border that indicates the heatmap zoom area). * Defaults to 1px and dark blue. */ panner?: BorderRule, } /** * Options specific to the zooming and the display of the minimap. */ interface ZoomRules { /** * The magnification range for zooming in on the heatmap. Both numbers must be positive (>0), and can be decimal. * A value of 1 means full view, or 1x magnification. Higher numbers mean you are zooming in, and number less than * 1 means you are shrinking the heatmap. The default is [1, 4]. The maximum magnification you want is going to * depend on how much data you have. Extremely large amounts of data will cause your cells to look like pebbles or * even dots, so you will want to bump that number up if that is your situation. */ zoomExtent?: [number, number] | null, /** * If the cells are really tiny, even rendering the text at a tiny font looks ridiculous. But if you zoom in to * the given magnification, the cell text can be made visible. Note that if you turn cell text off, it overrides * this setting. The default for this value is equal to the lowest zoomExtent value. */ cellTitleExtent?: number, /** * How to draw the minimap within the heatmap. */ minimap?: MinimapRules, } /** * You must provide an options object to the component, even if you do not wish to override any defaults. It's required, * because their is a high likelihood that you won't be fond of the defaults, and to encourage you to use application * colors that match your desired user experience. */ export class HeatmapOptions { view?: ViewRules; color?: ColorRules; hover?: HoverRules; text?: TextRules; zoom?: ZoomRules; static merge(custom: HeatmapOptions, defaults: HeatmapOptions): HeatmapOptions { const merged = new HeatmapOptions(); merged.view = Object.assign({}, defaults.view, custom.view); merged.color = Object.assign({}, defaults.color, custom.color); merged.hover = Object.assign({}, defaults.hover, custom.hover); merged.text = Object.assign({}, defaults.text, custom.text); merged.zoom = Object.assign({minimap: {}}, defaults.zoom, custom.zoom); if (defaults.text && custom.text) { merged.text.headers = Object.assign({}, defaults.text.headers, custom.text.headers); merged.text.cells = Object.assign({}, defaults.text.cells, custom.text.cells); } if (defaults.zoom && defaults.zoom.minimap && custom.zoom && custom.zoom.minimap) { merged.zoom.minimap = Object.assign({}, defaults.zoom.minimap, custom.zoom.minimap); if (defaults.zoom.minimap.text && custom.zoom.minimap.text) { merged.zoom.minimap.text = Object.assign({}, defaults.zoom.minimap.text, custom.zoom.minimap.text); } } // validate values merged.view.headerHeight = Math.max(0, merged.view.headerHeight); merged.view.minSidePadding = Math.max(0, merged.view.minSidePadding); merged.view.minBottomPadding = Math.max(0, merged.view.minBottomPadding); if (!merged.color.batchColors) { merged.color.batchColors = []; } if (!merged.color.heatColors) { merged.color.heatColors = {}; } if (!merged.color.noColor) { merged.color.noColor = { bg: 'transparent', fg: 'transparent', }; } merged.color.maxGradients = Math.max(1, merged.color.maxGradients); if (!merged.hover.color) { merged.hover.color = { bg: 'transparent', fg: 'transparent', }; } else if (merged.hover.color.bg && Array.isArray(merged.hover.color.bg)) { merged.hover.color.bg = (merged.hover.color.bg.length > 0) ? merged.hover.color.bg[0] : 'transparent'; } merged.hover.delay = Math.max(1, merged.hover.delay); if (merged.text.headers && merged.text.headers.fontSize) { merged.text.headers.fontSize = Math.max(0, merged.text.headers.fontSize); } if (merged.text.cells && merged.text.cells.fontSize) { merged.text.cells.fontSize = Math.max(0, merged.text.cells.fontSize); } // make sure we have a valid zoom extent, the numbers are valid, and they are sorted let zoom: number[] = merged.zoom.zoomExtent; if (!zoom || (zoom.length < 1)) { zoom = [1, 1]; } else if (zoom.length < 2) { zoom = [1, zoom[0]]; } else if (zoom.length > 2) { zoom = merged.zoom.zoomExtent.slice(0, 2); } zoom = zoom.map(extent => Math.max(.01, extent)).sort((a, b) => a - b); merged.zoom.cellTitleExtent = Math.min(Math.max(zoom[0], merged.zoom.cellTitleExtent), zoom[1]) || zoom[0]; // ensure NaN/null/undefined does not trip us up merged.zoom.zoomExtent = zoom as [number, number]; if (merged.zoom.zoomExtent[0] === merged.zoom.zoomExtent[1]) { merged.zoom.zoomExtent = null; merged.zoom.minimap.size = null; } // validate the minimap settings if (merged.zoom.minimap.size) { merged.zoom.minimap.size.width = Math.max(0, merged.zoom.minimap.size.width); merged.zoom.minimap.size.height = Math.max(0, merged.zoom.minimap.size.height); if ((merged.zoom.minimap.size.width === 0) && (merged.zoom.minimap.size.height === 0)) { merged.zoom.minimap.size = null; } } if (merged.zoom.minimap.text) { merged.zoom.minimap.text.fontSize = Math.max(0, merged.zoom.minimap.text.fontSize); merged.zoom.minimap.text.allowSplit = false; merged.zoom.minimap.text.hyphenate = false; } if (merged.zoom.minimap.border) { merged.zoom.minimap.border.width = Math.max(1, merged.zoom.minimap.border.width); } if (merged.zoom.minimap.panner) { merged.zoom.minimap.panner.width = Math.max(1, merged.zoom.minimap.panner.width); } return merged; } static reciprocateZoom(options: HeatmapOptions): [number, number]
} export const DEFAULT_OPTIONS: HeatmapOptions = { view: { component: '', headerHeight: 48, minSidePadding: 1, minBottomPadding: 1, }, color: { batchColors: [ {header: {bg: '#e3f2fd', fg: '#333'}, body: {bg: '#e3f2fd', fg: 'black'}}, {header: {bg: 'transparent', fg: '#333'}, body: {bg: 'transparent', fg: 'black'}}, ], heatColors: { 'true': {bg: '#e66', fg: 'black'}, 'false': {bg: '#ccc', fg: 'black'}, }, noColor: {bg: 'transparent', fg: 'black'}, showGradients: true, maxGradients: 3, defaultGradient: {bg: ['red', 'green'], fg: 'white'} }, hover: { color: {bg: '#f0f099', fg: 'black'}, delay: 500, }, text: { headers: { showText: true, fontSize: 14, allowSplit: true, hyphenate: true, }, cells: { showText: false, fontSize: 6, allowSplit: true, hyphenate: false, }, }, zoom: { zoomExtent: [1, 4], minimap: { size: { width: 200, height: 200, }, startCorner: 'bottom-right', hotCorners: true, attemptAvoidance: true, text: { showText: true, fontSize: 6, allowSplit: false, hyphenate: false }, border: { width: 2, color: 'black', }, panner: { width: 1, color: 'blue', }, }, }, };
{ if (!options || !options.zoom || !options.zoom.zoomExtent || (options.zoom.zoomExtent.length < 2)) { return null; } let zoom = options.zoom.zoomExtent.map(extent => Math.max(.01, 1 / extent)).sort((a, b) => a - b); return zoom as [number, number]; }
identifier_body
heatmap.data.ts
import { Dictionary } from '../../../models/json/dictionary'; /** * Reuse of HTML DOM Rect class, added here to shut Typescript up. */ export interface DOMRect { width: number; height: number; } /** * Specifies border settings for a component. The color can be a class (prefixed with '.') or color name. */ interface BorderRule { width: number, color: string, } /** * This is the format of the data to be provided to the component. */ export interface HeatCellData { /** * Value displayed in the text; also identifies which cell triggered a hover or click event. */ title: string, /** * This value corresponds to the name of a heat color specified in the heatmap options. */ value: boolean | string, } export interface HeatBatchData { /** * Header name for a batch (group) of cells. */ title: string, /** * Batches are normally displayed using alternating background (and maybe foreground) colors. You can, however, * specify a heat that would match a name in the heatmap options; thus creating custom colors for each batch. */ value: string | null, /** * The cells that make up this batch. One day, perhaps, we can make these cells and/or nested batches. Fun! */ cells?: Array<HeatCellData>, } /** * Used to specify the color of your cell data. Each value can be either a color string ('white' or '#334455', etc.), * or a CSS class prefixed with a period (.). You optionally specify heat colors in the options object you provide to * the component. */ export interface HeatColor { bg: string | string[], fg: string, border?: BorderRule, } /** * Each batch (groups of cells) has colors for the header, the body background, and the border style. You optionally * specify batch colors in the options object you provide to the component. If both the header and body have a border, * the header's bottom border and body's top border will have a width of 0 to prevent division. */ export interface BatchColor { header: HeatColor, body: HeatColor, } /** * Options specific to the display (sizes, viewability, padding, etc.) of the headers, batches and cells. */ export interface ViewRules { /** * If you have multiple heatmaps on a page, this value distinguishes the one you are drawing onto. * Needed by D3 to access the div to draw the canvas on. */ component?: string, /** * The height of the header bar for rendering batch titles. Defaults to a whopping 48 pixels. */ headerHeight?: number, /** * The minimum padding on both the left and right side of the chart. Defaults to 1 pixel. After all calculations * are done, if there is _extra_ space, then the padding will be _increased_ so we can center the chart. */ minSidePadding?: number, /** * This is guaranteed padding at the bottom of the chart. Useful if you want to draw a border. */ minBottomPadding?: number, /** * The future feature will draw the batches in blocks, instead of in columns. This will hopefully allow better * distribution to reduce open space, may be a better visualization for some types of data, and allow greater * freedom when trying to reserve space for the minimap without overlaying the data. */ stacked?: boolean, } /** * Options specific to the use of colors in the headers, batches and cells. */ interface ColorRules { /** * This is the default array colors for the batches and their headers. The list of colors rotates for each batch * (not each column). The defaults are reddish-brown header on white background, and very light gray on very light * gray. A batch can override using this default by specifying a heat (in its value property). */ batchColors?: Array<BatchColor>, /** * This array colors the cells, batches and their headers. The property names correspond to the value property in * the heatmap data, which can just be a boolean, string, or array of strings. So you can have values of true/false, * or strings like 'S', 'M', 'L', etc. The value is not required to be a key in this object; if it is missing, the * noColor heat will be used as backup. */ heatColors?: Dictionary<HeatColor>, /** * Default color when no heat value matches. */ noColor?: HeatColor, /** * Whether to shade cells with multiple heat values using a gradient of those heat colors. The default is true, * but bear in mind that anything over three colors (really, it depends on the calculated cell width) tends to * look bad. If you turn gradients off, and a cell has multiple heats, the default gradient color (which may * also be a gradient) will be used instead, or the noColor property if you don't provide a default gradient. * Note that another problem with gradients is that it makes it difficult to determine which text color to use, so * either: Turn off the foreground color (fg == 'transparent'); or use high contrasts between your background and * foreground color options. */ showGradients?: boolean, /** * So, even if you do decide to display gradients, you can specify that you want to limit the number of colors to * shade. If the cell exceeds this limit, the default gradient or no-color value is used instead. */ maxGradients?: number, /** * The default color or color gradient to use for when gradients are turned off or a cell exceeds the maximum * number of heats. Note this is a full HeatColor, so you can specify the foreground color with it. */ defaultGradient?: HeatColor, } /** * Options specific to handling hovering over the cells. */ interface HoverRules { /** * A color to highlight the background of a cell when the mouse hovers over it. Hover colors can never be gradients. */ color?: HeatColor, /** * How long to wait, in milliseconds, before firing a hover event over one of the cells. Defaults to 500ms. */ delay?: number, } /** * Collects rules for displaying text in various heatmap components. */ interface TextRule { /** * Whether to display the title. The default for headers is true; for cells, false. */ showText?: boolean, /** * The size, in pixels, for text. Defaults to 14px in headers, 6px in cells. */ fontSize?: number, /** * Whether to allow the text to be split on multiple lines. Note that this is dependent on how tall the component * is. Splitting is based on whitespace, slashes (/) and hyphens (-). The default is true. */ allowSplit?: boolean, /** * Whether to hyphenate the text in the component, if there is no room to fully write it, and if splitting doesn't * do the trick. Hyphenating is based solely on the presence of English consonants, and is therefore not very * robust, because loading an entire dictionary seems like overkill. (This may change in the future, but for now it * just doesn't seem viable.) Note that this is still dependent on how tall the component is. The default is true * for headers; for cells, false. Text that still doesn't fit, after splitting and hyphenating, if they are * allowed, will result in truncating the text, and adding an ellipsis. */ hyphenate?: boolean, } interface TextRules { headers?: TextRule, cells?: TextRule, } /** * Rules for displaying the minimap. */ interface MinimapRules { /** * Where to draw the minimap within the heatmap. Set to null to not draw a minimap at all. This is just the width * and height of the minimap, not where it will be placed. Note that the minimap will overlay the heatmap, * obscuring any cells that are under it. The default is a 200x200 area. If the heatmap has no zoom extent, or * the min and max range of the zoom extent are the same value, then this is the same as requesting no minimap. */ size?: DOMRect | null, /** * The starting location of the minimap. The default is 'bottom-right'. */ startCorner?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right', /** * Whether the minimap should have hot corners (buttons in each corner of the minimap) that will cause it to move * to the equivalent corner of the heatmap, in order to move it out of the user's line of sight. The default is * true. */ hotCorners?: boolean, /** * This future option will allow the heatmap to attempt to redraw its columns to avoid overlapping the minimap. * The default will be true, when this option is implemented. Also when implemented, the minimap corners will be * limited to the bottom corners, because rearranging the headers is just not an option. */ attemptAvoidance?: boolean, /** * The minimap can only draw header text; it's nutty to try to draw a 1px font, let alone to write text in each * cell. The minimap's header does not try to be a mirror of the heatmap, it just writes a single line, ellipsed * if necessary, at this tiny font. While the columns should be a perfect mirror of the heatmap, the cells will * just be too small to write in. So the split and hyphenate options are ignored. If the provided font size is * too tiny for the minimap header height, the header title won't be written at all. Defaults to 6px. */ text?: TextRule, /** * The thickness and color of the minimap border. Defaults to 2px and black. */ border?: BorderRule, /** * The thickness and color of the minimap zoom panner (the border that indicates the heatmap zoom area). * Defaults to 1px and dark blue. */ panner?: BorderRule, } /** * Options specific to the zooming and the display of the minimap. */ interface ZoomRules { /** * The magnification range for zooming in on the heatmap. Both numbers must be positive (>0), and can be decimal. * A value of 1 means full view, or 1x magnification. Higher numbers mean you are zooming in, and number less than * 1 means you are shrinking the heatmap. The default is [1, 4]. The maximum magnification you want is going to * depend on how much data you have. Extremely large amounts of data will cause your cells to look like pebbles or * even dots, so you will want to bump that number up if that is your situation. */ zoomExtent?: [number, number] | null, /** * If the cells are really tiny, even rendering the text at a tiny font looks ridiculous. But if you zoom in to * the given magnification, the cell text can be made visible. Note that if you turn cell text off, it overrides * this setting. The default for this value is equal to the lowest zoomExtent value. */ cellTitleExtent?: number, /** * How to draw the minimap within the heatmap. */ minimap?: MinimapRules, } /** * You must provide an options object to the component, even if you do not wish to override any defaults. It's required, * because their is a high likelihood that you won't be fond of the defaults, and to encourage you to use application * colors that match your desired user experience. */ export class HeatmapOptions { view?: ViewRules; color?: ColorRules; hover?: HoverRules; text?: TextRules; zoom?: ZoomRules; static merge(custom: HeatmapOptions, defaults: HeatmapOptions): HeatmapOptions { const merged = new HeatmapOptions(); merged.view = Object.assign({}, defaults.view, custom.view); merged.color = Object.assign({}, defaults.color, custom.color); merged.hover = Object.assign({}, defaults.hover, custom.hover); merged.text = Object.assign({}, defaults.text, custom.text); merged.zoom = Object.assign({minimap: {}}, defaults.zoom, custom.zoom); if (defaults.text && custom.text) { merged.text.headers = Object.assign({}, defaults.text.headers, custom.text.headers); merged.text.cells = Object.assign({}, defaults.text.cells, custom.text.cells); } if (defaults.zoom && defaults.zoom.minimap && custom.zoom && custom.zoom.minimap) { merged.zoom.minimap = Object.assign({}, defaults.zoom.minimap, custom.zoom.minimap); if (defaults.zoom.minimap.text && custom.zoom.minimap.text) { merged.zoom.minimap.text = Object.assign({}, defaults.zoom.minimap.text, custom.zoom.minimap.text); } } // validate values merged.view.headerHeight = Math.max(0, merged.view.headerHeight); merged.view.minSidePadding = Math.max(0, merged.view.minSidePadding); merged.view.minBottomPadding = Math.max(0, merged.view.minBottomPadding); if (!merged.color.batchColors) { merged.color.batchColors = []; } if (!merged.color.heatColors) { merged.color.heatColors = {}; } if (!merged.color.noColor) { merged.color.noColor = { bg: 'transparent', fg: 'transparent', }; } merged.color.maxGradients = Math.max(1, merged.color.maxGradients); if (!merged.hover.color) { merged.hover.color = { bg: 'transparent', fg: 'transparent', }; } else if (merged.hover.color.bg && Array.isArray(merged.hover.color.bg))
merged.hover.delay = Math.max(1, merged.hover.delay); if (merged.text.headers && merged.text.headers.fontSize) { merged.text.headers.fontSize = Math.max(0, merged.text.headers.fontSize); } if (merged.text.cells && merged.text.cells.fontSize) { merged.text.cells.fontSize = Math.max(0, merged.text.cells.fontSize); } // make sure we have a valid zoom extent, the numbers are valid, and they are sorted let zoom: number[] = merged.zoom.zoomExtent; if (!zoom || (zoom.length < 1)) { zoom = [1, 1]; } else if (zoom.length < 2) { zoom = [1, zoom[0]]; } else if (zoom.length > 2) { zoom = merged.zoom.zoomExtent.slice(0, 2); } zoom = zoom.map(extent => Math.max(.01, extent)).sort((a, b) => a - b); merged.zoom.cellTitleExtent = Math.min(Math.max(zoom[0], merged.zoom.cellTitleExtent), zoom[1]) || zoom[0]; // ensure NaN/null/undefined does not trip us up merged.zoom.zoomExtent = zoom as [number, number]; if (merged.zoom.zoomExtent[0] === merged.zoom.zoomExtent[1]) { merged.zoom.zoomExtent = null; merged.zoom.minimap.size = null; } // validate the minimap settings if (merged.zoom.minimap.size) { merged.zoom.minimap.size.width = Math.max(0, merged.zoom.minimap.size.width); merged.zoom.minimap.size.height = Math.max(0, merged.zoom.minimap.size.height); if ((merged.zoom.minimap.size.width === 0) && (merged.zoom.minimap.size.height === 0)) { merged.zoom.minimap.size = null; } } if (merged.zoom.minimap.text) { merged.zoom.minimap.text.fontSize = Math.max(0, merged.zoom.minimap.text.fontSize); merged.zoom.minimap.text.allowSplit = false; merged.zoom.minimap.text.hyphenate = false; } if (merged.zoom.minimap.border) { merged.zoom.minimap.border.width = Math.max(1, merged.zoom.minimap.border.width); } if (merged.zoom.minimap.panner) { merged.zoom.minimap.panner.width = Math.max(1, merged.zoom.minimap.panner.width); } return merged; } static reciprocateZoom(options: HeatmapOptions): [number, number] { if (!options || !options.zoom || !options.zoom.zoomExtent || (options.zoom.zoomExtent.length < 2)) { return null; } let zoom = options.zoom.zoomExtent.map(extent => Math.max(.01, 1 / extent)).sort((a, b) => a - b); return zoom as [number, number]; } } export const DEFAULT_OPTIONS: HeatmapOptions = { view: { component: '', headerHeight: 48, minSidePadding: 1, minBottomPadding: 1, }, color: { batchColors: [ {header: {bg: '#e3f2fd', fg: '#333'}, body: {bg: '#e3f2fd', fg: 'black'}}, {header: {bg: 'transparent', fg: '#333'}, body: {bg: 'transparent', fg: 'black'}}, ], heatColors: { 'true': {bg: '#e66', fg: 'black'}, 'false': {bg: '#ccc', fg: 'black'}, }, noColor: {bg: 'transparent', fg: 'black'}, showGradients: true, maxGradients: 3, defaultGradient: {bg: ['red', 'green'], fg: 'white'} }, hover: { color: {bg: '#f0f099', fg: 'black'}, delay: 500, }, text: { headers: { showText: true, fontSize: 14, allowSplit: true, hyphenate: true, }, cells: { showText: false, fontSize: 6, allowSplit: true, hyphenate: false, }, }, zoom: { zoomExtent: [1, 4], minimap: { size: { width: 200, height: 200, }, startCorner: 'bottom-right', hotCorners: true, attemptAvoidance: true, text: { showText: true, fontSize: 6, allowSplit: false, hyphenate: false }, border: { width: 2, color: 'black', }, panner: { width: 1, color: 'blue', }, }, }, };
{ merged.hover.color.bg = (merged.hover.color.bg.length > 0) ? merged.hover.color.bg[0] : 'transparent'; }
conditional_block
kindck-inherited-copy-bound.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that Copy bounds inherited by trait are checked. #![feature(box_syntax)] use std::any::Any; trait Foo : Copy { } impl<T:Copy> Foo for T { } fn take_param<T:Foo>(foo: &T) { } fn
() { let x = box 3is; take_param(&x); //~ ERROR `core::marker::Copy` is not implemented } fn b() { let x = box 3is; let y = &x; let z = &x as &Foo; //~ ERROR `core::marker::Copy` is not implemented } fn main() { }
a
identifier_name
kindck-inherited-copy-bound.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that Copy bounds inherited by trait are checked. #![feature(box_syntax)] use std::any::Any; trait Foo : Copy { } impl<T:Copy> Foo for T { } fn take_param<T:Foo>(foo: &T) { } fn a() { let x = box 3is; take_param(&x); //~ ERROR `core::marker::Copy` is not implemented } fn b() { let x = box 3is; let y = &x; let z = &x as &Foo; //~ ERROR `core::marker::Copy` is not implemented } fn main() { }
random_line_split
kindck-inherited-copy-bound.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that Copy bounds inherited by trait are checked. #![feature(box_syntax)] use std::any::Any; trait Foo : Copy { } impl<T:Copy> Foo for T { } fn take_param<T:Foo>(foo: &T)
fn a() { let x = box 3is; take_param(&x); //~ ERROR `core::marker::Copy` is not implemented } fn b() { let x = box 3is; let y = &x; let z = &x as &Foo; //~ ERROR `core::marker::Copy` is not implemented } fn main() { }
{ }
identifier_body
axfs.py
"""A parser for axfs file system images""" from stat import * import zlib from . import * from ..io import * from ..util import * AxfsHeader = Struct('AxfsHeader', [ ('magic', Struct.STR % 4), ('signature', Struct.STR % 16), ('digest', Struct.STR % 40), ('blockSize', Struct.INT32), ('files', Struct.INT64), ('size', Struct.INT64), ('blocks', Struct.INT64), ('mmapSize', Struct.INT64), ('regions', Struct.STR % 144), ('...', 13), ], Struct.BIG_ENDIAN) axfsHeaderMagic = b'\x48\xA0\xE4\xCD' axfsHeaderSignature = b'Advanced XIP FS\0' AxfsRegionDesc = Struct('AxfsRegionDesc', [ ('offset', Struct.INT64), ('size', Struct.INT64), ('compressedSize', Struct.INT64), ('maxIndex', Struct.INT64), ('tableByteDepth', Struct.INT8), ('incore', Struct.INT8), ], Struct.BIG_ENDIAN) axfsRegions = [ 'strings', 'xip', 'byteAligned', 'compressed', # tableRegions: 'nodeType', 'nodeIndex', 'cnodeOffset', 'cnodeIndex', 'banodeOffset', 'cblockOffset', 'fileSize', 'nameOffset', 'numEntries', 'modeIndex', 'arrayIndex', 'modes', 'uids', 'gids', ] def
(file): header = AxfsHeader.unpack(file) return header and header.magic == axfsHeaderMagic and header.signature == axfsHeaderSignature def readAxfs(file): header = AxfsHeader.unpack(file) if header.magic != axfsHeaderMagic or header.signature != axfsHeaderSignature: raise Exception('Wrong magic') regions = {} tables = {} for i, k in enumerate(axfsRegions): region = AxfsRegionDesc.unpack(file, parse64be(header.regions[i*8:(i+1)*8])) regions[k] = FilePart(file, region.offset, region.size) if i >= 4: regionData = regions[k].read() tables[k] = [sum([ord(regionData[j*region.maxIndex+i:j*region.maxIndex+i+1]) << (8*j) for j in range(region.tableByteDepth)]) for i in range(region.maxIndex)] def readInode(id, path=''): size = tables['fileSize'][id] nameOffset = tables['nameOffset'][id] mode = tables['modes'][tables['modeIndex'][id]] uid = tables['uids'][tables['modeIndex'][id]] gid = tables['gids'][tables['modeIndex'][id]] numEntries = tables['numEntries'][id] arrayIndex = tables['arrayIndex'][id] name = b'' regions['strings'].seek(nameOffset) while b'\0' not in name: name += regions['strings'].read(1024) name = name.partition(b'\0')[0].decode('ascii') path += name if id != 0 else '' isDir = S_ISDIR(mode) def generateChunks(arrayIndex=arrayIndex, numEntries=numEntries, size=size): read = 0 for i in range(numEntries): nodeType = tables['nodeType'][arrayIndex + i] nodeIndex = tables['nodeIndex'][arrayIndex + i] if nodeType == 0: regions['xip'].seek(nodeIndex << 12) contents = regions['xip'].read(4096) elif nodeType == 1: cnodeIndex = tables['cnodeIndex'][nodeIndex] regions['compressed'].seek(tables['cblockOffset'][cnodeIndex]) contents = zlib.decompress(regions['compressed'].read(tables['cblockOffset'][cnodeIndex+1] - tables['cblockOffset'][cnodeIndex])) elif nodeType == 2: regions['byteAligned'].seek(tables['banodeOffset'][nodeIndex]) contents = regions['byteAligned'].read(size - read) else: raise Exception('Unknown type') yield contents read += len(contents) yield UnixFile( path = path, size = size if not isDir else 0, mtime = 0, mode = mode, uid = uid, gid = gid, contents = ChunkedFile(generateChunks, size) if S_ISREG(mode) or S_ISLNK(mode) else None, ) if isDir: for i in range(numEntries): for f in readInode(arrayIndex + i, path + '/'): yield f for f in readInode(0): yield f
isAxfs
identifier_name
axfs.py
"""A parser for axfs file system images""" from stat import * import zlib from . import * from ..io import * from ..util import * AxfsHeader = Struct('AxfsHeader', [ ('magic', Struct.STR % 4), ('signature', Struct.STR % 16), ('digest', Struct.STR % 40), ('blockSize', Struct.INT32), ('files', Struct.INT64), ('size', Struct.INT64), ('blocks', Struct.INT64), ('mmapSize', Struct.INT64), ('regions', Struct.STR % 144), ('...', 13), ], Struct.BIG_ENDIAN) axfsHeaderMagic = b'\x48\xA0\xE4\xCD' axfsHeaderSignature = b'Advanced XIP FS\0' AxfsRegionDesc = Struct('AxfsRegionDesc', [ ('offset', Struct.INT64), ('size', Struct.INT64), ('compressedSize', Struct.INT64), ('maxIndex', Struct.INT64), ('tableByteDepth', Struct.INT8), ('incore', Struct.INT8), ], Struct.BIG_ENDIAN) axfsRegions = [ 'strings', 'xip', 'byteAligned', 'compressed', # tableRegions: 'nodeType', 'nodeIndex', 'cnodeOffset', 'cnodeIndex', 'banodeOffset', 'cblockOffset', 'fileSize', 'nameOffset', 'numEntries', 'modeIndex', 'arrayIndex', 'modes', 'uids', 'gids', ] def isAxfs(file):
def readAxfs(file): header = AxfsHeader.unpack(file) if header.magic != axfsHeaderMagic or header.signature != axfsHeaderSignature: raise Exception('Wrong magic') regions = {} tables = {} for i, k in enumerate(axfsRegions): region = AxfsRegionDesc.unpack(file, parse64be(header.regions[i*8:(i+1)*8])) regions[k] = FilePart(file, region.offset, region.size) if i >= 4: regionData = regions[k].read() tables[k] = [sum([ord(regionData[j*region.maxIndex+i:j*region.maxIndex+i+1]) << (8*j) for j in range(region.tableByteDepth)]) for i in range(region.maxIndex)] def readInode(id, path=''): size = tables['fileSize'][id] nameOffset = tables['nameOffset'][id] mode = tables['modes'][tables['modeIndex'][id]] uid = tables['uids'][tables['modeIndex'][id]] gid = tables['gids'][tables['modeIndex'][id]] numEntries = tables['numEntries'][id] arrayIndex = tables['arrayIndex'][id] name = b'' regions['strings'].seek(nameOffset) while b'\0' not in name: name += regions['strings'].read(1024) name = name.partition(b'\0')[0].decode('ascii') path += name if id != 0 else '' isDir = S_ISDIR(mode) def generateChunks(arrayIndex=arrayIndex, numEntries=numEntries, size=size): read = 0 for i in range(numEntries): nodeType = tables['nodeType'][arrayIndex + i] nodeIndex = tables['nodeIndex'][arrayIndex + i] if nodeType == 0: regions['xip'].seek(nodeIndex << 12) contents = regions['xip'].read(4096) elif nodeType == 1: cnodeIndex = tables['cnodeIndex'][nodeIndex] regions['compressed'].seek(tables['cblockOffset'][cnodeIndex]) contents = zlib.decompress(regions['compressed'].read(tables['cblockOffset'][cnodeIndex+1] - tables['cblockOffset'][cnodeIndex])) elif nodeType == 2: regions['byteAligned'].seek(tables['banodeOffset'][nodeIndex]) contents = regions['byteAligned'].read(size - read) else: raise Exception('Unknown type') yield contents read += len(contents) yield UnixFile( path = path, size = size if not isDir else 0, mtime = 0, mode = mode, uid = uid, gid = gid, contents = ChunkedFile(generateChunks, size) if S_ISREG(mode) or S_ISLNK(mode) else None, ) if isDir: for i in range(numEntries): for f in readInode(arrayIndex + i, path + '/'): yield f for f in readInode(0): yield f
header = AxfsHeader.unpack(file) return header and header.magic == axfsHeaderMagic and header.signature == axfsHeaderSignature
identifier_body
axfs.py
"""A parser for axfs file system images""" from stat import * import zlib from . import * from ..io import * from ..util import * AxfsHeader = Struct('AxfsHeader', [ ('magic', Struct.STR % 4), ('signature', Struct.STR % 16), ('digest', Struct.STR % 40), ('blockSize', Struct.INT32), ('files', Struct.INT64), ('size', Struct.INT64), ('blocks', Struct.INT64), ('mmapSize', Struct.INT64), ('regions', Struct.STR % 144), ('...', 13), ], Struct.BIG_ENDIAN) axfsHeaderMagic = b'\x48\xA0\xE4\xCD' axfsHeaderSignature = b'Advanced XIP FS\0' AxfsRegionDesc = Struct('AxfsRegionDesc', [ ('offset', Struct.INT64), ('size', Struct.INT64), ('compressedSize', Struct.INT64), ('maxIndex', Struct.INT64), ('tableByteDepth', Struct.INT8), ('incore', Struct.INT8), ], Struct.BIG_ENDIAN) axfsRegions = [ 'strings', 'xip', 'byteAligned', 'compressed', # tableRegions: 'nodeType', 'nodeIndex', 'cnodeOffset', 'cnodeIndex', 'banodeOffset', 'cblockOffset', 'fileSize', 'nameOffset', 'numEntries', 'modeIndex', 'arrayIndex', 'modes', 'uids', 'gids', ] def isAxfs(file): header = AxfsHeader.unpack(file) return header and header.magic == axfsHeaderMagic and header.signature == axfsHeaderSignature def readAxfs(file): header = AxfsHeader.unpack(file) if header.magic != axfsHeaderMagic or header.signature != axfsHeaderSignature: raise Exception('Wrong magic') regions = {} tables = {} for i, k in enumerate(axfsRegions):
def readInode(id, path=''): size = tables['fileSize'][id] nameOffset = tables['nameOffset'][id] mode = tables['modes'][tables['modeIndex'][id]] uid = tables['uids'][tables['modeIndex'][id]] gid = tables['gids'][tables['modeIndex'][id]] numEntries = tables['numEntries'][id] arrayIndex = tables['arrayIndex'][id] name = b'' regions['strings'].seek(nameOffset) while b'\0' not in name: name += regions['strings'].read(1024) name = name.partition(b'\0')[0].decode('ascii') path += name if id != 0 else '' isDir = S_ISDIR(mode) def generateChunks(arrayIndex=arrayIndex, numEntries=numEntries, size=size): read = 0 for i in range(numEntries): nodeType = tables['nodeType'][arrayIndex + i] nodeIndex = tables['nodeIndex'][arrayIndex + i] if nodeType == 0: regions['xip'].seek(nodeIndex << 12) contents = regions['xip'].read(4096) elif nodeType == 1: cnodeIndex = tables['cnodeIndex'][nodeIndex] regions['compressed'].seek(tables['cblockOffset'][cnodeIndex]) contents = zlib.decompress(regions['compressed'].read(tables['cblockOffset'][cnodeIndex+1] - tables['cblockOffset'][cnodeIndex])) elif nodeType == 2: regions['byteAligned'].seek(tables['banodeOffset'][nodeIndex]) contents = regions['byteAligned'].read(size - read) else: raise Exception('Unknown type') yield contents read += len(contents) yield UnixFile( path = path, size = size if not isDir else 0, mtime = 0, mode = mode, uid = uid, gid = gid, contents = ChunkedFile(generateChunks, size) if S_ISREG(mode) or S_ISLNK(mode) else None, ) if isDir: for i in range(numEntries): for f in readInode(arrayIndex + i, path + '/'): yield f for f in readInode(0): yield f
region = AxfsRegionDesc.unpack(file, parse64be(header.regions[i*8:(i+1)*8])) regions[k] = FilePart(file, region.offset, region.size) if i >= 4: regionData = regions[k].read() tables[k] = [sum([ord(regionData[j*region.maxIndex+i:j*region.maxIndex+i+1]) << (8*j) for j in range(region.tableByteDepth)]) for i in range(region.maxIndex)]
conditional_block
axfs.py
"""A parser for axfs file system images""" from stat import *
import zlib from . import * from ..io import * from ..util import * AxfsHeader = Struct('AxfsHeader', [ ('magic', Struct.STR % 4), ('signature', Struct.STR % 16), ('digest', Struct.STR % 40), ('blockSize', Struct.INT32), ('files', Struct.INT64), ('size', Struct.INT64), ('blocks', Struct.INT64), ('mmapSize', Struct.INT64), ('regions', Struct.STR % 144), ('...', 13), ], Struct.BIG_ENDIAN) axfsHeaderMagic = b'\x48\xA0\xE4\xCD' axfsHeaderSignature = b'Advanced XIP FS\0' AxfsRegionDesc = Struct('AxfsRegionDesc', [ ('offset', Struct.INT64), ('size', Struct.INT64), ('compressedSize', Struct.INT64), ('maxIndex', Struct.INT64), ('tableByteDepth', Struct.INT8), ('incore', Struct.INT8), ], Struct.BIG_ENDIAN) axfsRegions = [ 'strings', 'xip', 'byteAligned', 'compressed', # tableRegions: 'nodeType', 'nodeIndex', 'cnodeOffset', 'cnodeIndex', 'banodeOffset', 'cblockOffset', 'fileSize', 'nameOffset', 'numEntries', 'modeIndex', 'arrayIndex', 'modes', 'uids', 'gids', ] def isAxfs(file): header = AxfsHeader.unpack(file) return header and header.magic == axfsHeaderMagic and header.signature == axfsHeaderSignature def readAxfs(file): header = AxfsHeader.unpack(file) if header.magic != axfsHeaderMagic or header.signature != axfsHeaderSignature: raise Exception('Wrong magic') regions = {} tables = {} for i, k in enumerate(axfsRegions): region = AxfsRegionDesc.unpack(file, parse64be(header.regions[i*8:(i+1)*8])) regions[k] = FilePart(file, region.offset, region.size) if i >= 4: regionData = regions[k].read() tables[k] = [sum([ord(regionData[j*region.maxIndex+i:j*region.maxIndex+i+1]) << (8*j) for j in range(region.tableByteDepth)]) for i in range(region.maxIndex)] def readInode(id, path=''): size = tables['fileSize'][id] nameOffset = tables['nameOffset'][id] mode = tables['modes'][tables['modeIndex'][id]] uid = tables['uids'][tables['modeIndex'][id]] gid = tables['gids'][tables['modeIndex'][id]] numEntries = tables['numEntries'][id] arrayIndex = tables['arrayIndex'][id] name = b'' regions['strings'].seek(nameOffset) while b'\0' not in name: name += regions['strings'].read(1024) name = name.partition(b'\0')[0].decode('ascii') path += name if id != 0 else '' isDir = S_ISDIR(mode) def generateChunks(arrayIndex=arrayIndex, numEntries=numEntries, size=size): read = 0 for i in range(numEntries): nodeType = tables['nodeType'][arrayIndex + i] nodeIndex = tables['nodeIndex'][arrayIndex + i] if nodeType == 0: regions['xip'].seek(nodeIndex << 12) contents = regions['xip'].read(4096) elif nodeType == 1: cnodeIndex = tables['cnodeIndex'][nodeIndex] regions['compressed'].seek(tables['cblockOffset'][cnodeIndex]) contents = zlib.decompress(regions['compressed'].read(tables['cblockOffset'][cnodeIndex+1] - tables['cblockOffset'][cnodeIndex])) elif nodeType == 2: regions['byteAligned'].seek(tables['banodeOffset'][nodeIndex]) contents = regions['byteAligned'].read(size - read) else: raise Exception('Unknown type') yield contents read += len(contents) yield UnixFile( path = path, size = size if not isDir else 0, mtime = 0, mode = mode, uid = uid, gid = gid, contents = ChunkedFile(generateChunks, size) if S_ISREG(mode) or S_ISLNK(mode) else None, ) if isDir: for i in range(numEntries): for f in readInode(arrayIndex + i, path + '/'): yield f for f in readInode(0): yield f
random_line_split
example-nrf24-recv.py
#!/usr/bin/python # -*- coding: utf-8 -*- # # Example program to receive packets from the radio link # import virtGPIO as GPIO from lib_nrf24 import NRF24 import time pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]] radio2 = NRF24(GPIO, GPIO.SpiDev()) radio2.begin(9, 7) radio2.setRetries(15,15) radio2.setPayloadSize(32) radio2.setChannel(0x60) radio2.setDataRate(NRF24.BR_2MBPS) radio2.setPALevel(NRF24.PA_MIN) radio2.setAutoAck(True) radio2.enableDynamicPayloads() radio2.enableAckPayload() radio2.openWritingPipe(pipes[0]) radio2.openReadingPipe(1, pipes[1]) radio2.startListening() radio2.stopListening() radio2.printDetails() radio2.startListening() c=1 while True: akpl_buf = [c,1, 2, 3,4,5,6,7,8,9,0,1, 2, 3,4,5,6,7,8] pipe = [0] while not radio2.available(pipe): time.sleep(10000/1000000.0) recv_buffer = [] radio2.read(recv_buffer, radio2.getDynamicPayloadSize()) print ("Received:") , print (recv_buffer) c = c + 1 if (c&1) == 0:
radio2.writeAckPayload(1, akpl_buf, len(akpl_buf)) print ("Loaded payload reply:"), print (akpl_buf) else: print ("(No return payload)")
random_line_split
example-nrf24-recv.py
#!/usr/bin/python # -*- coding: utf-8 -*- # # Example program to receive packets from the radio link # import virtGPIO as GPIO from lib_nrf24 import NRF24 import time pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]] radio2 = NRF24(GPIO, GPIO.SpiDev()) radio2.begin(9, 7) radio2.setRetries(15,15) radio2.setPayloadSize(32) radio2.setChannel(0x60) radio2.setDataRate(NRF24.BR_2MBPS) radio2.setPALevel(NRF24.PA_MIN) radio2.setAutoAck(True) radio2.enableDynamicPayloads() radio2.enableAckPayload() radio2.openWritingPipe(pipes[0]) radio2.openReadingPipe(1, pipes[1]) radio2.startListening() radio2.stopListening() radio2.printDetails() radio2.startListening() c=1 while True: akpl_buf = [c,1, 2, 3,4,5,6,7,8,9,0,1, 2, 3,4,5,6,7,8] pipe = [0] while not radio2.available(pipe): time.sleep(10000/1000000.0) recv_buffer = [] radio2.read(recv_buffer, radio2.getDynamicPayloadSize()) print ("Received:") , print (recv_buffer) c = c + 1 if (c&1) == 0:
else: print ("(No return payload)")
radio2.writeAckPayload(1, akpl_buf, len(akpl_buf)) print ("Loaded payload reply:"), print (akpl_buf)
conditional_block
NotificationsMenu.tsx
import { AnalyticsSchema } from "v2/Artsy/Analytics" import { useTracking } from "v2/Artsy/Analytics/useTracking" import React, { useContext } from "react" import { graphql } from "react-relay" import { SystemContext } from "v2/Artsy" import { get } from "v2/Utils/get" import { LoadProgressRenderer, renderWithLoadProgress, } from "v2/Artsy/Relay/renderWithLoadProgress" import { NotificationsMenuQuery, NotificationsMenuQueryResponse, } from "v2/__generated__/NotificationsMenuQuery.graphql" import { Box, Flex, Image, Link, Sans, Separator, Serif } from "@artsy/palette" import { Menu, MenuItem } from "v2/Components/Menu" import { SystemQueryRenderer as QueryRenderer } from "v2/Artsy/Relay/SystemQueryRenderer" export const NotificationMenuItems: React.FC<NotificationsMenuQueryResponse> = props => { const notifications = get( props, p => { return p.me.followsAndSaves.notifications.edges }, [] ) const { trackEvent } = useTracking() const handleClick = (href: string, subject: string) => { trackEvent({ action_type: AnalyticsSchema.ActionType.Click, context_module: AnalyticsSchema.ContextModule.HeaderActivityDropdown, destination_path: href, subject, }) } return ( <> {notifications.map(({ node }, index) => { const { artists, href, image, summary } = node const worksForSaleHref = href + "/works-for-sale" return ( <MenuItem href={worksForSaleHref} key={index} onClick={() => { handleClick(href, AnalyticsSchema.Subject.Notification)
> <Flex alignItems="center"> <Box width={40} height={40} bg="black5" mr={1}> <Image src={image.resized.url} width={40} height={40} style={{ objectFit: "cover", }} /> </Box> <Box> <Sans size="2">{summary}</Sans> <Sans size="2" weight="medium"> {artists} </Sans> </Box> </Flex> </MenuItem> ) })} <Flex py={1} flexDirection="column" alignItems="center"> <> {notifications.length === 0 && ( <Flex width="100%" flexDirection="column"> <Box pt={1} pb={3} width="100%" textAlign="center"> <Serif size="3">No new works</Serif> </Box> </Flex> )} <Box width="100%" px={2}> <Separator /> </Box> <Box pt={2}> <Sans size="2"> <Link href="/works-for-you" onClick={() => { handleClick("/works-for-you", AnalyticsSchema.Subject.ViewAll) }} > View all </Link> </Sans> </Box> </> </Flex> </> ) } /** * The <Menu /> component renders a QueryRenderer inside of it, which fetches * individual MenuItems for display. During fetch there is a loading spinner. */ export const NotificationsMenu: React.FC = () => { return ( <Menu title="Activity"> <NotificationsQueryRenderer render={renderWithLoadProgress( NotificationMenuItems, {}, {}, { size: "small" } )} /> </Menu> ) } /** * This QueryRenderer is also shared with NotificationBadge. Once the request * is performed the data is cached at the network layer so menu data appears * immediately and doesn't require a second request. */ export const NotificationsQueryRenderer: React.FC<{ render: LoadProgressRenderer<any> }> = ({ render }) => { const { relayEnvironment } = useContext(SystemContext) return ( <QueryRenderer<NotificationsMenuQuery> environment={relayEnvironment} query={graphql` query NotificationsMenuQuery { me { unreadNotificationsCount followsAndSaves { notifications: bundledArtworksByArtistConnection( sort: PUBLISHED_AT_DESC first: 10 ) @connection(key: "WorksForYou_notifications") { edges { node { href summary artists published_at: publishedAt(format: "MMM DD") image { resized(height: 40, width: 40) { url } } } } } } } } `} variables={{}} render={render} /> ) }
}}
random_line_split
autocomplete_test.py
# coding: utf-8 # Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org # https://groups.google.com/d/forum/navitia # www.navitia.io from __future__ import absolute_import, print_function, division, unicode_literals from tests.check_utils import api_get, api_post, api_delete, api_put, _dt import json import pytest import jmespath from navitiacommon import models from tyr import app @pytest.fixture def create_autocomplete_parameter(): with app.app_context(): autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param) models.db.session.commit() # we also create 3 datasets, one for bano, 2 for osm for i, dset_type in enumerate(['bano', 'osm', 'osm']):
@pytest.fixture def create_two_autocomplete_parameters(): with app.app_context(): autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9]) autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param1) models.db.session.add(autocomplete_param2) models.db.session.commit() @pytest.fixture def autocomplete_parameter_json(): return { "name": "peru", "street": "OSM", "address": "BANO", "poi": "FUSIO", "admin": "OSM", "admin_level": [8], } def test_get_autocomplete_parameters_empty(): resp = api_get('/v0/autocomplete_parameters/') assert resp == [] def test_get_all_autocomplete(create_autocomplete_parameter): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 assert resp[0]['name'] == 'idf' assert resp[0]['street'] == 'OSM' assert resp[0]['address'] == 'BANO' assert resp[0]['poi'] == 'FUSIO' assert resp[0]['admin'] == 'OSM' assert resp[0]['admin_level'] == [8, 9] def test_get_autocomplete_by_name(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] def test_post_autocomplete(autocomplete_parameter_json): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['name'] == 'peru' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_post_autocomplete_cosmo(): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}), content_type='application/json', ) assert resp['name'] == 'bobette' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'OSM' assert resp['admin'] == 'COSMOGONY' assert resp['admin_level'] == [] def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] resp = api_put( '/v0/autocomplete_parameters/france', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_delete_autocomplete(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' _, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True) assert status == 204 _, status = api_get('/v0/autocomplete_parameters/france', check=False) assert status == 404 resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 def test_get_last_datasets_autocomplete(create_autocomplete_parameter): """ we query the loaded datasets of idf we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm """ resp = api_get('/v0/autocomplete_parameters/idf/last_datasets') assert len(resp) == 2 bano = next((d for d in resp if d['type'] == 'bano'), None) assert bano assert bano['family_type'] == 'autocomplete_bano' assert bano['name'] == '/path/to/dataset_0' osm = next((d for d in resp if d['type'] == 'osm'), None) assert osm assert osm['family_type'] == 'autocomplete_osm' assert osm['name'] == '/path/to/dataset_2' # we should have the last one # if we ask for the 2 last datasets per type, we got all of them resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2') assert len(resp) == 3 @pytest.fixture def minimal_poi_types_json(): return { "poi_types": [ {"id": "amenity:bicycle_rental", "name": "Station VLS"}, {"id": "amenity:parking", "name": "Parking"}, ], "rules": [ { "osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}], "poi_type_id": "amenity:bicycle_rental", }, {"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"}, ], } def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' # POST a minimal conf resp = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps(minimal_poi_types_json), content_type='application/json', ) def test_minimal_conf(resp): assert len(resp['poi_types']) == 2 assert len(resp['rules']) == 2 bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp) assert len(bss_type) == 1 assert bss_type[0]['name'] == 'Station VLS' bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp) assert len(bss_rule) == 1 assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental' # check that it's not the "default" conf assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp) # check that the conf is correctly set on france test_minimal_conf(resp) # check that the conf on europe is still empty resp = api_get('/v0/autocomplete_parameters/europe/poi_types') assert not resp # check GET of newly defined france conf resp = api_get('/v0/autocomplete_parameters/france/poi_types') test_minimal_conf(resp) # check DELETE of france conf resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True) assert not resp assert code == 204 # check get of conf on france is now empty resp = api_get('/v0/autocomplete_parameters/france/poi_types') assert not resp # check that tyr refuses incorrect conf resp, code = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}), content_type='application/json', check=False, ) assert code == 400 assert resp['status'] == 'error' assert 'rules' in resp['message']
job = models.Job() dataset = models.DataSet() dataset.type = dset_type dataset.family_type = 'autocomplete_{}'.format(dataset.type) dataset.name = '/path/to/dataset_{}'.format(i) models.db.session.add(dataset) job.autocomplete_params_id = autocomplete_param.id job.data_sets.append(dataset) job.state = 'done' models.db.session.add(job) models.db.session.commit()
conditional_block
autocomplete_test.py
# coding: utf-8 # Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org # https://groups.google.com/d/forum/navitia # www.navitia.io from __future__ import absolute_import, print_function, division, unicode_literals from tests.check_utils import api_get, api_post, api_delete, api_put, _dt import json import pytest import jmespath from navitiacommon import models from tyr import app @pytest.fixture def create_autocomplete_parameter(): with app.app_context(): autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param) models.db.session.commit() # we also create 3 datasets, one for bano, 2 for osm for i, dset_type in enumerate(['bano', 'osm', 'osm']): job = models.Job() dataset = models.DataSet() dataset.type = dset_type dataset.family_type = 'autocomplete_{}'.format(dataset.type) dataset.name = '/path/to/dataset_{}'.format(i) models.db.session.add(dataset) job.autocomplete_params_id = autocomplete_param.id job.data_sets.append(dataset) job.state = 'done' models.db.session.add(job) models.db.session.commit() @pytest.fixture def create_two_autocomplete_parameters(): with app.app_context(): autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9]) autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param1) models.db.session.add(autocomplete_param2) models.db.session.commit() @pytest.fixture def autocomplete_parameter_json(): return { "name": "peru", "street": "OSM", "address": "BANO", "poi": "FUSIO", "admin": "OSM", "admin_level": [8], } def test_get_autocomplete_parameters_empty(): resp = api_get('/v0/autocomplete_parameters/') assert resp == [] def test_get_all_autocomplete(create_autocomplete_parameter): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 assert resp[0]['name'] == 'idf' assert resp[0]['street'] == 'OSM' assert resp[0]['address'] == 'BANO' assert resp[0]['poi'] == 'FUSIO' assert resp[0]['admin'] == 'OSM' assert resp[0]['admin_level'] == [8, 9] def test_get_autocomplete_by_name(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] def test_post_autocomplete(autocomplete_parameter_json): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['name'] == 'peru' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_post_autocomplete_cosmo(): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}), content_type='application/json', ) assert resp['name'] == 'bobette' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'OSM' assert resp['admin'] == 'COSMOGONY' assert resp['admin_level'] == [] def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] resp = api_put( '/v0/autocomplete_parameters/france', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_delete_autocomplete(create_two_autocomplete_parameters):
def test_get_last_datasets_autocomplete(create_autocomplete_parameter): """ we query the loaded datasets of idf we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm """ resp = api_get('/v0/autocomplete_parameters/idf/last_datasets') assert len(resp) == 2 bano = next((d for d in resp if d['type'] == 'bano'), None) assert bano assert bano['family_type'] == 'autocomplete_bano' assert bano['name'] == '/path/to/dataset_0' osm = next((d for d in resp if d['type'] == 'osm'), None) assert osm assert osm['family_type'] == 'autocomplete_osm' assert osm['name'] == '/path/to/dataset_2' # we should have the last one # if we ask for the 2 last datasets per type, we got all of them resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2') assert len(resp) == 3 @pytest.fixture def minimal_poi_types_json(): return { "poi_types": [ {"id": "amenity:bicycle_rental", "name": "Station VLS"}, {"id": "amenity:parking", "name": "Parking"}, ], "rules": [ { "osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}], "poi_type_id": "amenity:bicycle_rental", }, {"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"}, ], } def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' # POST a minimal conf resp = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps(minimal_poi_types_json), content_type='application/json', ) def test_minimal_conf(resp): assert len(resp['poi_types']) == 2 assert len(resp['rules']) == 2 bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp) assert len(bss_type) == 1 assert bss_type[0]['name'] == 'Station VLS' bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp) assert len(bss_rule) == 1 assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental' # check that it's not the "default" conf assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp) # check that the conf is correctly set on france test_minimal_conf(resp) # check that the conf on europe is still empty resp = api_get('/v0/autocomplete_parameters/europe/poi_types') assert not resp # check GET of newly defined france conf resp = api_get('/v0/autocomplete_parameters/france/poi_types') test_minimal_conf(resp) # check DELETE of france conf resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True) assert not resp assert code == 204 # check get of conf on france is now empty resp = api_get('/v0/autocomplete_parameters/france/poi_types') assert not resp # check that tyr refuses incorrect conf resp, code = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}), content_type='application/json', check=False, ) assert code == 400 assert resp['status'] == 'error' assert 'rules' in resp['message']
resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' _, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True) assert status == 204 _, status = api_get('/v0/autocomplete_parameters/france', check=False) assert status == 404 resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1
identifier_body
autocomplete_test.py
# coding: utf-8 # Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org # https://groups.google.com/d/forum/navitia # www.navitia.io from __future__ import absolute_import, print_function, division, unicode_literals from tests.check_utils import api_get, api_post, api_delete, api_put, _dt import json import pytest import jmespath from navitiacommon import models from tyr import app @pytest.fixture def create_autocomplete_parameter(): with app.app_context(): autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param) models.db.session.commit() # we also create 3 datasets, one for bano, 2 for osm for i, dset_type in enumerate(['bano', 'osm', 'osm']): job = models.Job() dataset = models.DataSet() dataset.type = dset_type dataset.family_type = 'autocomplete_{}'.format(dataset.type) dataset.name = '/path/to/dataset_{}'.format(i) models.db.session.add(dataset) job.autocomplete_params_id = autocomplete_param.id job.data_sets.append(dataset) job.state = 'done' models.db.session.add(job) models.db.session.commit() @pytest.fixture def create_two_autocomplete_parameters(): with app.app_context(): autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9]) autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param1) models.db.session.add(autocomplete_param2) models.db.session.commit() @pytest.fixture def autocomplete_parameter_json(): return { "name": "peru", "street": "OSM", "address": "BANO", "poi": "FUSIO", "admin": "OSM", "admin_level": [8], } def test_get_autocomplete_parameters_empty(): resp = api_get('/v0/autocomplete_parameters/') assert resp == [] def test_get_all_autocomplete(create_autocomplete_parameter): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 assert resp[0]['name'] == 'idf' assert resp[0]['street'] == 'OSM' assert resp[0]['address'] == 'BANO' assert resp[0]['poi'] == 'FUSIO' assert resp[0]['admin'] == 'OSM' assert resp[0]['admin_level'] == [8, 9] def test_get_autocomplete_by_name(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] def test_post_autocomplete(autocomplete_parameter_json): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['name'] == 'peru' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_post_autocomplete_cosmo(): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}), content_type='application/json', ) assert resp['name'] == 'bobette' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'OSM' assert resp['admin'] == 'COSMOGONY' assert resp['admin_level'] == [] def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] resp = api_put( '/v0/autocomplete_parameters/france', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_delete_autocomplete(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' _, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True) assert status == 204 _, status = api_get('/v0/autocomplete_parameters/france', check=False) assert status == 404 resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 def test_get_last_datasets_autocomplete(create_autocomplete_parameter): """ we query the loaded datasets of idf we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm """ resp = api_get('/v0/autocomplete_parameters/idf/last_datasets') assert len(resp) == 2 bano = next((d for d in resp if d['type'] == 'bano'), None) assert bano assert bano['family_type'] == 'autocomplete_bano' assert bano['name'] == '/path/to/dataset_0' osm = next((d for d in resp if d['type'] == 'osm'), None) assert osm assert osm['family_type'] == 'autocomplete_osm' assert osm['name'] == '/path/to/dataset_2' # we should have the last one # if we ask for the 2 last datasets per type, we got all of them resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2') assert len(resp) == 3 @pytest.fixture def minimal_poi_types_json(): return { "poi_types": [ {"id": "amenity:bicycle_rental", "name": "Station VLS"}, {"id": "amenity:parking", "name": "Parking"}, ], "rules": [ { "osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}], "poi_type_id": "amenity:bicycle_rental", }, {"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"}, ], } def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' # POST a minimal conf resp = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps(minimal_poi_types_json), content_type='application/json', ) def test_minimal_conf(resp): assert len(resp['poi_types']) == 2 assert len(resp['rules']) == 2 bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp) assert len(bss_type) == 1 assert bss_type[0]['name'] == 'Station VLS' bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp) assert len(bss_rule) == 1 assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental' # check that it's not the "default" conf assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp) # check that the conf is correctly set on france test_minimal_conf(resp)
resp = api_get('/v0/autocomplete_parameters/europe/poi_types') assert not resp # check GET of newly defined france conf resp = api_get('/v0/autocomplete_parameters/france/poi_types') test_minimal_conf(resp) # check DELETE of france conf resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True) assert not resp assert code == 204 # check get of conf on france is now empty resp = api_get('/v0/autocomplete_parameters/france/poi_types') assert not resp # check that tyr refuses incorrect conf resp, code = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}), content_type='application/json', check=False, ) assert code == 400 assert resp['status'] == 'error' assert 'rules' in resp['message']
# check that the conf on europe is still empty
random_line_split
autocomplete_test.py
# coding: utf-8 # Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org # https://groups.google.com/d/forum/navitia # www.navitia.io from __future__ import absolute_import, print_function, division, unicode_literals from tests.check_utils import api_get, api_post, api_delete, api_put, _dt import json import pytest import jmespath from navitiacommon import models from tyr import app @pytest.fixture def create_autocomplete_parameter(): with app.app_context(): autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param) models.db.session.commit() # we also create 3 datasets, one for bano, 2 for osm for i, dset_type in enumerate(['bano', 'osm', 'osm']): job = models.Job() dataset = models.DataSet() dataset.type = dset_type dataset.family_type = 'autocomplete_{}'.format(dataset.type) dataset.name = '/path/to/dataset_{}'.format(i) models.db.session.add(dataset) job.autocomplete_params_id = autocomplete_param.id job.data_sets.append(dataset) job.state = 'done' models.db.session.add(job) models.db.session.commit() @pytest.fixture def create_two_autocomplete_parameters(): with app.app_context(): autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9]) autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param1) models.db.session.add(autocomplete_param2) models.db.session.commit() @pytest.fixture def autocomplete_parameter_json(): return { "name": "peru", "street": "OSM", "address": "BANO", "poi": "FUSIO", "admin": "OSM", "admin_level": [8], } def test_get_autocomplete_parameters_empty(): resp = api_get('/v0/autocomplete_parameters/') assert resp == [] def test_get_all_autocomplete(create_autocomplete_parameter): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 assert resp[0]['name'] == 'idf' assert resp[0]['street'] == 'OSM' assert resp[0]['address'] == 'BANO' assert resp[0]['poi'] == 'FUSIO' assert resp[0]['admin'] == 'OSM' assert resp[0]['admin_level'] == [8, 9] def test_get_autocomplete_by_name(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] def test_post_autocomplete(autocomplete_parameter_json): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['name'] == 'peru' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_post_autocomplete_cosmo(): resp = api_post( '/v0/autocomplete_parameters', data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}), content_type='application/json', ) assert resp['name'] == 'bobette' assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'OSM' assert resp['admin'] == 'COSMOGONY' assert resp['admin_level'] == [] def
(create_two_autocomplete_parameters, autocomplete_parameter_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' assert resp['street'] == 'OSM' assert resp['address'] == 'OSM' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8, 9] resp = api_put( '/v0/autocomplete_parameters/france', data=json.dumps(autocomplete_parameter_json), content_type='application/json', ) assert resp['street'] == 'OSM' assert resp['address'] == 'BANO' assert resp['poi'] == 'FUSIO' assert resp['admin'] == 'OSM' assert resp['admin_level'] == [8] def test_delete_autocomplete(create_two_autocomplete_parameters): resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 2 resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' _, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True) assert status == 204 _, status = api_get('/v0/autocomplete_parameters/france', check=False) assert status == 404 resp = api_get('/v0/autocomplete_parameters/') assert len(resp) == 1 def test_get_last_datasets_autocomplete(create_autocomplete_parameter): """ we query the loaded datasets of idf we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm """ resp = api_get('/v0/autocomplete_parameters/idf/last_datasets') assert len(resp) == 2 bano = next((d for d in resp if d['type'] == 'bano'), None) assert bano assert bano['family_type'] == 'autocomplete_bano' assert bano['name'] == '/path/to/dataset_0' osm = next((d for d in resp if d['type'] == 'osm'), None) assert osm assert osm['family_type'] == 'autocomplete_osm' assert osm['name'] == '/path/to/dataset_2' # we should have the last one # if we ask for the 2 last datasets per type, we got all of them resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2') assert len(resp) == 3 @pytest.fixture def minimal_poi_types_json(): return { "poi_types": [ {"id": "amenity:bicycle_rental", "name": "Station VLS"}, {"id": "amenity:parking", "name": "Parking"}, ], "rules": [ { "osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}], "poi_type_id": "amenity:bicycle_rental", }, {"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"}, ], } def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json): resp = api_get('/v0/autocomplete_parameters/france') assert resp['name'] == 'france' # POST a minimal conf resp = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps(minimal_poi_types_json), content_type='application/json', ) def test_minimal_conf(resp): assert len(resp['poi_types']) == 2 assert len(resp['rules']) == 2 bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp) assert len(bss_type) == 1 assert bss_type[0]['name'] == 'Station VLS' bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp) assert len(bss_rule) == 1 assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental' # check that it's not the "default" conf assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp) # check that the conf is correctly set on france test_minimal_conf(resp) # check that the conf on europe is still empty resp = api_get('/v0/autocomplete_parameters/europe/poi_types') assert not resp # check GET of newly defined france conf resp = api_get('/v0/autocomplete_parameters/france/poi_types') test_minimal_conf(resp) # check DELETE of france conf resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True) assert not resp assert code == 204 # check get of conf on france is now empty resp = api_get('/v0/autocomplete_parameters/france/poi_types') assert not resp # check that tyr refuses incorrect conf resp, code = api_post( '/v0/autocomplete_parameters/france/poi_types', data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}), content_type='application/json', check=False, ) assert code == 400 assert resp['status'] == 'error' assert 'rules' in resp['message']
test_put_autocomplete
identifier_name
CommentRenderer.js
pinion.backend.renderer.CommentRenderer = (function($) { var constr; // public API -- constructor constr = function(settings, backend) { var _this = this, data = settings.data; this.$element = $("<div class='pinion-backend-renderer-CommentRenderer'></div>"); // TEXTWRAPPER var $textWrapper = $("<div class='pinion-textWrapper'></div>") .appendTo(this.$element); // INFOS $("<div class='pinion-comment-info'></div>") // USER .append("<div class='pinion-name'><span class='pinion-backend-icon-user'></span><span class='pinion-username'>"+data.name+"</span></div>") // TIME .append("<div class='pinion-time'><span class='pinion-backend-icon-clock'></span><span class='pinion-time-text'>"+data.created+"</span></div>") .append("<div class='pinion-mail'><span class='pinion-backend-icon-mail'></span><a href='mailto:"+data.email+"' class='pinion-mail-adress'>"+data.email+"</a></div>") .appendTo(this.$element); // COMMENT $("<div class='pinion-commentWrapper'><div class='pinion-comment-text'>"+data.text+"</div></div>") .appendTo(this.$element); var $activate = $("<div class='pinion-activate'><div class='pinion-icon'></div><div class='pinion-text'>"+pinion.translate("activate comment")+"</div></div>") .click(function() { if(_this.$element.hasClass("pinion-activated")) { _this.setClean(); } else { _this.setDirty(); } _this.$element.toggleClass("pinion-activated") }); // RENDERER BAR var bar = []; if(pinion.hasPermission("comment", "approve comment")) { bar.push($activate); } if(pinion.hasPermission("comment", "delete comment")) { bar.push(pinion.data.Delete.call(this, data, function() { _this.info.deleted = true; _this.fadeOut(300, function() { _this.setDirty(); }); })); } if(!pinion.isEmpty(bar)) { pinion.data.Bar.call(this, bar); } // INFO pinion.data.Info.call(this, ["Time"], data); // group events settings.groupEvents = true; } // public API -- prototype constr.prototype = {
constructor: pinion.backend.renderer.CommentRenderer, init: function() { this.info.id = this.settings.data.id; }, reset: function() { this.$element.removeClass("pinion-activated"); } } return constr; }(jQuery));
random_line_split
CommentRenderer.js
pinion.backend.renderer.CommentRenderer = (function($) { var constr; // public API -- constructor constr = function(settings, backend) { var _this = this, data = settings.data; this.$element = $("<div class='pinion-backend-renderer-CommentRenderer'></div>"); // TEXTWRAPPER var $textWrapper = $("<div class='pinion-textWrapper'></div>") .appendTo(this.$element); // INFOS $("<div class='pinion-comment-info'></div>") // USER .append("<div class='pinion-name'><span class='pinion-backend-icon-user'></span><span class='pinion-username'>"+data.name+"</span></div>") // TIME .append("<div class='pinion-time'><span class='pinion-backend-icon-clock'></span><span class='pinion-time-text'>"+data.created+"</span></div>") .append("<div class='pinion-mail'><span class='pinion-backend-icon-mail'></span><a href='mailto:"+data.email+"' class='pinion-mail-adress'>"+data.email+"</a></div>") .appendTo(this.$element); // COMMENT $("<div class='pinion-commentWrapper'><div class='pinion-comment-text'>"+data.text+"</div></div>") .appendTo(this.$element); var $activate = $("<div class='pinion-activate'><div class='pinion-icon'></div><div class='pinion-text'>"+pinion.translate("activate comment")+"</div></div>") .click(function() { if(_this.$element.hasClass("pinion-activated"))
else { _this.setDirty(); } _this.$element.toggleClass("pinion-activated") }); // RENDERER BAR var bar = []; if(pinion.hasPermission("comment", "approve comment")) { bar.push($activate); } if(pinion.hasPermission("comment", "delete comment")) { bar.push(pinion.data.Delete.call(this, data, function() { _this.info.deleted = true; _this.fadeOut(300, function() { _this.setDirty(); }); })); } if(!pinion.isEmpty(bar)) { pinion.data.Bar.call(this, bar); } // INFO pinion.data.Info.call(this, ["Time"], data); // group events settings.groupEvents = true; } // public API -- prototype constr.prototype = { constructor: pinion.backend.renderer.CommentRenderer, init: function() { this.info.id = this.settings.data.id; }, reset: function() { this.$element.removeClass("pinion-activated"); } } return constr; }(jQuery));
{ _this.setClean(); }
conditional_block
HotController.js
import webpack from "webpack" import { spawn } from "child_process" import appRootDir from "app-root-dir" import path from "path" import { createNotification } from "./util" import HotServerManager from "./HotServerManager" import HotClientManager from "./HotClientManager" import ConfigFactory from "../webpack/ConfigFactory" import StatusPlugin from "../webpack/plugins/Status" function safeDisposer(manager) { return manager ? manager.dispose() : Promise.resolve() } /* eslint-disable arrow-body-style, no-console */ function createCompiler({ name, start, done }) { try { const webpackConfig = ConfigFactory({ target: name === "server" ? "node" : "web", mode: "development" }) // Offering a special status handling until Webpack offers a proper `done()` callback // See also: https://github.com/webpack/webpack/issues/4243 webpackConfig.plugins.push(new StatusPlugin({ name, start, done })) return webpack(webpackConfig) } catch (error) { createNotification({ title: "development", level: "error", message: "Webpack config is invalid, please check the console for more information.", notify: true }) console.error(error) throw error } } export default class HotController { constructor() { this.hotClientManager = null this.hotServerManager = null this.clientIsBuilding = false this.serverIsBuilding = false this.timeout = 0 const createClientManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "client", start: () => { this.clientIsBuilding = true createNotification({ title: "Hot Client", level: "info", message: "Building new bundle..." }) }, done: () => { this.clientIsBuilding = false createNotification({ title: "Hot Client", level: "info", message: "Bundle is ready.", notify: true }) resolve(compiler) } }) this.hotClientCompiler = compiler this.hotClientManager = new HotClientManager(compiler) }) } const createServerManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "server", start: () => { this.serverIsBuilding = true createNotification({ title: "Hot Server", level: "info", message: "Building new bundle..." }) }, done: () => { this.serverIsBuilding = false createNotification({ title: "Hot Server", level: "info", message: "Bundle is ready.", notify: true }) this.tryStartServer() resolve(compiler) } }) this.compiledServer = path.resolve( appRootDir.get(), compiler.options.output.path, `${Object.keys(compiler.options.entry)[0]}.js`, ) this.hotServerCompiler = compiler this.hotServerManager = new HotServerManager(compiler, this.hotClientCompiler) }) } createClientManager().then(createServerManager).catch((error) => { console.error("Error during build:", error) }) } tryStartServer = () => { if (this.clientIsBuilding)
this.startServer() this.timeout = 0 } startServer = () => { if (this.server) { this.server.kill() this.server = null createNotification({ title: "Hot Server", level: "info", message: "Restarting server..." }) } const newServer = spawn("node", [ "--inspect", this.compiledServer, "--colors" ], { stdio: [ process.stdin, process.stdout, "pipe" ] }) createNotification({ title: "Hot Server", level: "info", message: "Server running with latest changes.", notify: true }) newServer.stderr.on("data", (data) => { createNotification({ title: "Hot Server", level: "error", message: "Error in server execution, check the console for more info." }) process.stderr.write("\n") process.stderr.write(data) process.stderr.write("\n") }) this.server = newServer } dispose() { // First the hot client server. Then dispose the hot node server. return safeDisposer(this.hotClientManager).then(() => safeDisposer(this.hotServerManager)).catch((error) => { console.error(error) }) } }
{ if (this.serverTryTimeout) { clearTimeout(this.serverTryTimeout) } this.serverTryTimeout = setTimeout(this.tryStartServer, this.timeout) this.timeout += 100 return }
conditional_block
HotController.js
import webpack from "webpack" import { spawn } from "child_process" import appRootDir from "app-root-dir" import path from "path" import { createNotification } from "./util" import HotServerManager from "./HotServerManager" import HotClientManager from "./HotClientManager" import ConfigFactory from "../webpack/ConfigFactory" import StatusPlugin from "../webpack/plugins/Status" function safeDisposer(manager) { return manager ? manager.dispose() : Promise.resolve() } /* eslint-disable arrow-body-style, no-console */ function createCompiler({ name, start, done }) { try { const webpackConfig = ConfigFactory({ target: name === "server" ? "node" : "web", mode: "development" }) // Offering a special status handling until Webpack offers a proper `done()` callback // See also: https://github.com/webpack/webpack/issues/4243 webpackConfig.plugins.push(new StatusPlugin({ name, start, done })) return webpack(webpackConfig) } catch (error) { createNotification({ title: "development", level: "error", message: "Webpack config is invalid, please check the console for more information.", notify: true }) console.error(error) throw error } } export default class HotController { constructor() { this.hotClientManager = null this.hotServerManager = null this.clientIsBuilding = false this.serverIsBuilding = false this.timeout = 0 const createClientManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "client", start: () => { this.clientIsBuilding = true createNotification({ title: "Hot Client", level: "info", message: "Building new bundle..." }) }, done: () => { this.clientIsBuilding = false createNotification({ title: "Hot Client", level: "info", message: "Bundle is ready.", notify: true }) resolve(compiler) } }) this.hotClientCompiler = compiler this.hotClientManager = new HotClientManager(compiler) }) } const createServerManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "server", start: () => { this.serverIsBuilding = true createNotification({ title: "Hot Server", level: "info", message: "Building new bundle..." }) }, done: () => { this.serverIsBuilding = false createNotification({ title: "Hot Server", level: "info", message: "Bundle is ready.", notify: true }) this.tryStartServer() resolve(compiler) } }) this.compiledServer = path.resolve( appRootDir.get(), compiler.options.output.path, `${Object.keys(compiler.options.entry)[0]}.js`, ) this.hotServerCompiler = compiler this.hotServerManager = new HotServerManager(compiler, this.hotClientCompiler) }) } createClientManager().then(createServerManager).catch((error) => { console.error("Error during build:", error) }) } tryStartServer = () => { if (this.clientIsBuilding) { if (this.serverTryTimeout) { clearTimeout(this.serverTryTimeout) } this.serverTryTimeout = setTimeout(this.tryStartServer, this.timeout) this.timeout += 100 return } this.startServer() this.timeout = 0 } startServer = () => { if (this.server) { this.server.kill() this.server = null createNotification({ title: "Hot Server", level: "info", message: "Restarting server..." }) } const newServer = spawn("node", [ "--inspect", this.compiledServer, "--colors" ], { stdio: [ process.stdin, process.stdout, "pipe" ] }) createNotification({ title: "Hot Server", level: "info", message: "Server running with latest changes.", notify: true }) newServer.stderr.on("data", (data) => { createNotification({ title: "Hot Server", level: "error", message: "Error in server execution, check the console for more info." }) process.stderr.write("\n") process.stderr.write(data) process.stderr.write("\n") }) this.server = newServer }
() { // First the hot client server. Then dispose the hot node server. return safeDisposer(this.hotClientManager).then(() => safeDisposer(this.hotServerManager)).catch((error) => { console.error(error) }) } }
dispose
identifier_name
HotController.js
import webpack from "webpack" import { spawn } from "child_process" import appRootDir from "app-root-dir" import path from "path" import { createNotification } from "./util" import HotServerManager from "./HotServerManager" import HotClientManager from "./HotClientManager" import ConfigFactory from "../webpack/ConfigFactory" import StatusPlugin from "../webpack/plugins/Status" function safeDisposer(manager) { return manager ? manager.dispose() : Promise.resolve() } /* eslint-disable arrow-body-style, no-console */ function createCompiler({ name, start, done }) { try { const webpackConfig = ConfigFactory({ target: name === "server" ? "node" : "web", mode: "development" }) // Offering a special status handling until Webpack offers a proper `done()` callback // See also: https://github.com/webpack/webpack/issues/4243 webpackConfig.plugins.push(new StatusPlugin({ name, start, done })) return webpack(webpackConfig) } catch (error) { createNotification({ title: "development", level: "error", message: "Webpack config is invalid, please check the console for more information.", notify: true }) console.error(error) throw error } } export default class HotController { constructor() { this.hotClientManager = null this.hotServerManager = null this.clientIsBuilding = false this.serverIsBuilding = false this.timeout = 0 const createClientManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "client", start: () => { this.clientIsBuilding = true createNotification({ title: "Hot Client", level: "info", message: "Building new bundle..." }) }, done: () => { this.clientIsBuilding = false createNotification({ title: "Hot Client", level: "info", message: "Bundle is ready.", notify: true }) resolve(compiler) } }) this.hotClientCompiler = compiler this.hotClientManager = new HotClientManager(compiler) }) } const createServerManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "server", start: () => { this.serverIsBuilding = true createNotification({ title: "Hot Server", level: "info", message: "Building new bundle..." }) }, done: () => { this.serverIsBuilding = false createNotification({ title: "Hot Server", level: "info", message: "Bundle is ready.", notify: true }) this.tryStartServer() resolve(compiler) } }) this.compiledServer = path.resolve( appRootDir.get(), compiler.options.output.path, `${Object.keys(compiler.options.entry)[0]}.js`, ) this.hotServerCompiler = compiler this.hotServerManager = new HotServerManager(compiler, this.hotClientCompiler) }) } createClientManager().then(createServerManager).catch((error) => { console.error("Error during build:", error) }) } tryStartServer = () => { if (this.clientIsBuilding) { if (this.serverTryTimeout) { clearTimeout(this.serverTryTimeout) } this.serverTryTimeout = setTimeout(this.tryStartServer, this.timeout) this.timeout += 100 return } this.startServer() this.timeout = 0 } startServer = () => { if (this.server) { this.server.kill() this.server = null createNotification({ title: "Hot Server", level: "info", message: "Restarting server..." }) } const newServer = spawn("node", [ "--inspect", this.compiledServer, "--colors" ], { stdio: [ process.stdin, process.stdout, "pipe" ] }) createNotification({ title: "Hot Server", level: "info", message: "Server running with latest changes.", notify: true }) newServer.stderr.on("data", (data) => { createNotification({ title: "Hot Server", level: "error", message: "Error in server execution, check the console for more info." }) process.stderr.write("\n") process.stderr.write(data) process.stderr.write("\n")
this.server = newServer } dispose() { // First the hot client server. Then dispose the hot node server. return safeDisposer(this.hotClientManager).then(() => safeDisposer(this.hotServerManager)).catch((error) => { console.error(error) }) } }
})
random_line_split
HotController.js
import webpack from "webpack" import { spawn } from "child_process" import appRootDir from "app-root-dir" import path from "path" import { createNotification } from "./util" import HotServerManager from "./HotServerManager" import HotClientManager from "./HotClientManager" import ConfigFactory from "../webpack/ConfigFactory" import StatusPlugin from "../webpack/plugins/Status" function safeDisposer(manager) { return manager ? manager.dispose() : Promise.resolve() } /* eslint-disable arrow-body-style, no-console */ function createCompiler({ name, start, done })
export default class HotController { constructor() { this.hotClientManager = null this.hotServerManager = null this.clientIsBuilding = false this.serverIsBuilding = false this.timeout = 0 const createClientManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "client", start: () => { this.clientIsBuilding = true createNotification({ title: "Hot Client", level: "info", message: "Building new bundle..." }) }, done: () => { this.clientIsBuilding = false createNotification({ title: "Hot Client", level: "info", message: "Bundle is ready.", notify: true }) resolve(compiler) } }) this.hotClientCompiler = compiler this.hotClientManager = new HotClientManager(compiler) }) } const createServerManager = () => { return new Promise((resolve) => { const compiler = createCompiler({ name: "server", start: () => { this.serverIsBuilding = true createNotification({ title: "Hot Server", level: "info", message: "Building new bundle..." }) }, done: () => { this.serverIsBuilding = false createNotification({ title: "Hot Server", level: "info", message: "Bundle is ready.", notify: true }) this.tryStartServer() resolve(compiler) } }) this.compiledServer = path.resolve( appRootDir.get(), compiler.options.output.path, `${Object.keys(compiler.options.entry)[0]}.js`, ) this.hotServerCompiler = compiler this.hotServerManager = new HotServerManager(compiler, this.hotClientCompiler) }) } createClientManager().then(createServerManager).catch((error) => { console.error("Error during build:", error) }) } tryStartServer = () => { if (this.clientIsBuilding) { if (this.serverTryTimeout) { clearTimeout(this.serverTryTimeout) } this.serverTryTimeout = setTimeout(this.tryStartServer, this.timeout) this.timeout += 100 return } this.startServer() this.timeout = 0 } startServer = () => { if (this.server) { this.server.kill() this.server = null createNotification({ title: "Hot Server", level: "info", message: "Restarting server..." }) } const newServer = spawn("node", [ "--inspect", this.compiledServer, "--colors" ], { stdio: [ process.stdin, process.stdout, "pipe" ] }) createNotification({ title: "Hot Server", level: "info", message: "Server running with latest changes.", notify: true }) newServer.stderr.on("data", (data) => { createNotification({ title: "Hot Server", level: "error", message: "Error in server execution, check the console for more info." }) process.stderr.write("\n") process.stderr.write(data) process.stderr.write("\n") }) this.server = newServer } dispose() { // First the hot client server. Then dispose the hot node server. return safeDisposer(this.hotClientManager).then(() => safeDisposer(this.hotServerManager)).catch((error) => { console.error(error) }) } }
{ try { const webpackConfig = ConfigFactory({ target: name === "server" ? "node" : "web", mode: "development" }) // Offering a special status handling until Webpack offers a proper `done()` callback // See also: https://github.com/webpack/webpack/issues/4243 webpackConfig.plugins.push(new StatusPlugin({ name, start, done })) return webpack(webpackConfig) } catch (error) { createNotification({ title: "development", level: "error", message: "Webpack config is invalid, please check the console for more information.", notify: true }) console.error(error) throw error } }
identifier_body
displayfield.ts
import {Component,ViewChild,ElementRef,ComponentFactoryResolver,ViewContainerRef,forwardRef,ContentChildren,QueryList} from '@angular/core'; import { base } from './base'; // Ext Class - Ext.form.field.Display export class displayfieldMetaData { public static XTYPE: string = 'displayfield'; public static INPUTNAMES: string[] = [ 'activeCounter', 'activeError', 'activeErrorsTpl', 'afterBodyEl', 'afterLabelTextTpl', 'afterLabelTpl', 'afterSubTpl', 'alignOnScroll', 'alignTarget', 'alwaysOnTop', 'anchor', 'animateShadow', 'ariaAttributes', 'ariaDescribedBy', 'ariaErrorText', 'ariaHelp', 'ariaLabel', 'ariaLabelledBy', 'autoEl', 'autoFitErrors', 'autoRender', 'autoScroll', 'autoShow', 'baseBodyCls', 'baseCls', 'beforeBodyEl', 'beforeLabelTextTpl', 'beforeLabelTpl', 'beforeSubTpl', 'bind', 'border', 'checkChangeBuffer', 'checkChangeEvents', 'childEls', 'cls', 'columnWidth', 'componentCls', 'componentLayout', 'constrain', 'constraintInsets', 'constrainTo', 'contentEl', 'controller', 'data', 'defaultAlign', 'defaultListenerScope', 'dirtyCls', 'disabled', 'disabledCls', 'dock', 'draggable', 'errorMsgCls', 'fieldBodyCls', 'fieldCls', 'fieldLabel', 'fieldStyle', 'fieldSubTpl', 'fixed', 'flex', 'floating', 'focusCls', 'focusOnToFront', 'formatText', 'formBind', 'formItemCls', 'frame', 'height', 'hidden', 'hideEmptyLabel', 'hideLabel', 'hideMode', 'html', 'htmlEncode', 'id', 'inputAttrTpl', 'inputId', 'inputType', 'invalidCls', 'invalidText', 'isTextInput', 'itemId', 'keyMap', 'keyMapEnabled', 'keyMapTarget', 'labelableRenderTpl', 'labelAlign', 'labelAttrTpl', 'labelCls', 'labelClsExtra', 'labelPad', 'labelSeparator', 'labelStyle', 'labelWidth', 'liquidLayout', 'listeners', 'liveDrag', 'loader', 'margin', 'maskDefaults', 'maskElement', 'maxHeight', 'maxWidth', 'minHeight', 'minWidth', 'modal', 'modelValidation', 'msgTarget', 'name', 'nameable', 'overCls', 'overflowX', 'overflowY', 'padding', 'plugins', 'preventMark', 'publishes', 'readOnly', 'readOnlyCls', 'reference', 'region', 'renderConfig', 'renderData', 'renderer', 'renderSelectors', 'renderTo', 'renderTpl', 'resizable', 'resizeHandles', 'saveDelay', 'scope', 'scrollable', 'session', 'shadow', 'shadowOffset', 'shareableName', 'shim', 'shrinkWrap', 'stateEvents', 'stateful', 'stateId', 'style', 'submitValue', 'tabIndex', 'toFrontOnShow', 'touchAction', 'tpl', 'tplWriteMode', 'twoWayBindable', 'ui', 'uiCls', 'userCls', 'validateOnBlur', 'validateOnChange', 'validation', 'validationField', 'value', 'valuePublishEvent', 'viewModel', 'weight', 'width', 'xtype', 'flex', 'platformConfig', 'responsiveConfig', 'fitToParent', 'config' ]; public static OUTPUTS: any[] = [ {name:'activate',parameters:'displayfield'}, {name:'added',parameters:'displayfield,container,pos'}, {name:'afterlayoutanimation',parameters:'displayfield'}, {name:'afterrender',parameters:'displayfield'}, {name:'beforeactivate',parameters:'displayfield'}, {name:'beforedeactivate',parameters:'displayfield'}, {name:'beforedestroy',parameters:'displayfield'}, {name:'beforehide',parameters:'displayfield'}, {name:'beforerender',parameters:'displayfield'}, {name:'beforeshow',parameters:'displayfield'}, {name:'beforestaterestore',parameters:'displayfield,state'}, {name:'beforestatesave',parameters:'displayfield,state'}, {name:'blur',parameters:'displayfield,event'}, {name:'boxready',parameters:'displayfield,width,height'}, {name:'change',parameters:'displayfield,newValue,oldValue'}, {name:'deactivate',parameters:'displayfield'}, {name:'destroy',parameters:'displayfield'}, {name:'dirtychange',parameters:'displayfield,isDirty'}, {name:'disable',parameters:'displayfield'}, {name:'enable',parameters:'displayfield'}, {name:'errorchange',parameters:'displayfield,error'}, {name:'focus',parameters:'displayfield,event'}, {name:'focusenter',parameters:'displayfield,event'}, {name:'focusleave',parameters:'displayfield,event'}, {name:'hide',parameters:'displayfield'}, {name:'move',parameters:'displayfield,x,y'}, {name:'removed',parameters:'displayfield,ownerCt'}, {name:'render',parameters:'displayfield'}, {name:'resize',parameters:'displayfield,width,height,oldWidth,oldHeight'}, {name:'show',parameters:'displayfield'}, {name:'specialkey',parameters:'displayfield,e'}, {name:'staterestore',parameters:'displayfield,state'}, {name:'statesave',parameters:'displayfield,state'}, {name:'validitychange',parameters:'displayfield,isValid'}, {name:'writeablechange',parameters:'displayfield,Read'}, {name:'ready',parameters:''} ]; public static OUTPUTNAMES: string[] = [ 'activate', 'added', 'afterlayoutanimation', 'afterrender', 'beforeactivate', 'beforedeactivate', 'beforedestroy', 'beforehide', 'beforerender', 'beforeshow', 'beforestaterestore', 'beforestatesave', 'blur', 'boxready', 'change', 'deactivate', 'destroy', 'dirtychange', 'disable', 'enable', 'errorchange', 'focus', 'focusenter', 'focusleave', 'hide', 'move', 'removed', 'render', 'resize', 'show', 'specialkey', 'staterestore', 'statesave', 'validitychange', 'writeablechange', 'ready' ]; } @Component({ selector: displayfieldMetaData.XTYPE, inputs: displayfieldMetaData.INPUTNAMES, outputs: displayfieldMetaData.OUTPUTNAMES, providers: [{provide: base, useExisting: forwardRef(() => displayfield)}], template: '<ng-template #dynamic></ng-template>' }) export class
extends base { constructor(eRef:ElementRef,resolver:ComponentFactoryResolver,vcRef:ViewContainerRef) { super(eRef,resolver,vcRef,displayfieldMetaData); } //@ContentChildren(base,{read:ViewContainerRef}) extbaseRef:QueryList<ViewContainerRef>; @ContentChildren(base,{read: base}) extbaseRef: QueryList<base>; @ViewChild('dynamic',{read:ViewContainerRef}) dynamicRef:ViewContainerRef; ngAfterContentInit() {this.AfterContentInit(this.extbaseRef);} ngOnInit() {this.OnInit(this.dynamicRef,displayfieldMetaData);} }
displayfield
identifier_name
displayfield.ts
import {Component,ViewChild,ElementRef,ComponentFactoryResolver,ViewContainerRef,forwardRef,ContentChildren,QueryList} from '@angular/core'; import { base } from './base'; // Ext Class - Ext.form.field.Display export class displayfieldMetaData { public static XTYPE: string = 'displayfield'; public static INPUTNAMES: string[] = [ 'activeCounter', 'activeError', 'activeErrorsTpl', 'afterBodyEl', 'afterLabelTextTpl', 'afterLabelTpl', 'afterSubTpl', 'alignOnScroll', 'alignTarget', 'alwaysOnTop', 'anchor', 'animateShadow', 'ariaAttributes', 'ariaDescribedBy', 'ariaErrorText', 'ariaHelp', 'ariaLabel', 'ariaLabelledBy', 'autoEl', 'autoFitErrors', 'autoRender', 'autoScroll', 'autoShow', 'baseBodyCls', 'baseCls', 'beforeBodyEl', 'beforeLabelTextTpl', 'beforeLabelTpl', 'beforeSubTpl', 'bind', 'border', 'checkChangeBuffer', 'checkChangeEvents', 'childEls', 'cls', 'columnWidth', 'componentCls', 'componentLayout', 'constrain', 'constraintInsets', 'constrainTo', 'contentEl', 'controller', 'data', 'defaultAlign', 'defaultListenerScope', 'dirtyCls', 'disabled', 'disabledCls', 'dock', 'draggable', 'errorMsgCls', 'fieldBodyCls', 'fieldCls', 'fieldLabel', 'fieldStyle', 'fieldSubTpl', 'fixed', 'flex', 'floating', 'focusCls', 'focusOnToFront', 'formatText', 'formBind', 'formItemCls', 'frame', 'height', 'hidden', 'hideEmptyLabel', 'hideLabel', 'hideMode', 'html', 'htmlEncode', 'id', 'inputAttrTpl', 'inputId', 'inputType', 'invalidCls', 'invalidText', 'isTextInput', 'itemId', 'keyMap', 'keyMapEnabled', 'keyMapTarget', 'labelableRenderTpl', 'labelAlign', 'labelAttrTpl', 'labelCls', 'labelClsExtra', 'labelPad', 'labelSeparator', 'labelStyle', 'labelWidth', 'liquidLayout', 'listeners', 'liveDrag', 'loader', 'margin', 'maskDefaults', 'maskElement', 'maxHeight', 'maxWidth', 'minHeight', 'minWidth', 'modal', 'modelValidation', 'msgTarget', 'name', 'nameable', 'overCls', 'overflowX', 'overflowY', 'padding', 'plugins', 'preventMark', 'publishes', 'readOnly', 'readOnlyCls', 'reference', 'region', 'renderConfig', 'renderData', 'renderer', 'renderSelectors', 'renderTo', 'renderTpl', 'resizable', 'resizeHandles', 'saveDelay', 'scope', 'scrollable', 'session', 'shadow', 'shadowOffset', 'shareableName', 'shim', 'shrinkWrap', 'stateEvents', 'stateful', 'stateId', 'style', 'submitValue', 'tabIndex', 'toFrontOnShow', 'touchAction', 'tpl', 'tplWriteMode', 'twoWayBindable', 'ui', 'uiCls', 'userCls', 'validateOnBlur', 'validateOnChange', 'validation', 'validationField', 'value', 'valuePublishEvent', 'viewModel', 'weight', 'width', 'xtype', 'flex', 'platformConfig', 'responsiveConfig', 'fitToParent', 'config' ]; public static OUTPUTS: any[] = [ {name:'activate',parameters:'displayfield'}, {name:'added',parameters:'displayfield,container,pos'}, {name:'afterlayoutanimation',parameters:'displayfield'}, {name:'afterrender',parameters:'displayfield'}, {name:'beforeactivate',parameters:'displayfield'}, {name:'beforedeactivate',parameters:'displayfield'}, {name:'beforedestroy',parameters:'displayfield'}, {name:'beforehide',parameters:'displayfield'}, {name:'beforerender',parameters:'displayfield'}, {name:'beforeshow',parameters:'displayfield'}, {name:'beforestaterestore',parameters:'displayfield,state'}, {name:'beforestatesave',parameters:'displayfield,state'}, {name:'blur',parameters:'displayfield,event'}, {name:'boxready',parameters:'displayfield,width,height'}, {name:'change',parameters:'displayfield,newValue,oldValue'}, {name:'deactivate',parameters:'displayfield'}, {name:'destroy',parameters:'displayfield'}, {name:'dirtychange',parameters:'displayfield,isDirty'}, {name:'disable',parameters:'displayfield'}, {name:'enable',parameters:'displayfield'}, {name:'errorchange',parameters:'displayfield,error'}, {name:'focus',parameters:'displayfield,event'}, {name:'focusenter',parameters:'displayfield,event'}, {name:'focusleave',parameters:'displayfield,event'}, {name:'hide',parameters:'displayfield'}, {name:'move',parameters:'displayfield,x,y'}, {name:'removed',parameters:'displayfield,ownerCt'}, {name:'render',parameters:'displayfield'}, {name:'resize',parameters:'displayfield,width,height,oldWidth,oldHeight'}, {name:'show',parameters:'displayfield'}, {name:'specialkey',parameters:'displayfield,e'}, {name:'staterestore',parameters:'displayfield,state'}, {name:'statesave',parameters:'displayfield,state'}, {name:'validitychange',parameters:'displayfield,isValid'}, {name:'writeablechange',parameters:'displayfield,Read'}, {name:'ready',parameters:''} ]; public static OUTPUTNAMES: string[] = [ 'activate', 'added', 'afterlayoutanimation', 'afterrender', 'beforeactivate', 'beforedeactivate', 'beforedestroy', 'beforehide', 'beforerender', 'beforeshow', 'beforestaterestore', 'beforestatesave', 'blur', 'boxready', 'change', 'deactivate', 'destroy', 'dirtychange', 'disable', 'enable', 'errorchange', 'focus', 'focusenter', 'focusleave', 'hide', 'move', 'removed', 'render', 'resize', 'show', 'specialkey', 'staterestore', 'statesave', 'validitychange', 'writeablechange', 'ready' ]; } @Component({ selector: displayfieldMetaData.XTYPE, inputs: displayfieldMetaData.INPUTNAMES, outputs: displayfieldMetaData.OUTPUTNAMES, providers: [{provide: base, useExisting: forwardRef(() => displayfield)}], template: '<ng-template #dynamic></ng-template>' }) export class displayfield extends base { constructor(eRef:ElementRef,resolver:ComponentFactoryResolver,vcRef:ViewContainerRef)
//@ContentChildren(base,{read:ViewContainerRef}) extbaseRef:QueryList<ViewContainerRef>; @ContentChildren(base,{read: base}) extbaseRef: QueryList<base>; @ViewChild('dynamic',{read:ViewContainerRef}) dynamicRef:ViewContainerRef; ngAfterContentInit() {this.AfterContentInit(this.extbaseRef);} ngOnInit() {this.OnInit(this.dynamicRef,displayfieldMetaData);} }
{ super(eRef,resolver,vcRef,displayfieldMetaData); }
identifier_body
displayfield.ts
import {Component,ViewChild,ElementRef,ComponentFactoryResolver,ViewContainerRef,forwardRef,ContentChildren,QueryList} from '@angular/core'; import { base } from './base'; // Ext Class - Ext.form.field.Display export class displayfieldMetaData { public static XTYPE: string = 'displayfield'; public static INPUTNAMES: string[] = [ 'activeCounter', 'activeError', 'activeErrorsTpl', 'afterBodyEl', 'afterLabelTextTpl', 'afterLabelTpl', 'afterSubTpl', 'alignOnScroll', 'alignTarget', 'alwaysOnTop', 'anchor', 'animateShadow', 'ariaAttributes', 'ariaDescribedBy', 'ariaErrorText', 'ariaHelp', 'ariaLabel', 'ariaLabelledBy', 'autoEl', 'autoFitErrors', 'autoRender', 'autoScroll', 'autoShow', 'baseBodyCls', 'baseCls', 'beforeBodyEl', 'beforeLabelTextTpl', 'beforeLabelTpl', 'beforeSubTpl', 'bind', 'border', 'checkChangeBuffer', 'checkChangeEvents', 'childEls', 'cls', 'columnWidth', 'componentCls', 'componentLayout', 'constrain', 'constraintInsets', 'constrainTo', 'contentEl', 'controller', 'data', 'defaultAlign', 'defaultListenerScope', 'dirtyCls', 'disabled', 'disabledCls', 'dock', 'draggable', 'errorMsgCls', 'fieldBodyCls', 'fieldCls', 'fieldLabel', 'fieldStyle', 'fieldSubTpl', 'fixed', 'flex', 'floating', 'focusCls', 'focusOnToFront', 'formatText', 'formBind', 'formItemCls', 'frame', 'height', 'hidden', 'hideEmptyLabel', 'hideLabel', 'hideMode', 'html', 'htmlEncode', 'id', 'inputAttrTpl', 'inputId', 'inputType', 'invalidCls', 'invalidText', 'isTextInput', 'itemId', 'keyMap', 'keyMapEnabled', 'keyMapTarget', 'labelableRenderTpl', 'labelAlign', 'labelAttrTpl', 'labelCls', 'labelClsExtra', 'labelPad', 'labelSeparator', 'labelStyle', 'labelWidth', 'liquidLayout', 'listeners', 'liveDrag', 'loader', 'margin', 'maskDefaults', 'maskElement', 'maxHeight', 'maxWidth', 'minHeight', 'minWidth', 'modal', 'modelValidation', 'msgTarget', 'name', 'nameable', 'overCls', 'overflowX', 'overflowY', 'padding', 'plugins', 'preventMark', 'publishes', 'readOnly', 'readOnlyCls', 'reference', 'region', 'renderConfig', 'renderData', 'renderer', 'renderSelectors', 'renderTo', 'renderTpl', 'resizable', 'resizeHandles', 'saveDelay', 'scope', 'scrollable', 'session', 'shadow', 'shadowOffset', 'shareableName', 'shim', 'shrinkWrap', 'stateEvents', 'stateful', 'stateId', 'style', 'submitValue', 'tabIndex', 'toFrontOnShow', 'touchAction', 'tpl', 'tplWriteMode', 'twoWayBindable', 'ui', 'uiCls', 'userCls', 'validateOnBlur', 'validateOnChange', 'validation', 'validationField', 'value', 'valuePublishEvent', 'viewModel', 'weight', 'width', 'xtype', 'flex', 'platformConfig', 'responsiveConfig', 'fitToParent', 'config' ]; public static OUTPUTS: any[] = [ {name:'activate',parameters:'displayfield'}, {name:'added',parameters:'displayfield,container,pos'}, {name:'afterlayoutanimation',parameters:'displayfield'}, {name:'afterrender',parameters:'displayfield'}, {name:'beforeactivate',parameters:'displayfield'}, {name:'beforedeactivate',parameters:'displayfield'}, {name:'beforedestroy',parameters:'displayfield'}, {name:'beforehide',parameters:'displayfield'}, {name:'beforerender',parameters:'displayfield'}, {name:'beforeshow',parameters:'displayfield'}, {name:'beforestaterestore',parameters:'displayfield,state'}, {name:'beforestatesave',parameters:'displayfield,state'}, {name:'blur',parameters:'displayfield,event'}, {name:'boxready',parameters:'displayfield,width,height'},
{name:'deactivate',parameters:'displayfield'}, {name:'destroy',parameters:'displayfield'}, {name:'dirtychange',parameters:'displayfield,isDirty'}, {name:'disable',parameters:'displayfield'}, {name:'enable',parameters:'displayfield'}, {name:'errorchange',parameters:'displayfield,error'}, {name:'focus',parameters:'displayfield,event'}, {name:'focusenter',parameters:'displayfield,event'}, {name:'focusleave',parameters:'displayfield,event'}, {name:'hide',parameters:'displayfield'}, {name:'move',parameters:'displayfield,x,y'}, {name:'removed',parameters:'displayfield,ownerCt'}, {name:'render',parameters:'displayfield'}, {name:'resize',parameters:'displayfield,width,height,oldWidth,oldHeight'}, {name:'show',parameters:'displayfield'}, {name:'specialkey',parameters:'displayfield,e'}, {name:'staterestore',parameters:'displayfield,state'}, {name:'statesave',parameters:'displayfield,state'}, {name:'validitychange',parameters:'displayfield,isValid'}, {name:'writeablechange',parameters:'displayfield,Read'}, {name:'ready',parameters:''} ]; public static OUTPUTNAMES: string[] = [ 'activate', 'added', 'afterlayoutanimation', 'afterrender', 'beforeactivate', 'beforedeactivate', 'beforedestroy', 'beforehide', 'beforerender', 'beforeshow', 'beforestaterestore', 'beforestatesave', 'blur', 'boxready', 'change', 'deactivate', 'destroy', 'dirtychange', 'disable', 'enable', 'errorchange', 'focus', 'focusenter', 'focusleave', 'hide', 'move', 'removed', 'render', 'resize', 'show', 'specialkey', 'staterestore', 'statesave', 'validitychange', 'writeablechange', 'ready' ]; } @Component({ selector: displayfieldMetaData.XTYPE, inputs: displayfieldMetaData.INPUTNAMES, outputs: displayfieldMetaData.OUTPUTNAMES, providers: [{provide: base, useExisting: forwardRef(() => displayfield)}], template: '<ng-template #dynamic></ng-template>' }) export class displayfield extends base { constructor(eRef:ElementRef,resolver:ComponentFactoryResolver,vcRef:ViewContainerRef) { super(eRef,resolver,vcRef,displayfieldMetaData); } //@ContentChildren(base,{read:ViewContainerRef}) extbaseRef:QueryList<ViewContainerRef>; @ContentChildren(base,{read: base}) extbaseRef: QueryList<base>; @ViewChild('dynamic',{read:ViewContainerRef}) dynamicRef:ViewContainerRef; ngAfterContentInit() {this.AfterContentInit(this.extbaseRef);} ngOnInit() {this.OnInit(this.dynamicRef,displayfieldMetaData);} }
{name:'change',parameters:'displayfield,newValue,oldValue'},
random_line_split
index.js
'use strict'; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var Mecab = require('mecab-async'); var mecab = new Mecab(); var MarkovChain = function () { function MarkovChain(text)
_createClass(MarkovChain, [{ key: 'start', value: function start(sentence, callback) { this.parse(sentence, callback); } }, { key: 'parse', value: function parse(sentence, callback) { var _this = this; mecab.parse(this.text, function (err, result) { _this.dictionary = _this.makeDic(result); _this.makeSentence(_this.dictionary, sentence); callback(_this.output); }); } }, { key: 'makeDic', value: function makeDic(items) { var tmp = ['@']; var dic = {}; for (var i in items) { var t = items[i]; var word = t[0]; word = word.replace(/\s*/, ''); if (word == '' || word == 'EOS') continue; tmp.push(word); if (tmp.length < 3) continue; if (tmp.length > 3) tmp.splice(0, 1); this.setWord3(dic, tmp); if (word == '。') { tmp = ['@']; continue; } } return dic; } }, { key: 'setWord3', value: function setWord3(p, s3) { var w1 = s3[0]; var w2 = s3[1]; var w3 = s3[2]; if (p[w1] == undefined) p[w1] = {}; if (p[w1][w2] == undefined) p[w1][w2] = {}; if (p[w1][w2][w3] == undefined) p[w1][w2][w3] = 0; p[w1][w2][w3]++; } }, { key: 'makeSentence', value: function makeSentence(dic, sentence) { for (var i = 0; i < sentence; i++) { var ret = []; var top = dic['@']; if (!top) continue; var w1 = this.choiceWord(top); var w2 = this.choiceWord(top[w1]); ret.push(w1); ret.push(w2); for (;;) { var w3 = this.choiceWord(dic[w1][w2]); ret.push(w3); if (w3 == '。') break; w1 = w2, w2 = w3; } this.output = ret.join(''); return this.output; } } }, { key: 'objKeys', value: function objKeys(obj) { var r = []; for (var i in obj) { r.push(i); } return r; } }, { key: 'choiceWord', value: function choiceWord(obj) { var ks = this.objKeys(obj); var i = this.rnd(ks.length); return ks[i]; } }, { key: 'rnd', value: function rnd(num) { return Math.floor(Math.random() * num); } }]); return MarkovChain; }(); module.exports = MarkovChain;
{ _classCallCheck(this, MarkovChain); this.text = text; this.result = null; this.dictionary = {}; this.output = 'output'; }
identifier_body
index.js
'use strict'; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var Mecab = require('mecab-async'); var mecab = new Mecab(); var MarkovChain = function () { function MarkovChain(text) { _classCallCheck(this, MarkovChain); this.text = text; this.result = null; this.dictionary = {}; this.output = 'output'; } _createClass(MarkovChain, [{ key: 'start', value: function start(sentence, callback) { this.parse(sentence, callback); } }, { key: 'parse', value: function parse(sentence, callback) { var _this = this; mecab.parse(this.text, function (err, result) { _this.dictionary = _this.makeDic(result); _this.makeSentence(_this.dictionary, sentence); callback(_this.output); }); } }, { key: 'makeDic', value: function makeDic(items) { var tmp = ['@']; var dic = {}; for (var i in items) { var t = items[i]; var word = t[0]; word = word.replace(/\s*/, ''); if (word == '' || word == 'EOS') continue; tmp.push(word); if (tmp.length < 3) continue; if (tmp.length > 3) tmp.splice(0, 1); this.setWord3(dic, tmp); if (word == '。') { tmp = ['@']; continue; } } return dic; } }, { key: 'setWord3', value: function setWord3(p, s3) { var w1 = s3[0]; var w2 = s3[1]; var w3 = s3[2]; if (p[w1] == undefined) p[w1] = {}; if (p[w1][w2] == undefined) p[w1][w2] = {}; if (p[w1][w2][w3] == undefined) p[w1][w2][w3] = 0; p[w1][w2][w3]++; } }, { key: 'makeSentence', value: function makeSentence(dic, sentence) { for (var i = 0; i < sentence; i++) { var ret = []; var top = dic['@']; if (!top) continue; var w1 = this.choiceWord(top); var w2 = this.choiceWord(top[w1]); ret.push(w1); ret.push(w2); for (;;) { var w3 = this.choiceWord(dic[w1][w2]); ret.push(w3); if (w3 == '。') break; w1 = w2, w2 = w3; } this.output = ret.join('');
value: function objKeys(obj) { var r = []; for (var i in obj) { r.push(i); } return r; } }, { key: 'choiceWord', value: function choiceWord(obj) { var ks = this.objKeys(obj); var i = this.rnd(ks.length); return ks[i]; } }, { key: 'rnd', value: function rnd(num) { return Math.floor(Math.random() * num); } }]); return MarkovChain; }(); module.exports = MarkovChain;
return this.output; } } }, { key: 'objKeys',
random_line_split
index.js
'use strict'; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var Mecab = require('mecab-async'); var mecab = new Mecab(); var MarkovChain = function () { function
(text) { _classCallCheck(this, MarkovChain); this.text = text; this.result = null; this.dictionary = {}; this.output = 'output'; } _createClass(MarkovChain, [{ key: 'start', value: function start(sentence, callback) { this.parse(sentence, callback); } }, { key: 'parse', value: function parse(sentence, callback) { var _this = this; mecab.parse(this.text, function (err, result) { _this.dictionary = _this.makeDic(result); _this.makeSentence(_this.dictionary, sentence); callback(_this.output); }); } }, { key: 'makeDic', value: function makeDic(items) { var tmp = ['@']; var dic = {}; for (var i in items) { var t = items[i]; var word = t[0]; word = word.replace(/\s*/, ''); if (word == '' || word == 'EOS') continue; tmp.push(word); if (tmp.length < 3) continue; if (tmp.length > 3) tmp.splice(0, 1); this.setWord3(dic, tmp); if (word == '。') { tmp = ['@']; continue; } } return dic; } }, { key: 'setWord3', value: function setWord3(p, s3) { var w1 = s3[0]; var w2 = s3[1]; var w3 = s3[2]; if (p[w1] == undefined) p[w1] = {}; if (p[w1][w2] == undefined) p[w1][w2] = {}; if (p[w1][w2][w3] == undefined) p[w1][w2][w3] = 0; p[w1][w2][w3]++; } }, { key: 'makeSentence', value: function makeSentence(dic, sentence) { for (var i = 0; i < sentence; i++) { var ret = []; var top = dic['@']; if (!top) continue; var w1 = this.choiceWord(top); var w2 = this.choiceWord(top[w1]); ret.push(w1); ret.push(w2); for (;;) { var w3 = this.choiceWord(dic[w1][w2]); ret.push(w3); if (w3 == '。') break; w1 = w2, w2 = w3; } this.output = ret.join(''); return this.output; } } }, { key: 'objKeys', value: function objKeys(obj) { var r = []; for (var i in obj) { r.push(i); } return r; } }, { key: 'choiceWord', value: function choiceWord(obj) { var ks = this.objKeys(obj); var i = this.rnd(ks.length); return ks[i]; } }, { key: 'rnd', value: function rnd(num) { return Math.floor(Math.random() * num); } }]); return MarkovChain; }(); module.exports = MarkovChain;
MarkovChain
identifier_name
extradhcpopt_db.py
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.api.v2 import attributes from neutron.db import db_base_plugin_v2 from neutron.db import model_base from neutron.db import models_v2 from neutron.extensions import extra_dhcp_opt as edo_ext class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId): """Represent a generic concept of extra options associated to a port. Each port may have none to many dhcp opts associated to it that can define specifically different or extra options to DHCP clients. These will be written to the <network_id>/opts files, and each option's tag will be referenced in the <network_id>/host file. """
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), nullable=False) opt_name = sa.Column(sa.String(64), nullable=False) opt_value = sa.Column(sa.String(255), nullable=False) ip_version = sa.Column(sa.Integer, server_default='4', nullable=False) __table_args__ = (sa.UniqueConstraint( 'port_id', 'opt_name', 'ip_version', name='uniq_extradhcpopts0portid0optname0ipversion'), model_base.BASEV2.__table_args__,) # Add a relationship to the Port model in order to instruct SQLAlchemy to # eagerly load extra_dhcp_opts bindings ports = orm.relationship( models_v2.Port, backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete')) class ExtraDhcpOptMixin(object): """Mixin class to add extra options to the DHCP opts file and associate them to a port. """ def _is_valid_opt_value(self, opt_name, opt_value): # If the dhcp opt is blank-able, it shouldn't be saved to the DB in # case that the value is None if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS: return opt_value is not None # Otherwise, it shouldn't be saved to the DB in case that the value # is None or empty return bool(opt_value) def _process_port_create_extra_dhcp_opts(self, context, port, extra_dhcp_opts): if not extra_dhcp_opts: return port with context.session.begin(subtransactions=True): for dopt in extra_dhcp_opts: if self._is_valid_opt_value(dopt['opt_name'], dopt['opt_value']): ip_version = dopt.get('ip_version', 4) db = ExtraDhcpOpt( port_id=port['id'], opt_name=dopt['opt_name'], opt_value=dopt['opt_value'], ip_version=ip_version) context.session.add(db) return self._extend_port_extra_dhcp_opts_dict(context, port) def _extend_port_extra_dhcp_opts_dict(self, context, port): port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding( context, port['id']) def _get_port_extra_dhcp_opts_binding(self, context, port_id): query = self._model_query(context, ExtraDhcpOpt) binding = query.filter(ExtraDhcpOpt.port_id == port_id) return [{'opt_name': r.opt_name, 'opt_value': r.opt_value, 'ip_version': r.ip_version} for r in binding] def _update_extra_dhcp_opts_on_port(self, context, id, port, updated_port=None): # It is not necessary to update in a transaction, because # its called from within one from ovs_neutron_plugin. dopts = port['port'].get(edo_ext.EXTRADHCPOPTS) if dopts: opt_db = self._model_query( context, ExtraDhcpOpt).filter_by(port_id=id).all() # if there are currently no dhcp_options associated to # this port, Then just insert the new ones and be done. with context.session.begin(subtransactions=True): for upd_rec in dopts: for opt in opt_db: if (opt['opt_name'] == upd_rec['opt_name'] and opt['ip_version'] == upd_rec.get( 'ip_version', 4)): # to handle deleting of a opt from the port. if upd_rec['opt_value'] is None: context.session.delete(opt) else: if (self._is_valid_opt_value( opt['opt_name'], upd_rec['opt_value']) and opt['opt_value'] != upd_rec['opt_value']): opt.update( {'opt_value': upd_rec['opt_value']}) break else: if self._is_valid_opt_value( upd_rec['opt_name'], upd_rec['opt_value']): ip_version = upd_rec.get('ip_version', 4) db = ExtraDhcpOpt( port_id=id, opt_name=upd_rec['opt_name'], opt_value=upd_rec['opt_value'], ip_version=ip_version) context.session.add(db) if updated_port: edolist = self._get_port_extra_dhcp_opts_binding(context, id) updated_port[edo_ext.EXTRADHCPOPTS] = edolist return bool(dopts) def _extend_port_dict_extra_dhcp_opt(self, res, port): res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name, 'opt_value': dho.opt_value, 'ip_version': dho.ip_version} for dho in port.dhcp_opts] return res db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
random_line_split
extradhcpopt_db.py
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.api.v2 import attributes from neutron.db import db_base_plugin_v2 from neutron.db import model_base from neutron.db import models_v2 from neutron.extensions import extra_dhcp_opt as edo_ext class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId): """Represent a generic concept of extra options associated to a port. Each port may have none to many dhcp opts associated to it that can define specifically different or extra options to DHCP clients. These will be written to the <network_id>/opts files, and each option's tag will be referenced in the <network_id>/host file. """ port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), nullable=False) opt_name = sa.Column(sa.String(64), nullable=False) opt_value = sa.Column(sa.String(255), nullable=False) ip_version = sa.Column(sa.Integer, server_default='4', nullable=False) __table_args__ = (sa.UniqueConstraint( 'port_id', 'opt_name', 'ip_version', name='uniq_extradhcpopts0portid0optname0ipversion'), model_base.BASEV2.__table_args__,) # Add a relationship to the Port model in order to instruct SQLAlchemy to # eagerly load extra_dhcp_opts bindings ports = orm.relationship( models_v2.Port, backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete')) class ExtraDhcpOptMixin(object): """Mixin class to add extra options to the DHCP opts file and associate them to a port. """ def _is_valid_opt_value(self, opt_name, opt_value): # If the dhcp opt is blank-able, it shouldn't be saved to the DB in # case that the value is None if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS: return opt_value is not None # Otherwise, it shouldn't be saved to the DB in case that the value # is None or empty return bool(opt_value) def _process_port_create_extra_dhcp_opts(self, context, port, extra_dhcp_opts): if not extra_dhcp_opts: return port with context.session.begin(subtransactions=True): for dopt in extra_dhcp_opts: if self._is_valid_opt_value(dopt['opt_name'], dopt['opt_value']): ip_version = dopt.get('ip_version', 4) db = ExtraDhcpOpt( port_id=port['id'], opt_name=dopt['opt_name'], opt_value=dopt['opt_value'], ip_version=ip_version) context.session.add(db) return self._extend_port_extra_dhcp_opts_dict(context, port) def _extend_port_extra_dhcp_opts_dict(self, context, port):
def _get_port_extra_dhcp_opts_binding(self, context, port_id): query = self._model_query(context, ExtraDhcpOpt) binding = query.filter(ExtraDhcpOpt.port_id == port_id) return [{'opt_name': r.opt_name, 'opt_value': r.opt_value, 'ip_version': r.ip_version} for r in binding] def _update_extra_dhcp_opts_on_port(self, context, id, port, updated_port=None): # It is not necessary to update in a transaction, because # its called from within one from ovs_neutron_plugin. dopts = port['port'].get(edo_ext.EXTRADHCPOPTS) if dopts: opt_db = self._model_query( context, ExtraDhcpOpt).filter_by(port_id=id).all() # if there are currently no dhcp_options associated to # this port, Then just insert the new ones and be done. with context.session.begin(subtransactions=True): for upd_rec in dopts: for opt in opt_db: if (opt['opt_name'] == upd_rec['opt_name'] and opt['ip_version'] == upd_rec.get( 'ip_version', 4)): # to handle deleting of a opt from the port. if upd_rec['opt_value'] is None: context.session.delete(opt) else: if (self._is_valid_opt_value( opt['opt_name'], upd_rec['opt_value']) and opt['opt_value'] != upd_rec['opt_value']): opt.update( {'opt_value': upd_rec['opt_value']}) break else: if self._is_valid_opt_value( upd_rec['opt_name'], upd_rec['opt_value']): ip_version = upd_rec.get('ip_version', 4) db = ExtraDhcpOpt( port_id=id, opt_name=upd_rec['opt_name'], opt_value=upd_rec['opt_value'], ip_version=ip_version) context.session.add(db) if updated_port: edolist = self._get_port_extra_dhcp_opts_binding(context, id) updated_port[edo_ext.EXTRADHCPOPTS] = edolist return bool(dopts) def _extend_port_dict_extra_dhcp_opt(self, res, port): res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name, 'opt_value': dho.opt_value, 'ip_version': dho.ip_version} for dho in port.dhcp_opts] return res db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding( context, port['id'])
identifier_body
extradhcpopt_db.py
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.api.v2 import attributes from neutron.db import db_base_plugin_v2 from neutron.db import model_base from neutron.db import models_v2 from neutron.extensions import extra_dhcp_opt as edo_ext class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId): """Represent a generic concept of extra options associated to a port. Each port may have none to many dhcp opts associated to it that can define specifically different or extra options to DHCP clients. These will be written to the <network_id>/opts files, and each option's tag will be referenced in the <network_id>/host file. """ port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), nullable=False) opt_name = sa.Column(sa.String(64), nullable=False) opt_value = sa.Column(sa.String(255), nullable=False) ip_version = sa.Column(sa.Integer, server_default='4', nullable=False) __table_args__ = (sa.UniqueConstraint( 'port_id', 'opt_name', 'ip_version', name='uniq_extradhcpopts0portid0optname0ipversion'), model_base.BASEV2.__table_args__,) # Add a relationship to the Port model in order to instruct SQLAlchemy to # eagerly load extra_dhcp_opts bindings ports = orm.relationship( models_v2.Port, backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete')) class ExtraDhcpOptMixin(object): """Mixin class to add extra options to the DHCP opts file and associate them to a port. """ def _is_valid_opt_value(self, opt_name, opt_value): # If the dhcp opt is blank-able, it shouldn't be saved to the DB in # case that the value is None if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS: return opt_value is not None # Otherwise, it shouldn't be saved to the DB in case that the value # is None or empty return bool(opt_value) def _process_port_create_extra_dhcp_opts(self, context, port, extra_dhcp_opts): if not extra_dhcp_opts: return port with context.session.begin(subtransactions=True): for dopt in extra_dhcp_opts: if self._is_valid_opt_value(dopt['opt_name'], dopt['opt_value']): ip_version = dopt.get('ip_version', 4) db = ExtraDhcpOpt( port_id=port['id'], opt_name=dopt['opt_name'], opt_value=dopt['opt_value'], ip_version=ip_version) context.session.add(db) return self._extend_port_extra_dhcp_opts_dict(context, port) def _extend_port_extra_dhcp_opts_dict(self, context, port): port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding( context, port['id']) def _get_port_extra_dhcp_opts_binding(self, context, port_id): query = self._model_query(context, ExtraDhcpOpt) binding = query.filter(ExtraDhcpOpt.port_id == port_id) return [{'opt_name': r.opt_name, 'opt_value': r.opt_value, 'ip_version': r.ip_version} for r in binding] def _update_extra_dhcp_opts_on_port(self, context, id, port, updated_port=None): # It is not necessary to update in a transaction, because # its called from within one from ovs_neutron_plugin. dopts = port['port'].get(edo_ext.EXTRADHCPOPTS) if dopts: opt_db = self._model_query( context, ExtraDhcpOpt).filter_by(port_id=id).all() # if there are currently no dhcp_options associated to # this port, Then just insert the new ones and be done. with context.session.begin(subtransactions=True): for upd_rec in dopts: for opt in opt_db: if (opt['opt_name'] == upd_rec['opt_name'] and opt['ip_version'] == upd_rec.get( 'ip_version', 4)): # to handle deleting of a opt from the port.
else: if self._is_valid_opt_value( upd_rec['opt_name'], upd_rec['opt_value']): ip_version = upd_rec.get('ip_version', 4) db = ExtraDhcpOpt( port_id=id, opt_name=upd_rec['opt_name'], opt_value=upd_rec['opt_value'], ip_version=ip_version) context.session.add(db) if updated_port: edolist = self._get_port_extra_dhcp_opts_binding(context, id) updated_port[edo_ext.EXTRADHCPOPTS] = edolist return bool(dopts) def _extend_port_dict_extra_dhcp_opt(self, res, port): res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name, 'opt_value': dho.opt_value, 'ip_version': dho.ip_version} for dho in port.dhcp_opts] return res db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
if upd_rec['opt_value'] is None: context.session.delete(opt) else: if (self._is_valid_opt_value( opt['opt_name'], upd_rec['opt_value']) and opt['opt_value'] != upd_rec['opt_value']): opt.update( {'opt_value': upd_rec['opt_value']}) break
conditional_block
extradhcpopt_db.py
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.api.v2 import attributes from neutron.db import db_base_plugin_v2 from neutron.db import model_base from neutron.db import models_v2 from neutron.extensions import extra_dhcp_opt as edo_ext class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId): """Represent a generic concept of extra options associated to a port. Each port may have none to many dhcp opts associated to it that can define specifically different or extra options to DHCP clients. These will be written to the <network_id>/opts files, and each option's tag will be referenced in the <network_id>/host file. """ port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete="CASCADE"), nullable=False) opt_name = sa.Column(sa.String(64), nullable=False) opt_value = sa.Column(sa.String(255), nullable=False) ip_version = sa.Column(sa.Integer, server_default='4', nullable=False) __table_args__ = (sa.UniqueConstraint( 'port_id', 'opt_name', 'ip_version', name='uniq_extradhcpopts0portid0optname0ipversion'), model_base.BASEV2.__table_args__,) # Add a relationship to the Port model in order to instruct SQLAlchemy to # eagerly load extra_dhcp_opts bindings ports = orm.relationship( models_v2.Port, backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete')) class ExtraDhcpOptMixin(object): """Mixin class to add extra options to the DHCP opts file and associate them to a port. """ def
(self, opt_name, opt_value): # If the dhcp opt is blank-able, it shouldn't be saved to the DB in # case that the value is None if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS: return opt_value is not None # Otherwise, it shouldn't be saved to the DB in case that the value # is None or empty return bool(opt_value) def _process_port_create_extra_dhcp_opts(self, context, port, extra_dhcp_opts): if not extra_dhcp_opts: return port with context.session.begin(subtransactions=True): for dopt in extra_dhcp_opts: if self._is_valid_opt_value(dopt['opt_name'], dopt['opt_value']): ip_version = dopt.get('ip_version', 4) db = ExtraDhcpOpt( port_id=port['id'], opt_name=dopt['opt_name'], opt_value=dopt['opt_value'], ip_version=ip_version) context.session.add(db) return self._extend_port_extra_dhcp_opts_dict(context, port) def _extend_port_extra_dhcp_opts_dict(self, context, port): port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding( context, port['id']) def _get_port_extra_dhcp_opts_binding(self, context, port_id): query = self._model_query(context, ExtraDhcpOpt) binding = query.filter(ExtraDhcpOpt.port_id == port_id) return [{'opt_name': r.opt_name, 'opt_value': r.opt_value, 'ip_version': r.ip_version} for r in binding] def _update_extra_dhcp_opts_on_port(self, context, id, port, updated_port=None): # It is not necessary to update in a transaction, because # its called from within one from ovs_neutron_plugin. dopts = port['port'].get(edo_ext.EXTRADHCPOPTS) if dopts: opt_db = self._model_query( context, ExtraDhcpOpt).filter_by(port_id=id).all() # if there are currently no dhcp_options associated to # this port, Then just insert the new ones and be done. with context.session.begin(subtransactions=True): for upd_rec in dopts: for opt in opt_db: if (opt['opt_name'] == upd_rec['opt_name'] and opt['ip_version'] == upd_rec.get( 'ip_version', 4)): # to handle deleting of a opt from the port. if upd_rec['opt_value'] is None: context.session.delete(opt) else: if (self._is_valid_opt_value( opt['opt_name'], upd_rec['opt_value']) and opt['opt_value'] != upd_rec['opt_value']): opt.update( {'opt_value': upd_rec['opt_value']}) break else: if self._is_valid_opt_value( upd_rec['opt_name'], upd_rec['opt_value']): ip_version = upd_rec.get('ip_version', 4) db = ExtraDhcpOpt( port_id=id, opt_name=upd_rec['opt_name'], opt_value=upd_rec['opt_value'], ip_version=ip_version) context.session.add(db) if updated_port: edolist = self._get_port_extra_dhcp_opts_binding(context, id) updated_port[edo_ext.EXTRADHCPOPTS] = edolist return bool(dopts) def _extend_port_dict_extra_dhcp_opt(self, res, port): res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name, 'opt_value': dho.opt_value, 'ip_version': dho.ip_version} for dho in port.dhcp_opts] return res db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
_is_valid_opt_value
identifier_name
index.tsx
import * as React from 'react'; import * as ReactDOM from 'react-dom'; import {connect} from 'react-redux'; import {Link} from 'react-router'; import {DoLogin} from '../actions'; interface LoginPanelProps { doLogin: (username: string, password: string) => void; } export class LoginPanel extends React.Component<LoginPanelProps, {}> { public refs: { [key: string]: Element; username: HTMLInputElement; password: HTMLInputElement; }; public usernameInput(): HTMLInputElement { return ReactDOM.findDOMNode<HTMLInputElement>(this.refs.username); } public passwordInput(): HTMLInputElement { return ReactDOM.findDOMNode<HTMLInputElement>(this.refs.password); } public handleLogin(e: Event) { e.preventDefault(); const username: string = this.usernameInput().value; const password: string = this.passwordInput().value; this.props.doLogin(username, password); } public render() { return ( <form> <div> <label>Username</label> <input type="text" ref="username"/> </div>
<div> <label>Password</label> <input type="password" ref="password" /> </div> <div> <button type="submit" onClick={this.handleLogin.bind(this)}>Submit</button> </div> <div> <Link to="/">Home</Link> </div> </form> ); } } function mapStateToProps(state: any): {} { return {}; } function mapDispatchToProps(dispatch: Redux.Dispatch): {} { return { doLogin: (username, password) => dispatch(new DoLogin(username, password)) }; } export default connect<{}, {}, LoginPanelProps>(mapStateToProps, mapDispatchToProps)(LoginPanel);
random_line_split
index.tsx
import * as React from 'react'; import * as ReactDOM from 'react-dom'; import {connect} from 'react-redux'; import {Link} from 'react-router'; import {DoLogin} from '../actions'; interface LoginPanelProps { doLogin: (username: string, password: string) => void; } export class LoginPanel extends React.Component<LoginPanelProps, {}> { public refs: { [key: string]: Element; username: HTMLInputElement; password: HTMLInputElement; }; public
(): HTMLInputElement { return ReactDOM.findDOMNode<HTMLInputElement>(this.refs.username); } public passwordInput(): HTMLInputElement { return ReactDOM.findDOMNode<HTMLInputElement>(this.refs.password); } public handleLogin(e: Event) { e.preventDefault(); const username: string = this.usernameInput().value; const password: string = this.passwordInput().value; this.props.doLogin(username, password); } public render() { return ( <form> <div> <label>Username</label> <input type="text" ref="username"/> </div> <div> <label>Password</label> <input type="password" ref="password" /> </div> <div> <button type="submit" onClick={this.handleLogin.bind(this)}>Submit</button> </div> <div> <Link to="/">Home</Link> </div> </form> ); } } function mapStateToProps(state: any): {} { return {}; } function mapDispatchToProps(dispatch: Redux.Dispatch): {} { return { doLogin: (username, password) => dispatch(new DoLogin(username, password)) }; } export default connect<{}, {}, LoginPanelProps>(mapStateToProps, mapDispatchToProps)(LoginPanel);
usernameInput
identifier_name
frenchmap.py
# -*- coding: utf-8 -*- # This file is part of pygal # # A python svg graph plotting library # Copyright © 2012-2014 Kozea # # This library is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with pygal. If not, see <http://www.gnu.org/licenses/>. """ Worldmap chart """ from __future__ import division from collections import defaultdict from pygal.ghost import ChartCollection from pygal.util import cut, cached_property, decorate from pygal.graph.graph import Graph from pygal.graph import fr_departments_svg, fr_regions_svg from pygal._compat import u from pygal.etree import etree from numbers import Number import os DEPARTMENTS = { '01': u("Ain"), '02': u("Aisne"), '03': u("Allier"), '04': u("Alpes-de-Haute-Provence"), '05': u("Hautes-Alpes"), '06': u("Alpes-Maritimes"), '07': u("Ardèche"), '08': u("Ardennes"), '09': u("Ariège"), '10': u("Aube"), '11': u("Aude"), '12': u("Aveyron"), '13': u("Bouches-du-Rhône"), '14': u("Calvados"), '15': u("Cantal"), '16': u("Charente"), '17': u("Charente-Maritime"), '18': u("Cher"), '19': u("Corrèze"), '2A': u("Corse-du-Sud"), '2B': u("Haute-Corse"), '21': u("Côte-d'Or"), '22': u("Côtes-d'Armor"), '23': u("Creuse"), '24': u("Dordogne"), '25': u("Doubs"), '26': u("Drôme"), '27': u("Eure"), '28': u("Eure-et-Loir"), '29': u("Finistère"), '30': u("Gard"), '31': u("Haute-Garonne"), '32': u("Gers"), '33': u("Gironde"), '34': u("Hérault"), '35': u("Ille-et-Vilaine"), '36': u("Indre"), '37': u("Indre-et-Loire"), '38': u("Isère"), '39': u("Jura"), '40': u("Landes"), '41': u("Loir-et-Cher"), '42': u("Loire"), '43': u("Haute-Loire"), '44': u("Loire-Atlantique"), '45': u("Loiret"), '46': u("Lot"), '47': u("Lot-et-Garonne"), '48': u("Lozère"), '49': u("Maine-et-Loire"), '50': u("Manche"), '51': u("Marne"), '52': u("Haute-Marne"), '53': u("Mayenne"), '54': u("Meurthe-et-Moselle"), '55': u("Meuse"), '56': u("Morbihan"), '57': u("Moselle"), '58': u("Nièvre"), '59': u("Nord"), '60': u("Oise"), '61': u("Orne"), '62': u("Pas-de-Calais"), '63': u("Puy-de-Dôme"), '64': u("Pyrénées-Atlantiques"), '65': u("Hautes-Pyrénées"), '66': u("Pyrénées-Orientales"), '67': u("Bas-Rhin"), '68': u("Haut-Rhin"), '69': u("Rhône"), '70': u("Haute-Saône"), '71': u("Saône-et-Loire"), '72': u("Sarthe"), '73': u("Savoie"), '74': u("Haute-Savoie"), '75': u("Paris"), '76': u("Seine-Maritime"), '77': u("Seine-et-Marne"), '78': u("Yvelines"), '79': u("Deux-Sèvres"), '80': u("Somme"), '81': u("Tarn"), '82': u("Tarn-et-Garonne"), '83': u("Var"), '84': u("Vaucluse"), '85': u("Vendée"), '86': u("Vienne"), '87': u("Haute-Vienne"), '88': u("Vosges"), '89': u("Yonne"), '90': u("Territoire de Belfort"), '91': u("Essonne"), '92': u("Hauts-de-Seine"), '93': u("Seine-Saint-Denis"), '94': u("Val-de-Marne"), '95': u("Val-d'Oise"), '971': u("Guadeloupe"), '972': u("Martinique"), '973': u("Guyane"), '974': u("Réunion"), # Not a area anymore but in case of... '975': u("Saint Pierre et Miquelon"), '976': u("Mayotte") } REGIONS = { '11': u("Île-de-France"), '21': u("Champagne-Ardenne"), '22': u("Picardie"), '23': u("Haute-Normandie"), '24': u("Centre"), '25': u("Basse-Normandie"), '26': u("Bourgogne"), '31': u("Nord-Pas-de-Calais"), '41': u("Lorraine"), '42': u("Alsace"), '43': u("Franche-Comté"), '52': u("Pays-de-la-Loire"), '53': u("Bretagne"),
'72': u("Aquitaine"), '73': u("Midi-Pyrénées"), '74': u("Limousin"), '82': u("Rhône-Alpes"), '83': u("Auvergne"), '91': u("Languedoc-Roussillon"), '93': u("Provence-Alpes-Côte d'Azur"), '94': u("Corse"), '01': u("Guadeloupe"), '02': u("Martinique"), '03': u("Guyane"), '04': u("Réunion"), # Not a region anymore but in case of... '05': u("Saint Pierre et Miquelon"), '06': u("Mayotte") } DPT_MAP = fr_departments_svg.contents REG_MAP = fr_regions_svg.contents class FrenchMapDepartments(Graph): """French department map""" _dual = True x_labels = list(DEPARTMENTS.keys()) area_names = DEPARTMENTS area_prefix = 'z' kind = 'departement' svg_map = DPT_MAP @cached_property def _values(self): """Getter for series values (flattened)""" return [val[1] for serie in self.series for val in serie.values if val[1] is not None] def _plot(self): map = etree.fromstring(self.svg_map) map.set('width', str(self.view.width)) map.set('height', str(self.view.height)) for i, serie in enumerate(self.series): safe_vals = list(filter( lambda x: x is not None, cut(serie.values, 1))) if not safe_vals: continue min_ = min(safe_vals) max_ = max(safe_vals) for j, (area_code, value) in enumerate(serie.values): if isinstance(area_code, Number): area_code = '%2d' % area_code if value is None: continue if max_ == min_: ratio = 1 else: ratio = .3 + .7 * (value - min_) / (max_ - min_) try: areae = map.findall( ".//*[@class='%s%s %s map-element']" % ( self.area_prefix, area_code, self.kind)) except SyntaxError: # Python 2.6 (you'd better install lxml) areae = [] for g in map: for e in g: if '%s%s' % ( self.area_prefix, area_code ) in e.attrib.get('class', ''): areae.append(e) if not areae: continue for area in areae: cls = area.get('class', '').split(' ') cls.append('color-%d' % i) area.set('class', ' '.join(cls)) area.set('style', 'fill-opacity: %f' % (ratio)) metadata = serie.metadata.get(j) if metadata: node = decorate(self.svg, area, metadata) if node != area: area.remove(node) for g in map: if area not in g: continue index = list(g).index(area) g.remove(area) node.append(area) g.insert(index, node) last_node = len(area) > 0 and area[-1] if last_node is not None and last_node.tag == 'title': title_node = last_node text = title_node.text + '\n' else: title_node = self.svg.node(area, 'title') text = '' title_node.text = text + '[%s] %s: %s' % ( serie.title, self.area_names[area_code], self._format(value)) self.nodes['plot'].append(map) class FrenchMapRegions(FrenchMapDepartments): """French regions map""" x_labels = list(REGIONS.keys()) area_names = REGIONS area_prefix = 'a' svg_map = REG_MAP kind = 'region' class FrenchMap(ChartCollection): Regions = FrenchMapRegions Departments = FrenchMapDepartments DEPARTMENTS_REGIONS = { "01": "82", "02": "22", "03": "83", "04": "93", "05": "93", "06": "93", "07": "82", "08": "21", "09": "73", "10": "21", "11": "91", "12": "73", "13": "93", "14": "25", "15": "83", "16": "54", "17": "54", "18": "24", "19": "74", "21": "26", "22": "53", "23": "74", "24": "72", "25": "43", "26": "82", "27": "23", "28": "24", "29": "53", "2A": "94", "2B": "94", "30": "91", "31": "73", "32": "73", "33": "72", "34": "91", "35": "53", "36": "24", "37": "24", "38": "82", "39": "43", "40": "72", "41": "24", "42": "82", "43": "83", "44": "52", "45": "24", "46": "73", "47": "72", "48": "91", "49": "52", "50": "25", "51": "21", "52": "21", "53": "52", "54": "41", "55": "41", "56": "53", "57": "41", "58": "26", "59": "31", "60": "22", "61": "25", "62": "31", "63": "83", "64": "72", "65": "73", "66": "91", "67": "42", "68": "42", "69": "82", "70": "43", "71": "26", "72": "52", "73": "82", "74": "82", "75": "11", "76": "23", "77": "11", "78": "11", "79": "54", "80": "22", "81": "73", "82": "73", "83": "93", "84": "93", "85": "52", "86": "54", "87": "74", "88": "41", "89": "26", "90": "43", "91": "11", "92": "11", "93": "11", "94": "11", "95": "11", "971": "01", "972": "02", "973": "03", "974": "04", "975": "05", "976": "06" } def aggregate_regions(values): if isinstance(values, dict): values = values.items() regions = defaultdict(int) for department, value in values: regions[DEPARTMENTS_REGIONS[department]] += value return list(regions.items())
'54': u("Poitou-Charentes"),
random_line_split
frenchmap.py
# -*- coding: utf-8 -*- # This file is part of pygal # # A python svg graph plotting library # Copyright © 2012-2014 Kozea # # This library is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with pygal. If not, see <http://www.gnu.org/licenses/>. """ Worldmap chart """ from __future__ import division from collections import defaultdict from pygal.ghost import ChartCollection from pygal.util import cut, cached_property, decorate from pygal.graph.graph import Graph from pygal.graph import fr_departments_svg, fr_regions_svg from pygal._compat import u from pygal.etree import etree from numbers import Number import os DEPARTMENTS = { '01': u("Ain"), '02': u("Aisne"), '03': u("Allier"), '04': u("Alpes-de-Haute-Provence"), '05': u("Hautes-Alpes"), '06': u("Alpes-Maritimes"), '07': u("Ardèche"), '08': u("Ardennes"), '09': u("Ariège"), '10': u("Aube"), '11': u("Aude"), '12': u("Aveyron"), '13': u("Bouches-du-Rhône"), '14': u("Calvados"), '15': u("Cantal"), '16': u("Charente"), '17': u("Charente-Maritime"), '18': u("Cher"), '19': u("Corrèze"), '2A': u("Corse-du-Sud"), '2B': u("Haute-Corse"), '21': u("Côte-d'Or"), '22': u("Côtes-d'Armor"), '23': u("Creuse"), '24': u("Dordogne"), '25': u("Doubs"), '26': u("Drôme"), '27': u("Eure"), '28': u("Eure-et-Loir"), '29': u("Finistère"), '30': u("Gard"), '31': u("Haute-Garonne"), '32': u("Gers"), '33': u("Gironde"), '34': u("Hérault"), '35': u("Ille-et-Vilaine"), '36': u("Indre"), '37': u("Indre-et-Loire"), '38': u("Isère"), '39': u("Jura"), '40': u("Landes"), '41': u("Loir-et-Cher"), '42': u("Loire"), '43': u("Haute-Loire"), '44': u("Loire-Atlantique"), '45': u("Loiret"), '46': u("Lot"), '47': u("Lot-et-Garonne"), '48': u("Lozère"), '49': u("Maine-et-Loire"), '50': u("Manche"), '51': u("Marne"), '52': u("Haute-Marne"), '53': u("Mayenne"), '54': u("Meurthe-et-Moselle"), '55': u("Meuse"), '56': u("Morbihan"), '57': u("Moselle"), '58': u("Nièvre"), '59': u("Nord"), '60': u("Oise"), '61': u("Orne"), '62': u("Pas-de-Calais"), '63': u("Puy-de-Dôme"), '64': u("Pyrénées-Atlantiques"), '65': u("Hautes-Pyrénées"), '66': u("Pyrénées-Orientales"), '67': u("Bas-Rhin"), '68': u("Haut-Rhin"), '69': u("Rhône"), '70': u("Haute-Saône"), '71': u("Saône-et-Loire"), '72': u("Sarthe"), '73': u("Savoie"), '74': u("Haute-Savoie"), '75': u("Paris"), '76': u("Seine-Maritime"), '77': u("Seine-et-Marne"), '78': u("Yvelines"), '79': u("Deux-Sèvres"), '80': u("Somme"), '81': u("Tarn"), '82': u("Tarn-et-Garonne"), '83': u("Var"), '84': u("Vaucluse"), '85': u("Vendée"), '86': u("Vienne"), '87': u("Haute-Vienne"), '88': u("Vosges"), '89': u("Yonne"), '90': u("Territoire de Belfort"), '91': u("Essonne"), '92': u("Hauts-de-Seine"), '93': u("Seine-Saint-Denis"), '94': u("Val-de-Marne"), '95': u("Val-d'Oise"), '971': u("Guadeloupe"), '972': u("Martinique"), '973': u("Guyane"), '974': u("Réunion"), # Not a area anymore but in case of... '975': u("Saint Pierre et Miquelon"), '976': u("Mayotte") } REGIONS = { '11': u("Île-de-France"), '21': u("Champagne-Ardenne"), '22': u("Picardie"), '23': u("Haute-Normandie"), '24': u("Centre"), '25': u("Basse-Normandie"), '26': u("Bourgogne"), '31': u("Nord-Pas-de-Calais"), '41': u("Lorraine"), '42': u("Alsace"), '43': u("Franche-Comté"), '52': u("Pays-de-la-Loire"), '53': u("Bretagne"), '54': u("Poitou-Charentes"), '72': u("Aquitaine"), '73': u("Midi-Pyrénées"), '74': u("Limousin"), '82': u("Rhône-Alpes"), '83': u("Auvergne"), '91': u("Languedoc-Roussillon"), '93': u("Provence-Alpes-Côte d'Azur"), '94': u("Corse"), '01': u("Guadeloupe"), '02': u("Martinique"), '03': u("Guyane"), '04': u("Réunion"), # Not a region anymore but in case of... '05': u("Saint Pierre et Miquelon"), '06': u("Mayotte") } DPT_MAP = fr_departments_svg.contents REG_MAP = fr_regions_svg.contents class FrenchMapDepartments(Graph): """French department map""" _dual = True x_labels = list(DEPARTMENTS.keys()) area_names = DEPARTMENTS area_prefix = 'z' kind = 'departement' svg_map = DPT_MAP @cached_property def _values(self): """Getter for series values (flattened)""" return [val[1] for serie in self.series for val in serie.values if val[1] is not None] def _plot(self): map = etree.fromstring(self.svg_map) map.set('width', str(self.view.width)) map.set('height', str(self.view.height)) for i, serie in enumerate(self.series): safe_vals = list(filter( lambda x: x is not None, cut(serie.values, 1))) if not safe_vals: continue min_ = min(safe_vals) max_ = max(safe_vals) for j, (area_code, value) in enumerate(serie.values): if isinstance(area_code, Number): area_code = '%2d' % area_code if value is None: continue if max_ == min_: ratio = 1 else: ratio = .3 + .7 * (value - min_) / (max_ - min_) try: areae = map.findall( ".//*[@class='%s%s %s map-element']" % ( self.area_prefix, area_code, self.kind)) except SyntaxError: # Python 2.6 (you'd better install lxml) areae = [] for g in map: for e in g: if '%s%s' % ( self.area_prefix, area_code ) in e.attrib.get('class', ''): areae.append(e) if not areae: continue for area in areae: cls = area.get('class', '').split(' ') cls.append('color-%d' % i) area.set('class', ' '.join(cls)) area.set('style', 'fill-opacity: %f' % (ratio)) metadata = serie.metadata.get(j) if metadata: node = decorate(self.svg, area, metadata) if node != area: area.remove(node) for g in map: if area not in g: continue index = list(g).index(area) g.remove(area) node.append(area) g.insert(index, node) last_node = len(area) > 0 and area[-1] if last_node is not None and last_node.tag == 'title': title_node = last_node text = title_node.text + '\n' else: title_node = self.svg.node(area, 'title') text = '' title_node.text = text + '[%s] %s: %s' % ( serie.title, self.area_names[area_code], self._format(value)) self.nodes['plot'].append(map) class FrenchMapRegions(FrenchMapDepartments): """French regions map""" x_labels = list(REGIONS.keys()) area_names = REGIONS area_prefix = 'a' svg_map = REG_MAP kind = 'region' class FrenchMap(ChartCollection): R
FrenchMapRegions Departments = FrenchMapDepartments DEPARTMENTS_REGIONS = { "01": "82", "02": "22", "03": "83", "04": "93", "05": "93", "06": "93", "07": "82", "08": "21", "09": "73", "10": "21", "11": "91", "12": "73", "13": "93", "14": "25", "15": "83", "16": "54", "17": "54", "18": "24", "19": "74", "21": "26", "22": "53", "23": "74", "24": "72", "25": "43", "26": "82", "27": "23", "28": "24", "29": "53", "2A": "94", "2B": "94", "30": "91", "31": "73", "32": "73", "33": "72", "34": "91", "35": "53", "36": "24", "37": "24", "38": "82", "39": "43", "40": "72", "41": "24", "42": "82", "43": "83", "44": "52", "45": "24", "46": "73", "47": "72", "48": "91", "49": "52", "50": "25", "51": "21", "52": "21", "53": "52", "54": "41", "55": "41", "56": "53", "57": "41", "58": "26", "59": "31", "60": "22", "61": "25", "62": "31", "63": "83", "64": "72", "65": "73", "66": "91", "67": "42", "68": "42", "69": "82", "70": "43", "71": "26", "72": "52", "73": "82", "74": "82", "75": "11", "76": "23", "77": "11", "78": "11", "79": "54", "80": "22", "81": "73", "82": "73", "83": "93", "84": "93", "85": "52", "86": "54", "87": "74", "88": "41", "89": "26", "90": "43", "91": "11", "92": "11", "93": "11", "94": "11", "95": "11", "971": "01", "972": "02", "973": "03", "974": "04", "975": "05", "976": "06" } def aggregate_regions(values): if isinstance(values, dict): values = values.items() regions = defaultdict(int) for department, value in values: regions[DEPARTMENTS_REGIONS[department]] += value return list(regions.items())
egions =
identifier_name
frenchmap.py
# -*- coding: utf-8 -*- # This file is part of pygal # # A python svg graph plotting library # Copyright © 2012-2014 Kozea # # This library is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with pygal. If not, see <http://www.gnu.org/licenses/>. """ Worldmap chart """ from __future__ import division from collections import defaultdict from pygal.ghost import ChartCollection from pygal.util import cut, cached_property, decorate from pygal.graph.graph import Graph from pygal.graph import fr_departments_svg, fr_regions_svg from pygal._compat import u from pygal.etree import etree from numbers import Number import os DEPARTMENTS = { '01': u("Ain"), '02': u("Aisne"), '03': u("Allier"), '04': u("Alpes-de-Haute-Provence"), '05': u("Hautes-Alpes"), '06': u("Alpes-Maritimes"), '07': u("Ardèche"), '08': u("Ardennes"), '09': u("Ariège"), '10': u("Aube"), '11': u("Aude"), '12': u("Aveyron"), '13': u("Bouches-du-Rhône"), '14': u("Calvados"), '15': u("Cantal"), '16': u("Charente"), '17': u("Charente-Maritime"), '18': u("Cher"), '19': u("Corrèze"), '2A': u("Corse-du-Sud"), '2B': u("Haute-Corse"), '21': u("Côte-d'Or"), '22': u("Côtes-d'Armor"), '23': u("Creuse"), '24': u("Dordogne"), '25': u("Doubs"), '26': u("Drôme"), '27': u("Eure"), '28': u("Eure-et-Loir"), '29': u("Finistère"), '30': u("Gard"), '31': u("Haute-Garonne"), '32': u("Gers"), '33': u("Gironde"), '34': u("Hérault"), '35': u("Ille-et-Vilaine"), '36': u("Indre"), '37': u("Indre-et-Loire"), '38': u("Isère"), '39': u("Jura"), '40': u("Landes"), '41': u("Loir-et-Cher"), '42': u("Loire"), '43': u("Haute-Loire"), '44': u("Loire-Atlantique"), '45': u("Loiret"), '46': u("Lot"), '47': u("Lot-et-Garonne"), '48': u("Lozère"), '49': u("Maine-et-Loire"), '50': u("Manche"), '51': u("Marne"), '52': u("Haute-Marne"), '53': u("Mayenne"), '54': u("Meurthe-et-Moselle"), '55': u("Meuse"), '56': u("Morbihan"), '57': u("Moselle"), '58': u("Nièvre"), '59': u("Nord"), '60': u("Oise"), '61': u("Orne"), '62': u("Pas-de-Calais"), '63': u("Puy-de-Dôme"), '64': u("Pyrénées-Atlantiques"), '65': u("Hautes-Pyrénées"), '66': u("Pyrénées-Orientales"), '67': u("Bas-Rhin"), '68': u("Haut-Rhin"), '69': u("Rhône"), '70': u("Haute-Saône"), '71': u("Saône-et-Loire"), '72': u("Sarthe"), '73': u("Savoie"), '74': u("Haute-Savoie"), '75': u("Paris"), '76': u("Seine-Maritime"), '77': u("Seine-et-Marne"), '78': u("Yvelines"), '79': u("Deux-Sèvres"), '80': u("Somme"), '81': u("Tarn"), '82': u("Tarn-et-Garonne"), '83': u("Var"), '84': u("Vaucluse"), '85': u("Vendée"), '86': u("Vienne"), '87': u("Haute-Vienne"), '88': u("Vosges"), '89': u("Yonne"), '90': u("Territoire de Belfort"), '91': u("Essonne"), '92': u("Hauts-de-Seine"), '93': u("Seine-Saint-Denis"), '94': u("Val-de-Marne"), '95': u("Val-d'Oise"), '971': u("Guadeloupe"), '972': u("Martinique"), '973': u("Guyane"), '974': u("Réunion"), # Not a area anymore but in case of... '975': u("Saint Pierre et Miquelon"), '976': u("Mayotte") } REGIONS = { '11': u("Île-de-France"), '21': u("Champagne-Ardenne"), '22': u("Picardie"), '23': u("Haute-Normandie"), '24': u("Centre"), '25': u("Basse-Normandie"), '26': u("Bourgogne"), '31': u("Nord-Pas-de-Calais"), '41': u("Lorraine"), '42': u("Alsace"), '43': u("Franche-Comté"), '52': u("Pays-de-la-Loire"), '53': u("Bretagne"), '54': u("Poitou-Charentes"), '72': u("Aquitaine"), '73': u("Midi-Pyrénées"), '74': u("Limousin"), '82': u("Rhône-Alpes"), '83': u("Auvergne"), '91': u("Languedoc-Roussillon"), '93': u("Provence-Alpes-Côte d'Azur"), '94': u("Corse"), '01': u("Guadeloupe"), '02': u("Martinique"), '03': u("Guyane"), '04': u("Réunion"), # Not a region anymore but in case of... '05': u("Saint Pierre et Miquelon"), '06': u("Mayotte") } DPT_MAP = fr_departments_svg.contents REG_MAP = fr_regions_svg.contents class FrenchMapDepartments(Graph): """French department map""" _dual = True x_labels = list(DEPARTMENTS.keys()) area_names = DEPARTMENTS area_prefix = 'z' kind = 'departement' svg_map = DPT_MAP @cached_property def _values(self): """Getter for series values (flattened)""" return [val[1] for serie in self.series for val in serie.values if val[1] is not None] def _plot(self): map = etree.fromstring(self.svg_map) map.set('width', str(self.view.width)) map.set('height', str(self.view.height)) for i, serie in enumerate(self.series): safe_vals = list(filter( lambda x: x is not None, cut(serie.values, 1))) if not safe_vals: continue min_ = min(safe_vals) max_ = max(safe_vals) for j, (area_code, value) in enumerate(serie.values): if isinstance(area_code, Number): area_code = '%2d' % area_code if value is None: continue if max_ == min_: ratio = 1 else: ratio = .3 + .7 * (value - min_) / (max_ - min_) try: areae = map.findall( ".//*[@class='%s%s %s map-element']" % ( self.area_prefix, area_code, self.kind)) except SyntaxError: # Python 2.6 (you'd better install lxml) areae = [] for g in map: for e in g: if '%s%s' % ( self.area_prefix, area_code ) in e.attrib.get('class', ''): areae.append(e) if not areae: continue for area in areae: cls = area.get('class', '').split(' ') cls.append('color-%d' % i) area.set('class', ' '.join(cls)) area.set('style', 'fill-opacity: %f' % (ratio)) metadata = serie.metadata.get(j) if metadata: node = decorate(self.svg, area, metadata) if node != area: area.remove(node) for g in map: if area not in g: continue index = list(g).index(area) g.remove(area) node.append(area) g.insert(index, node) last_node = len(area) > 0 and area[-1] if last_node is not None and last_node.tag == 'title': title_node = last_node text = title_node.text + '\n' else: title_node = self.svg.node(area, 'title') text = '' title_node.text = text + '[%s] %s: %s' % ( serie.title, self.area_names[area_code], self._format(value)) self.nodes['plot'].append(map) class FrenchMapRegions(FrenchMapDepartments): """French regions map""" x_labels = list(REGIONS.keys()) area_names = REGIONS area_prefix = 'a' svg_map = REG_MAP kind = 'region' class FrenchMap(ChartCollection): Regions = FrenchMapRegions Departments = FrenchMapDepartments DEPARTMENTS_REGIONS = { "01": "82", "02": "22", "03": "83", "04": "93", "05": "93", "06": "93", "07": "82", "08": "21", "09": "73", "10": "21", "11": "91", "12": "73", "13": "93", "14": "25", "15": "83", "16": "54", "17": "54", "18": "24", "19": "74", "21": "26", "22": "53", "23": "74", "24": "72", "25": "43", "26": "82", "27": "23", "28": "24", "29": "53", "2A": "94", "2B": "94", "30": "91", "31": "73", "32": "73", "33": "72", "34": "91", "35": "53", "36": "24", "37": "24", "38": "82", "39": "43", "40": "72", "41": "24", "42": "82", "43": "83", "44": "52", "45": "24", "46": "73", "47": "72", "48": "91", "49": "52", "50": "25", "51": "21", "52": "21", "53": "52", "54": "41", "55": "41", "56": "53", "57": "41", "58": "26", "59": "31", "60": "22", "61": "25", "62": "31", "63": "83", "64": "72", "65": "73", "66": "91", "67": "42", "68": "42", "69": "82", "70": "43", "71": "26", "72": "52", "73": "82", "74": "82", "75": "11", "76": "23", "77": "11", "78": "11", "79": "54", "80": "22", "81": "73", "82": "73", "83": "93", "84": "93", "85": "52", "86": "54", "87": "74", "88": "41", "89": "26", "90": "43", "91": "11", "92": "11", "93": "11", "94": "11", "95": "11", "971": "01", "972": "02", "973": "03", "974": "04", "975": "05", "976": "06" } def aggregate_regions(values): if isinstance(values, dict):
values = values.items() regions = defaultdict(int) for department, value in values: regions[DEPARTMENTS_REGIONS[department]] += value return list(regions.items())
identifier_body
frenchmap.py
# -*- coding: utf-8 -*- # This file is part of pygal # # A python svg graph plotting library # Copyright © 2012-2014 Kozea # # This library is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with pygal. If not, see <http://www.gnu.org/licenses/>. """ Worldmap chart """ from __future__ import division from collections import defaultdict from pygal.ghost import ChartCollection from pygal.util import cut, cached_property, decorate from pygal.graph.graph import Graph from pygal.graph import fr_departments_svg, fr_regions_svg from pygal._compat import u from pygal.etree import etree from numbers import Number import os DEPARTMENTS = { '01': u("Ain"), '02': u("Aisne"), '03': u("Allier"), '04': u("Alpes-de-Haute-Provence"), '05': u("Hautes-Alpes"), '06': u("Alpes-Maritimes"), '07': u("Ardèche"), '08': u("Ardennes"), '09': u("Ariège"), '10': u("Aube"), '11': u("Aude"), '12': u("Aveyron"), '13': u("Bouches-du-Rhône"), '14': u("Calvados"), '15': u("Cantal"), '16': u("Charente"), '17': u("Charente-Maritime"), '18': u("Cher"), '19': u("Corrèze"), '2A': u("Corse-du-Sud"), '2B': u("Haute-Corse"), '21': u("Côte-d'Or"), '22': u("Côtes-d'Armor"), '23': u("Creuse"), '24': u("Dordogne"), '25': u("Doubs"), '26': u("Drôme"), '27': u("Eure"), '28': u("Eure-et-Loir"), '29': u("Finistère"), '30': u("Gard"), '31': u("Haute-Garonne"), '32': u("Gers"), '33': u("Gironde"), '34': u("Hérault"), '35': u("Ille-et-Vilaine"), '36': u("Indre"), '37': u("Indre-et-Loire"), '38': u("Isère"), '39': u("Jura"), '40': u("Landes"), '41': u("Loir-et-Cher"), '42': u("Loire"), '43': u("Haute-Loire"), '44': u("Loire-Atlantique"), '45': u("Loiret"), '46': u("Lot"), '47': u("Lot-et-Garonne"), '48': u("Lozère"), '49': u("Maine-et-Loire"), '50': u("Manche"), '51': u("Marne"), '52': u("Haute-Marne"), '53': u("Mayenne"), '54': u("Meurthe-et-Moselle"), '55': u("Meuse"), '56': u("Morbihan"), '57': u("Moselle"), '58': u("Nièvre"), '59': u("Nord"), '60': u("Oise"), '61': u("Orne"), '62': u("Pas-de-Calais"), '63': u("Puy-de-Dôme"), '64': u("Pyrénées-Atlantiques"), '65': u("Hautes-Pyrénées"), '66': u("Pyrénées-Orientales"), '67': u("Bas-Rhin"), '68': u("Haut-Rhin"), '69': u("Rhône"), '70': u("Haute-Saône"), '71': u("Saône-et-Loire"), '72': u("Sarthe"), '73': u("Savoie"), '74': u("Haute-Savoie"), '75': u("Paris"), '76': u("Seine-Maritime"), '77': u("Seine-et-Marne"), '78': u("Yvelines"), '79': u("Deux-Sèvres"), '80': u("Somme"), '81': u("Tarn"), '82': u("Tarn-et-Garonne"), '83': u("Var"), '84': u("Vaucluse"), '85': u("Vendée"), '86': u("Vienne"), '87': u("Haute-Vienne"), '88': u("Vosges"), '89': u("Yonne"), '90': u("Territoire de Belfort"), '91': u("Essonne"), '92': u("Hauts-de-Seine"), '93': u("Seine-Saint-Denis"), '94': u("Val-de-Marne"), '95': u("Val-d'Oise"), '971': u("Guadeloupe"), '972': u("Martinique"), '973': u("Guyane"), '974': u("Réunion"), # Not a area anymore but in case of... '975': u("Saint Pierre et Miquelon"), '976': u("Mayotte") } REGIONS = { '11': u("Île-de-France"), '21': u("Champagne-Ardenne"), '22': u("Picardie"), '23': u("Haute-Normandie"), '24': u("Centre"), '25': u("Basse-Normandie"), '26': u("Bourgogne"), '31': u("Nord-Pas-de-Calais"), '41': u("Lorraine"), '42': u("Alsace"), '43': u("Franche-Comté"), '52': u("Pays-de-la-Loire"), '53': u("Bretagne"), '54': u("Poitou-Charentes"), '72': u("Aquitaine"), '73': u("Midi-Pyrénées"), '74': u("Limousin"), '82': u("Rhône-Alpes"), '83': u("Auvergne"), '91': u("Languedoc-Roussillon"), '93': u("Provence-Alpes-Côte d'Azur"), '94': u("Corse"), '01': u("Guadeloupe"), '02': u("Martinique"), '03': u("Guyane"), '04': u("Réunion"), # Not a region anymore but in case of... '05': u("Saint Pierre et Miquelon"), '06': u("Mayotte") } DPT_MAP = fr_departments_svg.contents REG_MAP = fr_regions_svg.contents class FrenchMapDepartments(Graph): """French department map""" _dual = True x_labels = list(DEPARTMENTS.keys()) area_names = DEPARTMENTS area_prefix = 'z' kind = 'departement' svg_map = DPT_MAP @cached_property def _values(self): """Getter for series values (flattened)""" return [val[1] for serie in self.series for val in serie.values if val[1] is not None] def _plot(self): map = etree.fromstring(self.svg_map) map.set('width', str(self.view.width)) map.set('height', str(self.view.height)) for i, serie in enumerate(self.series): safe_vals = list(filter( lambda x: x is not None, cut(serie.values, 1))) if not safe_vals: continue min_ = min(safe_vals) max_ = max(safe_vals) for j, (area_code, value) in enumerate(serie.values): if isinstance(area_code, Number): area_code = '%2d' % area_code if value is None: continue if max_ == min_: ratio = 1 else: ratio = .3 + .7 * (value - min_) / (max_ - min_) try: areae = map.findall( ".//*[@class='%s%s %s map-element']" % ( self.area_prefix, area_code, self.kind)) except SyntaxError: # Python 2.6 (you'd better install lxml) areae = [] for g in map: for e in g: if '%s%s' % ( self.area_prefix, area_code ) in e.attrib.get('class', ''): areae.append(e) if not areae: continue for area in areae: cls = area.get('class', '').split
nd(map) class FrenchMapRegions(FrenchMapDepartments): """French regions map""" x_labels = list(REGIONS.keys()) area_names = REGIONS area_prefix = 'a' svg_map = REG_MAP kind = 'region' class FrenchMap(ChartCollection): Regions = FrenchMapRegions Departments = FrenchMapDepartments DEPARTMENTS_REGIONS = { "01": "82", "02": "22", "03": "83", "04": "93", "05": "93", "06": "93", "07": "82", "08": "21", "09": "73", "10": "21", "11": "91", "12": "73", "13": "93", "14": "25", "15": "83", "16": "54", "17": "54", "18": "24", "19": "74", "21": "26", "22": "53", "23": "74", "24": "72", "25": "43", "26": "82", "27": "23", "28": "24", "29": "53", "2A": "94", "2B": "94", "30": "91", "31": "73", "32": "73", "33": "72", "34": "91", "35": "53", "36": "24", "37": "24", "38": "82", "39": "43", "40": "72", "41": "24", "42": "82", "43": "83", "44": "52", "45": "24", "46": "73", "47": "72", "48": "91", "49": "52", "50": "25", "51": "21", "52": "21", "53": "52", "54": "41", "55": "41", "56": "53", "57": "41", "58": "26", "59": "31", "60": "22", "61": "25", "62": "31", "63": "83", "64": "72", "65": "73", "66": "91", "67": "42", "68": "42", "69": "82", "70": "43", "71": "26", "72": "52", "73": "82", "74": "82", "75": "11", "76": "23", "77": "11", "78": "11", "79": "54", "80": "22", "81": "73", "82": "73", "83": "93", "84": "93", "85": "52", "86": "54", "87": "74", "88": "41", "89": "26", "90": "43", "91": "11", "92": "11", "93": "11", "94": "11", "95": "11", "971": "01", "972": "02", "973": "03", "974": "04", "975": "05", "976": "06" } def aggregate_regions(values): if isinstance(values, dict): values = values.items() regions = defaultdict(int) for department, value in values: regions[DEPARTMENTS_REGIONS[department]] += value return list(regions.items())
(' ') cls.append('color-%d' % i) area.set('class', ' '.join(cls)) area.set('style', 'fill-opacity: %f' % (ratio)) metadata = serie.metadata.get(j) if metadata: node = decorate(self.svg, area, metadata) if node != area: area.remove(node) for g in map: if area not in g: continue index = list(g).index(area) g.remove(area) node.append(area) g.insert(index, node) last_node = len(area) > 0 and area[-1] if last_node is not None and last_node.tag == 'title': title_node = last_node text = title_node.text + '\n' else: title_node = self.svg.node(area, 'title') text = '' title_node.text = text + '[%s] %s: %s' % ( serie.title, self.area_names[area_code], self._format(value)) self.nodes['plot'].appe
conditional_block
opt_name.rs
//! Test optional prefix. extern crate flame; extern crate flamer; use flamer::{flame, noflame}; #[flame("top")] fn a() { let l = Lower {}; l.a(); } #[flame] fn b()
#[noflame] fn c() { b() } pub struct Lower; impl Lower { #[flame("lower")] pub fn a(self) { // nothing to do here } } #[test] fn main() { c(); let spans = flame::spans(); assert_eq!(1, spans.len()); let roots = &spans[0]; println!("{:?}",roots); // if more than 2 roots, a() was flamed twice or c was flamed // main is missing because main isn't closed here assert_eq!("b", roots.name); assert_eq!(1, roots.children.len()); assert_eq!("top::a", roots.children[0].name); assert_eq!(1, roots.children[0].children.len()); assert_eq!("lower::a", roots.children[0].children[0].name); }
{ a() }
identifier_body
opt_name.rs
//! Test optional prefix. extern crate flame;
fn a() { let l = Lower {}; l.a(); } #[flame] fn b() { a() } #[noflame] fn c() { b() } pub struct Lower; impl Lower { #[flame("lower")] pub fn a(self) { // nothing to do here } } #[test] fn main() { c(); let spans = flame::spans(); assert_eq!(1, spans.len()); let roots = &spans[0]; println!("{:?}",roots); // if more than 2 roots, a() was flamed twice or c was flamed // main is missing because main isn't closed here assert_eq!("b", roots.name); assert_eq!(1, roots.children.len()); assert_eq!("top::a", roots.children[0].name); assert_eq!(1, roots.children[0].children.len()); assert_eq!("lower::a", roots.children[0].children[0].name); }
extern crate flamer; use flamer::{flame, noflame}; #[flame("top")]
random_line_split
opt_name.rs
//! Test optional prefix. extern crate flame; extern crate flamer; use flamer::{flame, noflame}; #[flame("top")] fn a() { let l = Lower {}; l.a(); } #[flame] fn b() { a() } #[noflame] fn
() { b() } pub struct Lower; impl Lower { #[flame("lower")] pub fn a(self) { // nothing to do here } } #[test] fn main() { c(); let spans = flame::spans(); assert_eq!(1, spans.len()); let roots = &spans[0]; println!("{:?}",roots); // if more than 2 roots, a() was flamed twice or c was flamed // main is missing because main isn't closed here assert_eq!("b", roots.name); assert_eq!(1, roots.children.len()); assert_eq!("top::a", roots.children[0].name); assert_eq!(1, roots.children[0].children.len()); assert_eq!("lower::a", roots.children[0].children[0].name); }
c
identifier_name
sort7.js
// Check sorting of array sub field SERVER-480. t = db.jstests_sort7; t.drop(); // Compare indexed and unindexed sort order for an array embedded field. t.save({a: [{x: 2}]}); t.save({a: [{x: 1}]}); t.save({a: [{x: 3}]});
// Now check when there are two objects in the array. t.remove({}); t.save({a: [{x: 2}, {x: 3}]}); t.save({a: [{x: 1}, {x: 4}]}); t.save({a: [{x: 3}, {x: 2}]}); unindexed = t.find().sort({"a.x": 1}).toArray(); t.ensureIndex({"a.x": 1}); indexed = t.find().sort({"a.x": 1}).hint({"a.x": 1}).toArray(); assert.eq(unindexed, indexed);
unindexed = t.find().sort({"a.x": 1}).toArray(); t.ensureIndex({"a.x": 1}); indexed = t.find().sort({"a.x": 1}).hint({"a.x": 1}).toArray(); assert.eq(unindexed, indexed);
random_line_split
builtin.rs
use env::UserEnv; use getopts::Options; use std::collections::HashMap; use std::env; use std::io; use std::io::prelude::*; use std::path::{Component, Path, PathBuf}; use job; const SUCCESS: io::Result<i32> = Ok(0); pub trait Builtin { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32>; fn dup(&self) -> Box<Builtin>; } pub struct SimpleBuiltin(fn(&[String], &mut UserEnv) -> io::Result<i32>); impl Clone for SimpleBuiltin { fn clone(&self) -> Self { SimpleBuiltin(self.0) } } impl Builtin for SimpleBuiltin where { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { self.0(args, env) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } pub type BuiltinMap = HashMap<&'static str, Box<Builtin>>; #[derive(Clone)] struct Cd { prev_dir: String, } impl Cd { fn new() -> Cd { let pwd = env::var("PWD").unwrap_or(String::new()); Cd { prev_dir: pwd } } fn change_to<P: AsRef<Path>>(&mut self, p: &P, env: &mut UserEnv) -> io::Result<()> { let pwd = env.get("PWD"); self.prev_dir = pwd; let new_pwd_buf = normalize_logical_path(&p); env::set_current_dir(&new_pwd_buf)?; let path_str = new_pwd_buf.to_str().ok_or(io::Error::new( io::ErrorKind::Other, "Invalid characters in path", ))?; env.set("PWD", path_str); Ok(()) } } fn normalize_logical_path<P: AsRef<Path>>(path: &P) -> PathBuf { let path = path.as_ref(); let mut normalized_path = PathBuf::new(); for c in path.components() { match c { Component::ParentDir => { normalized_path.pop(); } Component::CurDir => continue, _ => normalized_path.push(c.as_os_str()), }; } normalized_path } impl Builtin for Cd { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { if args.len() == 0 { let home = env.get("HOME"); if home.len() != 0 { return self.change_to(&PathBuf::from(&home), env).and(SUCCESS); } return SUCCESS; } if args[0] == "-" { let prev_dir = self.prev_dir.clone(); return self.change_to(&prev_dir, env).and(SUCCESS); } let pwd = env.get("PWD"); let mut pwd_buf = if pwd == "" { env::current_dir()? } else { PathBuf::from(pwd) }; pwd_buf.push(&args[0]); self.change_to(&pwd_buf, env).and(SUCCESS) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } fn pwd(_args: &[String], env: &mut UserEnv) -> io::Result<i32> { println!("{}", env.get("PWD")); SUCCESS } fn echo(args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let mut opts = Options::new(); opts.optflag("n", "", "Suppress new lines"); let matches = match opts.parse(args) { Ok(m) => m, Err(_) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "Unable to parse arguments.", )) } }; let remaining_args = matches.free.join(" "); if matches.opt_present("n") { print!("{}", remaining_args); try!(io::stdout().flush()); } else { println!("{}", remaining_args); } SUCCESS } fn fg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(true)?; Ok(res) } fn bg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(false)?; Ok(res) } fn jobs(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { job::print_jobs(); Ok(0) } macro_rules! add_builtin_fns { ($map:ident, [ $( ($n:expr, $cmd:expr) ),* ] ) => {{ $($map.insert( $n, Box::new(SimpleBuiltin($cmd)) as Box<Builtin> );)* }} } fn builtin_true(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { SUCCESS } pub fn init_builtins() -> BuiltinMap { let mut builtins: BuiltinMap = HashMap::new(); builtins.insert("cd", Box::new(Cd::new())); add_builtin_fns!( builtins, [ ("echo", echo), ("pwd", pwd), ("fg", fg), ("bg", bg), ("jobs", jobs), ("true", builtin_true), ("false", |_args: &[String], _env: &mut UserEnv| Ok(1)), (":", builtin_true) ] ); builtins } pub fn clone_builtins(builtins: &BuiltinMap) -> BuiltinMap { let mut builtins_clone: BuiltinMap = HashMap::new(); for (cmd, func) in builtins.iter() { builtins_clone.insert(cmd, func.dup()); } builtins_clone } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use std::{env, fs}; use test_fixture::*; struct BuiltinTests { pwd: PathBuf, } impl TestFixture for BuiltinTests { fn setup(&mut self) { let mut pwd = env::temp_dir(); pwd.push("pwd"); fs::create_dir(&pwd).unwrap(); self.pwd = pwd; env::set_current_dir(&self.pwd).unwrap(); env::set_var("PWD", &self.pwd); } fn teardown(&mut self) { fs::remove_dir(&self.pwd).unwrap(); } fn
(&self) -> TestList<Self> { vec![ test!("cd, no args", cd_with_no_args), test!("cd, absolute arg", cd_with_absolute_arg), test!("cd, relative arg", cd_with_relative_arg), test!("cd, previous dir", cd_previous_directory), ] } } impl BuiltinTests { fn new() -> BuiltinTests { BuiltinTests { pwd: PathBuf::new(), } } fn cd_with_no_args(&mut self) { let home = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("HOME", &home); let mut cd = Cd::new(); cd.run(&[], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), home); } fn cd_with_absolute_arg(&mut self) { let dir = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&self.pwd)); let mut cd = Cd::new(); cd.run(&[dir.clone()], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), dir); } fn cd_with_relative_arg(&mut self) { let mut pwd = self.pwd.clone(); pwd.pop(); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&pwd)); env::set_current_dir("..").unwrap(); let mut cd = Cd::new(); cd.run(&[String::from("pwd")], &mut user_env).unwrap(); assert_eq!(env::var("PWD"), Ok(pathbuf_to_string(&self.pwd))); } fn cd_previous_directory(&mut self) { let mut user_env = UserEnv::new(); let mut cd = Cd::new(); cd.run(&[String::from("..")], &mut user_env).unwrap(); cd.run(&[String::from("-")], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), pathbuf_to_string(&self.pwd)); } } fn pathbuf_to_string(p: &PathBuf) -> String { String::from((*p).to_str().unwrap()) } #[test] fn builtin_tests() { let fixture = BuiltinTests::new(); test_fixture_runner(fixture); } }
tests
identifier_name
builtin.rs
use env::UserEnv; use getopts::Options; use std::collections::HashMap; use std::env; use std::io; use std::io::prelude::*; use std::path::{Component, Path, PathBuf}; use job; const SUCCESS: io::Result<i32> = Ok(0); pub trait Builtin { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32>; fn dup(&self) -> Box<Builtin>; } pub struct SimpleBuiltin(fn(&[String], &mut UserEnv) -> io::Result<i32>); impl Clone for SimpleBuiltin { fn clone(&self) -> Self { SimpleBuiltin(self.0) } } impl Builtin for SimpleBuiltin where { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { self.0(args, env) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } pub type BuiltinMap = HashMap<&'static str, Box<Builtin>>; #[derive(Clone)] struct Cd { prev_dir: String, } impl Cd { fn new() -> Cd { let pwd = env::var("PWD").unwrap_or(String::new()); Cd { prev_dir: pwd } } fn change_to<P: AsRef<Path>>(&mut self, p: &P, env: &mut UserEnv) -> io::Result<()> { let pwd = env.get("PWD"); self.prev_dir = pwd; let new_pwd_buf = normalize_logical_path(&p); env::set_current_dir(&new_pwd_buf)?; let path_str = new_pwd_buf.to_str().ok_or(io::Error::new( io::ErrorKind::Other, "Invalid characters in path", ))?; env.set("PWD", path_str); Ok(()) } } fn normalize_logical_path<P: AsRef<Path>>(path: &P) -> PathBuf { let path = path.as_ref(); let mut normalized_path = PathBuf::new(); for c in path.components() { match c { Component::ParentDir =>
Component::CurDir => continue, _ => normalized_path.push(c.as_os_str()), }; } normalized_path } impl Builtin for Cd { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { if args.len() == 0 { let home = env.get("HOME"); if home.len() != 0 { return self.change_to(&PathBuf::from(&home), env).and(SUCCESS); } return SUCCESS; } if args[0] == "-" { let prev_dir = self.prev_dir.clone(); return self.change_to(&prev_dir, env).and(SUCCESS); } let pwd = env.get("PWD"); let mut pwd_buf = if pwd == "" { env::current_dir()? } else { PathBuf::from(pwd) }; pwd_buf.push(&args[0]); self.change_to(&pwd_buf, env).and(SUCCESS) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } fn pwd(_args: &[String], env: &mut UserEnv) -> io::Result<i32> { println!("{}", env.get("PWD")); SUCCESS } fn echo(args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let mut opts = Options::new(); opts.optflag("n", "", "Suppress new lines"); let matches = match opts.parse(args) { Ok(m) => m, Err(_) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "Unable to parse arguments.", )) } }; let remaining_args = matches.free.join(" "); if matches.opt_present("n") { print!("{}", remaining_args); try!(io::stdout().flush()); } else { println!("{}", remaining_args); } SUCCESS } fn fg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(true)?; Ok(res) } fn bg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(false)?; Ok(res) } fn jobs(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { job::print_jobs(); Ok(0) } macro_rules! add_builtin_fns { ($map:ident, [ $( ($n:expr, $cmd:expr) ),* ] ) => {{ $($map.insert( $n, Box::new(SimpleBuiltin($cmd)) as Box<Builtin> );)* }} } fn builtin_true(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { SUCCESS } pub fn init_builtins() -> BuiltinMap { let mut builtins: BuiltinMap = HashMap::new(); builtins.insert("cd", Box::new(Cd::new())); add_builtin_fns!( builtins, [ ("echo", echo), ("pwd", pwd), ("fg", fg), ("bg", bg), ("jobs", jobs), ("true", builtin_true), ("false", |_args: &[String], _env: &mut UserEnv| Ok(1)), (":", builtin_true) ] ); builtins } pub fn clone_builtins(builtins: &BuiltinMap) -> BuiltinMap { let mut builtins_clone: BuiltinMap = HashMap::new(); for (cmd, func) in builtins.iter() { builtins_clone.insert(cmd, func.dup()); } builtins_clone } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use std::{env, fs}; use test_fixture::*; struct BuiltinTests { pwd: PathBuf, } impl TestFixture for BuiltinTests { fn setup(&mut self) { let mut pwd = env::temp_dir(); pwd.push("pwd"); fs::create_dir(&pwd).unwrap(); self.pwd = pwd; env::set_current_dir(&self.pwd).unwrap(); env::set_var("PWD", &self.pwd); } fn teardown(&mut self) { fs::remove_dir(&self.pwd).unwrap(); } fn tests(&self) -> TestList<Self> { vec![ test!("cd, no args", cd_with_no_args), test!("cd, absolute arg", cd_with_absolute_arg), test!("cd, relative arg", cd_with_relative_arg), test!("cd, previous dir", cd_previous_directory), ] } } impl BuiltinTests { fn new() -> BuiltinTests { BuiltinTests { pwd: PathBuf::new(), } } fn cd_with_no_args(&mut self) { let home = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("HOME", &home); let mut cd = Cd::new(); cd.run(&[], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), home); } fn cd_with_absolute_arg(&mut self) { let dir = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&self.pwd)); let mut cd = Cd::new(); cd.run(&[dir.clone()], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), dir); } fn cd_with_relative_arg(&mut self) { let mut pwd = self.pwd.clone(); pwd.pop(); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&pwd)); env::set_current_dir("..").unwrap(); let mut cd = Cd::new(); cd.run(&[String::from("pwd")], &mut user_env).unwrap(); assert_eq!(env::var("PWD"), Ok(pathbuf_to_string(&self.pwd))); } fn cd_previous_directory(&mut self) { let mut user_env = UserEnv::new(); let mut cd = Cd::new(); cd.run(&[String::from("..")], &mut user_env).unwrap(); cd.run(&[String::from("-")], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), pathbuf_to_string(&self.pwd)); } } fn pathbuf_to_string(p: &PathBuf) -> String { String::from((*p).to_str().unwrap()) } #[test] fn builtin_tests() { let fixture = BuiltinTests::new(); test_fixture_runner(fixture); } }
{ normalized_path.pop(); }
conditional_block
builtin.rs
use env::UserEnv; use getopts::Options; use std::collections::HashMap; use std::env; use std::io; use std::io::prelude::*; use std::path::{Component, Path, PathBuf}; use job; const SUCCESS: io::Result<i32> = Ok(0); pub trait Builtin { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32>; fn dup(&self) -> Box<Builtin>; } pub struct SimpleBuiltin(fn(&[String], &mut UserEnv) -> io::Result<i32>); impl Clone for SimpleBuiltin { fn clone(&self) -> Self { SimpleBuiltin(self.0) } } impl Builtin for SimpleBuiltin where { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { self.0(args, env) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } pub type BuiltinMap = HashMap<&'static str, Box<Builtin>>; #[derive(Clone)] struct Cd { prev_dir: String, } impl Cd { fn new() -> Cd { let pwd = env::var("PWD").unwrap_or(String::new()); Cd { prev_dir: pwd } } fn change_to<P: AsRef<Path>>(&mut self, p: &P, env: &mut UserEnv) -> io::Result<()> { let pwd = env.get("PWD"); self.prev_dir = pwd; let new_pwd_buf = normalize_logical_path(&p); env::set_current_dir(&new_pwd_buf)?; let path_str = new_pwd_buf.to_str().ok_or(io::Error::new( io::ErrorKind::Other, "Invalid characters in path", ))?; env.set("PWD", path_str); Ok(()) } } fn normalize_logical_path<P: AsRef<Path>>(path: &P) -> PathBuf { let path = path.as_ref(); let mut normalized_path = PathBuf::new(); for c in path.components() { match c { Component::ParentDir => { normalized_path.pop(); } Component::CurDir => continue, _ => normalized_path.push(c.as_os_str()), }; } normalized_path } impl Builtin for Cd { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { if args.len() == 0 { let home = env.get("HOME"); if home.len() != 0 { return self.change_to(&PathBuf::from(&home), env).and(SUCCESS); } return SUCCESS; } if args[0] == "-" { let prev_dir = self.prev_dir.clone(); return self.change_to(&prev_dir, env).and(SUCCESS); } let pwd = env.get("PWD"); let mut pwd_buf = if pwd == "" { env::current_dir()? } else { PathBuf::from(pwd) }; pwd_buf.push(&args[0]); self.change_to(&pwd_buf, env).and(SUCCESS) } fn dup(&self) -> Box<Builtin>
} fn pwd(_args: &[String], env: &mut UserEnv) -> io::Result<i32> { println!("{}", env.get("PWD")); SUCCESS } fn echo(args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let mut opts = Options::new(); opts.optflag("n", "", "Suppress new lines"); let matches = match opts.parse(args) { Ok(m) => m, Err(_) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "Unable to parse arguments.", )) } }; let remaining_args = matches.free.join(" "); if matches.opt_present("n") { print!("{}", remaining_args); try!(io::stdout().flush()); } else { println!("{}", remaining_args); } SUCCESS } fn fg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(true)?; Ok(res) } fn bg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(false)?; Ok(res) } fn jobs(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { job::print_jobs(); Ok(0) } macro_rules! add_builtin_fns { ($map:ident, [ $( ($n:expr, $cmd:expr) ),* ] ) => {{ $($map.insert( $n, Box::new(SimpleBuiltin($cmd)) as Box<Builtin> );)* }} } fn builtin_true(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { SUCCESS } pub fn init_builtins() -> BuiltinMap { let mut builtins: BuiltinMap = HashMap::new(); builtins.insert("cd", Box::new(Cd::new())); add_builtin_fns!( builtins, [ ("echo", echo), ("pwd", pwd), ("fg", fg), ("bg", bg), ("jobs", jobs), ("true", builtin_true), ("false", |_args: &[String], _env: &mut UserEnv| Ok(1)), (":", builtin_true) ] ); builtins } pub fn clone_builtins(builtins: &BuiltinMap) -> BuiltinMap { let mut builtins_clone: BuiltinMap = HashMap::new(); for (cmd, func) in builtins.iter() { builtins_clone.insert(cmd, func.dup()); } builtins_clone } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use std::{env, fs}; use test_fixture::*; struct BuiltinTests { pwd: PathBuf, } impl TestFixture for BuiltinTests { fn setup(&mut self) { let mut pwd = env::temp_dir(); pwd.push("pwd"); fs::create_dir(&pwd).unwrap(); self.pwd = pwd; env::set_current_dir(&self.pwd).unwrap(); env::set_var("PWD", &self.pwd); } fn teardown(&mut self) { fs::remove_dir(&self.pwd).unwrap(); } fn tests(&self) -> TestList<Self> { vec![ test!("cd, no args", cd_with_no_args), test!("cd, absolute arg", cd_with_absolute_arg), test!("cd, relative arg", cd_with_relative_arg), test!("cd, previous dir", cd_previous_directory), ] } } impl BuiltinTests { fn new() -> BuiltinTests { BuiltinTests { pwd: PathBuf::new(), } } fn cd_with_no_args(&mut self) { let home = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("HOME", &home); let mut cd = Cd::new(); cd.run(&[], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), home); } fn cd_with_absolute_arg(&mut self) { let dir = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&self.pwd)); let mut cd = Cd::new(); cd.run(&[dir.clone()], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), dir); } fn cd_with_relative_arg(&mut self) { let mut pwd = self.pwd.clone(); pwd.pop(); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&pwd)); env::set_current_dir("..").unwrap(); let mut cd = Cd::new(); cd.run(&[String::from("pwd")], &mut user_env).unwrap(); assert_eq!(env::var("PWD"), Ok(pathbuf_to_string(&self.pwd))); } fn cd_previous_directory(&mut self) { let mut user_env = UserEnv::new(); let mut cd = Cd::new(); cd.run(&[String::from("..")], &mut user_env).unwrap(); cd.run(&[String::from("-")], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), pathbuf_to_string(&self.pwd)); } } fn pathbuf_to_string(p: &PathBuf) -> String { String::from((*p).to_str().unwrap()) } #[test] fn builtin_tests() { let fixture = BuiltinTests::new(); test_fixture_runner(fixture); } }
{ Box::new(self.clone()) }
identifier_body
builtin.rs
use env::UserEnv; use getopts::Options; use std::collections::HashMap; use std::env; use std::io; use std::io::prelude::*; use std::path::{Component, Path, PathBuf}; use job; const SUCCESS: io::Result<i32> = Ok(0); pub trait Builtin { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32>; fn dup(&self) -> Box<Builtin>; } pub struct SimpleBuiltin(fn(&[String], &mut UserEnv) -> io::Result<i32>); impl Clone for SimpleBuiltin { fn clone(&self) -> Self { SimpleBuiltin(self.0) } } impl Builtin for SimpleBuiltin where { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { self.0(args, env) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } pub type BuiltinMap = HashMap<&'static str, Box<Builtin>>; #[derive(Clone)] struct Cd { prev_dir: String, } impl Cd { fn new() -> Cd { let pwd = env::var("PWD").unwrap_or(String::new()); Cd { prev_dir: pwd } } fn change_to<P: AsRef<Path>>(&mut self, p: &P, env: &mut UserEnv) -> io::Result<()> { let pwd = env.get("PWD"); self.prev_dir = pwd; let new_pwd_buf = normalize_logical_path(&p); env::set_current_dir(&new_pwd_buf)?; let path_str = new_pwd_buf.to_str().ok_or(io::Error::new( io::ErrorKind::Other, "Invalid characters in path", ))?; env.set("PWD", path_str); Ok(()) } } fn normalize_logical_path<P: AsRef<Path>>(path: &P) -> PathBuf { let path = path.as_ref(); let mut normalized_path = PathBuf::new(); for c in path.components() { match c { Component::ParentDir => { normalized_path.pop(); } Component::CurDir => continue, _ => normalized_path.push(c.as_os_str()), }; } normalized_path } impl Builtin for Cd { fn run(&mut self, args: &[String], env: &mut UserEnv) -> io::Result<i32> { if args.len() == 0 { let home = env.get("HOME"); if home.len() != 0 { return self.change_to(&PathBuf::from(&home), env).and(SUCCESS); } return SUCCESS; } if args[0] == "-" { let prev_dir = self.prev_dir.clone(); return self.change_to(&prev_dir, env).and(SUCCESS); } let pwd = env.get("PWD"); let mut pwd_buf = if pwd == "" { env::current_dir()? } else { PathBuf::from(pwd) }; pwd_buf.push(&args[0]); self.change_to(&pwd_buf, env).and(SUCCESS) } fn dup(&self) -> Box<Builtin> { Box::new(self.clone()) } } fn pwd(_args: &[String], env: &mut UserEnv) -> io::Result<i32> { println!("{}", env.get("PWD")); SUCCESS } fn echo(args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let mut opts = Options::new(); opts.optflag("n", "", "Suppress new lines"); let matches = match opts.parse(args) { Ok(m) => m, Err(_) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "Unable to parse arguments.", )) } }; let remaining_args = matches.free.join(" "); if matches.opt_present("n") { print!("{}", remaining_args); try!(io::stdout().flush()); } else { println!("{}", remaining_args); } SUCCESS } fn fg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(true)?; Ok(res) } fn bg(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { let res = job::start_job(false)?; Ok(res) } fn jobs(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { job::print_jobs(); Ok(0) } macro_rules! add_builtin_fns { ($map:ident, [ $( ($n:expr, $cmd:expr) ),* ] ) => {{ $($map.insert( $n, Box::new(SimpleBuiltin($cmd)) as Box<Builtin> );)* }} } fn builtin_true(_args: &[String], _env: &mut UserEnv) -> io::Result<i32> { SUCCESS } pub fn init_builtins() -> BuiltinMap { let mut builtins: BuiltinMap = HashMap::new(); builtins.insert("cd", Box::new(Cd::new())); add_builtin_fns!( builtins, [ ("echo", echo), ("pwd", pwd), ("fg", fg), ("bg", bg), ("jobs", jobs), ("true", builtin_true), ("false", |_args: &[String], _env: &mut UserEnv| Ok(1)), (":", builtin_true) ] ); builtins } pub fn clone_builtins(builtins: &BuiltinMap) -> BuiltinMap { let mut builtins_clone: BuiltinMap = HashMap::new(); for (cmd, func) in builtins.iter() { builtins_clone.insert(cmd, func.dup()); } builtins_clone } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use std::{env, fs}; use test_fixture::*; struct BuiltinTests { pwd: PathBuf, } impl TestFixture for BuiltinTests { fn setup(&mut self) { let mut pwd = env::temp_dir(); pwd.push("pwd"); fs::create_dir(&pwd).unwrap(); self.pwd = pwd; env::set_current_dir(&self.pwd).unwrap(); env::set_var("PWD", &self.pwd); } fn teardown(&mut self) { fs::remove_dir(&self.pwd).unwrap(); } fn tests(&self) -> TestList<Self> { vec![ test!("cd, no args", cd_with_no_args), test!("cd, absolute arg", cd_with_absolute_arg), test!("cd, relative arg", cd_with_relative_arg), test!("cd, previous dir", cd_previous_directory), ] } } impl BuiltinTests { fn new() -> BuiltinTests { BuiltinTests { pwd: PathBuf::new(), } } fn cd_with_no_args(&mut self) { let home = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("HOME", &home); let mut cd = Cd::new(); cd.run(&[], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), home); } fn cd_with_absolute_arg(&mut self) { let dir = String::from("/"); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&self.pwd)); let mut cd = Cd::new(); cd.run(&[dir.clone()], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), dir); } fn cd_with_relative_arg(&mut self) { let mut pwd = self.pwd.clone(); pwd.pop(); let mut user_env = UserEnv::new(); user_env.set("PWD", &pathbuf_to_string(&pwd)); env::set_current_dir("..").unwrap(); let mut cd = Cd::new(); cd.run(&[String::from("pwd")], &mut user_env).unwrap(); assert_eq!(env::var("PWD"), Ok(pathbuf_to_string(&self.pwd))); } fn cd_previous_directory(&mut self) { let mut user_env = UserEnv::new(); let mut cd = Cd::new(); cd.run(&[String::from("..")], &mut user_env).unwrap(); cd.run(&[String::from("-")], &mut user_env).unwrap(); assert_eq!(user_env.get("PWD"), pathbuf_to_string(&self.pwd)); } } fn pathbuf_to_string(p: &PathBuf) -> String { String::from((*p).to_str().unwrap()) }
let fixture = BuiltinTests::new(); test_fixture_runner(fixture); } }
#[test] fn builtin_tests() {
random_line_split
matlab-to-python.py
# Autogenerated with SMOP version 0.23 # main.py ../../assessing-mininet/MATLAB/load_function.m ../../assessing-mininet/MATLAB/process_complete_test_set.m ../../assessing-mininet/MATLAB/process_single_testfile.m ../../assessing-mininet/MATLAB/ProcessAllLogsMain.m from __future__ import division from numpy import arange def strcat(*args):
def load_octave_decoded_file_as_matrix(file_name): with open(file_name, 'r') as f: return [ map(float,line.strip().split(' ')) for line in f ] def get_test_bitrate(crosstraffic): if crosstraffic: return arange(4,6,0.25) else: return arange(8,12,0.5) def process_complete_test_set(file_names,output_format,crosstraffic): from glob import glob overview_img_file=strcat('overview.',output_format) mean_bitrate=[] std_dev_bitrate=[] mean_delay=[] std_dev_delay=[] mean_jitter=[] std_dev_jitter=[] mean_packetloss=[] std_dev_packetloss=[] print('Starting work on:') print(file_names) for f in file_names: print('in loop, iterating through list of found files...') #current_file_name_with_ext=f #bare_file_name=strrep(current_file_name_with_ext,extension_loadfile,'') #temp_picture_file_name=strcat(bare_file_name,extension_imgfile) current_picture_file_name=strcat(f,'.jpg') matrix_to_process=load_octave_decoded_file_as_matrix(f) parsed_data=process_single_testfile(matrix_to_process,current_picture_file_name,output_format) mean_bitrate[ii]=mean(parsed_data) std_dev_bitrate[ii]=std(parsed_data) mean_delay[ii]=mean(parsed_data[:,2]) std_dev_delay[ii]=std(parsed_data[:,2]) mean_jitter[ii]=mean(parsed_data[:,3]) std_dev_jitter[ii]=std(parsed_data[:,3]) mean_packetloss[ii]=mean(parsed_data[:,4]) std_dev_packetloss[ii]=std(parsed_data[:,4]) bitrate_of_test = get_test_bitrate(crosstraffic) s_bitrate=min(bitrate_of_test) - bitrate_interval e_bitrate=max(bitrate_of_test) + bitrate_interval s_mean_bitrate=min(mean_bitrate) - max(std_dev_bitrate) e_mean_bitrate=max(mean_bitrate) + max(std_dev_bitrate) s_mean_jitter=min(mean_jitter) - max(std_dev_jitter) e_mean_jitter=max(mean_jitter) + max(std_dev_jitter) s_mean_delay=min(mean_delay) - max(std_dev_delay) e_mean_delay=max(mean_delay) + max(std_dev_delay) axis_bitrate=(cat(s_bitrate,e_bitrate,s_mean_bitrate,e_mean_bitrate)) axis_delay=(cat(s_bitrate,e_bitrate,sort(cat(round_(s_mean_delay) - 1,round_(e_mean_delay) + 1)))) axis_jitter=(cat(s_bitrate,e_bitrate,s_mean_jitter,e_mean_jitter)) print('\n\n\n*** START TESTDATA ***\n') print(bitrate_of_test) print(mean_bitrate) print(std_dev_bitrate) print('\n*** END TESTDATA ***\n\n\n') subplot(3,1,1) print(len(bitrate_of_test)) print(len(mean_bitrate)) print(len(std_dev_bitrate)) errorbar(bitrate_of_test,mean_bitrate,std_dev_bitrate,'kx') title('mean throughput with standard deviation') xlabel('test bitrate [Mbps]') ylabel('bitrate value [Mbps]') print(axis_bitrate) axis(axis_bitrate) grid('on') subplot(3,1,2) errorbar(bitrate_of_test,mean_delay,std_dev_delay,'kx') title('mean delay with standard deviation') xlabel('test bitrate [Mbps]') ylabel('delay value [ms]') axis(axis_delay) grid('on') subplot(3,1,3) errorbar(bitrate_of_test,mean_jitter,std_dev_jitter,'kx') title('mean jitter with standard deviation') xlabel('test bitrate [Mbps]') ylabel('jitter value [ms]') axis(axis_jitter) grid('on') aggregatedPicture=figure(1) set_(aggregatedPicture,'PaperUnits','centimeters') set_(aggregatedPicture,'PaperSize',cat(30,16)) set_(aggregatedPicture,'PaperPosition',cat(0,0,30,16)) set_(aggregatedPicture,'PaperOrientation','portrait') saveas(aggregatedPicture,overview_img_file,output_format) close(aggregatedPicture) clear('all') return def process_single_testfile(matrix,current_picture_file_name,output_format): t_start=matrix[1][5] * 3600 + matrix[1][6] * 60 + matrix[1][7] print (matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) t_conv=(matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) - t_start t_start_s=matrix[1][2] * 3600 + matrix[1][3] * 60 + matrix[1][4] t_conv_s=(matrix[:][2] * 3600 + matrix[:][3] * 60 + matrix[:][4]) - t_start_s jj=1 t_int=0 bitrate[jj]=0 delay[jj]=0 jitter[jj]=0 pktloss[jj]=0 for ii in arange(1,len(matrix)).reshape(-1): if (t_conv[ii] - t_int >= 1): jj=jj + 1 t_int=t_conv[ii] bitrate[jj]=matrix[ii][8] delay[jj]=t_conv[ii] - t_conv_s[ii] if (ii > 1): pktloss[jj]=matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=t_conv[ii] - t_conv[ii - 1] else: bitrate[jj]=bitrate[jj] + matrix[ii][8] delay[jj]=mean(cat(delay[jj],(t_conv[ii] - t_conv_s[ii]))) if (ii > 1): pktloss[jj]=pktloss[jj] + matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=mean(cat(jitter[jj],(t_conv[ii] - t_conv[ii - 1]))) bitrate=bitrate / 125000 return_matrix=matlabarray(cat(bitrate.T,delay.T,jitter.T,pktloss.T)) subplot(2,2,1) bitrate_u=copy(bitrate) plot(arange(0,jj - 2),bitrate_u[1:jj - 1],'-') title('Throughput') xlabel('time [s]') ylabel('[Mbps]') axis(cat(0,max(t_conv),0,round_(max(bitrate_u) * 1.125))) grid('on') subplot(2,2,2) plot(arange(0,len(delay) - 1),delay,'-') title('Delay') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(delay) - 1e-05,max(delay))) grid('on') subplot(2,2,3) plot(arange(0,len(jitter) - 1),jitter,'-') title('Jitter') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(jitter) - max(jitter) * 1.125,max(jitter) * 1.125)) grid('on') subplot(2,2,4) d=diff(t_conv) m=max(d) hist(d) title('Inter-departure time Distribution') xlabel('time [s]') ylabel('Empirical PDF') grid('on') firstPicture=figure(1) set_(firstPicture,'PaperUnits','centimeters') set_(firstPicture,'PaperSize',cat(22,18)) set_(firstPicture,'PaperPosition',cat(0,0,22,18)) set_(firstPicture,'PaperOrientation','portrait') saveas(firstPicture,current_picture_file_name,output_format) close(firstPicture) # if (strcmp(log_type,'udp_rcv')): # subplot(1,1,1) # packetloss_picture=figure(1) # set_(packetloss_picture,'PaperUnits','centimeters') # set_(packetloss_picture,'PaperSize',cat(12,10)) # set_(packetloss_picture,'PaperPosition',cat(0,0,12,10)) # set_(packetloss_picture,'PaperOrientation','portrait') # plot(arange(0,len(pktloss) - 1),pktloss,'-') # title('Packet loss') # xlabel('time [s]') # ylabel('[pps]') # axis(cat(sort(cat(0,max(t_conv))),sort(cat(round_(max(pktloss)) + 1,round_(min(pktloss)) - 1)))) # grid('on') # saveas(packetloss_picture,strcat('pl_',current_picture_file_name),output_format) # close(packetloss_picture) return return_matrix crosstraffic = False #process_complete_test_set(['/tmp/octave.dat'],'pdf',crosstraffic) process_single_testfile(load_octave_decoded_file_as_matrix('/tmp/octave.dat'),'pic.jpg',"jpg")
return ''.join(args)
identifier_body
matlab-to-python.py
# Autogenerated with SMOP version 0.23 # main.py ../../assessing-mininet/MATLAB/load_function.m ../../assessing-mininet/MATLAB/process_complete_test_set.m ../../assessing-mininet/MATLAB/process_single_testfile.m ../../assessing-mininet/MATLAB/ProcessAllLogsMain.m from __future__ import division from numpy import arange def strcat(*args): return ''.join(args) def load_octave_decoded_file_as_matrix(file_name): with open(file_name, 'r') as f: return [ map(float,line.strip().split(' ')) for line in f ] def get_test_bitrate(crosstraffic): if crosstraffic: return arange(4,6,0.25) else: return arange(8,12,0.5) def process_complete_test_set(file_names,output_format,crosstraffic): from glob import glob overview_img_file=strcat('overview.',output_format) mean_bitrate=[] std_dev_bitrate=[] mean_delay=[] std_dev_delay=[] mean_jitter=[] std_dev_jitter=[] mean_packetloss=[] std_dev_packetloss=[] print('Starting work on:') print(file_names) for f in file_names: print('in loop, iterating through list of found files...') #current_file_name_with_ext=f #bare_file_name=strrep(current_file_name_with_ext,extension_loadfile,'') #temp_picture_file_name=strcat(bare_file_name,extension_imgfile)
mean_bitrate[ii]=mean(parsed_data) std_dev_bitrate[ii]=std(parsed_data) mean_delay[ii]=mean(parsed_data[:,2]) std_dev_delay[ii]=std(parsed_data[:,2]) mean_jitter[ii]=mean(parsed_data[:,3]) std_dev_jitter[ii]=std(parsed_data[:,3]) mean_packetloss[ii]=mean(parsed_data[:,4]) std_dev_packetloss[ii]=std(parsed_data[:,4]) bitrate_of_test = get_test_bitrate(crosstraffic) s_bitrate=min(bitrate_of_test) - bitrate_interval e_bitrate=max(bitrate_of_test) + bitrate_interval s_mean_bitrate=min(mean_bitrate) - max(std_dev_bitrate) e_mean_bitrate=max(mean_bitrate) + max(std_dev_bitrate) s_mean_jitter=min(mean_jitter) - max(std_dev_jitter) e_mean_jitter=max(mean_jitter) + max(std_dev_jitter) s_mean_delay=min(mean_delay) - max(std_dev_delay) e_mean_delay=max(mean_delay) + max(std_dev_delay) axis_bitrate=(cat(s_bitrate,e_bitrate,s_mean_bitrate,e_mean_bitrate)) axis_delay=(cat(s_bitrate,e_bitrate,sort(cat(round_(s_mean_delay) - 1,round_(e_mean_delay) + 1)))) axis_jitter=(cat(s_bitrate,e_bitrate,s_mean_jitter,e_mean_jitter)) print('\n\n\n*** START TESTDATA ***\n') print(bitrate_of_test) print(mean_bitrate) print(std_dev_bitrate) print('\n*** END TESTDATA ***\n\n\n') subplot(3,1,1) print(len(bitrate_of_test)) print(len(mean_bitrate)) print(len(std_dev_bitrate)) errorbar(bitrate_of_test,mean_bitrate,std_dev_bitrate,'kx') title('mean throughput with standard deviation') xlabel('test bitrate [Mbps]') ylabel('bitrate value [Mbps]') print(axis_bitrate) axis(axis_bitrate) grid('on') subplot(3,1,2) errorbar(bitrate_of_test,mean_delay,std_dev_delay,'kx') title('mean delay with standard deviation') xlabel('test bitrate [Mbps]') ylabel('delay value [ms]') axis(axis_delay) grid('on') subplot(3,1,3) errorbar(bitrate_of_test,mean_jitter,std_dev_jitter,'kx') title('mean jitter with standard deviation') xlabel('test bitrate [Mbps]') ylabel('jitter value [ms]') axis(axis_jitter) grid('on') aggregatedPicture=figure(1) set_(aggregatedPicture,'PaperUnits','centimeters') set_(aggregatedPicture,'PaperSize',cat(30,16)) set_(aggregatedPicture,'PaperPosition',cat(0,0,30,16)) set_(aggregatedPicture,'PaperOrientation','portrait') saveas(aggregatedPicture,overview_img_file,output_format) close(aggregatedPicture) clear('all') return def process_single_testfile(matrix,current_picture_file_name,output_format): t_start=matrix[1][5] * 3600 + matrix[1][6] * 60 + matrix[1][7] print (matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) t_conv=(matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) - t_start t_start_s=matrix[1][2] * 3600 + matrix[1][3] * 60 + matrix[1][4] t_conv_s=(matrix[:][2] * 3600 + matrix[:][3] * 60 + matrix[:][4]) - t_start_s jj=1 t_int=0 bitrate[jj]=0 delay[jj]=0 jitter[jj]=0 pktloss[jj]=0 for ii in arange(1,len(matrix)).reshape(-1): if (t_conv[ii] - t_int >= 1): jj=jj + 1 t_int=t_conv[ii] bitrate[jj]=matrix[ii][8] delay[jj]=t_conv[ii] - t_conv_s[ii] if (ii > 1): pktloss[jj]=matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=t_conv[ii] - t_conv[ii - 1] else: bitrate[jj]=bitrate[jj] + matrix[ii][8] delay[jj]=mean(cat(delay[jj],(t_conv[ii] - t_conv_s[ii]))) if (ii > 1): pktloss[jj]=pktloss[jj] + matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=mean(cat(jitter[jj],(t_conv[ii] - t_conv[ii - 1]))) bitrate=bitrate / 125000 return_matrix=matlabarray(cat(bitrate.T,delay.T,jitter.T,pktloss.T)) subplot(2,2,1) bitrate_u=copy(bitrate) plot(arange(0,jj - 2),bitrate_u[1:jj - 1],'-') title('Throughput') xlabel('time [s]') ylabel('[Mbps]') axis(cat(0,max(t_conv),0,round_(max(bitrate_u) * 1.125))) grid('on') subplot(2,2,2) plot(arange(0,len(delay) - 1),delay,'-') title('Delay') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(delay) - 1e-05,max(delay))) grid('on') subplot(2,2,3) plot(arange(0,len(jitter) - 1),jitter,'-') title('Jitter') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(jitter) - max(jitter) * 1.125,max(jitter) * 1.125)) grid('on') subplot(2,2,4) d=diff(t_conv) m=max(d) hist(d) title('Inter-departure time Distribution') xlabel('time [s]') ylabel('Empirical PDF') grid('on') firstPicture=figure(1) set_(firstPicture,'PaperUnits','centimeters') set_(firstPicture,'PaperSize',cat(22,18)) set_(firstPicture,'PaperPosition',cat(0,0,22,18)) set_(firstPicture,'PaperOrientation','portrait') saveas(firstPicture,current_picture_file_name,output_format) close(firstPicture) # if (strcmp(log_type,'udp_rcv')): # subplot(1,1,1) # packetloss_picture=figure(1) # set_(packetloss_picture,'PaperUnits','centimeters') # set_(packetloss_picture,'PaperSize',cat(12,10)) # set_(packetloss_picture,'PaperPosition',cat(0,0,12,10)) # set_(packetloss_picture,'PaperOrientation','portrait') # plot(arange(0,len(pktloss) - 1),pktloss,'-') # title('Packet loss') # xlabel('time [s]') # ylabel('[pps]') # axis(cat(sort(cat(0,max(t_conv))),sort(cat(round_(max(pktloss)) + 1,round_(min(pktloss)) - 1)))) # grid('on') # saveas(packetloss_picture,strcat('pl_',current_picture_file_name),output_format) # close(packetloss_picture) return return_matrix crosstraffic = False #process_complete_test_set(['/tmp/octave.dat'],'pdf',crosstraffic) process_single_testfile(load_octave_decoded_file_as_matrix('/tmp/octave.dat'),'pic.jpg',"jpg")
current_picture_file_name=strcat(f,'.jpg') matrix_to_process=load_octave_decoded_file_as_matrix(f) parsed_data=process_single_testfile(matrix_to_process,current_picture_file_name,output_format)
random_line_split