file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
test_guest_vlan_range.py
""" P1 tests for Dedicating Guest Vlan Ranges """ # Import Local Modules from marvin.cloudstackAPI import * from marvin.cloudstackTestCase import * from marvin.lib.base import * from marvin.lib.common import * from marvin.lib.utils import * from nose.plugins.attrib import attr class TestDedicateGuestVlanRange(cloudstackTestCase): @classmethod def setUpClass(cls): testClient = super(TestDedicateGuestVlanRange, cls).getClsTestClient() cls.apiclient = testClient.getApiClient() cls.services = testClient.getParsedTestDataConfig() # Get Zone, Domain cls.domain = get_domain(cls.apiclient) cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests()) # Create Account cls.account = Account.create( cls.apiclient, cls.services["account"], domainid=cls.domain.id ) cls._cleanup = [ cls.account, ] cls.physical_network, cls.free_vlan = setNonContiguousVlanIds(cls.apiclient, cls.zone.id) return @classmethod def tearDownClass(cls): try: # Cleanup resources used removeGuestVlanRangeResponse = \ cls.physical_network.update(cls.apiclient, id=cls.physical_network.id, vlan=cls.physical_network.vlan) cleanup_resources(cls.apiclient, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return def
(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] return def tearDown(self): try: # Clean up cleanup_resources(self.apiclient, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @attr(tags=["advanced", "guestvlanrange", "dedicate", "release"], required_hardware="false") def test_dedicateGuestVlanRange(self): """Test guest vlan range dedication """ """Assume a physical network is available """ """ # Validate the following: # 1. List the available physical network using ListPhysicalNetwork # 2. Add a Guest Vlan range to the available physical network using UpdatePhysicalNetwork # 3. Dedicate the created guest vlan range to user account using DedicateGuestVlanRange # 4. Verify vlan range is dedicated with listDedicatedGuestVlanRanges # 5. Release the dedicated guest vlan range back to the system # 6. Verify guest vlan range has been released, verify with listDedicatedGuestVlanRanges # 7. Remove the added guest vlan range using UpdatePhysicalNetwork """ self.debug("Adding guest vlan range") new_vlan = self.physical_network.vlan + "," + self.free_vlan["partial_range"][0] # new_vlan = self.free_vlan["partial_range"][0] addGuestVlanRangeResponse = self.physical_network.update(self.apiclient, id=self.physical_network.id, vlan=new_vlan) # id=self.physical_network.id, vlan=self.free_vlan["partial_range"][0]) self.debug("Dedicating guest vlan range"); dedicate_guest_vlan_range_response = PhysicalNetwork.dedicate( self.apiclient, self.free_vlan["partial_range"][0], physicalnetworkid=self.physical_network.id, account=self.account.name, domainid=self.account.domainid ) list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated( self.apiclient, id=dedicate_guest_vlan_range_response.id ) dedicated_guest_vlan_response = list_dedicated_guest_vlan_range_response[0] self.assertEqual( dedicated_guest_vlan_response.account, self.account.name, "Check account name is in listDedicatedGuestVlanRanges as the account the range is dedicated to" ) self.debug("Releasing guest vlan range"); dedicate_guest_vlan_range_response.release(self.apiclient) list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated(self.apiclient) self.assertEqual( list_dedicated_guest_vlan_range_response, None, "Check vlan range is not available in listDedicatedGuestVlanRanges" )
setUp
identifier_name
test_guest_vlan_range.py
""" P1 tests for Dedicating Guest Vlan Ranges """ # Import Local Modules from marvin.cloudstackAPI import * from marvin.cloudstackTestCase import * from marvin.lib.base import * from marvin.lib.common import * from marvin.lib.utils import * from nose.plugins.attrib import attr class TestDedicateGuestVlanRange(cloudstackTestCase): @classmethod def setUpClass(cls): testClient = super(TestDedicateGuestVlanRange, cls).getClsTestClient() cls.apiclient = testClient.getApiClient() cls.services = testClient.getParsedTestDataConfig() # Get Zone, Domain cls.domain = get_domain(cls.apiclient) cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests()) # Create Account cls.account = Account.create( cls.apiclient, cls.services["account"], domainid=cls.domain.id ) cls._cleanup = [ cls.account, ] cls.physical_network, cls.free_vlan = setNonContiguousVlanIds(cls.apiclient, cls.zone.id) return @classmethod def tearDownClass(cls): try: # Cleanup resources used removeGuestVlanRangeResponse = \ cls.physical_network.update(cls.apiclient, id=cls.physical_network.id, vlan=cls.physical_network.vlan) cleanup_resources(cls.apiclient, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return def setUp(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] return def tearDown(self): try: # Clean up cleanup_resources(self.apiclient, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @attr(tags=["advanced", "guestvlanrange", "dedicate", "release"], required_hardware="false") def test_dedicateGuestVlanRange(self): """Test guest vlan range dedication """ """Assume a physical network is available """ """ # Validate the following: # 1. List the available physical network using ListPhysicalNetwork # 2. Add a Guest Vlan range to the available physical network using UpdatePhysicalNetwork # 3. Dedicate the created guest vlan range to user account using DedicateGuestVlanRange # 4. Verify vlan range is dedicated with listDedicatedGuestVlanRanges # 5. Release the dedicated guest vlan range back to the system # 6. Verify guest vlan range has been released, verify with listDedicatedGuestVlanRanges # 7. Remove the added guest vlan range using UpdatePhysicalNetwork """ self.debug("Adding guest vlan range") new_vlan = self.physical_network.vlan + "," + self.free_vlan["partial_range"][0] # new_vlan = self.free_vlan["partial_range"][0] addGuestVlanRangeResponse = self.physical_network.update(self.apiclient, id=self.physical_network.id, vlan=new_vlan) # id=self.physical_network.id, vlan=self.free_vlan["partial_range"][0]) self.debug("Dedicating guest vlan range"); dedicate_guest_vlan_range_response = PhysicalNetwork.dedicate( self.apiclient, self.free_vlan["partial_range"][0], physicalnetworkid=self.physical_network.id, account=self.account.name, domainid=self.account.domainid ) list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated( self.apiclient, id=dedicate_guest_vlan_range_response.id ) dedicated_guest_vlan_response = list_dedicated_guest_vlan_range_response[0] self.assertEqual( dedicated_guest_vlan_response.account, self.account.name, "Check account name is in listDedicatedGuestVlanRanges as the account the range is dedicated to" )
list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated(self.apiclient) self.assertEqual( list_dedicated_guest_vlan_range_response, None, "Check vlan range is not available in listDedicatedGuestVlanRanges" )
self.debug("Releasing guest vlan range"); dedicate_guest_vlan_range_response.release(self.apiclient)
random_line_split
ParameterType.py
""" Created on Feb 15, 2014 @author: alex """ from sqlalchemy import Column from sqlalchemy.types import SmallInteger from sqlalchemy.types import Unicode from .meta import Base class
(Base): """ classdocs """ __tablename__ = 'ParameterTypes' _id = Column(SmallInteger, primary_key=True, autoincrement=True, nullable=False, unique=True) name = Column(Unicode(250), nullable=False, unique=True) unit = Column(Unicode(250), nullable=False) def __init__(self, name, unit): self.name = name self.unit = unit @property def id(self): return self._id @property def serialize(self): """Return data in serializeable (dictionary) format""" ret_dict = { 'id': self.id, 'name': self.name, 'unit': self.unit } return ret_dict def __repr__(self): return str(self.serialize) def init_parameter_types(db_session): db_session.add(ParameterType('Temperature', '°C')) db_session.add(ParameterType('Humidity', '%')) db_session.add(ParameterType('Volume', 'Liter')) db_session.add(ParameterType('pH', 'pH')) db_session.add(ParameterType('Conductivity', 'mS'))
ParameterType
identifier_name
ParameterType.py
""" Created on Feb 15, 2014 @author: alex """ from sqlalchemy import Column from sqlalchemy.types import SmallInteger from sqlalchemy.types import Unicode from .meta import Base class ParameterType(Base): """ classdocs """ __tablename__ = 'ParameterTypes' _id = Column(SmallInteger, primary_key=True, autoincrement=True, nullable=False, unique=True) name = Column(Unicode(250), nullable=False, unique=True) unit = Column(Unicode(250), nullable=False) def __init__(self, name, unit): self.name = name self.unit = unit @property def id(self): return self._id
def serialize(self): """Return data in serializeable (dictionary) format""" ret_dict = { 'id': self.id, 'name': self.name, 'unit': self.unit } return ret_dict def __repr__(self): return str(self.serialize) def init_parameter_types(db_session): db_session.add(ParameterType('Temperature', '°C')) db_session.add(ParameterType('Humidity', '%')) db_session.add(ParameterType('Volume', 'Liter')) db_session.add(ParameterType('pH', 'pH')) db_session.add(ParameterType('Conductivity', 'mS'))
@property
random_line_split
ParameterType.py
""" Created on Feb 15, 2014 @author: alex """ from sqlalchemy import Column from sqlalchemy.types import SmallInteger from sqlalchemy.types import Unicode from .meta import Base class ParameterType(Base):
def init_parameter_types(db_session): db_session.add(ParameterType('Temperature', '°C')) db_session.add(ParameterType('Humidity', '%')) db_session.add(ParameterType('Volume', 'Liter')) db_session.add(ParameterType('pH', 'pH')) db_session.add(ParameterType('Conductivity', 'mS'))
""" classdocs """ __tablename__ = 'ParameterTypes' _id = Column(SmallInteger, primary_key=True, autoincrement=True, nullable=False, unique=True) name = Column(Unicode(250), nullable=False, unique=True) unit = Column(Unicode(250), nullable=False) def __init__(self, name, unit): self.name = name self.unit = unit @property def id(self): return self._id @property def serialize(self): """Return data in serializeable (dictionary) format""" ret_dict = { 'id': self.id, 'name': self.name, 'unit': self.unit } return ret_dict def __repr__(self): return str(self.serialize)
identifier_body
kendo.culture.kk-KZ.js
/* * Kendo UI v2014.2.716 (http://www.telerik.com/kendo-ui) * Copyright 2014 Telerik AD. All rights reserved. * * Kendo UI commercial licenses may be obtained at * http://www.telerik.com/purchase/license-agreement/kendo-ui-complete * If you do not own a commercial license, this file shall be governed by the trial license terms. */ (function(f, define){ define([], f); })(function(){ (function( window, undefined ) { var kendo = window.kendo || (window.kendo = { cultures: {} }); kendo.cultures["kk-KZ"] = { name: "kk-KZ", numberFormat: { pattern: ["-n"], decimals: 2, ",": " ", ".": ",", groupSize: [3], percent: { pattern: ["-n%","n%"], decimals: 2, ",": " ", ".": ",", groupSize: [3],
decimals: 2, ",": " ", ".": "-", groupSize: [3], symbol: "Т" } }, calendars: { standard: { days: { names: ["Жексенбі","Дүйсенбі","Сейсенбі","Сәрсенбі","Бейсенбі","Жұма","Сенбі"], namesAbbr: ["Жк","Дс","Сс","Ср","Бс","Жм","Сн"], namesShort: ["Жк","Дс","Сс","Ср","Бс","Жм","Сн"] }, months: { names: ["қаңтар","ақпан","наурыз","сәуір","мамыр","маусым","шілде","тамыз","қыркүйек","қазан","қараша","желтоқсан",""], namesAbbr: ["Қаң","Ақп","Нау","Сәу","Мам","Мау","Шіл","Там","Қыр","Қаз","Қар","Жел",""] }, AM: [""], PM: [""], patterns: { d: "dd.MM.yyyy", D: "d MMMM yyyy 'ж.'", F: "d MMMM yyyy 'ж.' H:mm:ss", g: "dd.MM.yyyy H:mm", G: "dd.MM.yyyy H:mm:ss", m: "d MMMM", M: "d MMMM", s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss", t: "H:mm", T: "H:mm:ss", u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'", y: "MMMM yyyy", Y: "MMMM yyyy" }, "/": ".", ":": ":", firstDay: 1 } } } })(this); return window.kendo; }, typeof define == 'function' && define.amd ? define : function(_, f){ f(); });
symbol: "%" }, currency: { pattern: ["-$n","$n"],
random_line_split
common.ts
/**
* @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ export {Injector, OpaqueToken, ReflectiveInjector, bind, provide} from '@angular/core/src/di'; export {Options} from './src/common_options'; export {MeasureValues} from './src/measure_values'; export {Metric} from './src/metric'; export {MultiMetric} from './src/metric/multi_metric'; export {PerflogMetric} from './src/metric/perflog_metric'; export {UserMetric} from './src/metric/user_metric'; export {Reporter} from './src/reporter'; export {ConsoleReporter} from './src/reporter/console_reporter'; export {JsonFileReporter} from './src/reporter/json_file_reporter'; export {MultiReporter} from './src/reporter/multi_reporter'; export {Runner} from './src/runner'; export {SampleDescription} from './src/sample_description'; export {SampleState, Sampler} from './src/sampler'; export {Validator} from './src/validator'; export {RegressionSlopeValidator} from './src/validator/regression_slope_validator'; export {SizeValidator} from './src/validator/size_validator'; export {WebDriverAdapter} from './src/web_driver_adapter'; export {PerfLogFeatures, WebDriverExtension} from './src/web_driver_extension'; export {ChromeDriverExtension} from './src/webdriver/chrome_driver_extension'; export {FirefoxDriverExtension} from './src/webdriver/firefox_driver_extension'; export {IOsDriverExtension} from './src/webdriver/ios_driver_extension';
random_line_split
Pagination.tsx
import Link from "next/link"; import { FunctionComponent } from "react"; import { PagedCollection } from "types/Collection"; interface Props { collection: PagedCollection<any>; } const Pagination: FunctionComponent<Props> = ({ collection }) => { const view = collection && collection['hydra:view']; if (!view) return; const { 'hydra:first': first, 'hydra:previous': previous, 'hydra:next': next, 'hydra:last': last } = view; return ( <nav aria-label="Page navigation"> <Link href={first ? first : '#'}> <a className={`btn btn-primary${previous ? '' : ' disabled'}`}> <span aria-hidden="true">&lArr;</span> First </a> </Link> <Link href={previous ? previous : '#'}> <a className={`btn btn-primary${previous ? '' : ' disabled'}`}>
<Link href={next ? next : '#'}> <a className={`btn btn-primary${next ? '' : ' disabled'}`}> Next <span aria-hidden="true">&rarr;</span> </a> </Link> <Link href={last ? last : '#'}> <a className={`btn btn-primary${next ? '' : ' disabled'}`}> Last <span aria-hidden="true">&rArr;</span> </a> </Link> </nav> ); }; export default Pagination;
<span aria-hidden="true">&larr;</span> Previous </a> </Link>
random_line_split
home.component.ts
import { TextSplitPipe } from '../shared/pipes/text-split.pipe'; import { Greeting } from './home.model'; import { Response } from '@angular/http'; import { Component, OnInit } from '@angular/core'; import { HomeService } from './home.Service'; @Component({ selector: 'home', styleUrls: ['./home.component.css'], templateUrl: './home.component.html' }) export class HomeComponent implements OnInit { aboutName: string = "home"; testName: string = ""; greeting: Greeting = <Greeting>{}; values: [{ name: string; age: number; }]; valueString: string = "Hello Georgia"; valueDate: Date = new Date(); valueLongText: string = "Hello Bank of Georgia"; value3: string = new TextSplitPipe().transform("Hlello BOG from ts", 2); showText : boolean = true; showNumber : number = 2; constructor(public homeService: HomeService) { } public ngOnInit() { this.homeService.getGreetingMessage().subscribe( (response: Response) => { this.greeting = new Greeting(response.json()); }, (error: any) => { }, () => { }); this.values = [ { name: "Niko", age: 24 }, { name: "Vano", age: 25 }, { name: "Gurami", age: 25 } ]; } onNameChange() { console.log('click in home'); } onbtnClick() { //console.log(this.showText); this.showText = !this.showText; } showTextArea() { console.log("show");
console.log(item); } deleteItem(item){ let index = this.values.indexOf(item); this.values.splice(index,1); } }
return true; } printItem(item){
random_line_split
home.component.ts
import { TextSplitPipe } from '../shared/pipes/text-split.pipe'; import { Greeting } from './home.model'; import { Response } from '@angular/http'; import { Component, OnInit } from '@angular/core'; import { HomeService } from './home.Service'; @Component({ selector: 'home', styleUrls: ['./home.component.css'], templateUrl: './home.component.html' }) export class
implements OnInit { aboutName: string = "home"; testName: string = ""; greeting: Greeting = <Greeting>{}; values: [{ name: string; age: number; }]; valueString: string = "Hello Georgia"; valueDate: Date = new Date(); valueLongText: string = "Hello Bank of Georgia"; value3: string = new TextSplitPipe().transform("Hlello BOG from ts", 2); showText : boolean = true; showNumber : number = 2; constructor(public homeService: HomeService) { } public ngOnInit() { this.homeService.getGreetingMessage().subscribe( (response: Response) => { this.greeting = new Greeting(response.json()); }, (error: any) => { }, () => { }); this.values = [ { name: "Niko", age: 24 }, { name: "Vano", age: 25 }, { name: "Gurami", age: 25 } ]; } onNameChange() { console.log('click in home'); } onbtnClick() { //console.log(this.showText); this.showText = !this.showText; } showTextArea() { console.log("show"); return true; } printItem(item){ console.log(item); } deleteItem(item){ let index = this.values.indexOf(item); this.values.splice(index,1); } }
HomeComponent
identifier_name
home.component.ts
import { TextSplitPipe } from '../shared/pipes/text-split.pipe'; import { Greeting } from './home.model'; import { Response } from '@angular/http'; import { Component, OnInit } from '@angular/core'; import { HomeService } from './home.Service'; @Component({ selector: 'home', styleUrls: ['./home.component.css'], templateUrl: './home.component.html' }) export class HomeComponent implements OnInit { aboutName: string = "home"; testName: string = ""; greeting: Greeting = <Greeting>{}; values: [{ name: string; age: number; }]; valueString: string = "Hello Georgia"; valueDate: Date = new Date(); valueLongText: string = "Hello Bank of Georgia"; value3: string = new TextSplitPipe().transform("Hlello BOG from ts", 2); showText : boolean = true; showNumber : number = 2; constructor(public homeService: HomeService) { } public ngOnInit() { this.homeService.getGreetingMessage().subscribe( (response: Response) => { this.greeting = new Greeting(response.json()); }, (error: any) => { }, () => { }); this.values = [ { name: "Niko", age: 24 }, { name: "Vano", age: 25 }, { name: "Gurami", age: 25 } ]; } onNameChange() { console.log('click in home'); } onbtnClick() { //console.log(this.showText); this.showText = !this.showText; } showTextArea() { console.log("show"); return true; } printItem(item)
deleteItem(item){ let index = this.values.indexOf(item); this.values.splice(index,1); } }
{ console.log(item); }
identifier_body
app.js
var express = require('express'); var path = require('path'); var favicon = require('serve-favicon'); var logger = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); var routes = require('./routes/index'); var app = express(); var dbConfig = require('./db.js'); var mongoose = require('mongoose'); mongoose.connect(dbConfig.url); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'jade'); // uncomment after placing your favicon in /public //app.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); // Configuring Passport var passport = require('passport'); var expressSession = require('express-session'); app.use(expressSession({secret: 'mySecretKey'})); app.use(passport.initialize()); app.use(passport.session()); app.use('/', routes); // catch 404 and forward to error handler app.use(function(req, res, next) { var err = new Error('Not Found'); err.status = 404; next(err); }); // error handlers // development error handler // will print stacktrace if (app.get('env') === 'development') { app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); } // production error handler // no stacktraces leaked to user app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: {} });
}); module.exports = app;
random_line_split
app.js
var express = require('express'); var path = require('path'); var favicon = require('serve-favicon'); var logger = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); var routes = require('./routes/index'); var app = express(); var dbConfig = require('./db.js'); var mongoose = require('mongoose'); mongoose.connect(dbConfig.url); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'jade'); // uncomment after placing your favicon in /public //app.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); // Configuring Passport var passport = require('passport'); var expressSession = require('express-session'); app.use(expressSession({secret: 'mySecretKey'})); app.use(passport.initialize()); app.use(passport.session()); app.use('/', routes); // catch 404 and forward to error handler app.use(function(req, res, next) { var err = new Error('Not Found'); err.status = 404; next(err); }); // error handlers // development error handler // will print stacktrace if (app.get('env') === 'development')
// production error handler // no stacktraces leaked to user app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: {} }); }); module.exports = app;
{ app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); }
conditional_block
CircularAudioBuffer.d.ts
/** * Copyright 2017 Google LLC * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License.
* * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ /** * Save Float32Array in arbitrarily sized chunks. * Load Float32Array in arbitrarily sized chunks. * Determine if there's enough data to grab a certain amount. */ export default class CircularAudioBuffer { buffer: Float32Array; currentIndex: number; constructor(maxLength: number); /** * Add a new buffer of data. Called when we get new audio input samples. */ addBuffer(newBuffer: Float32Array): void; /** * How many samples are stored currently? */ getLength(): number; /** * How much space remains? */ getRemainingLength(): number; /** * Return the first N samples of the buffer, and remove them. Called when we * want to get a buffer of audio data of a fixed size. */ popBuffer(length: number): Float32Array; /** * Get the the first part of the buffer without mutating it. */ getBuffer(length?: number): Float32Array; clear(): void; }
* You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0
random_line_split
CircularAudioBuffer.d.ts
/** * Copyright 2017 Google LLC * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ /** * Save Float32Array in arbitrarily sized chunks. * Load Float32Array in arbitrarily sized chunks. * Determine if there's enough data to grab a certain amount. */ export default class
{ buffer: Float32Array; currentIndex: number; constructor(maxLength: number); /** * Add a new buffer of data. Called when we get new audio input samples. */ addBuffer(newBuffer: Float32Array): void; /** * How many samples are stored currently? */ getLength(): number; /** * How much space remains? */ getRemainingLength(): number; /** * Return the first N samples of the buffer, and remove them. Called when we * want to get a buffer of audio data of a fixed size. */ popBuffer(length: number): Float32Array; /** * Get the the first part of the buffer without mutating it. */ getBuffer(length?: number): Float32Array; clear(): void; }
CircularAudioBuffer
identifier_name
xyz-source.js
import { createTileUrlFunctionFromTemplates } from 'ol-tilecache' import { XYZ as XYZSource } from 'ol/source' import { createXYZ } from 'ol/tilegrid' import { EPSG_3857, extentFromProjection } from '../ol-ext' import { and, coalesce, isArray, isFunction, isNumber, noop, or } from '../utils' import source from './source' import tileImageSource from './tile-image-source' const validateMinZoom = value => value >= 0
mixins: [ tileImageSource, ], props: { /* eslint-disable vue/require-prop-types */ // ol/source/Source projection: { ...source.props.projection, default: EPSG_3857, }, /* eslint-enable vue/require-prop-types */ // ol/source/XYZ maxZoom: { type: Number, default: 42, }, minZoom: { type: Number, default: 0, validator: validateMinZoom, }, maxResolution: Number, tileSize: { type: [Number, Array], default: () => [256, 256], validator: validateTileSize, }, }, computed: { inputTileSize () { return isArray(this.tileSize) ? this.tileSize.slice() : [this.tileSize, this.tileSize] }, derivedTileGridFactory () { if (isFunction(this.tileGridFactory)) { return this.tileGridFactory } const extent = extentFromProjection(this.resolvedDataProjection) const maxZoom = this.maxZoom const minZoom = this.minZoom const maxResolution = this.maxResolution const tileSize = this.inputTileSize return () => createXYZ({ extent, maxZoom, minZoom, maxResolution, tileSize }) }, inputTileUrlFunction () { const urlFunc = coalesce(this.tileUrlFunction, this.tileUrlFunc) if (isFunction(urlFunc)) return urlFunc if (this.currentUrls.length === 0) return return createTileUrlFunctionFromTemplates(this.currentUrls, this.tileGrid) }, }, methods: { /** * @return {module:ol/source/XYZ~XYZSource} * @protected */ createSource () { return new XYZSource({ // ol/source/Source attributions: this.currentAttributions, attributionsCollapsible: this.attributionsCollapsible, projection: this.resolvedDataProjection, wrapX: this.wrapX, // ol/source/Tile cacheSize: this.cacheSize, opaque: this.opaque, tilePixelRatio: this.tilePixelRatio, transition: this.transition, zDirection: this.zDirection, tileGrid: this.tileGrid, // ol/source/UrlTile tileLoadFunction: this.currentTileLoadFunction, tileUrlFunction: this.currentTileUrlFunction, // ol/source/TileImage crossOrigin: this.crossOrigin, reprojectionErrorThreshold: this.reprojectionErrorThreshold, tileClass: this.tileClass, imageSmoothing: this.imageSmoothing, }) }, tileKeyChanged: noop, // input tileKey is not allowed in XYZ constructor stateChanged: noop, // input state is not allowed in XYZ constructor }, }
const validateTileSize = /*#__PURE__*/or(isNumber, and(isArray, value => value.length === 2 && value.every(isNumber))) /** * Base XYZ source mixin. */ export default {
random_line_split
xyz-source.js
import { createTileUrlFunctionFromTemplates } from 'ol-tilecache' import { XYZ as XYZSource } from 'ol/source' import { createXYZ } from 'ol/tilegrid' import { EPSG_3857, extentFromProjection } from '../ol-ext' import { and, coalesce, isArray, isFunction, isNumber, noop, or } from '../utils' import source from './source' import tileImageSource from './tile-image-source' const validateMinZoom = value => value >= 0 const validateTileSize = /*#__PURE__*/or(isNumber, and(isArray, value => value.length === 2 && value.every(isNumber))) /** * Base XYZ source mixin. */ export default { mixins: [ tileImageSource, ], props: { /* eslint-disable vue/require-prop-types */ // ol/source/Source projection: { ...source.props.projection, default: EPSG_3857, }, /* eslint-enable vue/require-prop-types */ // ol/source/XYZ maxZoom: { type: Number, default: 42, }, minZoom: { type: Number, default: 0, validator: validateMinZoom, }, maxResolution: Number, tileSize: { type: [Number, Array], default: () => [256, 256], validator: validateTileSize, }, }, computed: { inputTileSize () { return isArray(this.tileSize) ? this.tileSize.slice() : [this.tileSize, this.tileSize] }, derivedTileGridFactory () { if (isFunction(this.tileGridFactory)) { return this.tileGridFactory } const extent = extentFromProjection(this.resolvedDataProjection) const maxZoom = this.maxZoom const minZoom = this.minZoom const maxResolution = this.maxResolution const tileSize = this.inputTileSize return () => createXYZ({ extent, maxZoom, minZoom, maxResolution, tileSize }) },
() { const urlFunc = coalesce(this.tileUrlFunction, this.tileUrlFunc) if (isFunction(urlFunc)) return urlFunc if (this.currentUrls.length === 0) return return createTileUrlFunctionFromTemplates(this.currentUrls, this.tileGrid) }, }, methods: { /** * @return {module:ol/source/XYZ~XYZSource} * @protected */ createSource () { return new XYZSource({ // ol/source/Source attributions: this.currentAttributions, attributionsCollapsible: this.attributionsCollapsible, projection: this.resolvedDataProjection, wrapX: this.wrapX, // ol/source/Tile cacheSize: this.cacheSize, opaque: this.opaque, tilePixelRatio: this.tilePixelRatio, transition: this.transition, zDirection: this.zDirection, tileGrid: this.tileGrid, // ol/source/UrlTile tileLoadFunction: this.currentTileLoadFunction, tileUrlFunction: this.currentTileUrlFunction, // ol/source/TileImage crossOrigin: this.crossOrigin, reprojectionErrorThreshold: this.reprojectionErrorThreshold, tileClass: this.tileClass, imageSmoothing: this.imageSmoothing, }) }, tileKeyChanged: noop, // input tileKey is not allowed in XYZ constructor stateChanged: noop, // input state is not allowed in XYZ constructor }, }
inputTileUrlFunction
identifier_name
xyz-source.js
import { createTileUrlFunctionFromTemplates } from 'ol-tilecache' import { XYZ as XYZSource } from 'ol/source' import { createXYZ } from 'ol/tilegrid' import { EPSG_3857, extentFromProjection } from '../ol-ext' import { and, coalesce, isArray, isFunction, isNumber, noop, or } from '../utils' import source from './source' import tileImageSource from './tile-image-source' const validateMinZoom = value => value >= 0 const validateTileSize = /*#__PURE__*/or(isNumber, and(isArray, value => value.length === 2 && value.every(isNumber))) /** * Base XYZ source mixin. */ export default { mixins: [ tileImageSource, ], props: { /* eslint-disable vue/require-prop-types */ // ol/source/Source projection: { ...source.props.projection, default: EPSG_3857, }, /* eslint-enable vue/require-prop-types */ // ol/source/XYZ maxZoom: { type: Number, default: 42, }, minZoom: { type: Number, default: 0, validator: validateMinZoom, }, maxResolution: Number, tileSize: { type: [Number, Array], default: () => [256, 256], validator: validateTileSize, }, }, computed: { inputTileSize () { return isArray(this.tileSize) ? this.tileSize.slice() : [this.tileSize, this.tileSize] }, derivedTileGridFactory () { if (isFunction(this.tileGridFactory)) { return this.tileGridFactory } const extent = extentFromProjection(this.resolvedDataProjection) const maxZoom = this.maxZoom const minZoom = this.minZoom const maxResolution = this.maxResolution const tileSize = this.inputTileSize return () => createXYZ({ extent, maxZoom, minZoom, maxResolution, tileSize }) }, inputTileUrlFunction ()
, }, methods: { /** * @return {module:ol/source/XYZ~XYZSource} * @protected */ createSource () { return new XYZSource({ // ol/source/Source attributions: this.currentAttributions, attributionsCollapsible: this.attributionsCollapsible, projection: this.resolvedDataProjection, wrapX: this.wrapX, // ol/source/Tile cacheSize: this.cacheSize, opaque: this.opaque, tilePixelRatio: this.tilePixelRatio, transition: this.transition, zDirection: this.zDirection, tileGrid: this.tileGrid, // ol/source/UrlTile tileLoadFunction: this.currentTileLoadFunction, tileUrlFunction: this.currentTileUrlFunction, // ol/source/TileImage crossOrigin: this.crossOrigin, reprojectionErrorThreshold: this.reprojectionErrorThreshold, tileClass: this.tileClass, imageSmoothing: this.imageSmoothing, }) }, tileKeyChanged: noop, // input tileKey is not allowed in XYZ constructor stateChanged: noop, // input state is not allowed in XYZ constructor }, }
{ const urlFunc = coalesce(this.tileUrlFunction, this.tileUrlFunc) if (isFunction(urlFunc)) return urlFunc if (this.currentUrls.length === 0) return return createTileUrlFunctionFromTemplates(this.currentUrls, this.tileGrid) }
identifier_body
authentication.js
// @flow import { AUTH_ASAP, AUTH_BASIC, AUTH_BEARER, AUTH_HAWK, AUTH_OAUTH_1, AUTH_OAUTH_2 } from '../common/constants'; import getOAuth2Token from './o-auth-2/get-token'; import getOAuth1Token from './o-auth-1/get-token'; import * as Hawk from 'hawk'; import jwtAuthentication from 'jwt-authentication'; import type { RequestAuthentication } from '../models/request'; import { getBasicAuthHeader } from './basic-auth/get-header'; import { getBearerAuthHeader } from './bearer-auth/get-header'; type Header = { name: string, value: string }; export async function getAuthHeader( requestId: string, url: string, method: string, authentication: RequestAuthentication ): Promise<Header | null> { if (authentication.disabled)
if (authentication.type === AUTH_BASIC) { const { username, password } = authentication; return getBasicAuthHeader(username, password); } if (authentication.type === AUTH_BEARER) { const { token, prefix } = authentication; return getBearerAuthHeader(token, prefix); } if (authentication.type === AUTH_OAUTH_2) { // HACK: GraphQL requests use a child request to fetch the schema with an // ID of "{{request_id}}.graphql". Here we are removing the .graphql suffix and // pretending we are fetching a token for the original request. This makes sure // the same tokens are used for schema fetching. See issue #835 on GitHub. const tokenId = requestId.match(/\.graphql$/) ? requestId.replace(/\.graphql$/, '') : requestId; const oAuth2Token = await getOAuth2Token(tokenId, authentication); if (oAuth2Token) { const token = oAuth2Token.accessToken; return _buildBearerHeader(token, authentication.tokenPrefix); } else { return null; } } if (authentication.type === AUTH_OAUTH_1) { const oAuth1Token = await getOAuth1Token(url, method, authentication); if (oAuth1Token) { return { name: 'Authorization', value: oAuth1Token.Authorization }; } else { return null; } } if (authentication.type === AUTH_HAWK) { const { id, key, algorithm } = authentication; const header = Hawk.client.header(url, method, { credentials: { id, key, algorithm } }); return { name: 'Authorization', value: header.field }; } if (authentication.type === AUTH_ASAP) { const { issuer, subject, audience, keyId, additionalClaims, privateKey } = authentication; const generator = jwtAuthentication.client.create(); let claims = { iss: issuer, sub: subject, aud: audience }; let parsedAdditionalClaims; try { parsedAdditionalClaims = JSON.parse(additionalClaims || '{}'); } catch (err) { throw new Error(`Unable to parse additional-claims: ${err}`); } if (parsedAdditionalClaims) { if (typeof parsedAdditionalClaims !== 'object') { throw new Error( `additional-claims must be an object received: '${typeof parsedAdditionalClaims}' instead` ); } claims = Object.assign(parsedAdditionalClaims, claims); } const options = { privateKey, kid: keyId }; return new Promise((resolve, reject) => { generator.generateAuthorizationHeader(claims, options, (error, headerValue) => { if (error) { reject(error); } else { resolve({ name: 'Authorization', value: headerValue }); } }); }); } return null; } function _buildBearerHeader(accessToken, prefix) { if (!accessToken) { return null; } const name = 'Authorization'; const value = `${prefix || 'Bearer'} ${accessToken}`; return { name, value }; }
{ return null; }
identifier_body
authentication.js
// @flow import { AUTH_ASAP, AUTH_BASIC, AUTH_BEARER, AUTH_HAWK, AUTH_OAUTH_1, AUTH_OAUTH_2 } from '../common/constants'; import getOAuth2Token from './o-auth-2/get-token'; import getOAuth1Token from './o-auth-1/get-token'; import * as Hawk from 'hawk'; import jwtAuthentication from 'jwt-authentication'; import type { RequestAuthentication } from '../models/request'; import { getBasicAuthHeader } from './basic-auth/get-header'; import { getBearerAuthHeader } from './bearer-auth/get-header'; type Header = { name: string, value: string }; export async function getAuthHeader( requestId: string, url: string, method: string, authentication: RequestAuthentication ): Promise<Header | null> { if (authentication.disabled) { return null; } if (authentication.type === AUTH_BASIC) { const { username, password } = authentication; return getBasicAuthHeader(username, password); }
(authentication.type === AUTH_BEARER) { const { token, prefix } = authentication; return getBearerAuthHeader(token, prefix); } if (authentication.type === AUTH_OAUTH_2) { // HACK: GraphQL requests use a child request to fetch the schema with an // ID of "{{request_id}}.graphql". Here we are removing the .graphql suffix and // pretending we are fetching a token for the original request. This makes sure // the same tokens are used for schema fetching. See issue #835 on GitHub. const tokenId = requestId.match(/\.graphql$/) ? requestId.replace(/\.graphql$/, '') : requestId; const oAuth2Token = await getOAuth2Token(tokenId, authentication); if (oAuth2Token) { const token = oAuth2Token.accessToken; return _buildBearerHeader(token, authentication.tokenPrefix); } else { return null; } } if (authentication.type === AUTH_OAUTH_1) { const oAuth1Token = await getOAuth1Token(url, method, authentication); if (oAuth1Token) { return { name: 'Authorization', value: oAuth1Token.Authorization }; } else { return null; } } if (authentication.type === AUTH_HAWK) { const { id, key, algorithm } = authentication; const header = Hawk.client.header(url, method, { credentials: { id, key, algorithm } }); return { name: 'Authorization', value: header.field }; } if (authentication.type === AUTH_ASAP) { const { issuer, subject, audience, keyId, additionalClaims, privateKey } = authentication; const generator = jwtAuthentication.client.create(); let claims = { iss: issuer, sub: subject, aud: audience }; let parsedAdditionalClaims; try { parsedAdditionalClaims = JSON.parse(additionalClaims || '{}'); } catch (err) { throw new Error(`Unable to parse additional-claims: ${err}`); } if (parsedAdditionalClaims) { if (typeof parsedAdditionalClaims !== 'object') { throw new Error( `additional-claims must be an object received: '${typeof parsedAdditionalClaims}' instead` ); } claims = Object.assign(parsedAdditionalClaims, claims); } const options = { privateKey, kid: keyId }; return new Promise((resolve, reject) => { generator.generateAuthorizationHeader(claims, options, (error, headerValue) => { if (error) { reject(error); } else { resolve({ name: 'Authorization', value: headerValue }); } }); }); } return null; } function _buildBearerHeader(accessToken, prefix) { if (!accessToken) { return null; } const name = 'Authorization'; const value = `${prefix || 'Bearer'} ${accessToken}`; return { name, value }; }
if
identifier_name
authentication.js
// @flow import { AUTH_ASAP, AUTH_BASIC, AUTH_BEARER, AUTH_HAWK, AUTH_OAUTH_1, AUTH_OAUTH_2 } from '../common/constants'; import getOAuth2Token from './o-auth-2/get-token'; import getOAuth1Token from './o-auth-1/get-token'; import * as Hawk from 'hawk'; import jwtAuthentication from 'jwt-authentication'; import type { RequestAuthentication } from '../models/request'; import { getBasicAuthHeader } from './basic-auth/get-header'; import { getBearerAuthHeader } from './bearer-auth/get-header'; type Header = { name: string, value: string }; export async function getAuthHeader( requestId: string, url: string, method: string, authentication: RequestAuthentication ): Promise<Header | null> { if (authentication.disabled) { return null; } if (authentication.type === AUTH_BASIC) { const { username, password } = authentication; return getBasicAuthHeader(username, password); } if (authentication.type === AUTH_BEARER) { const { token, prefix } = authentication; return getBearerAuthHeader(token, prefix); } if (authentication.type === AUTH_OAUTH_2) { // HACK: GraphQL requests use a child request to fetch the schema with an // ID of "{{request_id}}.graphql". Here we are removing the .graphql suffix and // pretending we are fetching a token for the original request. This makes sure // the same tokens are used for schema fetching. See issue #835 on GitHub. const tokenId = requestId.match(/\.graphql$/) ? requestId.replace(/\.graphql$/, '') : requestId; const oAuth2Token = await getOAuth2Token(tokenId, authentication); if (oAuth2Token) { const token = oAuth2Token.accessToken; return _buildBearerHeader(token, authentication.tokenPrefix); } else { return null; } } if (authentication.type === AUTH_OAUTH_1) { const oAuth1Token = await getOAuth1Token(url, method, authentication); if (oAuth1Token) { return { name: 'Authorization', value: oAuth1Token.Authorization }; } else { return null; } } if (authentication.type === AUTH_HAWK) { const { id, key, algorithm } = authentication; const header = Hawk.client.header(url, method, { credentials: { id, key, algorithm } }); return { name: 'Authorization', value: header.field }; } if (authentication.type === AUTH_ASAP) { const { issuer, subject, audience, keyId, additionalClaims, privateKey } = authentication; const generator = jwtAuthentication.client.create(); let claims = { iss: issuer, sub: subject, aud: audience }; let parsedAdditionalClaims; try { parsedAdditionalClaims = JSON.parse(additionalClaims || '{}'); } catch (err) { throw new Error(`Unable to parse additional-claims: ${err}`); }
if (typeof parsedAdditionalClaims !== 'object') { throw new Error( `additional-claims must be an object received: '${typeof parsedAdditionalClaims}' instead` ); } claims = Object.assign(parsedAdditionalClaims, claims); } const options = { privateKey, kid: keyId }; return new Promise((resolve, reject) => { generator.generateAuthorizationHeader(claims, options, (error, headerValue) => { if (error) { reject(error); } else { resolve({ name: 'Authorization', value: headerValue }); } }); }); } return null; } function _buildBearerHeader(accessToken, prefix) { if (!accessToken) { return null; } const name = 'Authorization'; const value = `${prefix || 'Bearer'} ${accessToken}`; return { name, value }; }
if (parsedAdditionalClaims) {
random_line_split
authentication.js
// @flow import { AUTH_ASAP, AUTH_BASIC, AUTH_BEARER, AUTH_HAWK, AUTH_OAUTH_1, AUTH_OAUTH_2 } from '../common/constants'; import getOAuth2Token from './o-auth-2/get-token'; import getOAuth1Token from './o-auth-1/get-token'; import * as Hawk from 'hawk'; import jwtAuthentication from 'jwt-authentication'; import type { RequestAuthentication } from '../models/request'; import { getBasicAuthHeader } from './basic-auth/get-header'; import { getBearerAuthHeader } from './bearer-auth/get-header'; type Header = { name: string, value: string }; export async function getAuthHeader( requestId: string, url: string, method: string, authentication: RequestAuthentication ): Promise<Header | null> { if (authentication.disabled) { return null; } if (authentication.type === AUTH_BASIC) { const { username, password } = authentication; return getBasicAuthHeader(username, password); } if (authentication.type === AUTH_BEARER) { const { token, prefix } = authentication; return getBearerAuthHeader(token, prefix); } if (authentication.type === AUTH_OAUTH_2) { // HACK: GraphQL requests use a child request to fetch the schema with an // ID of "{{request_id}}.graphql". Here we are removing the .graphql suffix and // pretending we are fetching a token for the original request. This makes sure // the same tokens are used for schema fetching. See issue #835 on GitHub. const tokenId = requestId.match(/\.graphql$/) ? requestId.replace(/\.graphql$/, '') : requestId; const oAuth2Token = await getOAuth2Token(tokenId, authentication); if (oAuth2Token) { const token = oAuth2Token.accessToken; return _buildBearerHeader(token, authentication.tokenPrefix); } else { return null; } } if (authentication.type === AUTH_OAUTH_1) { const oAuth1Token = await getOAuth1Token(url, method, authentication); if (oAuth1Token) { return { name: 'Authorization', value: oAuth1Token.Authorization }; } else { return null; } } if (authentication.type === AUTH_HAWK) { const { id, key, algorithm } = authentication; const header = Hawk.client.header(url, method, { credentials: { id, key, algorithm } }); return { name: 'Authorization', value: header.field }; } if (authentication.type === AUTH_ASAP) { const { issuer, subject, audience, keyId, additionalClaims, privateKey } = authentication; const generator = jwtAuthentication.client.create(); let claims = { iss: issuer, sub: subject, aud: audience }; let parsedAdditionalClaims; try { parsedAdditionalClaims = JSON.parse(additionalClaims || '{}'); } catch (err) { throw new Error(`Unable to parse additional-claims: ${err}`); } if (parsedAdditionalClaims) { if (typeof parsedAdditionalClaims !== 'object') { throw new Error( `additional-claims must be an object received: '${typeof parsedAdditionalClaims}' instead` ); } claims = Object.assign(parsedAdditionalClaims, claims); } const options = { privateKey, kid: keyId }; return new Promise((resolve, reject) => { generator.generateAuthorizationHeader(claims, options, (error, headerValue) => { if (error) { reject(error); } else
}); }); } return null; } function _buildBearerHeader(accessToken, prefix) { if (!accessToken) { return null; } const name = 'Authorization'; const value = `${prefix || 'Bearer'} ${accessToken}`; return { name, value }; }
{ resolve({ name: 'Authorization', value: headerValue }); }
conditional_block
main.js
// For an introduction to the Page Control template, see the following documentation: // http://go.microsoft.com/fwlink/?LinkId=232511 (function () { "use strict"; var app = WinJS.Application; var nav = WinJS.Navigation; var sched = WinJS.Utilities.Scheduler; var ui = WinJS.UI; WinJS.UI.Pages.define("pages/main/main.html", { // This function is called whenever a user navigates to this page. It // populates the page elements with the app's data. ready: function (element, options) { var launchtiles = document.querySelectorAll('.launchtile'); for (var n = 0; n < launchtiles.length; n++) {
}, toggleNavBarVisibility: function (ev) { document.getElementById('createNavBar').winControl.show(); }, launchtileInvoked: function (ev) { var tile = ev.currentTarget; var location = "pages/" + tile.dataset.page + "/" + tile.dataset.page + ".html"; nav.navigate(location); var trigger = document.querySelector(".nav-trigger"); trigger.checked = false; }, unload: function () { // TODO: Respond to navigations away from this page. }, updateLayout: function (element) { /// <param name="element" domElement="true" /> // TODO: Respond to changes in layout. } }); })();
var launchtile = launchtiles[n]; launchtile.addEventListener('click', this.launchtileInvoked.bind(this)); }
conditional_block
main.js
// For an introduction to the Page Control template, see the following documentation: // http://go.microsoft.com/fwlink/?LinkId=232511 (function () { "use strict"; var app = WinJS.Application; var nav = WinJS.Navigation; var sched = WinJS.Utilities.Scheduler; var ui = WinJS.UI; WinJS.UI.Pages.define("pages/main/main.html", { // This function is called whenever a user navigates to this page. It // populates the page elements with the app's data. ready: function (element, options) { var launchtiles = document.querySelectorAll('.launchtile'); for (var n = 0; n < launchtiles.length; n++) { var launchtile = launchtiles[n]; launchtile.addEventListener('click', this.launchtileInvoked.bind(this)); } }, toggleNavBarVisibility: function (ev) { document.getElementById('createNavBar').winControl.show(); }, launchtileInvoked: function (ev) { var tile = ev.currentTarget; var location = "pages/" + tile.dataset.page + "/" + tile.dataset.page + ".html"; nav.navigate(location); var trigger = document.querySelector(".nav-trigger"); trigger.checked = false; }, unload: function () {
}, updateLayout: function (element) { /// <param name="element" domElement="true" /> // TODO: Respond to changes in layout. } }); })();
// TODO: Respond to navigations away from this page.
random_line_split
index.js
'use strict'; var Grid = require('nd-grid'); var datetime = require('nd-datetime'); var RbacRoleModel = require('../../../mod/model/rbac/role'); module.exports = function(util) { if (!util.auth.hasAuth('=8')) { return util.redirect('error/403'); } var instance = new Grid({ parentNode: '#main', proxy: new RbacRoleModel(), mode: util.RBAC_ENABLED ? 2 : 0, autoload: util.RBAC_ENABLED, uniqueId: 'role_id', entryKey: null, labelMap: { 'role_id': 'ID', 'role_name': '名称', 'remarks': '备注', 'is_default': '默认角色', // 0-NONE,1-READ,2-ADD,3-WRITE,4-DELETE 'auth_extra': '权限扩展', 'updated_at': '更新时间' }, adapters: function(key, value) { switch (key) { case 'is_default': return value ? '是' : '否'; case 'auth_extra': return ['NONE', 'READ', 'ADD', 'WRITE', 'DELETE'][value]; case 'updated_at': return datetime(value).format(); default: return value; } }, plugins: [{ name: 'roleUser', starter: require('./user/starter') }, { name: 'roleAuth', starter: require('./auth/starter') }], pluginCfg: { addItem: { disabled: false,
start: require('./add/start') } }, editItem: { disabled: false, listeners: { start: require('./edit/start') } }, delItem: { disabled: false }, search: { disabled: util.RBAC_ENABLED, listeners: { start: require('./search/start') } } } }).render(); if (!util.RBAC_ENABLED) { // 新增 // RISK: 如果多次调用,会多次绑定 instance.get('proxy').on('POST', function(options) { options.data.realm = instance.get('params').realm; }); } // 面包屑导航 util.bread.set( [{ title: '权限管理' }, { title: '角色列表' }] ); // 返回垃圾回收 // 否则内存泄漏 return function() { instance.destroy(); }; };
listeners: {
random_line_split
shared.ts
import {ListWrapper, StringMapWrapper} from 'angular2/src/facade/collection'; import {isBlank, isPresent, looseIdentical, hasConstructor} from 'angular2/src/facade/lang'; import {BaseException, WrappedException} from 'angular2/src/facade/exceptions'; import {ControlContainer} from './control_container'; import {NgControl} from './ng_control'; import {AbstractControlDirective} from './abstract_control_directive'; import {NgControlGroup} from './ng_control_group'; import {Control, ControlGroup} from '../model'; import {Validators} from '../validators'; import {ControlValueAccessor} from './control_value_accessor'; import {DefaultValueAccessor} from './default_value_accessor'; import {NumberValueAccessor} from './number_value_accessor'; import {CheckboxControlValueAccessor} from './checkbox_value_accessor'; import {SelectControlValueAccessor} from './select_control_value_accessor'; import {RadioControlValueAccessor} from './radio_control_value_accessor'; import {normalizeValidator, normalizeAsyncValidator} from './normalize_validator'; import {ValidatorFn, AsyncValidatorFn} from './validators'; export function controlPath(name: string, parent: ControlContainer): string[] { var p = ListWrapper.clone(parent.path); p.push(name); return p; } export function setUpControl(control: Control, dir: NgControl): void { if (isBlank(control)) _throwError(dir, "Cannot find control"); if (isBlank(dir.valueAccessor)) _throwError(dir, "No value accessor for"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); dir.valueAccessor.writeValue(control.value); // view -> model dir.valueAccessor.registerOnChange((newValue: any) => { dir.viewToModelUpdate(newValue); control.updateValue(newValue, {emitModelToViewChange: false}); control.markAsDirty(); }); // model -> view control.registerOnChange((newValue: any) => dir.valueAccessor.writeValue(newValue)); // touched dir.valueAccessor.registerOnTouched(() => control.markAsTouched()); } export function
(control: ControlGroup, dir: NgControlGroup) { if (isBlank(control)) _throwError(dir, "Cannot find control"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); } function _throwError(dir: AbstractControlDirective, message: string): void { var path = dir.path.join(" -> "); throw new BaseException(`${message} '${path}'`); } export function composeValidators(validators: /* Array<Validator|Function> */ any[]): ValidatorFn { return isPresent(validators) ? Validators.compose(validators.map(normalizeValidator)) : null; } export function composeAsyncValidators( validators: /* Array<Validator|Function> */ any[]): AsyncValidatorFn { return isPresent(validators) ? Validators.composeAsync(validators.map(normalizeAsyncValidator)) : null; } export function isPropertyUpdated(changes: {[key: string]: any}, viewModel: any): boolean { if (!StringMapWrapper.contains(changes, "model")) return false; var change = changes["model"]; if (change.isFirstChange()) return true; return !looseIdentical(viewModel, change.currentValue); } // TODO: vsavkin remove it once https://github.com/angular/angular/issues/3011 is implemented export function selectValueAccessor(dir: NgControl, valueAccessors: ControlValueAccessor[]): ControlValueAccessor { if (isBlank(valueAccessors)) return null; var defaultAccessor: ControlValueAccessor; var builtinAccessor: ControlValueAccessor; var customAccessor: ControlValueAccessor; valueAccessors.forEach((v: ControlValueAccessor) => { if (hasConstructor(v, DefaultValueAccessor)) { defaultAccessor = v; } else if (hasConstructor(v, CheckboxControlValueAccessor) || hasConstructor(v, NumberValueAccessor) || hasConstructor(v, SelectControlValueAccessor) || hasConstructor(v, RadioControlValueAccessor)) { if (isPresent(builtinAccessor)) _throwError(dir, "More than one built-in value accessor matches"); builtinAccessor = v; } else { if (isPresent(customAccessor)) _throwError(dir, "More than one custom value accessor matches"); customAccessor = v; } }); if (isPresent(customAccessor)) return customAccessor; if (isPresent(builtinAccessor)) return builtinAccessor; if (isPresent(defaultAccessor)) return defaultAccessor; _throwError(dir, "No valid value accessor for"); return null; }
setUpControlGroup
identifier_name
shared.ts
import {ListWrapper, StringMapWrapper} from 'angular2/src/facade/collection'; import {isBlank, isPresent, looseIdentical, hasConstructor} from 'angular2/src/facade/lang'; import {BaseException, WrappedException} from 'angular2/src/facade/exceptions'; import {ControlContainer} from './control_container'; import {NgControl} from './ng_control'; import {AbstractControlDirective} from './abstract_control_directive'; import {NgControlGroup} from './ng_control_group'; import {Control, ControlGroup} from '../model'; import {Validators} from '../validators'; import {ControlValueAccessor} from './control_value_accessor'; import {DefaultValueAccessor} from './default_value_accessor'; import {NumberValueAccessor} from './number_value_accessor'; import {CheckboxControlValueAccessor} from './checkbox_value_accessor'; import {SelectControlValueAccessor} from './select_control_value_accessor'; import {RadioControlValueAccessor} from './radio_control_value_accessor'; import {normalizeValidator, normalizeAsyncValidator} from './normalize_validator'; import {ValidatorFn, AsyncValidatorFn} from './validators'; export function controlPath(name: string, parent: ControlContainer): string[] { var p = ListWrapper.clone(parent.path); p.push(name); return p; } export function setUpControl(control: Control, dir: NgControl): void { if (isBlank(control)) _throwError(dir, "Cannot find control"); if (isBlank(dir.valueAccessor)) _throwError(dir, "No value accessor for"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); dir.valueAccessor.writeValue(control.value); // view -> model dir.valueAccessor.registerOnChange((newValue: any) => { dir.viewToModelUpdate(newValue); control.updateValue(newValue, {emitModelToViewChange: false}); control.markAsDirty(); }); // model -> view control.registerOnChange((newValue: any) => dir.valueAccessor.writeValue(newValue)); // touched dir.valueAccessor.registerOnTouched(() => control.markAsTouched()); } export function setUpControlGroup(control: ControlGroup, dir: NgControlGroup) { if (isBlank(control)) _throwError(dir, "Cannot find control"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); } function _throwError(dir: AbstractControlDirective, message: string): void { var path = dir.path.join(" -> "); throw new BaseException(`${message} '${path}'`); } export function composeValidators(validators: /* Array<Validator|Function> */ any[]): ValidatorFn
export function composeAsyncValidators( validators: /* Array<Validator|Function> */ any[]): AsyncValidatorFn { return isPresent(validators) ? Validators.composeAsync(validators.map(normalizeAsyncValidator)) : null; } export function isPropertyUpdated(changes: {[key: string]: any}, viewModel: any): boolean { if (!StringMapWrapper.contains(changes, "model")) return false; var change = changes["model"]; if (change.isFirstChange()) return true; return !looseIdentical(viewModel, change.currentValue); } // TODO: vsavkin remove it once https://github.com/angular/angular/issues/3011 is implemented export function selectValueAccessor(dir: NgControl, valueAccessors: ControlValueAccessor[]): ControlValueAccessor { if (isBlank(valueAccessors)) return null; var defaultAccessor: ControlValueAccessor; var builtinAccessor: ControlValueAccessor; var customAccessor: ControlValueAccessor; valueAccessors.forEach((v: ControlValueAccessor) => { if (hasConstructor(v, DefaultValueAccessor)) { defaultAccessor = v; } else if (hasConstructor(v, CheckboxControlValueAccessor) || hasConstructor(v, NumberValueAccessor) || hasConstructor(v, SelectControlValueAccessor) || hasConstructor(v, RadioControlValueAccessor)) { if (isPresent(builtinAccessor)) _throwError(dir, "More than one built-in value accessor matches"); builtinAccessor = v; } else { if (isPresent(customAccessor)) _throwError(dir, "More than one custom value accessor matches"); customAccessor = v; } }); if (isPresent(customAccessor)) return customAccessor; if (isPresent(builtinAccessor)) return builtinAccessor; if (isPresent(defaultAccessor)) return defaultAccessor; _throwError(dir, "No valid value accessor for"); return null; }
{ return isPresent(validators) ? Validators.compose(validators.map(normalizeValidator)) : null; }
identifier_body
shared.ts
import {ListWrapper, StringMapWrapper} from 'angular2/src/facade/collection'; import {isBlank, isPresent, looseIdentical, hasConstructor} from 'angular2/src/facade/lang'; import {BaseException, WrappedException} from 'angular2/src/facade/exceptions'; import {ControlContainer} from './control_container'; import {NgControl} from './ng_control'; import {AbstractControlDirective} from './abstract_control_directive'; import {NgControlGroup} from './ng_control_group'; import {Control, ControlGroup} from '../model'; import {Validators} from '../validators'; import {ControlValueAccessor} from './control_value_accessor'; import {DefaultValueAccessor} from './default_value_accessor'; import {NumberValueAccessor} from './number_value_accessor'; import {CheckboxControlValueAccessor} from './checkbox_value_accessor'; import {SelectControlValueAccessor} from './select_control_value_accessor'; import {RadioControlValueAccessor} from './radio_control_value_accessor'; import {normalizeValidator, normalizeAsyncValidator} from './normalize_validator'; import {ValidatorFn, AsyncValidatorFn} from './validators'; export function controlPath(name: string, parent: ControlContainer): string[] { var p = ListWrapper.clone(parent.path); p.push(name); return p; } export function setUpControl(control: Control, dir: NgControl): void { if (isBlank(control)) _throwError(dir, "Cannot find control"); if (isBlank(dir.valueAccessor)) _throwError(dir, "No value accessor for"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); dir.valueAccessor.writeValue(control.value); // view -> model dir.valueAccessor.registerOnChange((newValue: any) => { dir.viewToModelUpdate(newValue); control.updateValue(newValue, {emitModelToViewChange: false}); control.markAsDirty(); }); // model -> view control.registerOnChange((newValue: any) => dir.valueAccessor.writeValue(newValue)); // touched dir.valueAccessor.registerOnTouched(() => control.markAsTouched()); } export function setUpControlGroup(control: ControlGroup, dir: NgControlGroup) { if (isBlank(control)) _throwError(dir, "Cannot find control"); control.validator = Validators.compose([control.validator, dir.validator]); control.asyncValidator = Validators.composeAsync([control.asyncValidator, dir.asyncValidator]); } function _throwError(dir: AbstractControlDirective, message: string): void { var path = dir.path.join(" -> "); throw new BaseException(`${message} '${path}'`); } export function composeValidators(validators: /* Array<Validator|Function> */ any[]): ValidatorFn { return isPresent(validators) ? Validators.compose(validators.map(normalizeValidator)) : null; } export function composeAsyncValidators( validators: /* Array<Validator|Function> */ any[]): AsyncValidatorFn { return isPresent(validators) ? Validators.composeAsync(validators.map(normalizeAsyncValidator)) : null; } export function isPropertyUpdated(changes: {[key: string]: any}, viewModel: any): boolean { if (!StringMapWrapper.contains(changes, "model")) return false;
} // TODO: vsavkin remove it once https://github.com/angular/angular/issues/3011 is implemented export function selectValueAccessor(dir: NgControl, valueAccessors: ControlValueAccessor[]): ControlValueAccessor { if (isBlank(valueAccessors)) return null; var defaultAccessor: ControlValueAccessor; var builtinAccessor: ControlValueAccessor; var customAccessor: ControlValueAccessor; valueAccessors.forEach((v: ControlValueAccessor) => { if (hasConstructor(v, DefaultValueAccessor)) { defaultAccessor = v; } else if (hasConstructor(v, CheckboxControlValueAccessor) || hasConstructor(v, NumberValueAccessor) || hasConstructor(v, SelectControlValueAccessor) || hasConstructor(v, RadioControlValueAccessor)) { if (isPresent(builtinAccessor)) _throwError(dir, "More than one built-in value accessor matches"); builtinAccessor = v; } else { if (isPresent(customAccessor)) _throwError(dir, "More than one custom value accessor matches"); customAccessor = v; } }); if (isPresent(customAccessor)) return customAccessor; if (isPresent(builtinAccessor)) return builtinAccessor; if (isPresent(defaultAccessor)) return defaultAccessor; _throwError(dir, "No valid value accessor for"); return null; }
var change = changes["model"]; if (change.isFirstChange()) return true; return !looseIdentical(viewModel, change.currentValue);
random_line_split
preparse_postformat.js
var moment = require('../../moment'), symbolMap = {
'3': '#', '4': '$', '5': '%', '6': '^', '7': '&', '8': '*', '9': '(', '0': ')' }, numberMap = { '!': '1', '@': '2', '#': '3', '$': '4', '%': '5', '^': '6', '&': '7', '*': '8', '(': '9', ')': '0' }, symbolLang = { preparse: function (string) { return string.replace(/[!@#$%\^&*()]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); } }; exports.preparsePostformat = { setUp: function (cb) { moment.locale('symbol', symbolLang); moment.createFromInputFallback = function () { throw new Error('input not handled by moment'); }; cb(); }, tearDown: function (cb) { moment.locale('en-gb'); cb(); }, 'transform': function (test) { test.expect(3); test.equal(moment.utc('@)!@-)*-@&', 'YYYY-MM-DD').unix(), 1346025600, 'preparse string + format'); test.equal(moment.utc('@)!@-)*-@&').unix(), 1346025600, 'preparse ISO8601 string'); test.equal(moment.unix(1346025600).utc().format('YYYY-MM-DD'), '@)!@-)*-@&', 'postformat'); test.done(); }, 'transform from': function (test) { test.expect(3); var start = moment([2007, 1, 28]); test.equal(start.from(moment([2007, 1, 28]).add({s: 90}), true), '@ minutes', 'postformat should work on moment.fn.from'); test.equal(moment().add(6, 'd').fromNow(true), '^ days', 'postformat should work on moment.fn.fromNow'); test.equal(moment.duration(10, 'h').humanize(), '!) hours', 'postformat should work on moment.duration.fn.humanize'); test.done(); }, 'calendar day' : function (test) { test.expect(6); var a = moment().hours(2).minutes(0).seconds(0); test.equal(moment(a).calendar(), 'Today at @:)) AM', 'today at the same time'); test.equal(moment(a).add({m: 25}).calendar(), 'Today at @:@% AM', 'Now plus 25 min'); test.equal(moment(a).add({h: 1}).calendar(), 'Today at #:)) AM', 'Now plus 1 hour'); test.equal(moment(a).add({d: 1}).calendar(), 'Tomorrow at @:)) AM', 'tomorrow at the same time'); test.equal(moment(a).subtract({h: 1}).calendar(), 'Today at !:)) AM', 'Now minus 1 hour'); test.equal(moment(a).subtract({d: 1}).calendar(), 'Yesterday at @:)) AM', 'yesterday at the same time'); test.done(); } };
'1': '!', '2': '@',
random_line_split
locale_nl.js
if (window.midgardCreate === undefined) { window.midgardCreate = {}; } if (window.midgardCreate.locale === undefined)
window.midgardCreate.locale.nl = { // Session-state buttons for the main toolbar 'Save': 'Opslaan', 'Saving': 'Bezig met opslaan', 'Cancel': 'Annuleren', 'Edit': 'Bewerken', // Storage status messages 'localModification': 'Items "<%= label %>" op de pagina heeft lokale wijzigingen', 'localModifications': '<%= number %> items op de pagina hebben lokale wijzigingen', 'Restore': 'Herstellen', 'Ignore': 'Negeren', 'saveSuccess': 'Item "<%= label %>" succesvol opgeslagen', 'saveSuccessMultiple': '<%= number %> items succesvol opgeslagen', 'saveError': 'Fout opgetreden bij het opslaan<br /><%= error %>', // Tagging 'Item tags': 'Item tags', 'Suggested tags': 'Tag suggesties', 'Tags': 'Tags', 'add a tag': 'tag toevoegen', // Collection widgets 'Add': 'Toevoegen', 'Choose type to add': 'Kies type om toe te voegen' };
{ window.midgardCreate.locale = {}; }
conditional_block
locale_nl.js
if (window.midgardCreate === undefined) { window.midgardCreate = {}; } if (window.midgardCreate.locale === undefined) { window.midgardCreate.locale = {}; } window.midgardCreate.locale.nl = { // Session-state buttons for the main toolbar 'Save': 'Opslaan', 'Saving': 'Bezig met opslaan', 'Cancel': 'Annuleren', 'Edit': 'Bewerken', // Storage status messages 'localModification': 'Items "<%= label %>" op de pagina heeft lokale wijzigingen', 'localModifications': '<%= number %> items op de pagina hebben lokale wijzigingen', 'Restore': 'Herstellen', 'Ignore': 'Negeren', 'saveSuccess': 'Item "<%= label %>" succesvol opgeslagen', 'saveSuccessMultiple': '<%= number %> items succesvol opgeslagen', 'saveError': 'Fout opgetreden bij het opslaan<br /><%= error %>', // Tagging 'Item tags': 'Item tags', 'Suggested tags': 'Tag suggesties', 'Tags': 'Tags', 'add a tag': 'tag toevoegen',
// Collection widgets 'Add': 'Toevoegen', 'Choose type to add': 'Kies type om toe te voegen' };
random_line_split
basemodule.js
// Copyright 2008 The Closure Library Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Defines the base class for a module. This is used to allow the * code to be modularized, giving the benefits of lazy loading and loading on * demand. * */ goog.provide('goog.module.BaseModule'); goog.require('goog.Disposable');
* A basic module object that represents a module of Javascript code that can * be dynamically loaded. * * @constructor * @extends {goog.Disposable} */ goog.module.BaseModule = function() { goog.Disposable.call(this); }; goog.inherits(goog.module.BaseModule, goog.Disposable); /** * Performs any load-time initialization that the module requires. * @param {Object} context The module context. */ goog.module.BaseModule.prototype.initialize = function(context) {};
/**
random_line_split
el.js
/* * CKFinder * ======== * http://ckfinder.com * Copyright (C) 2007-2011, CKSource - Frederico Knabben. All rights reserved. * * The software, this file and its contents are subject to the CKFinder * License. Please read the license.txt file before using, installing, copying, * modifying or distribute this file or part of its contents. The contents of * this file is part of the Source Code of CKFinder. * */ /** * @fileOverview */ /** * Constains the dictionary of language entries. * @namespace */ CKFinder.lang['el'] = { appTitle : 'CKFinder', // MISSING // Common messages and labels. common : { // Put the voice-only part of the label in the span. unavailable : '%1<span class="cke_accessibility">, unavailable</span>', // MISSING confirmCancel : 'Some of the options have been changed. Are you sure to close the dialog?', // MISSING ok : 'OK', // MISSING cancel : 'Cancel', // MISSING confirmationTitle : 'Confirmation', // MISSING messageTitle : 'Information', // MISSING inputTitle : 'Question', // MISSING undo : 'Undo', // MISSING redo : 'Redo', // MISSING skip : 'Skip', // MISSING skipAll : 'Skip all', // MISSING makeDecision : 'What action should be taken?', // MISSING rememberDecision: 'Remember my decision' // MISSING }, dir : 'ltr', // MISSING HelpLang : 'en', LangCode : 'el', // Date Format // d : Day // dd : Day (padding zero) // m : Month // mm : Month (padding zero) // yy : Year (two digits) // yyyy : Year (four digits) // h : Hour (12 hour clock) // hh : Hour (12 hour clock, padding zero) // H : Hour (24 hour clock) // HH : Hour (24 hour clock, padding zero) // M : Minute // MM : Minute (padding zero) // a : Firt char of AM/PM // aa : AM/PM DateTime : 'dd/mm/yyyy HH:MM', DateAmPm : ['ΜΜ', 'ΠΜ'], // Folders FoldersTitle : 'Φάκελοι', FolderLoading : 'Φόρτωση...', FolderNew : 'Παρακαλούμε πληκτρολογήστε την ονομασία του νέου φακέλου: ', FolderRename : 'Παρακαλούμε πληκτρολογήστε την νέα ονομασία του φακέλου: ', FolderDelete : 'Είστε σίγουροι ότι θέλετε να διαγράψετε το φάκελο "%1";', FolderRenaming : ' (Μετονομασία...)', FolderDeleting : ' (Διαγραφή...)', // Files FileRename : 'Παρακαλούμε πληκτρολογήστε την νέα ονομασία του αρχείου: ', FileRenameExt : 'Είστε σίγουροι ότι θέλετε να αλλάξετε την επέκταση του αρχείου; Μετά από αυτή την ενέργεια το αρχείο μπορεί να μην μπορεί να χρησιμοποιηθεί', FileRenaming : 'Μετονομασία...', FileDelete : 'Είστε σίγουροι ότι θέλετε να διαγράψετε το αρχείο "%1"?', FilesLoading : 'Loading...', // MISSING FilesEmpty : 'Empty folder', // MISSING FilesMoved : 'File %1 moved into %2:%3', // MISSING FilesCopied : 'File %1 copied into %2:%3', // MISSING // Basket BasketFolder : 'Basket', // MISSING BasketClear : 'Clear Basket', // MISSING BasketRemove : 'Remove from basket', // MISSING BasketOpenFolder : 'Open parent folder', // MISSING BasketTruncateConfirm : 'Do you really want to remove all files from the basket?', // MISSING BasketRemoveConfirm : 'Do you really want to remove the file "%1" from the basket?', // MISSING BasketEmpty : 'No files in the basket, drag\'n\'drop some.', // MISSING BasketCopyFilesHere : 'Copy Files from Basket', // MISSING BasketMoveFilesHere : 'Move Files from Basket', // MISSING BasketPasteErrorOther : 'File %s error: %e', // MISSING BasketPasteMoveSuccess : 'The following files were moved: %s', // MISSING BasketPasteCopySuccess : 'The following files were copied: %s', // MISSING // Toolbar Buttons (some used elsewhere) Upload : 'Μεταφόρτωση', UploadTip : 'Μεταφόρτωση Νέου Αρχείου', Refresh : 'Ανανέωση', Settings : 'Ρυθμίσεις', Help : 'Βοήθεια', HelpTip : 'Βοήθεια', // Context Menus Select : 'Επιλογή', SelectThumbnail : 'Επιλογή Μικρογραφίας', View : 'Προβολή', Download : 'Λήψη Αρχείου', NewSubFolder : 'Νέος Υποφάκελος', Rename : 'Μετονομασία', Delete : 'Διαγραφή', CopyDragDrop : 'Copy file here', // MISSING MoveDragDrop : 'Move file here', // MISSING // Dialogs RenameDlgTitle : 'Rename', // MISSING NewNameDlgTitle : 'New name', // MISSING FileExistsDlgTitle : 'File already exists', // MISSING SysErrorDlgTitle : 'System error', // MISSING FileOverwrite : 'Overwrite', // MISSING FileAutorename : 'Auto-rename', // MISSING // Generic OkBtn : 'OK', CancelBtn : 'Ακύρωση', CloseBtn : 'Κλείσιμο', // Upload Panel UploadTitle : 'Μεταφόρτωση Νέου Αρχείου', UploadSelectLbl : 'επιλέξτε το αρχείο που θέλετε να μεταφερθεί κάνοντας κλίκ στο κουμπί', UploadProgressLbl : '(Η μεταφόρτωση εκτελείται, παρακαλούμε περιμένετε...)', UploadBtn : 'Μεταφόρτωση Επιλεγμένου Αρχείου', UploadBtnCancel : 'Cancel', // MISSING UploadNoFileMsg : 'Παρακαλούμε επιλέξτε ένα αρχείο από τον υπολογιστή σας', UploadNoFolder : 'Please select folder before uploading.', // MISSING UploadNoPerms : 'File upload not allowed.', // MISSING UploadUnknError : 'Error sending the file.', // MISSING UploadExtIncorrect : 'File extension not allowed in this folder.', // MISSING // Settings Panel SetTitle : 'Ρυθμίσεις', SetView : 'Προβολή:', SetViewThumb : 'Μικρογραφίες', SetViewList : 'Λίστα', SetDisplay : 'Εμφάνιση:', SetDisplayName : 'Όνομα Αρχείου', SetDisplayDate : 'Ημερομηνία', SetDisplaySize : 'Μέγεθος Αρχείου', SetSort : 'Ταξινόμηση:', SetSortName : 'βάσει Όνοματος Αρχείου', SetSortDate : 'βάσει Ημερομήνιας', SetSortSize : 'βάσει Μεγέθους', // Status Bar FilesCountEmpty : '<Κενός Φάκελος>', FilesCountOne : '1 αρχείο', FilesCountMany : '%1 αρχεία', // Size and Speed Kb : '%1 kB', KbPerSecond : '%1 kB/s', // Connector Error Messages. ErrorUnknown : 'Η ενέργεια δεν ήταν δυνατόν να εκτελεστεί. (Σφάλμα %1)', Errors : { 10 : 'Λανθασμένη Εντολή.', 11 : 'Το resource type δεν ήταν δυνατόν να προσδιορίστεί.', 12 : 'Το resource type δεν είναι έγκυρο.', 102 : 'Το όνομα αρχείου ή φακέλου δεν είναι έγκυρο.', 103 : 'Δεν ήταν δυνατή η εκτέλεση της ενέργειας λόγω έλλειψης δικαιωμάτων ασφαλείας.', 104 : 'Δεν ήταν δυνατή η εκτέλεση της ενέργειας λόγω περιορισμών του συστήματος αρχείων.', 105 : 'Λανθασμένη Επέκταση Αρχείου.', 109 : 'Λανθασμένη Ενέργεια.',
201 : 'Ένα αρχείο με την ίδια ονομασία υπάρχει ήδη. Το μεταφορτωμένο αρχείο μετονομάστηκε σε "%1"', 202 : 'Λανθασμένο Αρχείο', 203 : 'Λανθασμένο Αρχείο. Το μέγεθος του αρχείου είναι πολύ μεγάλο.', 204 : 'Το μεταφορτωμένο αρχείο είναι χαλασμένο.', 205 : 'Δεν υπάρχει προσωρινός φάκελος για να χρησιμοποιηθεί για τις μεταφορτώσεις των αρχείων.', 206 : 'Η μεταφόρτωση ακυρώθηκε για λόγους ασφαλείας. Το αρχείο περιέχει δεδομένα μορφής HTML.', 207 : 'Το μεταφορτωμένο αρχείο μετονομάστηκε σε "%1"', 300 : 'Moving file(s) failed.', // MISSING 301 : 'Copying file(s) failed.', // MISSING 500 : 'Ο πλοηγός αρχείων έχει απενεργοποιηθεί για λόγους ασφαλείας. Παρακαλούμε επικοινωνήστε με τον διαχειριστή της ιστοσελίδας και ελέγξτε το αρχείο ρυθμίσεων του πλοηγού (CKFinder).', 501 : 'Η υποστήριξη των μικρογραφιών έχει απενεργοποιηθεί.' }, // Other Error Messages. ErrorMsg : { FileEmpty : 'Η ονομασία του αρχείου δεν μπορεί να είναι κενή', FileExists : 'File %s already exists', // MISSING FolderEmpty : 'Η ονομασία του φακέλου δεν μπορεί να είναι κενή', FileInvChar : 'Η ονομασία του αρχείου δεν μπορεί να περιέχει τους ακόλουθους χαρακτήρες: \n\\ / : * ? " < > |', FolderInvChar : 'Η ονομασία του φακέλου δεν μπορεί να περιέχει τους ακόλουθους χαρακτήρες: \n\\ / : * ? " < > |', PopupBlockView : 'Δεν ήταν εφικτό να ανοίξει το αρχείο σε νέο παράθυρο. Παρακαλώ, ελέγξτε τις ρυθμίσεις τους πλοηγού σας και απενεργοποιήστε όλους τους popup blockers για αυτή την ιστοσελίδα.' }, // Imageresize plugin Imageresize : { dialogTitle : 'Resize %s', // MISSING sizeTooBig : 'Cannot set image height or width to a value bigger than the original size (%size).', // MISSING resizeSuccess : 'Image resized successfully.', // MISSING thumbnailNew : 'Create new thumbnail', // MISSING thumbnailSmall : 'Small (%s)', // MISSING thumbnailMedium : 'Medium (%s)', // MISSING thumbnailLarge : 'Large (%s)', // MISSING newSize : 'Set new size', // MISSING width : 'Width', // MISSING height : 'Height', // MISSING invalidHeight : 'Invalid height.', // MISSING invalidWidth : 'Invalid width.', // MISSING invalidName : 'Invalid file name.', // MISSING newImage : 'Create new image', // MISSING noExtensionChange : 'The file extension cannot be changed.', // MISSING imageSmall : 'Source image is too small', // MISSING contextMenuName : 'Resize' // MISSING }, // Fileeditor plugin Fileeditor : { save : 'Save', // MISSING fileOpenError : 'Unable to open file.', // MISSING fileSaveSuccess : 'File saved successfully.', // MISSING contextMenuName : 'Edit', // MISSING loadingFile : 'Loading file, please wait...' // MISSING } };
110 : 'Άγνωστο Λάθος.', 115 : 'Το αρχείο ή φάκελος υπάρχει ήδη.', 116 : 'Ο φάκελος δεν βρέθηκε. Παρακαλούμε ανανεώστε τη σελίδα και προσπαθήστε ξανά.', 117 : 'Το αρχείο δεν βρέθηκε. Παρακαλούμε ανανεώστε τη σελίδα και προσπαθήστε ξανά.', 118 : 'Source and target paths are equal.', // MISSING
random_line_split
error.rs
use std::io; use std::fmt; use std::error::Error; /// Result type for using with [`EmuleadError`]. pub type EmuleadResult<T> = Result<T, EmuleadError>; /// Error type using for the project errors. #[derive(Debug)] pub enum EmuleadError { /// IO Error Io(io::Error), /// Rotate bytes error used in [`network::rotate_bytes_right`] RotateBytes(usize) } impl fmt::Display for EmuleadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &EmuleadError::Io(ref err) => write!(f, "IO error: {}", err), err => write!(f, "Error: {}", err.description()) } } } impl Error for EmuleadError { fn description(&self) -> &str { match *self { EmuleadError::Io(ref err) => err.description(), EmuleadError::RotateBytes(_) => "Rotate shift must be in 0-8 bits." } } fn cause(&self) -> Option<&Error> { match *self { EmuleadError::Io(ref err) => Some(err), _ => None } } } impl From<io::Error> for EmuleadError {
} }
fn from(err: io::Error) -> EmuleadError { EmuleadError::Io(err)
random_line_split
error.rs
use std::io; use std::fmt; use std::error::Error; /// Result type for using with [`EmuleadError`]. pub type EmuleadResult<T> = Result<T, EmuleadError>; /// Error type using for the project errors. #[derive(Debug)] pub enum EmuleadError { /// IO Error Io(io::Error), /// Rotate bytes error used in [`network::rotate_bytes_right`] RotateBytes(usize) } impl fmt::Display for EmuleadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &EmuleadError::Io(ref err) => write!(f, "IO error: {}", err), err => write!(f, "Error: {}", err.description()) } } } impl Error for EmuleadError { fn description(&self) -> &str { match *self { EmuleadError::Io(ref err) => err.description(), EmuleadError::RotateBytes(_) => "Rotate shift must be in 0-8 bits." } } fn
(&self) -> Option<&Error> { match *self { EmuleadError::Io(ref err) => Some(err), _ => None } } } impl From<io::Error> for EmuleadError { fn from(err: io::Error) -> EmuleadError { EmuleadError::Io(err) } }
cause
identifier_name
error.rs
use std::io; use std::fmt; use std::error::Error; /// Result type for using with [`EmuleadError`]. pub type EmuleadResult<T> = Result<T, EmuleadError>; /// Error type using for the project errors. #[derive(Debug)] pub enum EmuleadError { /// IO Error Io(io::Error), /// Rotate bytes error used in [`network::rotate_bytes_right`] RotateBytes(usize) } impl fmt::Display for EmuleadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &EmuleadError::Io(ref err) => write!(f, "IO error: {}", err), err => write!(f, "Error: {}", err.description()) } } } impl Error for EmuleadError { fn description(&self) -> &str { match *self { EmuleadError::Io(ref err) => err.description(), EmuleadError::RotateBytes(_) => "Rotate shift must be in 0-8 bits." } } fn cause(&self) -> Option<&Error>
} impl From<io::Error> for EmuleadError { fn from(err: io::Error) -> EmuleadError { EmuleadError::Io(err) } }
{ match *self { EmuleadError::Io(ref err) => Some(err), _ => None } }
identifier_body
crawler_wikipedia.py
#encoding=utf-8 #------------------------------------------------------------------ # File Name: crawler_wikipedia_v0.py # Author: yy # Mail: [email protected] # Date: 2014年02月12日 星期三 15时15分24秒 #------------------------------------------------------------------- import time import sys import string import urllib2 import re import types from bs4 import BeautifulSoup import xml.etree.cElementTree as ET class crawler_wikipedia: # the start url: startUrl = u'' # the prefix of wikipedia api string apiPrefix = u'http://zh.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&pageids=' # the surfix of wikipedia api string apiSurfix = u'&format=xml' # the name of mean file MeanFileName = r'wiki.txt' # the name of error pageids list file ErrorListFileName = r'wiki_error.txt' #------------------------------------------------------------ # function: get_content_helper(self,apistr) # description: deduce the page type by content string # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_helper(self,apistr): return u'tset the function.' #------------------------------------------------------------ # function: get_content_by_api(self,apistr) # description: get content by wikipedia api # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_by_api(self,apistr): pagecontent = urllib2.urlopen(apistr).read() bs = BeautifulSoup(str(pagecontent)) content = bs.find('page') if None == content: print apistr + u' is empty!!' return None else: flag_title = False for attribute in content.attrs: if attribute == u'title': flag_title = True if flag_title: print apistr + u' has content!!' contentStr = self.get_content_helper(apistr) return contentStr else: return None #------------------------------------------------------------ # #------------------------------------------------------------ def main(self): #change the default code type of sys reload(sys) sys.setdefaultencoding('utf-8') #init the pageid count = 121213#a exsit word #get the handle of output file outputfile = open(self.__class__.MeanFileName,'a+') #write the working time into file beginStr = 'begin time:\n' + time.asctime() + u'\n' outputfile.write(beginStr) #while(count < 2): # #generate url # countstr = str(count) # currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix # #test if have an exception # req = urllib2.Request(currentApiStr) # try: # urllib2.urlopen(req) # except urllib2.URLError,e: # count += 1 # print e.reason # continue # #get content by apistr # content = self.get_content_by_api(currentApiStr) # print currentApiStr # print u' ' # print content # print u'-----------------------------------------------------' # count += 1 # print count countstr = str(count) currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix content = self.get_content_by_api(currentApiStr) print content endStr = 'end time:\n' + time.asctime() + u'\n' outputfile.write(endStr) print currentApiStr print u'the main function is finished!!' outputfile.close() #---------------------------------------------------------------- #
print """ ----------------------------------------------------------------- a crawler on wikipedia ----------------------------------------------------------------- content is in file: wiki.txt the program is working...... """ mycrawler = crawler_wikipedia() mycrawler.main()
# program entrance # #----------------------------------------------------------------
random_line_split
crawler_wikipedia.py
#encoding=utf-8 #------------------------------------------------------------------ # File Name: crawler_wikipedia_v0.py # Author: yy # Mail: [email protected] # Date: 2014年02月12日 星期三 15时15分24秒 #------------------------------------------------------------------- import time import sys import string import urllib2 import re import types from bs4 import BeautifulSoup import xml.etree.cElementTree as ET class crawler_wikipedia: # the start url: startUrl = u'' # the prefix of wikipedia api string apiPrefix = u'http://zh.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&pageids=' # the surfix of wikipedia api string apiSurfix = u'&format=xml' # the name of mean file MeanFileName = r'wiki.txt' # the name of error pageids list file ErrorListFileName = r'wiki_error.txt' #------------------------------------------------------------ # function: get_content_helper(self,apistr) # description: deduce the page type by content string # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_helper(self,apistr): return u'tset the function.' #------------------------------------------------------------ # function: get_content_by_api(self,apistr) # description: get content by wikipedia api # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_by_api
pagecontent = urllib2.urlopen(apistr).read() bs = BeautifulSoup(str(pagecontent)) content = bs.find('page') if None == content: print apistr + u' is empty!!' return None else: flag_title = False for attribute in content.attrs: if attribute == u'title': flag_title = True if flag_title: print apistr + u' has content!!' contentStr = self.get_content_helper(apistr) return contentStr else: return None #------------------------------------------------------------ # #------------------------------------------------------------ def main(self): #change the default code type of sys reload(sys) sys.setdefaultencoding('utf-8') #init the pageid count = 121213#a exsit word #get the handle of output file outputfile = open(self.__class__.MeanFileName,'a+') #write the working time into file beginStr = 'begin time:\n' + time.asctime() + u'\n' outputfile.write(beginStr) #while(count < 2): # #generate url # countstr = str(count) # currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix # #test if have an exception # req = urllib2.Request(currentApiStr) # try: # urllib2.urlopen(req) # except urllib2.URLError,e: # count += 1 # print e.reason # continue # #get content by apistr # content = self.get_content_by_api(currentApiStr) # print currentApiStr # print u' ' # print content # print u'-----------------------------------------------------' # count += 1 # print count countstr = str(count) currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix content = self.get_content_by_api(currentApiStr) print content endStr = 'end time:\n' + time.asctime() + u'\n' outputfile.write(endStr) print currentApiStr print u'the main function is finished!!' outputfile.close() #---------------------------------------------------------------- # # program entrance # #---------------------------------------------------------------- print """ ----------------------------------------------------------------- a crawler on wikipedia ----------------------------------------------------------------- content is in file: wiki.txt the program is working...... """ mycrawler = crawler_wikipedia() mycrawler.main()
(self,apistr):
identifier_name
crawler_wikipedia.py
#encoding=utf-8 #------------------------------------------------------------------ # File Name: crawler_wikipedia_v0.py # Author: yy # Mail: [email protected] # Date: 2014年02月12日 星期三 15时15分24秒 #------------------------------------------------------------------- import time import sys import string import urllib2 import re import types from bs4 import BeautifulSoup import xml.etree.cElementTree as ET class crawler_wikipedia: # the start url: startUrl = u'' # the prefix of wikipedia api string apiPrefix = u'http://zh.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&pageids=' # the surfix of wikipedia api string apiSurfix = u'&format=xml' # the name of mean file MeanFileName = r'wiki.txt' # the name of error pageids list file ErrorListFileName = r'wiki_error.txt' #------------------------------------------------------------ # function: get_content_helper(self,apistr) # description: deduce the page type by content string # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_helper(self,apistr): return u'tset the function.' #------------------------------------------------------------ # function: get_content_by_api(self,apistr) # description: get content by wikipedia api # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_by_api(self,apistr): pagecontent = urllib2.urlopen(apistr).read() bs = BeautifulSoup(str(pagecontent)) content = bs.find('page') if None == content: print apistr + u'
flag_title = False for attribute in content.attrs: if attribute == u'title': flag_title = True if flag_title: print apistr + u' has content!!' contentStr = self.get_content_helper(apistr) return contentStr else: return None #------------------------------------------------------------ # #------------------------------------------------------------ def main(self): #change the default code type of sys reload(sys) sys.setdefaultencoding('utf-8') #init the pageid count = 121213#a exsit word #get the handle of output file outputfile = open(self.__class__.MeanFileName,'a+') #write the working time into file beginStr = 'begin time:\n' + time.asctime() + u'\n' outputfile.write(beginStr) #while(count < 2): # #generate url # countstr = str(count) # currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix # #test if have an exception # req = urllib2.Request(currentApiStr) # try: # urllib2.urlopen(req) # except urllib2.URLError,e: # count += 1 # print e.reason # continue # #get content by apistr # content = self.get_content_by_api(currentApiStr) # print currentApiStr # print u' ' # print content # print u'-----------------------------------------------------' # count += 1 # print count countstr = str(count) currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix content = self.get_content_by_api(currentApiStr) print content endStr = 'end time:\n' + time.asctime() + u'\n' outputfile.write(endStr) print currentApiStr print u'the main function is finished!!' outputfile.close() #---------------------------------------------------------------- # # program entrance # #---------------------------------------------------------------- print """ ----------------------------------------------------------------- a crawler on wikipedia ----------------------------------------------------------------- content is in file: wiki.txt the program is working...... """ mycrawler = crawler_wikipedia() mycrawler.main()
is empty!!' return None else:
conditional_block
crawler_wikipedia.py
#encoding=utf-8 #------------------------------------------------------------------ # File Name: crawler_wikipedia_v0.py # Author: yy # Mail: [email protected] # Date: 2014年02月12日 星期三 15时15分24秒 #------------------------------------------------------------------- import time import sys import string import urllib2 import re import types from bs4 import BeautifulSoup import xml.etree.cElementTree as ET class crawler_wikipedia: # the start url: startUrl = u'' # the prefix of wikipedia api string apiPrefix = u'http://zh.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&pageids=' # the surfix of wikipedia api string apiSurfix = u'&format=xml' # the name of mean file MeanFileName = r'wiki.txt' # the name of error pageids list file ErrorListFileName = r'wiki_error.txt' #------------------------------------------------------------ # function: get_content_helper(self,apistr) # description: deduce the page type by content string # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_helper(self,apistr): return u'tset the function.' #------------------------------------------------------------ # function: get_content_by_api(self,apistr) # description: get content by wikipedia api # # parameter: # self: # apistr: string. # # return: # string #------------------------------------------------------------ def get_content_by_api(self,apistr): pagecontent = urllib2.urlopen(apistr).read() bs = BeautifulSoup(str(pagecontent)) content = bs.find('page') if None == content: print apistr + u' is empty!!' return None else: flag_title = False for attribute in content.attrs: if attribute == u'title': flag_title = True if flag_title: print apistr + u' has content!!' contentStr = self.get_content_helper(apistr) return contentStr else: return None #------------------------------------------------------------ # #------------------------------------------------------------ def main(self): #change the default code type of sys reload(sys) sys.
-------------------------------------------------- # # program entrance # #---------------------------------------------------------------- print """ ----------------------------------------------------------------- a crawler on wikipedia ----------------------------------------------------------------- content is in file: wiki.txt the program is working...... """ mycrawler = crawler_wikipedia() mycrawler.main()
setdefaultencoding('utf-8') #init the pageid count = 121213#a exsit word #get the handle of output file outputfile = open(self.__class__.MeanFileName,'a+') #write the working time into file beginStr = 'begin time:\n' + time.asctime() + u'\n' outputfile.write(beginStr) #while(count < 2): # #generate url # countstr = str(count) # currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix # #test if have an exception # req = urllib2.Request(currentApiStr) # try: # urllib2.urlopen(req) # except urllib2.URLError,e: # count += 1 # print e.reason # continue # #get content by apistr # content = self.get_content_by_api(currentApiStr) # print currentApiStr # print u' ' # print content # print u'-----------------------------------------------------' # count += 1 # print count countstr = str(count) currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix content = self.get_content_by_api(currentApiStr) print content endStr = 'end time:\n' + time.asctime() + u'\n' outputfile.write(endStr) print currentApiStr print u'the main function is finished!!' outputfile.close() #--------------
identifier_body
connect.test.js
//@ sourceMappingURL=connect.test.map // Generated by CoffeeScript 1.6.1 (function() { var assert, async, wongo, __hasProp = {}.hasOwnProperty; assert = require('assert');
async = require('async'); wongo = require('../lib/wongo'); describe('Wongo.connect()', function() { it('should connect to the database', function(done) { wongo.connect(process.env.DB_URL); return done(); }); return it('should clear every registered schema', function(done) { var _type, _types; _types = (function() { var _ref, _results; _ref = wongo.schemas; _results = []; for (_type in _ref) { if (!__hasProp.call(_ref, _type)) continue; _results.push(_type); } return _results; })(); return async.each(_types, function(_type, nextInLoop) { return wongo.clear(_type, nextInLoop); }, done); }); }); }).call(this);
random_line_split
account_report_account_balance.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class
(osv.osv_memory): _inherit = "account.common.account.report" _name = 'account.balance.report' _description = 'Trial Balance Report' _columns = { 'journal_ids': fields.many2many('account.journal', 'account_balance_report_journal_rel', 'account_id', 'journal_id', 'Journals', required=True), } _defaults = { 'journal_ids': [], } def _print_report(self, cr, uid, ids, data, context=None): data = self.pre_print_report(cr, uid, ids, data, context=context) return {'type': 'ir.actions.report.xml', 'report_name': 'account.account.balance', 'datas': data} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
account_balance_report
identifier_name
account_report_account_balance.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_balance_report(osv.osv_memory): _inherit = "account.common.account.report" _name = 'account.balance.report' _description = 'Trial Balance Report' _columns = { 'journal_ids': fields.many2many('account.journal', 'account_balance_report_journal_rel', 'account_id', 'journal_id', 'Journals', required=True), } _defaults = { 'journal_ids': [], } def _print_report(self, cr, uid, ids, data, context=None):
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
data = self.pre_print_report(cr, uid, ids, data, context=context) return {'type': 'ir.actions.report.xml', 'report_name': 'account.account.balance', 'datas': data}
identifier_body
account_report_account_balance.py
# -*- coding: utf-8 -*- ############################################################################## #
# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_balance_report(osv.osv_memory): _inherit = "account.common.account.report" _name = 'account.balance.report' _description = 'Trial Balance Report' _columns = { 'journal_ids': fields.many2many('account.journal', 'account_balance_report_journal_rel', 'account_id', 'journal_id', 'Journals', required=True), } _defaults = { 'journal_ids': [], } def _print_report(self, cr, uid, ids, data, context=None): data = self.pre_print_report(cr, uid, ids, data, context=context) return {'type': 'ir.actions.report.xml', 'report_name': 'account.account.balance', 'datas': data} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
random_line_split
base-platform.ts
import { ExecuteInCurrentWindow, Inject, mutation, StatefulService } from 'services/core'; import { EPlatformCallResult, IPlatformState, TPlatform, TPlatformCapability, TStartStreamOptions, TPlatformCapabilityMap, } from './index'; import { StreamingService } from 'services/streaming'; import { UserService } from 'services/user'; import { HostsService } from 'services/hosts'; import electron from 'electron'; import { IFacebookStartStreamOptions } from './facebook'; import { StreamSettingsService } from '../settings/streaming'; const VIEWER_COUNT_UPDATE_INTERVAL = 60 * 1000; /** * Base class for platforms * Keeps shared code for all platforms */ export abstract class BasePlatformService<T extends IPlatformState> extends StatefulService<T> { static initialState: IPlatformState = { streamKey: '', viewersCount: 0, settings: null, isPrepopulated: false, }; @Inject() protected streamingService: StreamingService; @Inject() protected userService: UserService; @Inject() protected hostsService: HostsService; @Inject() protected streamSettingsService: StreamSettingsService; abstract readonly platform: TPlatform; abstract capabilities: Set<TPlatformCapability>; @ExecuteInCurrentWindow() hasCapability<T extends TPlatformCapability>(capability: T): this is TPlatformCapabilityMap[T] { return this.capabilities.has(capability); } get mergeUrl() { const host = this.hostsService.streamlabs; const token = this.userService.apiToken; return `https://${host}/slobs/merge/${token}/${this.platform}_account`; } averageViewers: number; peakViewers: number; private nViewerSamples: number; async afterGoLive(): Promise<void> { this.averageViewers = 0; this.peakViewers = 0; this.nViewerSamples = 0; // update viewers count
if (this.hasCapability('viewerCount')) { const count = await this.fetchViewerCount(); this.nViewerSamples += 1; this.averageViewers = (this.averageViewers * (this.nViewerSamples - 1) + count) / this.nViewerSamples; this.peakViewers = Math.max(this.peakViewers, count); this.SET_VIEWERS_COUNT(count); } // stop updating if streaming has stopped if (this.streamingService.views.isMidStreamMode) { setTimeout(runInterval, VIEWER_COUNT_UPDATE_INTERVAL); } }; if (this.hasCapability('viewerCount')) await runInterval(); } unlink() { // unlink platform and reload auth state // const url = `https://${this.hostsService.streamlabs}/api/v5/slobs/unlink/${this.platform}_account`; // const headers = authorizedHeaders(this.userService.apiToken!); // const request = new Request(url, { headers }); // return fetch(request) // .then(handleResponse) // .then(_ => this.userService.updateLinkedPlatforms()); electron.remote.shell.openExternal( `https://${this.hostsService.streamlabs}/dashboard#/settings/account-settings`, ); } protected syncSettingsWithLocalStorage() { // save settings to the local storage const savedSettings: IFacebookStartStreamOptions = JSON.parse( localStorage.getItem(this.serviceName) as string, ); if (savedSettings) this.UPDATE_STREAM_SETTINGS(savedSettings); this.store.watch( () => this.state.settings, () => { localStorage.setItem(this.serviceName, JSON.stringify(this.state.settings)); }, { deep: true }, ); } async validatePlatform() { return EPlatformCallResult.Success; } fetchUserInfo() { return Promise.resolve({}); } @mutation() protected SET_VIEWERS_COUNT(viewers: number) { this.state.viewersCount = viewers; } @mutation() protected SET_STREAM_KEY(key: string) { this.state.streamKey = key; } @mutation() protected SET_PREPOPULATED(isPrepopulated: boolean) { this.state.isPrepopulated = isPrepopulated; } @mutation() protected SET_STREAM_SETTINGS(settings: TStartStreamOptions) { this.state.settings = settings; } @mutation() protected UPDATE_STREAM_SETTINGS(settingsPatch: Partial<TStartStreamOptions>) { this.state.settings = { ...this.state.settings, ...settingsPatch }; } }
const runInterval = async () => {
random_line_split
base-platform.ts
import { ExecuteInCurrentWindow, Inject, mutation, StatefulService } from 'services/core'; import { EPlatformCallResult, IPlatformState, TPlatform, TPlatformCapability, TStartStreamOptions, TPlatformCapabilityMap, } from './index'; import { StreamingService } from 'services/streaming'; import { UserService } from 'services/user'; import { HostsService } from 'services/hosts'; import electron from 'electron'; import { IFacebookStartStreamOptions } from './facebook'; import { StreamSettingsService } from '../settings/streaming'; const VIEWER_COUNT_UPDATE_INTERVAL = 60 * 1000; /** * Base class for platforms * Keeps shared code for all platforms */ export abstract class BasePlatformService<T extends IPlatformState> extends StatefulService<T> { static initialState: IPlatformState = { streamKey: '', viewersCount: 0, settings: null, isPrepopulated: false, }; @Inject() protected streamingService: StreamingService; @Inject() protected userService: UserService; @Inject() protected hostsService: HostsService; @Inject() protected streamSettingsService: StreamSettingsService; abstract readonly platform: TPlatform; abstract capabilities: Set<TPlatformCapability>; @ExecuteInCurrentWindow() hasCapability<T extends TPlatformCapability>(capability: T): this is TPlatformCapabilityMap[T] { return this.capabilities.has(capability); } get mergeUrl() { const host = this.hostsService.streamlabs; const token = this.userService.apiToken; return `https://${host}/slobs/merge/${token}/${this.platform}_account`; } averageViewers: number; peakViewers: number; private nViewerSamples: number; async afterGoLive(): Promise<void> { this.averageViewers = 0; this.peakViewers = 0; this.nViewerSamples = 0; // update viewers count const runInterval = async () => { if (this.hasCapability('viewerCount')) { const count = await this.fetchViewerCount(); this.nViewerSamples += 1; this.averageViewers = (this.averageViewers * (this.nViewerSamples - 1) + count) / this.nViewerSamples; this.peakViewers = Math.max(this.peakViewers, count); this.SET_VIEWERS_COUNT(count); } // stop updating if streaming has stopped if (this.streamingService.views.isMidStreamMode) { setTimeout(runInterval, VIEWER_COUNT_UPDATE_INTERVAL); } }; if (this.hasCapability('viewerCount')) await runInterval(); } unlink() { // unlink platform and reload auth state // const url = `https://${this.hostsService.streamlabs}/api/v5/slobs/unlink/${this.platform}_account`; // const headers = authorizedHeaders(this.userService.apiToken!); // const request = new Request(url, { headers }); // return fetch(request) // .then(handleResponse) // .then(_ => this.userService.updateLinkedPlatforms()); electron.remote.shell.openExternal( `https://${this.hostsService.streamlabs}/dashboard#/settings/account-settings`, ); } protected syncSettingsWithLocalStorage() { // save settings to the local storage const savedSettings: IFacebookStartStreamOptions = JSON.parse( localStorage.getItem(this.serviceName) as string, ); if (savedSettings) this.UPDATE_STREAM_SETTINGS(savedSettings); this.store.watch( () => this.state.settings, () => { localStorage.setItem(this.serviceName, JSON.stringify(this.state.settings)); }, { deep: true }, ); } async validatePlatform() { return EPlatformCallResult.Success; }
() { return Promise.resolve({}); } @mutation() protected SET_VIEWERS_COUNT(viewers: number) { this.state.viewersCount = viewers; } @mutation() protected SET_STREAM_KEY(key: string) { this.state.streamKey = key; } @mutation() protected SET_PREPOPULATED(isPrepopulated: boolean) { this.state.isPrepopulated = isPrepopulated; } @mutation() protected SET_STREAM_SETTINGS(settings: TStartStreamOptions) { this.state.settings = settings; } @mutation() protected UPDATE_STREAM_SETTINGS(settingsPatch: Partial<TStartStreamOptions>) { this.state.settings = { ...this.state.settings, ...settingsPatch }; } }
fetchUserInfo
identifier_name
base-platform.ts
import { ExecuteInCurrentWindow, Inject, mutation, StatefulService } from 'services/core'; import { EPlatformCallResult, IPlatformState, TPlatform, TPlatformCapability, TStartStreamOptions, TPlatformCapabilityMap, } from './index'; import { StreamingService } from 'services/streaming'; import { UserService } from 'services/user'; import { HostsService } from 'services/hosts'; import electron from 'electron'; import { IFacebookStartStreamOptions } from './facebook'; import { StreamSettingsService } from '../settings/streaming'; const VIEWER_COUNT_UPDATE_INTERVAL = 60 * 1000; /** * Base class for platforms * Keeps shared code for all platforms */ export abstract class BasePlatformService<T extends IPlatformState> extends StatefulService<T> { static initialState: IPlatformState = { streamKey: '', viewersCount: 0, settings: null, isPrepopulated: false, }; @Inject() protected streamingService: StreamingService; @Inject() protected userService: UserService; @Inject() protected hostsService: HostsService; @Inject() protected streamSettingsService: StreamSettingsService; abstract readonly platform: TPlatform; abstract capabilities: Set<TPlatformCapability>; @ExecuteInCurrentWindow() hasCapability<T extends TPlatformCapability>(capability: T): this is TPlatformCapabilityMap[T] { return this.capabilities.has(capability); } get mergeUrl() { const host = this.hostsService.streamlabs; const token = this.userService.apiToken; return `https://${host}/slobs/merge/${token}/${this.platform}_account`; } averageViewers: number; peakViewers: number; private nViewerSamples: number; async afterGoLive(): Promise<void> { this.averageViewers = 0; this.peakViewers = 0; this.nViewerSamples = 0; // update viewers count const runInterval = async () => { if (this.hasCapability('viewerCount')) { const count = await this.fetchViewerCount(); this.nViewerSamples += 1; this.averageViewers = (this.averageViewers * (this.nViewerSamples - 1) + count) / this.nViewerSamples; this.peakViewers = Math.max(this.peakViewers, count); this.SET_VIEWERS_COUNT(count); } // stop updating if streaming has stopped if (this.streamingService.views.isMidStreamMode) { setTimeout(runInterval, VIEWER_COUNT_UPDATE_INTERVAL); } }; if (this.hasCapability('viewerCount')) await runInterval(); } unlink() { // unlink platform and reload auth state // const url = `https://${this.hostsService.streamlabs}/api/v5/slobs/unlink/${this.platform}_account`; // const headers = authorizedHeaders(this.userService.apiToken!); // const request = new Request(url, { headers }); // return fetch(request) // .then(handleResponse) // .then(_ => this.userService.updateLinkedPlatforms()); electron.remote.shell.openExternal( `https://${this.hostsService.streamlabs}/dashboard#/settings/account-settings`, ); } protected syncSettingsWithLocalStorage() { // save settings to the local storage const savedSettings: IFacebookStartStreamOptions = JSON.parse( localStorage.getItem(this.serviceName) as string, ); if (savedSettings) this.UPDATE_STREAM_SETTINGS(savedSettings); this.store.watch( () => this.state.settings, () => { localStorage.setItem(this.serviceName, JSON.stringify(this.state.settings)); }, { deep: true }, ); } async validatePlatform() { return EPlatformCallResult.Success; } fetchUserInfo() { return Promise.resolve({}); } @mutation() protected SET_VIEWERS_COUNT(viewers: number) { this.state.viewersCount = viewers; } @mutation() protected SET_STREAM_KEY(key: string) { this.state.streamKey = key; } @mutation() protected SET_PREPOPULATED(isPrepopulated: boolean) { this.state.isPrepopulated = isPrepopulated; } @mutation() protected SET_STREAM_SETTINGS(settings: TStartStreamOptions) { this.state.settings = settings; } @mutation() protected UPDATE_STREAM_SETTINGS(settingsPatch: Partial<TStartStreamOptions>)
}
{ this.state.settings = { ...this.state.settings, ...settingsPatch }; }
identifier_body
middlewares.rs
use config::Config; use db; use hyper::header::UserAgent; use hyper::method::Method; use nickel::{Continue, Halt, MediaType, Middleware, MiddlewareResult, Request, Response}; use nickel::status::StatusCode; use plugin::Extensible; use r2d2::{Config as PoolConfig, Pool}; use r2d2_postgres::{PostgresConnectionManager as Manager, TlsMode}; use std::error::Error; use std::io::Write; use std::str; use std::sync::Arc; use super::extensions::{IpAddrExtension, PostgresExtension}; use typemap::Key; use uuid::Uuid; pub struct ConfigMiddleware { config: Arc<Config>, } impl ConfigMiddleware { pub fn new(config: Arc<Config>) -> ConfigMiddleware { ConfigMiddleware { config: config } } } impl Key for ConfigMiddleware { type Value = Arc<Config>; } impl<D> Middleware<D> for ConfigMiddleware { fn
<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, rep: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<ConfigMiddleware>(self.config.clone()); Ok(Continue(rep)) } } pub struct PostgresMiddleware { pub pool: Pool<Manager>, } impl PostgresMiddleware { pub fn new(db_url: &str) -> Result<PostgresMiddleware, Box<Error>> { let manager = Manager::new(db_url, TlsMode::None)?; let pool = Pool::new(PoolConfig::default(), manager)?; Ok(PostgresMiddleware { pool: pool }) } } impl Key for PostgresMiddleware { type Value = Pool<Manager>; } impl<D> Middleware<D> for PostgresMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<PostgresMiddleware>(self.pool.clone()); Ok(Continue(res)) } } pub struct AuthMiddleware; impl<D> Middleware<D> for AuthMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, mut res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { let (is_create_token, is_exists_token, is_api) = req.path_without_query().map(|p| ( p == "/api/tokens", p.starts_with("/api/tokens/"), p.starts_with("/api")) ).unwrap_or((false, false, false)); if (is_create_token && req.origin.method == Method::Post) || (is_exists_token && req.origin.method == Method::Head) || !is_api { Ok(Continue(res)) } else if match req.origin.headers.get_raw("x-auth-token") { Some(header) if header.len() > 0 => { let value = try_with!(res, str::from_utf8(&header[0]).map_err(|err| (StatusCode::BadRequest, err))); let value = try_with!(res, value.parse::<Uuid>().map_err(|err| (StatusCode::BadRequest, err))); let conn = try_with!(res, req.pg_conn()); let ip = req.ip_addr(); let user_agent = req.origin.headers.get::<UserAgent>(); try_with!(res, db::tokens::exists( &*conn, &value, &format!("{}", ip), user_agent.map(|h| &h.0).unwrap_or(&String::new()) ).map_err(|err| (StatusCode::InternalServerError, err))) } _ => false, } { Ok(Continue(res)) } else { res.set(StatusCode::Forbidden); res.set(MediaType::Json); let mut stream = res.start()?; if let Err(err) = stream.write_all(r#"{"data": "Access denied", "success": false}"#.as_bytes()) { stream.bail(format!("[AuthMiddleware] Unable to halt request: {}", err)) } else { Ok(Halt(stream)) } } } }
invoke
identifier_name
middlewares.rs
use config::Config; use db; use hyper::header::UserAgent; use hyper::method::Method; use nickel::{Continue, Halt, MediaType, Middleware, MiddlewareResult, Request, Response}; use nickel::status::StatusCode; use plugin::Extensible; use r2d2::{Config as PoolConfig, Pool}; use r2d2_postgres::{PostgresConnectionManager as Manager, TlsMode}; use std::error::Error; use std::io::Write; use std::str; use std::sync::Arc; use super::extensions::{IpAddrExtension, PostgresExtension}; use typemap::Key; use uuid::Uuid; pub struct ConfigMiddleware { config: Arc<Config>, } impl ConfigMiddleware { pub fn new(config: Arc<Config>) -> ConfigMiddleware { ConfigMiddleware { config: config } } } impl Key for ConfigMiddleware { type Value = Arc<Config>; } impl<D> Middleware<D> for ConfigMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, rep: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<ConfigMiddleware>(self.config.clone()); Ok(Continue(rep)) } } pub struct PostgresMiddleware { pub pool: Pool<Manager>, } impl PostgresMiddleware { pub fn new(db_url: &str) -> Result<PostgresMiddleware, Box<Error>> { let manager = Manager::new(db_url, TlsMode::None)?; let pool = Pool::new(PoolConfig::default(), manager)?; Ok(PostgresMiddleware { pool: pool }) } } impl Key for PostgresMiddleware { type Value = Pool<Manager>; } impl<D> Middleware<D> for PostgresMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<PostgresMiddleware>(self.pool.clone()); Ok(Continue(res)) } } pub struct AuthMiddleware; impl<D> Middleware<D> for AuthMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, mut res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { let (is_create_token, is_exists_token, is_api) = req.path_without_query().map(|p| ( p == "/api/tokens", p.starts_with("/api/tokens/"), p.starts_with("/api")) ).unwrap_or((false, false, false)); if (is_create_token && req.origin.method == Method::Post) || (is_exists_token && req.origin.method == Method::Head) || !is_api { Ok(Continue(res)) } else if match req.origin.headers.get_raw("x-auth-token") { Some(header) if header.len() > 0 => { let value = try_with!(res, str::from_utf8(&header[0]).map_err(|err| (StatusCode::BadRequest, err))); let value = try_with!(res, value.parse::<Uuid>().map_err(|err| (StatusCode::BadRequest, err))); let conn = try_with!(res, req.pg_conn()); let ip = req.ip_addr(); let user_agent = req.origin.headers.get::<UserAgent>(); try_with!(res, db::tokens::exists( &*conn, &value, &format!("{}", ip), user_agent.map(|h| &h.0).unwrap_or(&String::new()) ).map_err(|err| (StatusCode::InternalServerError, err))) } _ => false, } { Ok(Continue(res)) } else
} }
{ res.set(StatusCode::Forbidden); res.set(MediaType::Json); let mut stream = res.start()?; if let Err(err) = stream.write_all(r#"{"data": "Access denied", "success": false}"#.as_bytes()) { stream.bail(format!("[AuthMiddleware] Unable to halt request: {}", err)) } else { Ok(Halt(stream)) } }
conditional_block
middlewares.rs
use config::Config; use db; use hyper::header::UserAgent; use hyper::method::Method; use nickel::{Continue, Halt, MediaType, Middleware, MiddlewareResult, Request, Response}; use nickel::status::StatusCode; use plugin::Extensible; use r2d2::{Config as PoolConfig, Pool}; use r2d2_postgres::{PostgresConnectionManager as Manager, TlsMode}; use std::error::Error; use std::io::Write; use std::str; use std::sync::Arc; use super::extensions::{IpAddrExtension, PostgresExtension}; use typemap::Key; use uuid::Uuid; pub struct ConfigMiddleware { config: Arc<Config>, } impl ConfigMiddleware { pub fn new(config: Arc<Config>) -> ConfigMiddleware
} impl Key for ConfigMiddleware { type Value = Arc<Config>; } impl<D> Middleware<D> for ConfigMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, rep: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<ConfigMiddleware>(self.config.clone()); Ok(Continue(rep)) } } pub struct PostgresMiddleware { pub pool: Pool<Manager>, } impl PostgresMiddleware { pub fn new(db_url: &str) -> Result<PostgresMiddleware, Box<Error>> { let manager = Manager::new(db_url, TlsMode::None)?; let pool = Pool::new(PoolConfig::default(), manager)?; Ok(PostgresMiddleware { pool: pool }) } } impl Key for PostgresMiddleware { type Value = Pool<Manager>; } impl<D> Middleware<D> for PostgresMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<PostgresMiddleware>(self.pool.clone()); Ok(Continue(res)) } } pub struct AuthMiddleware; impl<D> Middleware<D> for AuthMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, mut res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { let (is_create_token, is_exists_token, is_api) = req.path_without_query().map(|p| ( p == "/api/tokens", p.starts_with("/api/tokens/"), p.starts_with("/api")) ).unwrap_or((false, false, false)); if (is_create_token && req.origin.method == Method::Post) || (is_exists_token && req.origin.method == Method::Head) || !is_api { Ok(Continue(res)) } else if match req.origin.headers.get_raw("x-auth-token") { Some(header) if header.len() > 0 => { let value = try_with!(res, str::from_utf8(&header[0]).map_err(|err| (StatusCode::BadRequest, err))); let value = try_with!(res, value.parse::<Uuid>().map_err(|err| (StatusCode::BadRequest, err))); let conn = try_with!(res, req.pg_conn()); let ip = req.ip_addr(); let user_agent = req.origin.headers.get::<UserAgent>(); try_with!(res, db::tokens::exists( &*conn, &value, &format!("{}", ip), user_agent.map(|h| &h.0).unwrap_or(&String::new()) ).map_err(|err| (StatusCode::InternalServerError, err))) } _ => false, } { Ok(Continue(res)) } else { res.set(StatusCode::Forbidden); res.set(MediaType::Json); let mut stream = res.start()?; if let Err(err) = stream.write_all(r#"{"data": "Access denied", "success": false}"#.as_bytes()) { stream.bail(format!("[AuthMiddleware] Unable to halt request: {}", err)) } else { Ok(Halt(stream)) } } } }
{ ConfigMiddleware { config: config } }
identifier_body
middlewares.rs
use config::Config; use db; use hyper::header::UserAgent; use hyper::method::Method; use nickel::{Continue, Halt, MediaType, Middleware, MiddlewareResult, Request, Response}; use nickel::status::StatusCode; use plugin::Extensible; use r2d2::{Config as PoolConfig, Pool}; use r2d2_postgres::{PostgresConnectionManager as Manager, TlsMode}; use std::error::Error; use std::io::Write; use std::str; use std::sync::Arc; use super::extensions::{IpAddrExtension, PostgresExtension}; use typemap::Key; use uuid::Uuid; pub struct ConfigMiddleware { config: Arc<Config>, } impl ConfigMiddleware { pub fn new(config: Arc<Config>) -> ConfigMiddleware { ConfigMiddleware { config: config } } } impl Key for ConfigMiddleware { type Value = Arc<Config>; } impl<D> Middleware<D> for ConfigMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, rep: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<ConfigMiddleware>(self.config.clone()); Ok(Continue(rep)) } } pub struct PostgresMiddleware { pub pool: Pool<Manager>, } impl PostgresMiddleware { pub fn new(db_url: &str) -> Result<PostgresMiddleware, Box<Error>> { let manager = Manager::new(db_url, TlsMode::None)?; let pool = Pool::new(PoolConfig::default(), manager)?; Ok(PostgresMiddleware { pool: pool }) } } impl Key for PostgresMiddleware { type Value = Pool<Manager>; } impl<D> Middleware<D> for PostgresMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { req.extensions_mut().insert::<PostgresMiddleware>(self.pool.clone()); Ok(Continue(res)) } } pub struct AuthMiddleware; impl<D> Middleware<D> for AuthMiddleware { fn invoke<'mw, 'conn>(&self, req: &mut Request<'mw, 'conn, D>, mut res: Response<'mw, D>) -> MiddlewareResult<'mw, D> { let (is_create_token, is_exists_token, is_api) = req.path_without_query().map(|p| ( p == "/api/tokens", p.starts_with("/api/tokens/"), p.starts_with("/api")) ).unwrap_or((false, false, false)); if (is_create_token && req.origin.method == Method::Post) || (is_exists_token && req.origin.method == Method::Head) || !is_api { Ok(Continue(res)) } else if match req.origin.headers.get_raw("x-auth-token") { Some(header) if header.len() > 0 => { let value = try_with!(res, str::from_utf8(&header[0]).map_err(|err| (StatusCode::BadRequest, err))); let value = try_with!(res, value.parse::<Uuid>().map_err(|err| (StatusCode::BadRequest, err))); let conn = try_with!(res, req.pg_conn()); let ip = req.ip_addr(); let user_agent = req.origin.headers.get::<UserAgent>(); try_with!(res, db::tokens::exists( &*conn, &value, &format!("{}", ip), user_agent.map(|h| &h.0).unwrap_or(&String::new()) ).map_err(|err| (StatusCode::InternalServerError, err)))
} { Ok(Continue(res)) } else { res.set(StatusCode::Forbidden); res.set(MediaType::Json); let mut stream = res.start()?; if let Err(err) = stream.write_all(r#"{"data": "Access denied", "success": false}"#.as_bytes()) { stream.bail(format!("[AuthMiddleware] Unable to halt request: {}", err)) } else { Ok(Halt(stream)) } } } }
} _ => false,
random_line_split
replace_pipe.ts
import { isBlank, isString, isNumber, isFunction, RegExpWrapper, StringWrapper } from 'angular2/src/facade/lang'; import {Injectable, PipeTransform, Pipe} from 'angular2/core'; import {InvalidPipeArgumentException} from './invalid_pipe_argument_exception'; /** * Creates a new String with some or all of the matches of a pattern replaced by * a replacement. * * The pattern to be matched is specified by the 'pattern' parameter. * * The replacement to be set is specified by the 'replacement' parameter. * * An optional 'flags' parameter can be set. * * ### Usage * * expression | replace:pattern:replacement * * All behavior is based on the expected behavior of the JavaScript API * String.prototype.replace() function. * * Where the input expression is a [String] or [Number] (to be treated as a string), * the `pattern` is a [String] or [RegExp], * the 'replacement' is a [String] or [Function]. * * --Note--: The 'pattern' parameter will be converted to a RegExp instance. Make sure to escape the * string properly if you are matching for regular expression special characters like parenthesis, * brackets etc. */ @Pipe({name: 'replace'}) @Injectable() export class ReplacePipe implements PipeTransform { transform(value: any, pattern: string | RegExp, replacement: Function | string): any { if (isBlank(value)) { return value; } if (!this._supportedInput(value)) { throw new InvalidPipeArgumentException(ReplacePipe, value); } var input = value.toString(); if (!this._supportedPattern(pattern)) { throw new InvalidPipeArgumentException(ReplacePipe, pattern); } if (!this._supportedReplacement(replacement)) { throw new InvalidPipeArgumentException(ReplacePipe, replacement); } // template fails with literal RegExp e.g /pattern/igm // var rgx = pattern instanceof RegExp ? pattern : RegExpWrapper.create(pattern); if (isFunction(replacement)) { var rgxPattern = isString(pattern) ? RegExpWrapper.create(<string>pattern) : <RegExp>pattern; return StringWrapper.replaceAllMapped(input, rgxPattern, <Function>replacement); } if (pattern instanceof RegExp) { // use the replaceAll variant return StringWrapper.replaceAll(input, pattern, <string>replacement); } return StringWrapper.replace(input, <string>pattern, <string>replacement); } private _supportedInput(input: any): boolean { return isString(input) || isNumber(input); } private _supportedPattern(pattern: any): boolean
private _supportedReplacement(replacement: any): boolean { return isString(replacement) || isFunction(replacement); } }
{ return isString(pattern) || pattern instanceof RegExp; }
identifier_body
replace_pipe.ts
import { isBlank, isString, isNumber, isFunction, RegExpWrapper, StringWrapper } from 'angular2/src/facade/lang'; import {Injectable, PipeTransform, Pipe} from 'angular2/core'; import {InvalidPipeArgumentException} from './invalid_pipe_argument_exception'; /** * Creates a new String with some or all of the matches of a pattern replaced by * a replacement. * * The pattern to be matched is specified by the 'pattern' parameter. * * The replacement to be set is specified by the 'replacement' parameter. * * An optional 'flags' parameter can be set. * * ### Usage * * expression | replace:pattern:replacement * * All behavior is based on the expected behavior of the JavaScript API * String.prototype.replace() function. * * Where the input expression is a [String] or [Number] (to be treated as a string), * the `pattern` is a [String] or [RegExp], * the 'replacement' is a [String] or [Function]. * * --Note--: The 'pattern' parameter will be converted to a RegExp instance. Make sure to escape the * string properly if you are matching for regular expression special characters like parenthesis, * brackets etc. */ @Pipe({name: 'replace'}) @Injectable() export class ReplacePipe implements PipeTransform { transform(value: any, pattern: string | RegExp, replacement: Function | string): any { if (isBlank(value)) { return value; } if (!this._supportedInput(value)) { throw new InvalidPipeArgumentException(ReplacePipe, value); } var input = value.toString(); if (!this._supportedPattern(pattern)) { throw new InvalidPipeArgumentException(ReplacePipe, pattern); } if (!this._supportedReplacement(replacement)) { throw new InvalidPipeArgumentException(ReplacePipe, replacement); } // template fails with literal RegExp e.g /pattern/igm // var rgx = pattern instanceof RegExp ? pattern : RegExpWrapper.create(pattern); if (isFunction(replacement)) { var rgxPattern = isString(pattern) ? RegExpWrapper.create(<string>pattern) : <RegExp>pattern; return StringWrapper.replaceAllMapped(input, rgxPattern, <Function>replacement); } if (pattern instanceof RegExp)
return StringWrapper.replace(input, <string>pattern, <string>replacement); } private _supportedInput(input: any): boolean { return isString(input) || isNumber(input); } private _supportedPattern(pattern: any): boolean { return isString(pattern) || pattern instanceof RegExp; } private _supportedReplacement(replacement: any): boolean { return isString(replacement) || isFunction(replacement); } }
{ // use the replaceAll variant return StringWrapper.replaceAll(input, pattern, <string>replacement); }
conditional_block
replace_pipe.ts
import { isBlank, isString, isNumber, isFunction, RegExpWrapper, StringWrapper } from 'angular2/src/facade/lang'; import {Injectable, PipeTransform, Pipe} from 'angular2/core'; import {InvalidPipeArgumentException} from './invalid_pipe_argument_exception'; /** * Creates a new String with some or all of the matches of a pattern replaced by * a replacement. * * The pattern to be matched is specified by the 'pattern' parameter. * * The replacement to be set is specified by the 'replacement' parameter. * * An optional 'flags' parameter can be set. * * ### Usage * * expression | replace:pattern:replacement * * All behavior is based on the expected behavior of the JavaScript API * String.prototype.replace() function. * * Where the input expression is a [String] or [Number] (to be treated as a string), * the `pattern` is a [String] or [RegExp], * the 'replacement' is a [String] or [Function]. * * --Note--: The 'pattern' parameter will be converted to a RegExp instance. Make sure to escape the * string properly if you are matching for regular expression special characters like parenthesis, * brackets etc. */ @Pipe({name: 'replace'}) @Injectable() export class ReplacePipe implements PipeTransform { transform(value: any, pattern: string | RegExp, replacement: Function | string): any { if (isBlank(value)) { return value; } if (!this._supportedInput(value)) { throw new InvalidPipeArgumentException(ReplacePipe, value); } var input = value.toString(); if (!this._supportedPattern(pattern)) { throw new InvalidPipeArgumentException(ReplacePipe, pattern); } if (!this._supportedReplacement(replacement)) { throw new InvalidPipeArgumentException(ReplacePipe, replacement); } // template fails with literal RegExp e.g /pattern/igm // var rgx = pattern instanceof RegExp ? pattern : RegExpWrapper.create(pattern); if (isFunction(replacement)) { var rgxPattern = isString(pattern) ? RegExpWrapper.create(<string>pattern) : <RegExp>pattern; return StringWrapper.replaceAllMapped(input, rgxPattern, <Function>replacement); } if (pattern instanceof RegExp) { // use the replaceAll variant return StringWrapper.replaceAll(input, pattern, <string>replacement); } return StringWrapper.replace(input, <string>pattern, <string>replacement); } private
(input: any): boolean { return isString(input) || isNumber(input); } private _supportedPattern(pattern: any): boolean { return isString(pattern) || pattern instanceof RegExp; } private _supportedReplacement(replacement: any): boolean { return isString(replacement) || isFunction(replacement); } }
_supportedInput
identifier_name
replace_pipe.ts
import { isBlank, isString, isNumber, isFunction, RegExpWrapper, StringWrapper } from 'angular2/src/facade/lang'; import {Injectable, PipeTransform, Pipe} from 'angular2/core'; import {InvalidPipeArgumentException} from './invalid_pipe_argument_exception'; /** * Creates a new String with some or all of the matches of a pattern replaced by * a replacement. * * The pattern to be matched is specified by the 'pattern' parameter. * * The replacement to be set is specified by the 'replacement' parameter. * * An optional 'flags' parameter can be set. * * ### Usage * * expression | replace:pattern:replacement * * All behavior is based on the expected behavior of the JavaScript API * String.prototype.replace() function. * * Where the input expression is a [String] or [Number] (to be treated as a string), * the `pattern` is a [String] or [RegExp], * the 'replacement' is a [String] or [Function]. * * --Note--: The 'pattern' parameter will be converted to a RegExp instance. Make sure to escape the * string properly if you are matching for regular expression special characters like parenthesis, * brackets etc. */ @Pipe({name: 'replace'}) @Injectable()
} if (!this._supportedInput(value)) { throw new InvalidPipeArgumentException(ReplacePipe, value); } var input = value.toString(); if (!this._supportedPattern(pattern)) { throw new InvalidPipeArgumentException(ReplacePipe, pattern); } if (!this._supportedReplacement(replacement)) { throw new InvalidPipeArgumentException(ReplacePipe, replacement); } // template fails with literal RegExp e.g /pattern/igm // var rgx = pattern instanceof RegExp ? pattern : RegExpWrapper.create(pattern); if (isFunction(replacement)) { var rgxPattern = isString(pattern) ? RegExpWrapper.create(<string>pattern) : <RegExp>pattern; return StringWrapper.replaceAllMapped(input, rgxPattern, <Function>replacement); } if (pattern instanceof RegExp) { // use the replaceAll variant return StringWrapper.replaceAll(input, pattern, <string>replacement); } return StringWrapper.replace(input, <string>pattern, <string>replacement); } private _supportedInput(input: any): boolean { return isString(input) || isNumber(input); } private _supportedPattern(pattern: any): boolean { return isString(pattern) || pattern instanceof RegExp; } private _supportedReplacement(replacement: any): boolean { return isString(replacement) || isFunction(replacement); } }
export class ReplacePipe implements PipeTransform { transform(value: any, pattern: string | RegExp, replacement: Function | string): any { if (isBlank(value)) { return value;
random_line_split
trace_pid.py
# Copyright (c) 2015, Simone Margaritelli <evilsocket at gmail dot com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of ARM Inject nor the names of its contributors may be used # to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from pyadb.adb import ADB import sys if len(sys.argv) != 2:
pid = int(sys.argv[1]) try: adb = ADB() print "@ Pushing files to /data/local/tmp ..." adb.sh( "rm -rf /data/local/tmp/injector /data/local/tmp/libhook.so" ) adb.push( "libs/armeabi-v7a/injector", "/data/local/tmp/injector" ) adb.push( "libs/armeabi-v7a/libhook.so", "/data/local/tmp/libhook.so" ) adb.sh( "chmod 777 /data/local/tmp/injector" ) # we need to set selinux to permissive in order to make ptrace work adb.set_selinux_level( 0 ) adb.clear_log() print "@ Injection into PID %d starting ..." % pid adb.sudo( "/data/local/tmp/injector %d /data/local/tmp/libhook.so" % pid ) adb.logcat("LIBHOOK") except KeyboardInterrupt: pass
print "Usage: python %s <pid>" % sys.argv[0] quit()
conditional_block
trace_pid.py
# Copyright (c) 2015, Simone Margaritelli <evilsocket at gmail dot com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of ARM Inject nor the names of its contributors may be used # to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from pyadb.adb import ADB import sys if len(sys.argv) != 2: print "Usage: python %s <pid>" % sys.argv[0] quit() pid = int(sys.argv[1]) try: adb = ADB() print "@ Pushing files to /data/local/tmp ..." adb.sh( "rm -rf /data/local/tmp/injector /data/local/tmp/libhook.so" ) adb.push( "libs/armeabi-v7a/injector", "/data/local/tmp/injector" ) adb.push( "libs/armeabi-v7a/libhook.so", "/data/local/tmp/libhook.so" ) adb.sh( "chmod 777 /data/local/tmp/injector" )
adb.set_selinux_level( 0 ) adb.clear_log() print "@ Injection into PID %d starting ..." % pid adb.sudo( "/data/local/tmp/injector %d /data/local/tmp/libhook.so" % pid ) adb.logcat("LIBHOOK") except KeyboardInterrupt: pass
# we need to set selinux to permissive in order to make ptrace work
random_line_split
location_mock.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Location, LocationStrategy} from '@angular/common'; import {EventEmitter, Injectable} from '@angular/core'; import {ISubscription} from 'rxjs/Subscription'; /** * A spy for {@link Location} that allows tests to fire simulated location events. * * @experimental */ @Injectable() export class SpyLocation implements Location { urlChanges: string[] = []; private _history: LocationState[] = [new LocationState('', '', null)]; private _historyIndex: number = 0; /** @internal */ _subject: EventEmitter<any> = new EventEmitter(); /** @internal */ _baseHref: string = ''; /** @internal */ _platformStrategy: LocationStrategy = null !; setInitialPath(url: string) { this._history[this._historyIndex].path = url; } setBaseHref(url: string) { this._baseHref = url; }
isCurrentPathEqualTo(path: string, query: string = ''): boolean { const givenPath = path.endsWith('/') ? path.substring(0, path.length - 1) : path; const currPath = this.path().endsWith('/') ? this.path().substring(0, this.path().length - 1) : this.path(); return currPath == givenPath + (query.length > 0 ? ('?' + query) : ''); } simulateUrlPop(pathname: string) { this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'}); } simulateHashChange(pathname: string) { // Because we don't prevent the native event, the browser will independently update the path this.setInitialPath(pathname); this.urlChanges.push('hash: ' + pathname); this._subject.emit({'url': pathname, 'pop': true, 'type': 'hashchange'}); } prepareExternalUrl(url: string): string { if (url.length > 0 && !url.startsWith('/')) { url = '/' + url; } return this._baseHref + url; } go(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); if (this._historyIndex > 0) { this._history.splice(this._historyIndex + 1); } this._history.push(new LocationState(path, query, state)); this._historyIndex = this._history.length - 1; const locationState = this._history[this._historyIndex - 1]; if (locationState.path == path && locationState.query == query) { return; } const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push(url); this._subject.emit({'url': url, 'pop': false}); } replaceState(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); const history = this._history[this._historyIndex]; if (history.path == path && history.query == query) { return; } history.path = path; history.query = query; history.state = state; const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push('replace: ' + url); } forward() { if (this._historyIndex < (this._history.length - 1)) { this._historyIndex++; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } back() { if (this._historyIndex > 0) { this._historyIndex--; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } subscribe( onNext: (value: any) => void, onThrow?: ((error: any) => void)|null, onReturn?: (() => void)|null): ISubscription { return this._subject.subscribe({next: onNext, error: onThrow, complete: onReturn}); } normalize(url: string): string { return null !; } } class LocationState { constructor(public path: string, public query: string, public state: any) {} }
path(): string { return this._history[this._historyIndex].path; } private state(): string { return this._history[this._historyIndex].state; }
random_line_split
location_mock.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Location, LocationStrategy} from '@angular/common'; import {EventEmitter, Injectable} from '@angular/core'; import {ISubscription} from 'rxjs/Subscription'; /** * A spy for {@link Location} that allows tests to fire simulated location events. * * @experimental */ @Injectable() export class SpyLocation implements Location { urlChanges: string[] = []; private _history: LocationState[] = [new LocationState('', '', null)]; private _historyIndex: number = 0; /** @internal */ _subject: EventEmitter<any> = new EventEmitter(); /** @internal */ _baseHref: string = ''; /** @internal */ _platformStrategy: LocationStrategy = null !; setInitialPath(url: string) { this._history[this._historyIndex].path = url; } setBaseHref(url: string) { this._baseHref = url; } path(): string { return this._history[this._historyIndex].path; } private state(): string { return this._history[this._historyIndex].state; } isCurrentPathEqualTo(path: string, query: string = ''): boolean { const givenPath = path.endsWith('/') ? path.substring(0, path.length - 1) : path; const currPath = this.path().endsWith('/') ? this.path().substring(0, this.path().length - 1) : this.path(); return currPath == givenPath + (query.length > 0 ? ('?' + query) : ''); } simulateUrlPop(pathname: string) { this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'}); } simulateHashChange(pathname: string) { // Because we don't prevent the native event, the browser will independently update the path this.setInitialPath(pathname); this.urlChanges.push('hash: ' + pathname); this._subject.emit({'url': pathname, 'pop': true, 'type': 'hashchange'}); } prepareExternalUrl(url: string): string { if (url.length > 0 && !url.startsWith('/')) { url = '/' + url; } return this._baseHref + url; } go(path: string, query: string = '', state: any = null)
replaceState(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); const history = this._history[this._historyIndex]; if (history.path == path && history.query == query) { return; } history.path = path; history.query = query; history.state = state; const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push('replace: ' + url); } forward() { if (this._historyIndex < (this._history.length - 1)) { this._historyIndex++; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } back() { if (this._historyIndex > 0) { this._historyIndex--; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } subscribe( onNext: (value: any) => void, onThrow?: ((error: any) => void)|null, onReturn?: (() => void)|null): ISubscription { return this._subject.subscribe({next: onNext, error: onThrow, complete: onReturn}); } normalize(url: string): string { return null !; } } class LocationState { constructor(public path: string, public query: string, public state: any) {} }
{ path = this.prepareExternalUrl(path); if (this._historyIndex > 0) { this._history.splice(this._historyIndex + 1); } this._history.push(new LocationState(path, query, state)); this._historyIndex = this._history.length - 1; const locationState = this._history[this._historyIndex - 1]; if (locationState.path == path && locationState.query == query) { return; } const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push(url); this._subject.emit({'url': url, 'pop': false}); }
identifier_body
location_mock.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Location, LocationStrategy} from '@angular/common'; import {EventEmitter, Injectable} from '@angular/core'; import {ISubscription} from 'rxjs/Subscription'; /** * A spy for {@link Location} that allows tests to fire simulated location events. * * @experimental */ @Injectable() export class SpyLocation implements Location { urlChanges: string[] = []; private _history: LocationState[] = [new LocationState('', '', null)]; private _historyIndex: number = 0; /** @internal */ _subject: EventEmitter<any> = new EventEmitter(); /** @internal */ _baseHref: string = ''; /** @internal */ _platformStrategy: LocationStrategy = null !; setInitialPath(url: string) { this._history[this._historyIndex].path = url; }
(url: string) { this._baseHref = url; } path(): string { return this._history[this._historyIndex].path; } private state(): string { return this._history[this._historyIndex].state; } isCurrentPathEqualTo(path: string, query: string = ''): boolean { const givenPath = path.endsWith('/') ? path.substring(0, path.length - 1) : path; const currPath = this.path().endsWith('/') ? this.path().substring(0, this.path().length - 1) : this.path(); return currPath == givenPath + (query.length > 0 ? ('?' + query) : ''); } simulateUrlPop(pathname: string) { this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'}); } simulateHashChange(pathname: string) { // Because we don't prevent the native event, the browser will independently update the path this.setInitialPath(pathname); this.urlChanges.push('hash: ' + pathname); this._subject.emit({'url': pathname, 'pop': true, 'type': 'hashchange'}); } prepareExternalUrl(url: string): string { if (url.length > 0 && !url.startsWith('/')) { url = '/' + url; } return this._baseHref + url; } go(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); if (this._historyIndex > 0) { this._history.splice(this._historyIndex + 1); } this._history.push(new LocationState(path, query, state)); this._historyIndex = this._history.length - 1; const locationState = this._history[this._historyIndex - 1]; if (locationState.path == path && locationState.query == query) { return; } const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push(url); this._subject.emit({'url': url, 'pop': false}); } replaceState(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); const history = this._history[this._historyIndex]; if (history.path == path && history.query == query) { return; } history.path = path; history.query = query; history.state = state; const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push('replace: ' + url); } forward() { if (this._historyIndex < (this._history.length - 1)) { this._historyIndex++; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } back() { if (this._historyIndex > 0) { this._historyIndex--; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } subscribe( onNext: (value: any) => void, onThrow?: ((error: any) => void)|null, onReturn?: (() => void)|null): ISubscription { return this._subject.subscribe({next: onNext, error: onThrow, complete: onReturn}); } normalize(url: string): string { return null !; } } class LocationState { constructor(public path: string, public query: string, public state: any) {} }
setBaseHref
identifier_name
location_mock.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Location, LocationStrategy} from '@angular/common'; import {EventEmitter, Injectable} from '@angular/core'; import {ISubscription} from 'rxjs/Subscription'; /** * A spy for {@link Location} that allows tests to fire simulated location events. * * @experimental */ @Injectable() export class SpyLocation implements Location { urlChanges: string[] = []; private _history: LocationState[] = [new LocationState('', '', null)]; private _historyIndex: number = 0; /** @internal */ _subject: EventEmitter<any> = new EventEmitter(); /** @internal */ _baseHref: string = ''; /** @internal */ _platformStrategy: LocationStrategy = null !; setInitialPath(url: string) { this._history[this._historyIndex].path = url; } setBaseHref(url: string) { this._baseHref = url; } path(): string { return this._history[this._historyIndex].path; } private state(): string { return this._history[this._historyIndex].state; } isCurrentPathEqualTo(path: string, query: string = ''): boolean { const givenPath = path.endsWith('/') ? path.substring(0, path.length - 1) : path; const currPath = this.path().endsWith('/') ? this.path().substring(0, this.path().length - 1) : this.path(); return currPath == givenPath + (query.length > 0 ? ('?' + query) : ''); } simulateUrlPop(pathname: string) { this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'}); } simulateHashChange(pathname: string) { // Because we don't prevent the native event, the browser will independently update the path this.setInitialPath(pathname); this.urlChanges.push('hash: ' + pathname); this._subject.emit({'url': pathname, 'pop': true, 'type': 'hashchange'}); } prepareExternalUrl(url: string): string { if (url.length > 0 && !url.startsWith('/')) { url = '/' + url; } return this._baseHref + url; } go(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); if (this._historyIndex > 0) { this._history.splice(this._historyIndex + 1); } this._history.push(new LocationState(path, query, state)); this._historyIndex = this._history.length - 1; const locationState = this._history[this._historyIndex - 1]; if (locationState.path == path && locationState.query == query)
const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push(url); this._subject.emit({'url': url, 'pop': false}); } replaceState(path: string, query: string = '', state: any = null) { path = this.prepareExternalUrl(path); const history = this._history[this._historyIndex]; if (history.path == path && history.query == query) { return; } history.path = path; history.query = query; history.state = state; const url = path + (query.length > 0 ? ('?' + query) : ''); this.urlChanges.push('replace: ' + url); } forward() { if (this._historyIndex < (this._history.length - 1)) { this._historyIndex++; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } back() { if (this._historyIndex > 0) { this._historyIndex--; this._subject.emit({'url': this.path(), 'state': this.state(), 'pop': true}); } } subscribe( onNext: (value: any) => void, onThrow?: ((error: any) => void)|null, onReturn?: (() => void)|null): ISubscription { return this._subject.subscribe({next: onNext, error: onThrow, complete: onReturn}); } normalize(url: string): string { return null !; } } class LocationState { constructor(public path: string, public query: string, public state: any) {} }
{ return; }
conditional_block
app.js
import * as THREE from '../../libs/three/three.module.js'; import { Stats } from '../../libs/stats.module.js'; import { OrbitControls } from '../../libs/three/jsm/OrbitControls.js'; import { ARButton } from '../../libs/ARButton.js'; class App{
(){ // Creates a <div> element and adds it to the HTML page. const container = document.createElement( 'div' ); document.body.appendChild( container ); /* * SCENE */ // INITIALIZATION this.scene = new THREE.Scene(); // LIGHTING // Create an ambient light and add it to the scene. // Parameters: Sky color, Ground color, intensity const ambient = new THREE.HemisphereLight(0xffffff, 0xbbbbff, 0.5); this.scene.add(ambient); // Create a directional light and add it to the scene // Parameters: Light Color // The light target is the origin by default. const light = new THREE.DirectionalLight( 0xffffff ); // Moves the source of the light to a given position. light.position.set( 1, 1, 1 ).normalize(); this.scene.add( light ); // OBJECTS // Define a Box Geometry const geometry = new THREE.BoxBufferGeometry(); // Define a basic material with color Red const material = new THREE.MeshStandardMaterial( { color: 0x00FF00 }); // Create a new mesh using the geometry and material this.mesh = new THREE.Mesh( geometry, material ); // Move the mesh to a new position this.mesh.position.set(0,1,-3); // Add the mesh to the scene this.scene.add(this.mesh); /* * CAMERA */ // INITIALIZE // Create a new camera // Parameters: Field of View, Aspect Ratio, Inner Clipping Plane, Outer Clipping Plane this.camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 0.1, 100 ); // Set the position of the camera this.camera.position.set( 0, 1.6, 3 ); /* * RENDERER */ // INITIALIZE this.renderer = new THREE.WebGLRenderer({ antialias: true } ); this.renderer.setPixelRatio( window.devicePixelRatio ); this.renderer.setSize( window.innerWidth, window.innerHeight ); this.renderer.outputEncoding = THREE.sRGBEncoding; // Add the renderer to the initial container that will display on the HTML page container.appendChild( this.renderer.domElement ); /* * ADDITIONAL TOOLS */ // ORBIT CONTROLS for non-XR view this.controls = new OrbitControls( this.camera, this.renderer.domElement ); this.controls.target.set(0, 1.6, 0); this.controls.update(); // STATS for XR this.stats = new Stats(); document.body.appendChild( this.stats.dom ); /* * MAIN FUNCTION CALL */ this.setupXR(); // Window resize handler window.addEventListener('resize', this.resize.bind(this) ); } setupXR(){ // Enable XR this.renderer.xr.enabled = true; // Create a button to allow the user to enter VR const button = new ARButton( this.renderer ); // Set the animation loop function this.renderer.setAnimationLoop( this.render.bind(this) ); } render( ) { // Update the frame statistics this.stats.update(); // Render the scene this.renderer.render( this.scene, this.camera ); } // Handles window resizing resize(){ this.camera.aspect = window.innerWidth / window.innerHeight; this.camera.updateProjectionMatrix(); this.renderer.setSize( window.innerWidth, window.innerHeight ); } } export { App };
constructor
identifier_name
app.js
import * as THREE from '../../libs/three/three.module.js'; import { Stats } from '../../libs/stats.module.js'; import { OrbitControls } from '../../libs/three/jsm/OrbitControls.js'; import { ARButton } from '../../libs/ARButton.js'; class App{ constructor(){ // Creates a <div> element and adds it to the HTML page. const container = document.createElement( 'div' ); document.body.appendChild( container ); /* * SCENE */ // INITIALIZATION this.scene = new THREE.Scene(); // LIGHTING // Create an ambient light and add it to the scene. // Parameters: Sky color, Ground color, intensity const ambient = new THREE.HemisphereLight(0xffffff, 0xbbbbff, 0.5); this.scene.add(ambient); // Create a directional light and add it to the scene // Parameters: Light Color // The light target is the origin by default. const light = new THREE.DirectionalLight( 0xffffff ); // Moves the source of the light to a given position. light.position.set( 1, 1, 1 ).normalize(); this.scene.add( light ); // OBJECTS // Define a Box Geometry const geometry = new THREE.BoxBufferGeometry(); // Define a basic material with color Red const material = new THREE.MeshStandardMaterial( { color: 0x00FF00 }); // Create a new mesh using the geometry and material this.mesh = new THREE.Mesh( geometry, material ); // Move the mesh to a new position this.mesh.position.set(0,1,-3); // Add the mesh to the scene this.scene.add(this.mesh); /* * CAMERA */ // INITIALIZE // Create a new camera // Parameters: Field of View, Aspect Ratio, Inner Clipping Plane, Outer Clipping Plane this.camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 0.1, 100 ); // Set the position of the camera this.camera.position.set( 0, 1.6, 3 ); /* * RENDERER */ // INITIALIZE this.renderer = new THREE.WebGLRenderer({ antialias: true } ); this.renderer.setPixelRatio( window.devicePixelRatio ); this.renderer.setSize( window.innerWidth, window.innerHeight ); this.renderer.outputEncoding = THREE.sRGBEncoding; // Add the renderer to the initial container that will display on the HTML page container.appendChild( this.renderer.domElement ); /* * ADDITIONAL TOOLS */
// ORBIT CONTROLS for non-XR view this.controls = new OrbitControls( this.camera, this.renderer.domElement ); this.controls.target.set(0, 1.6, 0); this.controls.update(); // STATS for XR this.stats = new Stats(); document.body.appendChild( this.stats.dom ); /* * MAIN FUNCTION CALL */ this.setupXR(); // Window resize handler window.addEventListener('resize', this.resize.bind(this) ); } setupXR(){ // Enable XR this.renderer.xr.enabled = true; // Create a button to allow the user to enter VR const button = new ARButton( this.renderer ); // Set the animation loop function this.renderer.setAnimationLoop( this.render.bind(this) ); } render( ) { // Update the frame statistics this.stats.update(); // Render the scene this.renderer.render( this.scene, this.camera ); } // Handles window resizing resize(){ this.camera.aspect = window.innerWidth / window.innerHeight; this.camera.updateProjectionMatrix(); this.renderer.setSize( window.innerWidth, window.innerHeight ); } } export { App };
random_line_split
app.js
import * as THREE from '../../libs/three/three.module.js'; import { Stats } from '../../libs/stats.module.js'; import { OrbitControls } from '../../libs/three/jsm/OrbitControls.js'; import { ARButton } from '../../libs/ARButton.js'; class App{ constructor(){ // Creates a <div> element and adds it to the HTML page. const container = document.createElement( 'div' ); document.body.appendChild( container ); /* * SCENE */ // INITIALIZATION this.scene = new THREE.Scene(); // LIGHTING // Create an ambient light and add it to the scene. // Parameters: Sky color, Ground color, intensity const ambient = new THREE.HemisphereLight(0xffffff, 0xbbbbff, 0.5); this.scene.add(ambient); // Create a directional light and add it to the scene // Parameters: Light Color // The light target is the origin by default. const light = new THREE.DirectionalLight( 0xffffff ); // Moves the source of the light to a given position. light.position.set( 1, 1, 1 ).normalize(); this.scene.add( light ); // OBJECTS // Define a Box Geometry const geometry = new THREE.BoxBufferGeometry(); // Define a basic material with color Red const material = new THREE.MeshStandardMaterial( { color: 0x00FF00 }); // Create a new mesh using the geometry and material this.mesh = new THREE.Mesh( geometry, material ); // Move the mesh to a new position this.mesh.position.set(0,1,-3); // Add the mesh to the scene this.scene.add(this.mesh); /* * CAMERA */ // INITIALIZE // Create a new camera // Parameters: Field of View, Aspect Ratio, Inner Clipping Plane, Outer Clipping Plane this.camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 0.1, 100 ); // Set the position of the camera this.camera.position.set( 0, 1.6, 3 ); /* * RENDERER */ // INITIALIZE this.renderer = new THREE.WebGLRenderer({ antialias: true } ); this.renderer.setPixelRatio( window.devicePixelRatio ); this.renderer.setSize( window.innerWidth, window.innerHeight ); this.renderer.outputEncoding = THREE.sRGBEncoding; // Add the renderer to the initial container that will display on the HTML page container.appendChild( this.renderer.domElement ); /* * ADDITIONAL TOOLS */ // ORBIT CONTROLS for non-XR view this.controls = new OrbitControls( this.camera, this.renderer.domElement ); this.controls.target.set(0, 1.6, 0); this.controls.update(); // STATS for XR this.stats = new Stats(); document.body.appendChild( this.stats.dom ); /* * MAIN FUNCTION CALL */ this.setupXR(); // Window resize handler window.addEventListener('resize', this.resize.bind(this) ); } setupXR(){ // Enable XR this.renderer.xr.enabled = true; // Create a button to allow the user to enter VR const button = new ARButton( this.renderer ); // Set the animation loop function this.renderer.setAnimationLoop( this.render.bind(this) ); } render( ) { // Update the frame statistics this.stats.update(); // Render the scene this.renderer.render( this.scene, this.camera ); } // Handles window resizing resize()
} export { App };
{ this.camera.aspect = window.innerWidth / window.innerHeight; this.camera.updateProjectionMatrix(); this.renderer.setSize( window.innerWidth, window.innerHeight ); }
identifier_body
spawn.js
'use strict' const { spawn } = require('@malept/cross-spawn-promise') const which = require('which') function updateExecutableMissingException (err, hasLogger)
module.exports = async function (cmd, args, logger) { if (process.platform !== 'win32') { args.unshift(cmd) cmd = 'mono' } return spawn(cmd, args, { logger, updateErrorCallback: updateExecutableMissingException }) }
{ if (hasLogger && err.code === 'ENOENT' && err.syscall === 'spawn mono') { let installer let pkg if (process.platform === 'darwin') { installer = 'brew' pkg = 'mono' } else if (which.sync('dnf', { nothrow: true })) { installer = 'dnf' pkg = 'mono-core' } else { // assume apt-based Linux distro installer = 'apt' pkg = 'mono-runtime' } err.message = `Your system is missing the ${pkg} package. Try, e.g. '${installer} install ${pkg}'` } }
identifier_body
spawn.js
'use strict' const { spawn } = require('@malept/cross-spawn-promise') const which = require('which') function
(err, hasLogger) { if (hasLogger && err.code === 'ENOENT' && err.syscall === 'spawn mono') { let installer let pkg if (process.platform === 'darwin') { installer = 'brew' pkg = 'mono' } else if (which.sync('dnf', { nothrow: true })) { installer = 'dnf' pkg = 'mono-core' } else { // assume apt-based Linux distro installer = 'apt' pkg = 'mono-runtime' } err.message = `Your system is missing the ${pkg} package. Try, e.g. '${installer} install ${pkg}'` } } module.exports = async function (cmd, args, logger) { if (process.platform !== 'win32') { args.unshift(cmd) cmd = 'mono' } return spawn(cmd, args, { logger, updateErrorCallback: updateExecutableMissingException }) }
updateExecutableMissingException
identifier_name
spawn.js
'use strict' const { spawn } = require('@malept/cross-spawn-promise') const which = require('which') function updateExecutableMissingException (err, hasLogger) { if (hasLogger && err.code === 'ENOENT' && err.syscall === 'spawn mono') { let installer let pkg if (process.platform === 'darwin') { installer = 'brew' pkg = 'mono' } else if (which.sync('dnf', { nothrow: true }))
else { // assume apt-based Linux distro installer = 'apt' pkg = 'mono-runtime' } err.message = `Your system is missing the ${pkg} package. Try, e.g. '${installer} install ${pkg}'` } } module.exports = async function (cmd, args, logger) { if (process.platform !== 'win32') { args.unshift(cmd) cmd = 'mono' } return spawn(cmd, args, { logger, updateErrorCallback: updateExecutableMissingException }) }
{ installer = 'dnf' pkg = 'mono-core' }
conditional_block
spawn.js
'use strict' const { spawn } = require('@malept/cross-spawn-promise') const which = require('which') function updateExecutableMissingException (err, hasLogger) { if (hasLogger && err.code === 'ENOENT' && err.syscall === 'spawn mono') { let installer let pkg if (process.platform === 'darwin') { installer = 'brew' pkg = 'mono' } else if (which.sync('dnf', { nothrow: true })) { installer = 'dnf'
pkg = 'mono-runtime' } err.message = `Your system is missing the ${pkg} package. Try, e.g. '${installer} install ${pkg}'` } } module.exports = async function (cmd, args, logger) { if (process.platform !== 'win32') { args.unshift(cmd) cmd = 'mono' } return spawn(cmd, args, { logger, updateErrorCallback: updateExecutableMissingException }) }
pkg = 'mono-core' } else { // assume apt-based Linux distro installer = 'apt'
random_line_split
version.rs
use std::str::FromStr; use cargo_edit::VersionExt; use crate::errors::*; #[derive(Clone, Debug)] pub enum TargetVersion { Relative(BumpLevel), Absolute(semver::Version), } impl TargetVersion { pub fn bump( &self, current: &semver::Version, metadata: Option<&str>, ) -> CargoResult<Option<semver::Version>> { match self { TargetVersion::Relative(bump_level) => { let mut potential_version = current.to_owned(); bump_level.bump_version(&mut potential_version, metadata)?; if potential_version != *current { let version = potential_version; Ok(Some(version)) } else { Ok(None) } } TargetVersion::Absolute(version) => { if current < version { let mut version = version.clone(); if version.build.is_empty() { if let Some(metadata) = metadata { version.build = semver::BuildMetadata::new(metadata)?; } else { version.build = current.build.clone(); } } Ok(Some(version)) } else if current == version { Ok(None) } else { Err(version_downgrade_err(current, version)) } } } } } impl Default for TargetVersion { fn default() -> Self
} #[derive(Debug, Clone, Copy)] pub enum BumpLevel { Major, Minor, Patch, /// Strip all pre-release flags Release, Rc, Beta, Alpha, } impl BumpLevel { pub fn variants() -> &'static [&'static str] { &["major", "minor", "patch", "release", "rc", "beta", "alpha"] } } impl FromStr for BumpLevel { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "major" => Ok(BumpLevel::Major), "minor" => Ok(BumpLevel::Minor), "patch" => Ok(BumpLevel::Patch), "release" => Ok(BumpLevel::Release), "rc" => Ok(BumpLevel::Rc), "beta" => Ok(BumpLevel::Beta), "alpha" => Ok(BumpLevel::Alpha), _ => Err(String::from( "[valid values: major, minor, patch, rc, beta, alpha]", )), } } } impl BumpLevel { pub fn bump_version( self, version: &mut semver::Version, metadata: Option<&str>, ) -> CargoResult<()> { match self { BumpLevel::Major => { version.increment_major(); } BumpLevel::Minor => { version.increment_minor(); } BumpLevel::Patch => { if !version.is_prerelease() { version.increment_patch(); } else { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Release => { if version.is_prerelease() { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Rc => { version.increment_rc()?; } BumpLevel::Beta => { version.increment_beta()?; } BumpLevel::Alpha => { version.increment_alpha()?; } }; if let Some(metadata) = metadata { version.metadata(metadata)?; } Ok(()) } }
{ TargetVersion::Relative(BumpLevel::Release) }
identifier_body
version.rs
use std::str::FromStr; use cargo_edit::VersionExt; use crate::errors::*; #[derive(Clone, Debug)] pub enum TargetVersion { Relative(BumpLevel), Absolute(semver::Version), } impl TargetVersion { pub fn bump( &self, current: &semver::Version, metadata: Option<&str>, ) -> CargoResult<Option<semver::Version>> { match self { TargetVersion::Relative(bump_level) => { let mut potential_version = current.to_owned(); bump_level.bump_version(&mut potential_version, metadata)?; if potential_version != *current { let version = potential_version; Ok(Some(version)) } else { Ok(None) } } TargetVersion::Absolute(version) => { if current < version { let mut version = version.clone(); if version.build.is_empty() { if let Some(metadata) = metadata { version.build = semver::BuildMetadata::new(metadata)?; } else { version.build = current.build.clone(); } } Ok(Some(version)) } else if current == version { Ok(None) } else { Err(version_downgrade_err(current, version)) } } } } } impl Default for TargetVersion { fn default() -> Self { TargetVersion::Relative(BumpLevel::Release) } } #[derive(Debug, Clone, Copy)] pub enum BumpLevel { Major, Minor, Patch, /// Strip all pre-release flags Release, Rc, Beta, Alpha, } impl BumpLevel { pub fn variants() -> &'static [&'static str] { &["major", "minor", "patch", "release", "rc", "beta", "alpha"] } } impl FromStr for BumpLevel { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "major" => Ok(BumpLevel::Major), "minor" => Ok(BumpLevel::Minor), "patch" => Ok(BumpLevel::Patch), "release" => Ok(BumpLevel::Release), "rc" => Ok(BumpLevel::Rc), "beta" => Ok(BumpLevel::Beta), "alpha" => Ok(BumpLevel::Alpha), _ => Err(String::from( "[valid values: major, minor, patch, rc, beta, alpha]", )), } } } impl BumpLevel { pub fn bump_version( self, version: &mut semver::Version, metadata: Option<&str>, ) -> CargoResult<()> { match self { BumpLevel::Major => { version.increment_major(); } BumpLevel::Minor => { version.increment_minor(); } BumpLevel::Patch => { if !version.is_prerelease() { version.increment_patch(); } else { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Release => { if version.is_prerelease() { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Rc => { version.increment_rc()?;
version.increment_alpha()?; } }; if let Some(metadata) = metadata { version.metadata(metadata)?; } Ok(()) } }
} BumpLevel::Beta => { version.increment_beta()?; } BumpLevel::Alpha => {
random_line_split
version.rs
use std::str::FromStr; use cargo_edit::VersionExt; use crate::errors::*; #[derive(Clone, Debug)] pub enum TargetVersion { Relative(BumpLevel), Absolute(semver::Version), } impl TargetVersion { pub fn bump( &self, current: &semver::Version, metadata: Option<&str>, ) -> CargoResult<Option<semver::Version>> { match self { TargetVersion::Relative(bump_level) => { let mut potential_version = current.to_owned(); bump_level.bump_version(&mut potential_version, metadata)?; if potential_version != *current { let version = potential_version; Ok(Some(version)) } else { Ok(None) } } TargetVersion::Absolute(version) => { if current < version { let mut version = version.clone(); if version.build.is_empty() { if let Some(metadata) = metadata { version.build = semver::BuildMetadata::new(metadata)?; } else { version.build = current.build.clone(); } } Ok(Some(version)) } else if current == version { Ok(None) } else { Err(version_downgrade_err(current, version)) } } } } } impl Default for TargetVersion { fn default() -> Self { TargetVersion::Relative(BumpLevel::Release) } } #[derive(Debug, Clone, Copy)] pub enum BumpLevel { Major, Minor, Patch, /// Strip all pre-release flags Release, Rc, Beta, Alpha, } impl BumpLevel { pub fn variants() -> &'static [&'static str] { &["major", "minor", "patch", "release", "rc", "beta", "alpha"] } } impl FromStr for BumpLevel { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "major" => Ok(BumpLevel::Major), "minor" => Ok(BumpLevel::Minor), "patch" => Ok(BumpLevel::Patch), "release" => Ok(BumpLevel::Release), "rc" => Ok(BumpLevel::Rc), "beta" => Ok(BumpLevel::Beta), "alpha" => Ok(BumpLevel::Alpha), _ => Err(String::from( "[valid values: major, minor, patch, rc, beta, alpha]", )), } } } impl BumpLevel { pub fn
( self, version: &mut semver::Version, metadata: Option<&str>, ) -> CargoResult<()> { match self { BumpLevel::Major => { version.increment_major(); } BumpLevel::Minor => { version.increment_minor(); } BumpLevel::Patch => { if !version.is_prerelease() { version.increment_patch(); } else { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Release => { if version.is_prerelease() { version.pre = semver::Prerelease::EMPTY; } } BumpLevel::Rc => { version.increment_rc()?; } BumpLevel::Beta => { version.increment_beta()?; } BumpLevel::Alpha => { version.increment_alpha()?; } }; if let Some(metadata) = metadata { version.metadata(metadata)?; } Ok(()) } }
bump_version
identifier_name
pa-Guru.js
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js (function(global) { global.ng = global.ng || {}; global.ng.common = global.ng.common || {}; global.ng.common.locales = global.ng.common.locales || {}; const u = undefined; function plural(n)
global.ng.common.locales['pa-guru'] = [ 'pa-Guru', [['ਸ.', 'ਸ਼.'], ['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u], [['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u, u], [ ['ਐ', 'ਸੋ', 'ਮੰ', 'ਬੁੱ', 'ਵੀ', 'ਸ਼ੁੱ', 'ਸ਼'], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗਲ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕਰ', 'ਸ਼ਨਿੱਚਰ' ], [ 'ਐਤਵਾਰ', 'ਸੋਮਵਾਰ', 'ਮੰਗਲਵਾਰ', 'ਬੁੱਧਵਾਰ', 'ਵੀਰਵਾਰ', 'ਸ਼ੁੱਕਰਵਾਰ', 'ਸ਼ਨਿੱਚਰਵਾਰ' ], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕ', 'ਸ਼ਨਿੱ' ] ], u, [ [ 'ਜ', 'ਫ਼', 'ਮਾ', 'ਅ', 'ਮ', 'ਜੂ', 'ਜੁ', 'ਅ', 'ਸ', 'ਅ', 'ਨ', 'ਦ' ], [ 'ਜਨ', 'ਫ਼ਰ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾ', 'ਅਗ', 'ਸਤੰ', 'ਅਕਤੂ', 'ਨਵੰ', 'ਦਸੰ' ], [ 'ਜਨਵਰੀ', 'ਫ਼ਰਵਰੀ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈਲ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾਈ', 'ਅਗਸਤ', 'ਸਤੰਬਰ', 'ਅਕਤੂਬਰ', 'ਨਵੰਬਰ', 'ਦਸੰਬਰ' ] ], u, [ ['ਈ.ਪੂ.', 'ਸੰਨ'], ['ਈ. ਪੂ.', 'ਸੰਨ'], ['ਈਸਵੀ ਪੂਰਵ', 'ਈਸਵੀ ਸੰਨ'] ], 0, [0, 0], ['d/M/yy', 'd MMM y', 'd MMMM y', 'EEEE, d MMMM y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], ['{1}, {0}', u, '{1} {0}', u], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##,##0.###', '#,##,##0%', '¤ #,##,##0.00', '[#E0]'], 'INR', '₹', 'ਭਾਰਤੀ ਰੁਪਇਆ', {'JPY': ['JP¥', '¥'], 'THB': ['฿'], 'TWD': ['NT$'], 'USD': ['US$', '$'], 'XXX': []}, 'ltr', plural, [ [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, u ], [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮ', 'ਰਾਤ' ] ], ['00:00', ['04:00', '12:00'], ['12:00', '16:00'], ['16:00', '21:00'], ['21:00', '04:00']] ] ]; })(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global || typeof window !== 'undefined' && window);
{ if (n === Math.floor(n) && n >= 0 && n <= 1) return 1; return 5; }
identifier_body
pa-Guru.js
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js (function(global) { global.ng = global.ng || {}; global.ng.common = global.ng.common || {}; global.ng.common.locales = global.ng.common.locales || {}; const u = undefined; function plural(n) { if (n === Math.floor(n) && n >= 0 && n <= 1) return 1; return 5; } global.ng.common.locales['pa-guru'] = [ 'pa-Guru', [['ਸ.', 'ਸ਼.'], ['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u], [['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u, u], [ ['ਐ', 'ਸੋ', 'ਮੰ', 'ਬੁੱ', 'ਵੀ', 'ਸ਼ੁੱ', 'ਸ਼'], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗਲ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕਰ', 'ਸ਼ਨਿੱਚਰ' ], [ 'ਐਤਵਾਰ', 'ਸੋਮਵਾਰ', 'ਮੰਗਲਵਾਰ', 'ਬੁੱਧਵਾਰ', 'ਵੀਰਵਾਰ', 'ਸ਼ੁੱਕਰਵਾਰ', 'ਸ਼ਨਿੱਚਰਵਾਰ' ], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕ', 'ਸ਼ਨਿੱ' ] ], u, [ [ 'ਜ', 'ਫ਼', 'ਮਾ', 'ਅ', 'ਮ', 'ਜੂ', 'ਜੁ', 'ਅ', 'ਸ', 'ਅ', 'ਨ', 'ਦ' ], [ 'ਜਨ', 'ਫ਼ਰ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾ', 'ਅਗ', 'ਸਤੰ', 'ਅਕਤੂ', 'ਨਵੰ', 'ਦਸੰ' ], [ 'ਜਨਵਰੀ', 'ਫ਼ਰਵਰੀ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈਲ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾਈ', 'ਅਗਸਤ', 'ਸਤੰਬਰ', 'ਅਕਤੂਬਰ', 'ਨਵੰਬਰ', 'ਦਸੰਬਰ' ] ], u, [ ['ਈ.ਪੂ.', 'ਸੰਨ'], ['ਈ. ਪੂ.', 'ਸੰਨ'], ['ਈਸਵੀ ਪੂਰਵ', 'ਈਸਵੀ ਸੰਨ'] ], 0, [0, 0], ['d/M/yy', 'd MMM y', 'd MMMM y', 'EEEE, d MMMM y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], ['{1}, {0}', u, '{1} {0}', u], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##,##0.###', '#,##,##0%', '¤ #,##,##0.00', '[#E0]'], 'INR', '₹', 'ਭਾਰਤੀ ਰੁਪਇਆ', {'JPY': ['JP¥', '¥'], 'THB': ['฿'], 'TWD': ['NT$'], 'USD': ['US$', '$'], 'XXX': []}, 'ltr', plural, [ [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, u ], [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, [
] ]; })(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global || typeof window !== 'undefined' && window);
'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮ', 'ਰਾਤ' ] ], ['00:00', ['04:00', '12:00'], ['12:00', '16:00'], ['16:00', '21:00'], ['21:00', '04:00']]
random_line_split
pa-Guru.js
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js (function(global) { global.ng = global.ng || {}; global.ng.common = global.ng.common || {}; global.ng.common.locales = global.ng.common.locales || {}; const u = undefined; function
(n) { if (n === Math.floor(n) && n >= 0 && n <= 1) return 1; return 5; } global.ng.common.locales['pa-guru'] = [ 'pa-Guru', [['ਸ.', 'ਸ਼.'], ['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u], [['ਪੂ.ਦੁ.', 'ਬਾ.ਦੁ.'], u, u], [ ['ਐ', 'ਸੋ', 'ਮੰ', 'ਬੁੱ', 'ਵੀ', 'ਸ਼ੁੱ', 'ਸ਼'], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗਲ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕਰ', 'ਸ਼ਨਿੱਚਰ' ], [ 'ਐਤਵਾਰ', 'ਸੋਮਵਾਰ', 'ਮੰਗਲਵਾਰ', 'ਬੁੱਧਵਾਰ', 'ਵੀਰਵਾਰ', 'ਸ਼ੁੱਕਰਵਾਰ', 'ਸ਼ਨਿੱਚਰਵਾਰ' ], [ 'ਐਤ', 'ਸੋਮ', 'ਮੰਗ', 'ਬੁੱਧ', 'ਵੀਰ', 'ਸ਼ੁੱਕ', 'ਸ਼ਨਿੱ' ] ], u, [ [ 'ਜ', 'ਫ਼', 'ਮਾ', 'ਅ', 'ਮ', 'ਜੂ', 'ਜੁ', 'ਅ', 'ਸ', 'ਅ', 'ਨ', 'ਦ' ], [ 'ਜਨ', 'ਫ਼ਰ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾ', 'ਅਗ', 'ਸਤੰ', 'ਅਕਤੂ', 'ਨਵੰ', 'ਦਸੰ' ], [ 'ਜਨਵਰੀ', 'ਫ਼ਰਵਰੀ', 'ਮਾਰਚ', 'ਅਪ੍ਰੈਲ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾਈ', 'ਅਗਸਤ', 'ਸਤੰਬਰ', 'ਅਕਤੂਬਰ', 'ਨਵੰਬਰ', 'ਦਸੰਬਰ' ] ], u, [ ['ਈ.ਪੂ.', 'ਸੰਨ'], ['ਈ. ਪੂ.', 'ਸੰਨ'], ['ਈਸਵੀ ਪੂਰਵ', 'ਈਸਵੀ ਸੰਨ'] ], 0, [0, 0], ['d/M/yy', 'd MMM y', 'd MMMM y', 'EEEE, d MMMM y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], ['{1}, {0}', u, '{1} {0}', u], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##,##0.###', '#,##,##0%', '¤ #,##,##0.00', '[#E0]'], 'INR', '₹', 'ਭਾਰਤੀ ਰੁਪਇਆ', {'JPY': ['JP¥', '¥'], 'THB': ['฿'], 'TWD': ['NT$'], 'USD': ['US$', '$'], 'XXX': []}, 'ltr', plural, [ [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, u ], [ [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮੀਂ', 'ਰਾਤੀਂ' ], u, [ 'ਅੱਧੀ ਰਾਤ', 'ਸਵੇਰੇ', 'ਦੁਪਹਿਰੇ', 'ਸ਼ਾਮ', 'ਰਾਤ' ] ], ['00:00', ['04:00', '12:00'], ['12:00', '16:00'], ['16:00', '21:00'], ['21:00', '04:00']] ] ]; })(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global || typeof window !== 'undefined' && window);
plural
identifier_name
filetable.rs
/* * Copyright (C) 2018, Nils Asmussen <[email protected]> * Economic rights: Technische Universitaet Dresden (Germany) * * This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores). * * M3 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * M3 is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details. */ use cap::Selector; use cell::RefCell; use col::Vec; use core::{fmt, mem}; use com::{VecSink, SliceSource}; use dtu::EpId; use errors::{Code, Error}; use io::Serial; use rc::Rc; use serialize::Sink; use vfs::{File, FileRef, GenericFile}; use vpe::VPE; pub type Fd = usize; const MAX_EPS: usize = 4; pub const MAX_FILES: usize = 32; pub type FileHandle = Rc<RefCell<File>>; struct FileEP { fd: Fd, ep: EpId, } #[derive(Default)] pub struct FileTable { file_ep_victim: usize, file_ep_count: usize, file_eps: [Option<FileEP>; MAX_EPS], files: [Option<FileHandle>; MAX_FILES], } impl FileTable { pub fn add(&mut self, file: FileHandle) -> Result<FileRef, Error> { self.alloc(file.clone()).map(|fd| FileRef::new(file, fd)) } pub fn alloc(&mut self, file: FileHandle) -> Result<Fd, Error> { for fd in 0..MAX_FILES { if self.files[fd].is_none() { self.set(fd, file); return Ok(fd); } } Err(Error::new(Code::NoSpace)) } pub fn get(&self, fd: Fd) -> Option<FileHandle> { match self.files[fd] { Some(ref f) => Some(f.clone()), None => None, } } pub fn set(&mut self, fd: Fd, file: FileHandle) { file.borrow_mut().set_fd(fd); self.files[fd] = Some(file); } pub fn remove(&mut self, fd: Fd) { let find_file_ep = |files: &[Option<FileEP>], fd| -> Option<usize> { for i in 0..MAX_EPS { if let Some(ref fep) = files[i] { if fep.fd == fd { return Some(i); } } } None }; if let Some(ref mut f) = mem::replace(&mut self.files[fd], None) { f.borrow_mut().close(); // remove from multiplexing table if let Some(idx) = find_file_ep(&self.file_eps, fd) { log!(FILES, "FileEPs[{}] = --", idx); self.file_eps[idx] = None; self.file_ep_count -= 1; } } } pub(crate) fn request_ep(&mut self, fd: Fd) -> Result<EpId, Error> { if self.file_ep_count < MAX_EPS { if let Ok(ep) = VPE::cur().alloc_ep() {
log!( FILES, "FileEPs[{}] = EP:{},FD:{}", i, ep, fd ); self.file_eps[i] = Some(FileEP { fd: fd, ep: ep, }); self.file_ep_count += 1; return Ok(ep); } } } } // TODO be smarter here let mut i = self.file_ep_victim; for _ in 0..MAX_EPS { if let Some(ref mut fep) = self.file_eps[i] { log!( FILES, "FileEPs[{}] = EP:{},FD: switching from {} to {}", i, fep.ep, fep.fd, fd ); let file = self.files[fep.fd].as_ref().unwrap(); file.borrow_mut().evict(); fep.fd = fd; self.file_ep_victim = (i + 1) % MAX_EPS; return Ok(fep.ep); } i = (i + 1) % MAX_EPS; } Err(Error::new(Code::NoSpace)) } pub fn collect_caps(&self, vpe: Selector, dels: &mut Vec<Selector>, max_sel: &mut Selector) -> Result<(), Error> { for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { f.borrow().exchange_caps(vpe, dels, max_sel)?; } } Ok(()) } pub fn serialize(&self, s: &mut VecSink) { let count = self.files.iter().filter(|&f| f.is_some()).count(); s.push(&count); for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { let file = f.borrow(); s.push(&fd); s.push(&file.file_type()); file.serialize(s); } } } pub fn unserialize(s: &mut SliceSource) -> FileTable { let mut ft = FileTable::default(); let count = s.pop(); for _ in 0..count { let fd: Fd = s.pop(); let file_type: u8 = s.pop(); ft.set(fd, match file_type { b'F' => GenericFile::unserialize(s), b'S' => Serial::new(), _ => panic!("Unexpected file type {}", file_type), }); } ft } } impl fmt::Debug for FileTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "FileTable[\n")?; for fd in 0..MAX_FILES { if let Some(ref file) = self.files[fd] { write!(f, " {} -> {:?}\n", fd, file)?; } } write!(f, "]") } } pub fn deinit() { let ft = VPE::cur().files(); for fd in 0..MAX_FILES { ft.remove(fd); } }
for i in 0..MAX_EPS { if self.file_eps[i].is_none() {
random_line_split
filetable.rs
/* * Copyright (C) 2018, Nils Asmussen <[email protected]> * Economic rights: Technische Universitaet Dresden (Germany) * * This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores). * * M3 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * M3 is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details. */ use cap::Selector; use cell::RefCell; use col::Vec; use core::{fmt, mem}; use com::{VecSink, SliceSource}; use dtu::EpId; use errors::{Code, Error}; use io::Serial; use rc::Rc; use serialize::Sink; use vfs::{File, FileRef, GenericFile}; use vpe::VPE; pub type Fd = usize; const MAX_EPS: usize = 4; pub const MAX_FILES: usize = 32; pub type FileHandle = Rc<RefCell<File>>; struct FileEP { fd: Fd, ep: EpId, } #[derive(Default)] pub struct FileTable { file_ep_victim: usize, file_ep_count: usize, file_eps: [Option<FileEP>; MAX_EPS], files: [Option<FileHandle>; MAX_FILES], } impl FileTable { pub fn add(&mut self, file: FileHandle) -> Result<FileRef, Error> { self.alloc(file.clone()).map(|fd| FileRef::new(file, fd)) } pub fn alloc(&mut self, file: FileHandle) -> Result<Fd, Error> { for fd in 0..MAX_FILES { if self.files[fd].is_none() { self.set(fd, file); return Ok(fd); } } Err(Error::new(Code::NoSpace)) } pub fn get(&self, fd: Fd) -> Option<FileHandle> { match self.files[fd] { Some(ref f) => Some(f.clone()), None => None, } } pub fn set(&mut self, fd: Fd, file: FileHandle) { file.borrow_mut().set_fd(fd); self.files[fd] = Some(file); } pub fn remove(&mut self, fd: Fd) { let find_file_ep = |files: &[Option<FileEP>], fd| -> Option<usize> { for i in 0..MAX_EPS { if let Some(ref fep) = files[i] { if fep.fd == fd { return Some(i); } } } None }; if let Some(ref mut f) = mem::replace(&mut self.files[fd], None) { f.borrow_mut().close(); // remove from multiplexing table if let Some(idx) = find_file_ep(&self.file_eps, fd) { log!(FILES, "FileEPs[{}] = --", idx); self.file_eps[idx] = None; self.file_ep_count -= 1; } } } pub(crate) fn request_ep(&mut self, fd: Fd) -> Result<EpId, Error> { if self.file_ep_count < MAX_EPS { if let Ok(ep) = VPE::cur().alloc_ep() { for i in 0..MAX_EPS { if self.file_eps[i].is_none() { log!( FILES, "FileEPs[{}] = EP:{},FD:{}", i, ep, fd ); self.file_eps[i] = Some(FileEP { fd: fd, ep: ep, }); self.file_ep_count += 1; return Ok(ep); } } } } // TODO be smarter here let mut i = self.file_ep_victim; for _ in 0..MAX_EPS { if let Some(ref mut fep) = self.file_eps[i] { log!( FILES, "FileEPs[{}] = EP:{},FD: switching from {} to {}", i, fep.ep, fep.fd, fd ); let file = self.files[fep.fd].as_ref().unwrap(); file.borrow_mut().evict(); fep.fd = fd; self.file_ep_victim = (i + 1) % MAX_EPS; return Ok(fep.ep); } i = (i + 1) % MAX_EPS; } Err(Error::new(Code::NoSpace)) } pub fn collect_caps(&self, vpe: Selector, dels: &mut Vec<Selector>, max_sel: &mut Selector) -> Result<(), Error> { for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { f.borrow().exchange_caps(vpe, dels, max_sel)?; } } Ok(()) } pub fn
(&self, s: &mut VecSink) { let count = self.files.iter().filter(|&f| f.is_some()).count(); s.push(&count); for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { let file = f.borrow(); s.push(&fd); s.push(&file.file_type()); file.serialize(s); } } } pub fn unserialize(s: &mut SliceSource) -> FileTable { let mut ft = FileTable::default(); let count = s.pop(); for _ in 0..count { let fd: Fd = s.pop(); let file_type: u8 = s.pop(); ft.set(fd, match file_type { b'F' => GenericFile::unserialize(s), b'S' => Serial::new(), _ => panic!("Unexpected file type {}", file_type), }); } ft } } impl fmt::Debug for FileTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "FileTable[\n")?; for fd in 0..MAX_FILES { if let Some(ref file) = self.files[fd] { write!(f, " {} -> {:?}\n", fd, file)?; } } write!(f, "]") } } pub fn deinit() { let ft = VPE::cur().files(); for fd in 0..MAX_FILES { ft.remove(fd); } }
serialize
identifier_name
filetable.rs
/* * Copyright (C) 2018, Nils Asmussen <[email protected]> * Economic rights: Technische Universitaet Dresden (Germany) * * This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores). * * M3 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * M3 is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details. */ use cap::Selector; use cell::RefCell; use col::Vec; use core::{fmt, mem}; use com::{VecSink, SliceSource}; use dtu::EpId; use errors::{Code, Error}; use io::Serial; use rc::Rc; use serialize::Sink; use vfs::{File, FileRef, GenericFile}; use vpe::VPE; pub type Fd = usize; const MAX_EPS: usize = 4; pub const MAX_FILES: usize = 32; pub type FileHandle = Rc<RefCell<File>>; struct FileEP { fd: Fd, ep: EpId, } #[derive(Default)] pub struct FileTable { file_ep_victim: usize, file_ep_count: usize, file_eps: [Option<FileEP>; MAX_EPS], files: [Option<FileHandle>; MAX_FILES], } impl FileTable { pub fn add(&mut self, file: FileHandle) -> Result<FileRef, Error>
pub fn alloc(&mut self, file: FileHandle) -> Result<Fd, Error> { for fd in 0..MAX_FILES { if self.files[fd].is_none() { self.set(fd, file); return Ok(fd); } } Err(Error::new(Code::NoSpace)) } pub fn get(&self, fd: Fd) -> Option<FileHandle> { match self.files[fd] { Some(ref f) => Some(f.clone()), None => None, } } pub fn set(&mut self, fd: Fd, file: FileHandle) { file.borrow_mut().set_fd(fd); self.files[fd] = Some(file); } pub fn remove(&mut self, fd: Fd) { let find_file_ep = |files: &[Option<FileEP>], fd| -> Option<usize> { for i in 0..MAX_EPS { if let Some(ref fep) = files[i] { if fep.fd == fd { return Some(i); } } } None }; if let Some(ref mut f) = mem::replace(&mut self.files[fd], None) { f.borrow_mut().close(); // remove from multiplexing table if let Some(idx) = find_file_ep(&self.file_eps, fd) { log!(FILES, "FileEPs[{}] = --", idx); self.file_eps[idx] = None; self.file_ep_count -= 1; } } } pub(crate) fn request_ep(&mut self, fd: Fd) -> Result<EpId, Error> { if self.file_ep_count < MAX_EPS { if let Ok(ep) = VPE::cur().alloc_ep() { for i in 0..MAX_EPS { if self.file_eps[i].is_none() { log!( FILES, "FileEPs[{}] = EP:{},FD:{}", i, ep, fd ); self.file_eps[i] = Some(FileEP { fd: fd, ep: ep, }); self.file_ep_count += 1; return Ok(ep); } } } } // TODO be smarter here let mut i = self.file_ep_victim; for _ in 0..MAX_EPS { if let Some(ref mut fep) = self.file_eps[i] { log!( FILES, "FileEPs[{}] = EP:{},FD: switching from {} to {}", i, fep.ep, fep.fd, fd ); let file = self.files[fep.fd].as_ref().unwrap(); file.borrow_mut().evict(); fep.fd = fd; self.file_ep_victim = (i + 1) % MAX_EPS; return Ok(fep.ep); } i = (i + 1) % MAX_EPS; } Err(Error::new(Code::NoSpace)) } pub fn collect_caps(&self, vpe: Selector, dels: &mut Vec<Selector>, max_sel: &mut Selector) -> Result<(), Error> { for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { f.borrow().exchange_caps(vpe, dels, max_sel)?; } } Ok(()) } pub fn serialize(&self, s: &mut VecSink) { let count = self.files.iter().filter(|&f| f.is_some()).count(); s.push(&count); for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { let file = f.borrow(); s.push(&fd); s.push(&file.file_type()); file.serialize(s); } } } pub fn unserialize(s: &mut SliceSource) -> FileTable { let mut ft = FileTable::default(); let count = s.pop(); for _ in 0..count { let fd: Fd = s.pop(); let file_type: u8 = s.pop(); ft.set(fd, match file_type { b'F' => GenericFile::unserialize(s), b'S' => Serial::new(), _ => panic!("Unexpected file type {}", file_type), }); } ft } } impl fmt::Debug for FileTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "FileTable[\n")?; for fd in 0..MAX_FILES { if let Some(ref file) = self.files[fd] { write!(f, " {} -> {:?}\n", fd, file)?; } } write!(f, "]") } } pub fn deinit() { let ft = VPE::cur().files(); for fd in 0..MAX_FILES { ft.remove(fd); } }
{ self.alloc(file.clone()).map(|fd| FileRef::new(file, fd)) }
identifier_body
filetable.rs
/* * Copyright (C) 2018, Nils Asmussen <[email protected]> * Economic rights: Technische Universitaet Dresden (Germany) * * This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores). * * M3 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * M3 is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details. */ use cap::Selector; use cell::RefCell; use col::Vec; use core::{fmt, mem}; use com::{VecSink, SliceSource}; use dtu::EpId; use errors::{Code, Error}; use io::Serial; use rc::Rc; use serialize::Sink; use vfs::{File, FileRef, GenericFile}; use vpe::VPE; pub type Fd = usize; const MAX_EPS: usize = 4; pub const MAX_FILES: usize = 32; pub type FileHandle = Rc<RefCell<File>>; struct FileEP { fd: Fd, ep: EpId, } #[derive(Default)] pub struct FileTable { file_ep_victim: usize, file_ep_count: usize, file_eps: [Option<FileEP>; MAX_EPS], files: [Option<FileHandle>; MAX_FILES], } impl FileTable { pub fn add(&mut self, file: FileHandle) -> Result<FileRef, Error> { self.alloc(file.clone()).map(|fd| FileRef::new(file, fd)) } pub fn alloc(&mut self, file: FileHandle) -> Result<Fd, Error> { for fd in 0..MAX_FILES { if self.files[fd].is_none() { self.set(fd, file); return Ok(fd); } } Err(Error::new(Code::NoSpace)) } pub fn get(&self, fd: Fd) -> Option<FileHandle> { match self.files[fd] { Some(ref f) => Some(f.clone()), None => None, } } pub fn set(&mut self, fd: Fd, file: FileHandle) { file.borrow_mut().set_fd(fd); self.files[fd] = Some(file); } pub fn remove(&mut self, fd: Fd) { let find_file_ep = |files: &[Option<FileEP>], fd| -> Option<usize> { for i in 0..MAX_EPS { if let Some(ref fep) = files[i] { if fep.fd == fd { return Some(i); } } } None }; if let Some(ref mut f) = mem::replace(&mut self.files[fd], None) { f.borrow_mut().close(); // remove from multiplexing table if let Some(idx) = find_file_ep(&self.file_eps, fd) { log!(FILES, "FileEPs[{}] = --", idx); self.file_eps[idx] = None; self.file_ep_count -= 1; } } } pub(crate) fn request_ep(&mut self, fd: Fd) -> Result<EpId, Error> { if self.file_ep_count < MAX_EPS { if let Ok(ep) = VPE::cur().alloc_ep()
} // TODO be smarter here let mut i = self.file_ep_victim; for _ in 0..MAX_EPS { if let Some(ref mut fep) = self.file_eps[i] { log!( FILES, "FileEPs[{}] = EP:{},FD: switching from {} to {}", i, fep.ep, fep.fd, fd ); let file = self.files[fep.fd].as_ref().unwrap(); file.borrow_mut().evict(); fep.fd = fd; self.file_ep_victim = (i + 1) % MAX_EPS; return Ok(fep.ep); } i = (i + 1) % MAX_EPS; } Err(Error::new(Code::NoSpace)) } pub fn collect_caps(&self, vpe: Selector, dels: &mut Vec<Selector>, max_sel: &mut Selector) -> Result<(), Error> { for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { f.borrow().exchange_caps(vpe, dels, max_sel)?; } } Ok(()) } pub fn serialize(&self, s: &mut VecSink) { let count = self.files.iter().filter(|&f| f.is_some()).count(); s.push(&count); for fd in 0..MAX_FILES { if let Some(ref f) = self.files[fd] { let file = f.borrow(); s.push(&fd); s.push(&file.file_type()); file.serialize(s); } } } pub fn unserialize(s: &mut SliceSource) -> FileTable { let mut ft = FileTable::default(); let count = s.pop(); for _ in 0..count { let fd: Fd = s.pop(); let file_type: u8 = s.pop(); ft.set(fd, match file_type { b'F' => GenericFile::unserialize(s), b'S' => Serial::new(), _ => panic!("Unexpected file type {}", file_type), }); } ft } } impl fmt::Debug for FileTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "FileTable[\n")?; for fd in 0..MAX_FILES { if let Some(ref file) = self.files[fd] { write!(f, " {} -> {:?}\n", fd, file)?; } } write!(f, "]") } } pub fn deinit() { let ft = VPE::cur().files(); for fd in 0..MAX_FILES { ft.remove(fd); } }
{ for i in 0..MAX_EPS { if self.file_eps[i].is_none() { log!( FILES, "FileEPs[{}] = EP:{},FD:{}", i, ep, fd ); self.file_eps[i] = Some(FileEP { fd: fd, ep: ep, }); self.file_ep_count += 1; return Ok(ep); } } }
conditional_block
system_settings.py
from django.core.management.base import BaseCommand from dojo.models import System_Settings class Command(BaseCommand):
help = 'Updates product grade calculation' def handle(self, *args, **options): code = """def grade_product(crit, high, med, low): health=100 if crit > 0: health = 40 health = health - ((crit - 1) * 5) if high > 0: if health == 100: health = 60 health = health - ((high - 1) * 3) if med > 0: if health == 100: health = 80 health = health - ((med - 1) * 2) if low > 0: if health == 100: health = 95 health = health - low if health < 5: health = 5 return health """ system_settings = System_Settings.objects.get(id=1) system_settings.product_grade = code system_settings.save()
identifier_body
system_settings.py
from django.core.management.base import BaseCommand from dojo.models import System_Settings class Command(BaseCommand): help = 'Updates product grade calculation' def
(self, *args, **options): code = """def grade_product(crit, high, med, low): health=100 if crit > 0: health = 40 health = health - ((crit - 1) * 5) if high > 0: if health == 100: health = 60 health = health - ((high - 1) * 3) if med > 0: if health == 100: health = 80 health = health - ((med - 1) * 2) if low > 0: if health == 100: health = 95 health = health - low if health < 5: health = 5 return health """ system_settings = System_Settings.objects.get(id=1) system_settings.product_grade = code system_settings.save()
handle
identifier_name
system_settings.py
from django.core.management.base import BaseCommand from dojo.models import System_Settings class Command(BaseCommand): help = 'Updates product grade calculation' def handle(self, *args, **options): code = """def grade_product(crit, high, med, low): health=100 if crit > 0: health = 40 health = health - ((crit - 1) * 5) if high > 0: if health == 100: health = 60 health = health - ((high - 1) * 3) if med > 0: if health == 100:
health = health - low if health < 5: health = 5 return health """ system_settings = System_Settings.objects.get(id=1) system_settings.product_grade = code system_settings.save()
health = 80 health = health - ((med - 1) * 2) if low > 0: if health == 100: health = 95
random_line_split
mysql-tests.ts
import fs = require('fs'); import mysql = require('mysql'); import stream = require('stream'); /// Connections let connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); connection.connect(); connection.query('SELECT 1 + 1 AS solution', (err, rows, fields) => { if (err) throw err; console.log('The solution is: ', rows[0].solution); }); connection.end(err => { }); connection = mysql.createConnection({ host: 'example.org', user: 'bob', password: 'secret' }); connection.connect(err => { if (err) { console.error(`error connecting: ${err.stack}`); return; } console.log(`connected as id ${connection.threadId}`); }); connection.query('SELECT 1', (err, rows) => { // connected! (unless `err` is set) }); connection = mysql.createConnection({ host: 'localhost', ssl: 'Amazon RDS' }); connection = mysql.createConnection({ host: 'localhost', ssl: { ca: '' } }); connection = mysql.createConnection({ host: 'localhost', ssl: { // DO NOT DO THIS // set up your ca correctly to trust the connection rejectUnauthorized: false } }); connection.end(err => { // The connection is terminated now }); connection.destroy(); connection.changeUser({user: 'john'}, err => { if (err) console.error('SHOULD BE ERROR'); }); connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); const userId = 'some user provided value'; let sql = `SELECT * FROM users WHERE id = ${connection.escape(userId)}`; connection.query(sql, (err, results) => { // ... }); connection.query('SELECT * FROM users WHERE id = ?', [userId], (err, results) => { // ... }); const post = {id: 1, title: 'Hello MySQL'}; const queryx = connection.query('INSERT INTO posts SET ?', post, (err, result) => { // Neat! }); console.log(queryx.sql); // INSERT INTO posts SET `id` = 1, `title` = 'Hello MySQL' const queryStr = `SELECT * FROM posts WHERE title=${mysql.escape("Hello MySQL")}`; console.log(queryStr); // SELECT * FROM posts WHERE title='Hello MySQL' let sorter = 'date'; sql = `SELECT * FROM posts ORDER BY ${connection.escapeId(sorter)}`; connection.query(sql, (err, results) => { // ... }); sorter = 'date'; sql = `SELECT * FROM posts ORDER BY ${connection.escapeId('posts.' + sorter)}`; connection.query(sql, (err, results) => { // ... }); const userIdNum = 1; const columns = ['username', 'email']; const queryy = connection.query('SELECT ?? FROM ?? WHERE id = ?', [columns, 'users', userIdNum], (err, results) => { // ... }); console.log(queryy.sql); // SELECT `username`, `email` FROM `users` WHERE id = 1 sql = "SELECT * FROM ?? WHERE ?? = ?"; const inserts = ['users', 'id', userId]; sql = mysql.format(sql, inserts); connection.config.queryFormat = function(query, values) { if (!values) return query; return query.replace(/\:(\w+)/g, (txt: string, key: string) => { if (values.hasOwnProperty(key)) { return this.escape(values[key]); } return txt; }); }; connection.query("UPDATE posts SET title = :title", {title: "Hello MySQL"}); const s: stream.Readable = connection.query("UPDATE posts SET title = :title", {title: "Hello MySQL"}).stream({highWaterMark: 5}); connection.query('INSERT INTO posts SET ?', {title: 'test'}, (err, result) => { if (err) throw err; console.log(result.insertId); }); connection.query('DELETE FROM posts WHERE title = "wrong"', (err, result) => { if (err) throw err; console.log(`deleted ${result.affectedRows} rows`); }); connection.query('UPDATE posts SET ...', (err, result) => { if (err) throw err; console.log(`changed ${result.changedRows} rows`); }); connection.destroy(); connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); connection.connect(err => { if (err) throw err; console.log(`connected as id ${connection.threadId}`); }); connection.ping(err => { if (err) throw err; console.log('Ping was successful'); }); /// Pools const poolConfig = { connectionLimit: 10, host: 'example.org', user: 'bob', password: 'secret' }; let pool = mysql.createPool(poolConfig); pool.query('SELECT 1 + 1 AS solution', (err, rows, fields) => { if (err) throw err; console.log('The solution is: ', rows[0].solution); }); pool = mysql.createPool({ host: 'example.org', user: 'bob', password: 'secret' }); pool.getConnection((err, connection) => { // connected! (unless `err` is set) }); pool.on('connection', connection => { connection.query('SET SESSION auto_increment_increment=1'); }); pool.getConnection((err, connection) => { // Use the connection connection.query('SELECT something FROM sometable', (err, rows) => { // And done with the connection. connection.release(); // Don't use the connection here, it has been returned to the pool. }); }); /// PoolClusters // create const poolCluster = mysql.createPoolCluster(); poolCluster.add(poolConfig); // anonymous group poolCluster.add('MASTER', poolConfig); poolCluster.add('SLAVE1', poolConfig); poolCluster.add('SLAVE2', poolConfig); // Target Group : ALL(anonymous, MASTER, SLAVE1-2), Selector : round-robin(default) poolCluster.getConnection((err, connection) => { }); // Target Group : MASTER, Selector : round-robin poolCluster.getConnection('MASTER', (err, connection) => { }); // Target Group : SLAVE1-2, Selector : order // If can't connect to SLAVE1, return SLAVE2. (remove SLAVE1 in the cluster) poolCluster.on('remove', nodeId => { console.log(`REMOVED NODE : ${nodeId}`); // nodeId = SLAVE1 }); poolCluster.getConnection('SLAVE*', 'ORDER', (err, connection) => { }); // of namespace : of(pattern, selector) poolCluster.of('*').getConnection((err, connection) => { }); pool = poolCluster.of('SLAVE*', 'RANDOM'); pool.getConnection((err, connection) => { }); pool.getConnection((err, connection) => { }); const poolClusterWithOptions = mysql.createPoolCluster({ canRetry: true, removeNodeErrorCount: 3, restoreNodeTimeout: 1000, defaultSelector: 'RR' }); // destroy poolCluster.end(); /// Queries const queryF = connection.query('SELECT * FROM posts'); queryF .on('error', err => { // Handle error, an 'end' event will be emitted after this as well }) .on('fields', fields => { // the field packets for the rows to follow })
connection.pause(); const processRow = (row: any, cb: () => void) => { cb(); }; processRow(row, () => { connection.resume(); }); }) .on('end', () => { // all rows have been received }); const writable = fs.createWriteStream('file.txt'); connection.query('SELECT * FROM posts') .stream({highWaterMark: 5}) .pipe(writable); connection = mysql.createConnection({multipleStatements: true}); connection.query('SELECT 1; SELECT 2', (err, results) => { if (err) throw err; // `results` is an array with one element for every statement in the query: console.log(results[0]); // [{1: 1}] console.log(results[1]); // [{2: 2}] }); const queryH = connection.query('SELECT 1; SELECT 2'); queryH .on('fields', (fields, index) => { // the fields for the result rows that follow }) .on('result', (row, index) => { // index refers to the statement this result belongs to (starts at 0) }); const options = {sql: '...', nestTables: true}; connection.query(options, (err, results) => { /* results will be an array like this now: [{ table1: { fieldA: '...', fieldB: '...', }, table2: { fieldA: '...', fieldB: '...', }, }, ...] */ }); connection.beginTransaction(err => { const title = 'title'; if (err) { throw err; } connection.query('INSERT INTO posts SET title=?', title, (err, result) => { if (err) { connection.rollback(() => { throw err; }); } const log = `Post ${result.insertId} added`; connection.query('INSERT INTO log SET data=?', log, (err, result) => { if (err) { connection.rollback(() => { throw err; }); } connection.commit(err => { if (err) { connection.rollback(() => { throw err; }); } console.log('success!'); }); }); }); }); // Kill query after 60s connection.query({sql: 'SELECT COUNT(*) AS count FROM big_table', timeout: 60000}, (err, rows: any) => { if (err && err.code === 'PROTOCOL_SEQUENCE_TIMEOUT') { throw new Error('too long to count table rows!'); } if (err) { throw err; } console.log(`${rows[0].count} rows`); }); try { connection = mysql.createConnection({ port: 8943, // set wrong port and uncomment throw }); // throw new Error('error not thrown') } catch (err) { console.error('SHOULD BE WRONG PORT ERROR: ', err); } connection.connect(err => { console.log(err.code); // 'ECONNREFUSED' console.log(err.fatal); // true }); connection.query('SELECT 1', err => { console.log(err.code); // 'ECONNREFUSED' console.log(err.fatal); // true }); connection.query('USE name_of_db_that_does_not_exist', (err: mysql.MysqlError, rows: any) => { console.log(err.code); // 'ER_BAD_DB_ERROR' }); connection.query('SELECT 1', (err, rows) => { console.log(err); // null console.log(rows.length); // 1 }); connection.on('error', err => { console.log(err.code); // 'ER_BAD_DB_ERROR' }); connection.query('USE name_of_db_that_does_not_exist'); // I am Chuck Norris: connection.on('error', () => { }); connection = mysql.createConnection({typeCast: false}); const query1 = connection.query({sql: '...', typeCast: false}, (err: Error, results: any) => { }); connection.query({ sql: '...', typeCast: (field, next: () => void) => { if (field.type === 'TINY' && field.length === 1) { return (field.string() === '1'); // 1 = true, 0 = false } next(); } }); connection.query({sql: '...', values: ['test']}, (err: Error, results: any) => { }); connection = mysql.createConnection("mysql://localhost/test?flags=-FOUND_ROWS"); connection = mysql.createConnection({debug: true}); connection = mysql.createConnection({debug: ['ComQueryPacket', 'RowDataPacket']}); connection = mysql.createConnection({dateStrings: ['DATE']}); connection = mysql.createConnection({dateStrings: true});
.on('result', row => { // Pausing the connnection is useful if your processing involves I/O
random_line_split
mysql-tests.ts
import fs = require('fs'); import mysql = require('mysql'); import stream = require('stream'); /// Connections let connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); connection.connect(); connection.query('SELECT 1 + 1 AS solution', (err, rows, fields) => { if (err) throw err; console.log('The solution is: ', rows[0].solution); }); connection.end(err => { }); connection = mysql.createConnection({ host: 'example.org', user: 'bob', password: 'secret' }); connection.connect(err => { if (err) { console.error(`error connecting: ${err.stack}`); return; } console.log(`connected as id ${connection.threadId}`); }); connection.query('SELECT 1', (err, rows) => { // connected! (unless `err` is set) }); connection = mysql.createConnection({ host: 'localhost', ssl: 'Amazon RDS' }); connection = mysql.createConnection({ host: 'localhost', ssl: { ca: '' } }); connection = mysql.createConnection({ host: 'localhost', ssl: { // DO NOT DO THIS // set up your ca correctly to trust the connection rejectUnauthorized: false } }); connection.end(err => { // The connection is terminated now }); connection.destroy(); connection.changeUser({user: 'john'}, err => { if (err) console.error('SHOULD BE ERROR'); }); connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); const userId = 'some user provided value'; let sql = `SELECT * FROM users WHERE id = ${connection.escape(userId)}`; connection.query(sql, (err, results) => { // ... }); connection.query('SELECT * FROM users WHERE id = ?', [userId], (err, results) => { // ... }); const post = {id: 1, title: 'Hello MySQL'}; const queryx = connection.query('INSERT INTO posts SET ?', post, (err, result) => { // Neat! }); console.log(queryx.sql); // INSERT INTO posts SET `id` = 1, `title` = 'Hello MySQL' const queryStr = `SELECT * FROM posts WHERE title=${mysql.escape("Hello MySQL")}`; console.log(queryStr); // SELECT * FROM posts WHERE title='Hello MySQL' let sorter = 'date'; sql = `SELECT * FROM posts ORDER BY ${connection.escapeId(sorter)}`; connection.query(sql, (err, results) => { // ... }); sorter = 'date'; sql = `SELECT * FROM posts ORDER BY ${connection.escapeId('posts.' + sorter)}`; connection.query(sql, (err, results) => { // ... }); const userIdNum = 1; const columns = ['username', 'email']; const queryy = connection.query('SELECT ?? FROM ?? WHERE id = ?', [columns, 'users', userIdNum], (err, results) => { // ... }); console.log(queryy.sql); // SELECT `username`, `email` FROM `users` WHERE id = 1 sql = "SELECT * FROM ?? WHERE ?? = ?"; const inserts = ['users', 'id', userId]; sql = mysql.format(sql, inserts); connection.config.queryFormat = function(query, values) { if (!values) return query; return query.replace(/\:(\w+)/g, (txt: string, key: string) => { if (values.hasOwnProperty(key))
return txt; }); }; connection.query("UPDATE posts SET title = :title", {title: "Hello MySQL"}); const s: stream.Readable = connection.query("UPDATE posts SET title = :title", {title: "Hello MySQL"}).stream({highWaterMark: 5}); connection.query('INSERT INTO posts SET ?', {title: 'test'}, (err, result) => { if (err) throw err; console.log(result.insertId); }); connection.query('DELETE FROM posts WHERE title = "wrong"', (err, result) => { if (err) throw err; console.log(`deleted ${result.affectedRows} rows`); }); connection.query('UPDATE posts SET ...', (err, result) => { if (err) throw err; console.log(`changed ${result.changedRows} rows`); }); connection.destroy(); connection = mysql.createConnection({ host: 'localhost', user: 'me', password: 'test' }); connection.connect(err => { if (err) throw err; console.log(`connected as id ${connection.threadId}`); }); connection.ping(err => { if (err) throw err; console.log('Ping was successful'); }); /// Pools const poolConfig = { connectionLimit: 10, host: 'example.org', user: 'bob', password: 'secret' }; let pool = mysql.createPool(poolConfig); pool.query('SELECT 1 + 1 AS solution', (err, rows, fields) => { if (err) throw err; console.log('The solution is: ', rows[0].solution); }); pool = mysql.createPool({ host: 'example.org', user: 'bob', password: 'secret' }); pool.getConnection((err, connection) => { // connected! (unless `err` is set) }); pool.on('connection', connection => { connection.query('SET SESSION auto_increment_increment=1'); }); pool.getConnection((err, connection) => { // Use the connection connection.query('SELECT something FROM sometable', (err, rows) => { // And done with the connection. connection.release(); // Don't use the connection here, it has been returned to the pool. }); }); /// PoolClusters // create const poolCluster = mysql.createPoolCluster(); poolCluster.add(poolConfig); // anonymous group poolCluster.add('MASTER', poolConfig); poolCluster.add('SLAVE1', poolConfig); poolCluster.add('SLAVE2', poolConfig); // Target Group : ALL(anonymous, MASTER, SLAVE1-2), Selector : round-robin(default) poolCluster.getConnection((err, connection) => { }); // Target Group : MASTER, Selector : round-robin poolCluster.getConnection('MASTER', (err, connection) => { }); // Target Group : SLAVE1-2, Selector : order // If can't connect to SLAVE1, return SLAVE2. (remove SLAVE1 in the cluster) poolCluster.on('remove', nodeId => { console.log(`REMOVED NODE : ${nodeId}`); // nodeId = SLAVE1 }); poolCluster.getConnection('SLAVE*', 'ORDER', (err, connection) => { }); // of namespace : of(pattern, selector) poolCluster.of('*').getConnection((err, connection) => { }); pool = poolCluster.of('SLAVE*', 'RANDOM'); pool.getConnection((err, connection) => { }); pool.getConnection((err, connection) => { }); const poolClusterWithOptions = mysql.createPoolCluster({ canRetry: true, removeNodeErrorCount: 3, restoreNodeTimeout: 1000, defaultSelector: 'RR' }); // destroy poolCluster.end(); /// Queries const queryF = connection.query('SELECT * FROM posts'); queryF .on('error', err => { // Handle error, an 'end' event will be emitted after this as well }) .on('fields', fields => { // the field packets for the rows to follow }) .on('result', row => { // Pausing the connnection is useful if your processing involves I/O connection.pause(); const processRow = (row: any, cb: () => void) => { cb(); }; processRow(row, () => { connection.resume(); }); }) .on('end', () => { // all rows have been received }); const writable = fs.createWriteStream('file.txt'); connection.query('SELECT * FROM posts') .stream({highWaterMark: 5}) .pipe(writable); connection = mysql.createConnection({multipleStatements: true}); connection.query('SELECT 1; SELECT 2', (err, results) => { if (err) throw err; // `results` is an array with one element for every statement in the query: console.log(results[0]); // [{1: 1}] console.log(results[1]); // [{2: 2}] }); const queryH = connection.query('SELECT 1; SELECT 2'); queryH .on('fields', (fields, index) => { // the fields for the result rows that follow }) .on('result', (row, index) => { // index refers to the statement this result belongs to (starts at 0) }); const options = {sql: '...', nestTables: true}; connection.query(options, (err, results) => { /* results will be an array like this now: [{ table1: { fieldA: '...', fieldB: '...', }, table2: { fieldA: '...', fieldB: '...', }, }, ...] */ }); connection.beginTransaction(err => { const title = 'title'; if (err) { throw err; } connection.query('INSERT INTO posts SET title=?', title, (err, result) => { if (err) { connection.rollback(() => { throw err; }); } const log = `Post ${result.insertId} added`; connection.query('INSERT INTO log SET data=?', log, (err, result) => { if (err) { connection.rollback(() => { throw err; }); } connection.commit(err => { if (err) { connection.rollback(() => { throw err; }); } console.log('success!'); }); }); }); }); // Kill query after 60s connection.query({sql: 'SELECT COUNT(*) AS count FROM big_table', timeout: 60000}, (err, rows: any) => { if (err && err.code === 'PROTOCOL_SEQUENCE_TIMEOUT') { throw new Error('too long to count table rows!'); } if (err) { throw err; } console.log(`${rows[0].count} rows`); }); try { connection = mysql.createConnection({ port: 8943, // set wrong port and uncomment throw }); // throw new Error('error not thrown') } catch (err) { console.error('SHOULD BE WRONG PORT ERROR: ', err); } connection.connect(err => { console.log(err.code); // 'ECONNREFUSED' console.log(err.fatal); // true }); connection.query('SELECT 1', err => { console.log(err.code); // 'ECONNREFUSED' console.log(err.fatal); // true }); connection.query('USE name_of_db_that_does_not_exist', (err: mysql.MysqlError, rows: any) => { console.log(err.code); // 'ER_BAD_DB_ERROR' }); connection.query('SELECT 1', (err, rows) => { console.log(err); // null console.log(rows.length); // 1 }); connection.on('error', err => { console.log(err.code); // 'ER_BAD_DB_ERROR' }); connection.query('USE name_of_db_that_does_not_exist'); // I am Chuck Norris: connection.on('error', () => { }); connection = mysql.createConnection({typeCast: false}); const query1 = connection.query({sql: '...', typeCast: false}, (err: Error, results: any) => { }); connection.query({ sql: '...', typeCast: (field, next: () => void) => { if (field.type === 'TINY' && field.length === 1) { return (field.string() === '1'); // 1 = true, 0 = false } next(); } }); connection.query({sql: '...', values: ['test']}, (err: Error, results: any) => { }); connection = mysql.createConnection("mysql://localhost/test?flags=-FOUND_ROWS"); connection = mysql.createConnection({debug: true}); connection = mysql.createConnection({debug: ['ComQueryPacket', 'RowDataPacket']}); connection = mysql.createConnection({dateStrings: ['DATE']}); connection = mysql.createConnection({dateStrings: true});
{ return this.escape(values[key]); }
conditional_block
test_dauth.py
from nintendo import dauth, switch from anynet import http import pytest CHALLENGE_REQUEST = \ "POST /v6/challenge HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 17\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "key_generation=11" TOKEN_REQUEST = \ "POST /v6/device_auth_token HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 211\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "challenge=vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=&" \ "client_id=8f849b5d34778d8e&ist=false&key_generation=11&" \ "system_version=CusHY#000c0000#C-BynYNPXdQJNBZjx02Hizi8lRUSIKLwPGa5p8EY1uo=&" \ "mac=xRB_6mgnNqrnF9DRsEpYMg" @pytest.mark.anyio async def test_dauth(): async def handler(client, request): if request.path == "/v6/challenge": assert request.encode().decode() == CHALLENGE_REQUEST response = http.HTTPResponse(200) response.json = { "challenge": "vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=", "data": "dlL7ZBNSLmYo1hUlKYZiUA==" } return response else:
async with http.serve(handler, "127.0.0.1", 12345): keys = switch.KeySet() keys["aes_kek_generation_source"] = bytes.fromhex("485d45ad27c07c7e538c0183f90ee845") keys["master_key_0a"] = bytes.fromhex("37eed242e0f2ce6f8371e783c1a6a0ae") client = dauth.DAuthClient(keys) client.set_url("127.0.0.1:12345") client.set_system_version(1200) client.set_context(None) response = await client.device_token(client.BAAS) token = response["device_auth_token"] assert token == "device token"
assert request.encode().decode() == TOKEN_REQUEST response = http.HTTPResponse(200) response.json = { "device_auth_token": "device token" } return response
conditional_block
test_dauth.py
from nintendo import dauth, switch from anynet import http import pytest CHALLENGE_REQUEST = \ "POST /v6/challenge HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 17\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "key_generation=11" TOKEN_REQUEST = \ "POST /v6/device_auth_token HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 211\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "challenge=vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=&" \ "client_id=8f849b5d34778d8e&ist=false&key_generation=11&" \ "system_version=CusHY#000c0000#C-BynYNPXdQJNBZjx02Hizi8lRUSIKLwPGa5p8EY1uo=&" \ "mac=xRB_6mgnNqrnF9DRsEpYMg" @pytest.mark.anyio
async def test_dauth(): async def handler(client, request): if request.path == "/v6/challenge": assert request.encode().decode() == CHALLENGE_REQUEST response = http.HTTPResponse(200) response.json = { "challenge": "vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=", "data": "dlL7ZBNSLmYo1hUlKYZiUA==" } return response else: assert request.encode().decode() == TOKEN_REQUEST response = http.HTTPResponse(200) response.json = { "device_auth_token": "device token" } return response async with http.serve(handler, "127.0.0.1", 12345): keys = switch.KeySet() keys["aes_kek_generation_source"] = bytes.fromhex("485d45ad27c07c7e538c0183f90ee845") keys["master_key_0a"] = bytes.fromhex("37eed242e0f2ce6f8371e783c1a6a0ae") client = dauth.DAuthClient(keys) client.set_url("127.0.0.1:12345") client.set_system_version(1200) client.set_context(None) response = await client.device_token(client.BAAS) token = response["device_auth_token"] assert token == "device token"
random_line_split
test_dauth.py
from nintendo import dauth, switch from anynet import http import pytest CHALLENGE_REQUEST = \ "POST /v6/challenge HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 17\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "key_generation=11" TOKEN_REQUEST = \ "POST /v6/device_auth_token HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 211\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "challenge=vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=&" \ "client_id=8f849b5d34778d8e&ist=false&key_generation=11&" \ "system_version=CusHY#000c0000#C-BynYNPXdQJNBZjx02Hizi8lRUSIKLwPGa5p8EY1uo=&" \ "mac=xRB_6mgnNqrnF9DRsEpYMg" @pytest.mark.anyio async def
(): async def handler(client, request): if request.path == "/v6/challenge": assert request.encode().decode() == CHALLENGE_REQUEST response = http.HTTPResponse(200) response.json = { "challenge": "vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=", "data": "dlL7ZBNSLmYo1hUlKYZiUA==" } return response else: assert request.encode().decode() == TOKEN_REQUEST response = http.HTTPResponse(200) response.json = { "device_auth_token": "device token" } return response async with http.serve(handler, "127.0.0.1", 12345): keys = switch.KeySet() keys["aes_kek_generation_source"] = bytes.fromhex("485d45ad27c07c7e538c0183f90ee845") keys["master_key_0a"] = bytes.fromhex("37eed242e0f2ce6f8371e783c1a6a0ae") client = dauth.DAuthClient(keys) client.set_url("127.0.0.1:12345") client.set_system_version(1200) client.set_context(None) response = await client.device_token(client.BAAS) token = response["device_auth_token"] assert token == "device token"
test_dauth
identifier_name
test_dauth.py
from nintendo import dauth, switch from anynet import http import pytest CHALLENGE_REQUEST = \ "POST /v6/challenge HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 17\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "key_generation=11" TOKEN_REQUEST = \ "POST /v6/device_auth_token HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 211\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "challenge=vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=&" \ "client_id=8f849b5d34778d8e&ist=false&key_generation=11&" \ "system_version=CusHY#000c0000#C-BynYNPXdQJNBZjx02Hizi8lRUSIKLwPGa5p8EY1uo=&" \ "mac=xRB_6mgnNqrnF9DRsEpYMg" @pytest.mark.anyio async def test_dauth(): async def handler(client, request):
async with http.serve(handler, "127.0.0.1", 12345): keys = switch.KeySet() keys["aes_kek_generation_source"] = bytes.fromhex("485d45ad27c07c7e538c0183f90ee845") keys["master_key_0a"] = bytes.fromhex("37eed242e0f2ce6f8371e783c1a6a0ae") client = dauth.DAuthClient(keys) client.set_url("127.0.0.1:12345") client.set_system_version(1200) client.set_context(None) response = await client.device_token(client.BAAS) token = response["device_auth_token"] assert token == "device token"
if request.path == "/v6/challenge": assert request.encode().decode() == CHALLENGE_REQUEST response = http.HTTPResponse(200) response.json = { "challenge": "vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=", "data": "dlL7ZBNSLmYo1hUlKYZiUA==" } return response else: assert request.encode().decode() == TOKEN_REQUEST response = http.HTTPResponse(200) response.json = { "device_auth_token": "device token" } return response
identifier_body
util.py
#!/usr/bin/env python # encoding: utf-8 import random def
(brightness_offset=1): hex_color = "#%06x" % random.randint(0,0xFFFFFF) """ takes a color like #87c95f and produces a lighter or darker variant """ if len(hex_color) != 7: raise Exception("Passed %s into color_variant(), needs to be in #87c95f format." % hex_color) rgb_hex = [hex_color[x:x+2] for x in [1, 3, 5]] new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex] new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255 # hex() produces "0x88", we want just "88" return [hex_color, "#" + "".join([hex(i)[2:] for i in new_rgb_int])]
color_variant
identifier_name
util.py
#!/usr/bin/env python # encoding: utf-8 import random def color_variant(brightness_offset=1):
hex_color = "#%06x" % random.randint(0,0xFFFFFF) """ takes a color like #87c95f and produces a lighter or darker variant """ if len(hex_color) != 7: raise Exception("Passed %s into color_variant(), needs to be in #87c95f format." % hex_color) rgb_hex = [hex_color[x:x+2] for x in [1, 3, 5]] new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex] new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255 # hex() produces "0x88", we want just "88" return [hex_color, "#" + "".join([hex(i)[2:] for i in new_rgb_int])]
identifier_body
util.py
#!/usr/bin/env python # encoding: utf-8 import random def color_variant(brightness_offset=1): hex_color = "#%06x" % random.randint(0,0xFFFFFF) """ takes a color like #87c95f and produces a lighter or darker variant """ if len(hex_color) != 7:
rgb_hex = [hex_color[x:x+2] for x in [1, 3, 5]] new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex] new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255 # hex() produces "0x88", we want just "88" return [hex_color, "#" + "".join([hex(i)[2:] for i in new_rgb_int])]
raise Exception("Passed %s into color_variant(), needs to be in #87c95f format." % hex_color)
conditional_block
util.py
#!/usr/bin/env python # encoding: utf-8 import random def color_variant(brightness_offset=1): hex_color = "#%06x" % random.randint(0,0xFFFFFF)
new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex] new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255 # hex() produces "0x88", we want just "88" return [hex_color, "#" + "".join([hex(i)[2:] for i in new_rgb_int])]
""" takes a color like #87c95f and produces a lighter or darker variant """ if len(hex_color) != 7: raise Exception("Passed %s into color_variant(), needs to be in #87c95f format." % hex_color) rgb_hex = [hex_color[x:x+2] for x in [1, 3, 5]]
random_line_split
format-violation-test.ts
import { Result } from 'axe-core'; import { module, test } from 'qunit'; import formatViolation from 'ember-a11y-testing/test-support/format-violation'; module('Unit | Utils | formatViolation', function () { test('formats a well-formed violation and relevant html', function (assert) { let violation: Result = { id: 'test', impact: 'critical', help: 'it should be better', helpUrl: 'http://example.com', description: '', tags: [], nodes: [ { target: ['.some-class'], html: '<input type="text">', any: [], all: [], none: [], },
], }; let message = formatViolation(violation, [violation.nodes[0].html]); let expected = `[critical]: it should be better \nViolated 1 time. Offending nodes are: \n<input type="text">\nhttp://example.com`; assert.strictEqual(message, expected); }); test('formats a well-formed violation', function (assert) { let violation: Result = { id: 'test', impact: 'critical', help: 'it should be better', helpUrl: 'http://example.com', description: '', tags: [], nodes: [], }; let message = formatViolation(violation, []); let expected = `[critical]: it should be better \nViolated 1 time.\nhttp://example.com`; assert.strictEqual(message, expected); }); test('validates violation parameter structure', function (assert) { let violation: Result = { id: 'test', help: 'it should be better', helpUrl: 'http://example.com', description: '', tags: [], nodes: [ { target: ['.some-class'], html: '<input type="text">', any: [], all: [], none: [], }, ], }; let expected = /formatViolation called with improper structure of parameter: violation. Required properties: impact, help, helpUrl./; assert.throws(function () { formatViolation(violation, [violation.nodes[0].html]); }, expected); }); });
random_line_split
go90.py
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, ExtractorError, int_or_none, parse_age_limit, parse_iso8601, ) class Go90IE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?go90\.com/videos/(?P<id>[0-9a-zA-Z]+)' _TEST = { 'url': 'https://www.go90.com/videos/84BUqjLpf9D', 'md5': 'efa7670dbbbf21a7b07b360652b24a32', 'info_dict': { 'id': '84BUqjLpf9D', 'ext': 'mp4', 'title': 'Daily VICE - Inside The Utah Coalition Against Pornography Convention', 'description': 'VICE\'s Karley Sciortino meets with activists who discuss the state\'s strong anti-porn stance. Then, VICE Sports explains NFL contracts.', 'timestamp': 1491868800, 'upload_date': '20170411', 'age_limit': 14, } } def _real_extract(self, url): video_id = self._match_id(url) video_data = self._download_json( 'https://www.go90.com/api/view/items/' + video_id, video_id, headers={ 'Content-Type': 'application/json; charset=utf-8', }, data=b'{"client":"web","device_type":"pc"}') if video_data.get('requires_drm'): raise ExtractorError('This video is DRM protected.', expected=True) main_video_asset = video_data['main_video_asset'] episode_number = int_or_none(video_data.get('episode_number')) series = None season = None season_id = None season_number = None for metadata in video_data.get('__children', {}).get('Item', {}).values(): if metadata.get('type') == 'show': series = metadata.get('title') elif metadata.get('type') == 'season': season = metadata.get('title') season_id = metadata.get('id') season_number = int_or_none(metadata.get('season_number')) title = episode = video_data.get('title') or series if series and series != title: title = '%s - %s' % (series, title) thumbnails = [] formats = [] subtitles = {} for asset in video_data.get('assets'): if asset.get('id') == main_video_asset: for source in asset.get('sources', []): source_location = source.get('location') if not source_location: continue source_type = source.get('type') if source_type == 'hls': m3u8_formats = self._extract_m3u8_formats( source_location, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) for f in m3u8_formats: mobj = re.search(r'/hls-(\d+)-(\d+)K', f['url']) if mobj: height, tbr = mobj.groups() height = int_or_none(height) f.update({ 'height': f.get('height') or height, 'width': f.get('width') or int_or_none(height / 9.0 * 16.0 if height else None), 'tbr': f.get('tbr') or int_or_none(tbr), }) formats.extend(m3u8_formats) elif source_type == 'dash': formats.extend(self._extract_mpd_formats( source_location, video_id, mpd_id='dash', fatal=False)) else: formats.append({ 'format_id': source.get('name'), 'url': source_location, 'width': int_or_none(source.get('width')), 'height': int_or_none(source.get('height')), 'tbr': int_or_none(source.get('bitrate')), }) for caption in asset.get('caption_metadata', []): caption_url = caption.get('source_url') if not caption_url: continue subtitles.setdefault(caption.get('language', 'en'), []).append({ 'url': caption_url, 'ext': determine_ext(caption_url, 'vtt'), }) elif asset.get('type') == 'image': asset_location = asset.get('location') if not asset_location: continue thumbnails.append({ 'url': asset_location, 'width': int_or_none(asset.get('width')), 'height': int_or_none(asset.get('height')), }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'formats': formats, 'thumbnails': thumbnails, 'description': video_data.get('short_description'), 'like_count': int_or_none(video_data.get('like_count')), 'timestamp': parse_iso8601(video_data.get('released_at')), 'series': series, 'episode': episode, 'season': season, 'season_id': season_id, 'season_number': season_number, 'episode_number': episode_number, 'subtitles': subtitles, 'age_limit': parse_age_limit(video_data.get('rating')), }
identifier_body
go90.py
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, ExtractorError, int_or_none, parse_age_limit, parse_iso8601, ) class
(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?go90\.com/videos/(?P<id>[0-9a-zA-Z]+)' _TEST = { 'url': 'https://www.go90.com/videos/84BUqjLpf9D', 'md5': 'efa7670dbbbf21a7b07b360652b24a32', 'info_dict': { 'id': '84BUqjLpf9D', 'ext': 'mp4', 'title': 'Daily VICE - Inside The Utah Coalition Against Pornography Convention', 'description': 'VICE\'s Karley Sciortino meets with activists who discuss the state\'s strong anti-porn stance. Then, VICE Sports explains NFL contracts.', 'timestamp': 1491868800, 'upload_date': '20170411', 'age_limit': 14, } } def _real_extract(self, url): video_id = self._match_id(url) video_data = self._download_json( 'https://www.go90.com/api/view/items/' + video_id, video_id, headers={ 'Content-Type': 'application/json; charset=utf-8', }, data=b'{"client":"web","device_type":"pc"}') if video_data.get('requires_drm'): raise ExtractorError('This video is DRM protected.', expected=True) main_video_asset = video_data['main_video_asset'] episode_number = int_or_none(video_data.get('episode_number')) series = None season = None season_id = None season_number = None for metadata in video_data.get('__children', {}).get('Item', {}).values(): if metadata.get('type') == 'show': series = metadata.get('title') elif metadata.get('type') == 'season': season = metadata.get('title') season_id = metadata.get('id') season_number = int_or_none(metadata.get('season_number')) title = episode = video_data.get('title') or series if series and series != title: title = '%s - %s' % (series, title) thumbnails = [] formats = [] subtitles = {} for asset in video_data.get('assets'): if asset.get('id') == main_video_asset: for source in asset.get('sources', []): source_location = source.get('location') if not source_location: continue source_type = source.get('type') if source_type == 'hls': m3u8_formats = self._extract_m3u8_formats( source_location, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) for f in m3u8_formats: mobj = re.search(r'/hls-(\d+)-(\d+)K', f['url']) if mobj: height, tbr = mobj.groups() height = int_or_none(height) f.update({ 'height': f.get('height') or height, 'width': f.get('width') or int_or_none(height / 9.0 * 16.0 if height else None), 'tbr': f.get('tbr') or int_or_none(tbr), }) formats.extend(m3u8_formats) elif source_type == 'dash': formats.extend(self._extract_mpd_formats( source_location, video_id, mpd_id='dash', fatal=False)) else: formats.append({ 'format_id': source.get('name'), 'url': source_location, 'width': int_or_none(source.get('width')), 'height': int_or_none(source.get('height')), 'tbr': int_or_none(source.get('bitrate')), }) for caption in asset.get('caption_metadata', []): caption_url = caption.get('source_url') if not caption_url: continue subtitles.setdefault(caption.get('language', 'en'), []).append({ 'url': caption_url, 'ext': determine_ext(caption_url, 'vtt'), }) elif asset.get('type') == 'image': asset_location = asset.get('location') if not asset_location: continue thumbnails.append({ 'url': asset_location, 'width': int_or_none(asset.get('width')), 'height': int_or_none(asset.get('height')), }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'formats': formats, 'thumbnails': thumbnails, 'description': video_data.get('short_description'), 'like_count': int_or_none(video_data.get('like_count')), 'timestamp': parse_iso8601(video_data.get('released_at')), 'series': series, 'episode': episode, 'season': season, 'season_id': season_id, 'season_number': season_number, 'episode_number': episode_number, 'subtitles': subtitles, 'age_limit': parse_age_limit(video_data.get('rating')), }
Go90IE
identifier_name
go90.py
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, ExtractorError, int_or_none, parse_age_limit, parse_iso8601, ) class Go90IE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?go90\.com/videos/(?P<id>[0-9a-zA-Z]+)' _TEST = { 'url': 'https://www.go90.com/videos/84BUqjLpf9D', 'md5': 'efa7670dbbbf21a7b07b360652b24a32', 'info_dict': { 'id': '84BUqjLpf9D', 'ext': 'mp4', 'title': 'Daily VICE - Inside The Utah Coalition Against Pornography Convention', 'description': 'VICE\'s Karley Sciortino meets with activists who discuss the state\'s strong anti-porn stance. Then, VICE Sports explains NFL contracts.', 'timestamp': 1491868800, 'upload_date': '20170411', 'age_limit': 14, } } def _real_extract(self, url): video_id = self._match_id(url) video_data = self._download_json( 'https://www.go90.com/api/view/items/' + video_id, video_id, headers={ 'Content-Type': 'application/json; charset=utf-8', }, data=b'{"client":"web","device_type":"pc"}') if video_data.get('requires_drm'): raise ExtractorError('This video is DRM protected.', expected=True) main_video_asset = video_data['main_video_asset'] episode_number = int_or_none(video_data.get('episode_number')) series = None season = None season_id = None season_number = None for metadata in video_data.get('__children', {}).get('Item', {}).values(): if metadata.get('type') == 'show': series = metadata.get('title') elif metadata.get('type') == 'season': season = metadata.get('title') season_id = metadata.get('id') season_number = int_or_none(metadata.get('season_number')) title = episode = video_data.get('title') or series if series and series != title: title = '%s - %s' % (series, title) thumbnails = [] formats = [] subtitles = {} for asset in video_data.get('assets'): if asset.get('id') == main_video_asset: for source in asset.get('sources', []): source_location = source.get('location') if not source_location: continue source_type = source.get('type') if source_type == 'hls':
elif source_type == 'dash': formats.extend(self._extract_mpd_formats( source_location, video_id, mpd_id='dash', fatal=False)) else: formats.append({ 'format_id': source.get('name'), 'url': source_location, 'width': int_or_none(source.get('width')), 'height': int_or_none(source.get('height')), 'tbr': int_or_none(source.get('bitrate')), }) for caption in asset.get('caption_metadata', []): caption_url = caption.get('source_url') if not caption_url: continue subtitles.setdefault(caption.get('language', 'en'), []).append({ 'url': caption_url, 'ext': determine_ext(caption_url, 'vtt'), }) elif asset.get('type') == 'image': asset_location = asset.get('location') if not asset_location: continue thumbnails.append({ 'url': asset_location, 'width': int_or_none(asset.get('width')), 'height': int_or_none(asset.get('height')), }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'formats': formats, 'thumbnails': thumbnails, 'description': video_data.get('short_description'), 'like_count': int_or_none(video_data.get('like_count')), 'timestamp': parse_iso8601(video_data.get('released_at')), 'series': series, 'episode': episode, 'season': season, 'season_id': season_id, 'season_number': season_number, 'episode_number': episode_number, 'subtitles': subtitles, 'age_limit': parse_age_limit(video_data.get('rating')), }
m3u8_formats = self._extract_m3u8_formats( source_location, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) for f in m3u8_formats: mobj = re.search(r'/hls-(\d+)-(\d+)K', f['url']) if mobj: height, tbr = mobj.groups() height = int_or_none(height) f.update({ 'height': f.get('height') or height, 'width': f.get('width') or int_or_none(height / 9.0 * 16.0 if height else None), 'tbr': f.get('tbr') or int_or_none(tbr), }) formats.extend(m3u8_formats)
conditional_block
go90.py
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, ExtractorError, int_or_none, parse_age_limit, parse_iso8601, ) class Go90IE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?go90\.com/videos/(?P<id>[0-9a-zA-Z]+)' _TEST = { 'url': 'https://www.go90.com/videos/84BUqjLpf9D', 'md5': 'efa7670dbbbf21a7b07b360652b24a32', 'info_dict': { 'id': '84BUqjLpf9D', 'ext': 'mp4', 'title': 'Daily VICE - Inside The Utah Coalition Against Pornography Convention', 'description': 'VICE\'s Karley Sciortino meets with activists who discuss the state\'s strong anti-porn stance. Then, VICE Sports explains NFL contracts.', 'timestamp': 1491868800, 'upload_date': '20170411', 'age_limit': 14, } } def _real_extract(self, url): video_id = self._match_id(url) video_data = self._download_json( 'https://www.go90.com/api/view/items/' + video_id, video_id, headers={ 'Content-Type': 'application/json; charset=utf-8', }, data=b'{"client":"web","device_type":"pc"}') if video_data.get('requires_drm'): raise ExtractorError('This video is DRM protected.', expected=True) main_video_asset = video_data['main_video_asset'] episode_number = int_or_none(video_data.get('episode_number')) series = None season = None season_id = None season_number = None for metadata in video_data.get('__children', {}).get('Item', {}).values(): if metadata.get('type') == 'show': series = metadata.get('title') elif metadata.get('type') == 'season': season = metadata.get('title') season_id = metadata.get('id') season_number = int_or_none(metadata.get('season_number')) title = episode = video_data.get('title') or series if series and series != title: title = '%s - %s' % (series, title) thumbnails = [] formats = [] subtitles = {} for asset in video_data.get('assets'): if asset.get('id') == main_video_asset: for source in asset.get('sources', []): source_location = source.get('location') if not source_location: continue source_type = source.get('type') if source_type == 'hls': m3u8_formats = self._extract_m3u8_formats( source_location, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) for f in m3u8_formats: mobj = re.search(r'/hls-(\d+)-(\d+)K', f['url']) if mobj: height, tbr = mobj.groups() height = int_or_none(height) f.update({ 'height': f.get('height') or height, 'width': f.get('width') or int_or_none(height / 9.0 * 16.0 if height else None), 'tbr': f.get('tbr') or int_or_none(tbr), }) formats.extend(m3u8_formats) elif source_type == 'dash': formats.extend(self._extract_mpd_formats( source_location, video_id, mpd_id='dash', fatal=False)) else: formats.append({ 'format_id': source.get('name'), 'url': source_location, 'width': int_or_none(source.get('width')), 'height': int_or_none(source.get('height')), 'tbr': int_or_none(source.get('bitrate')), }) for caption in asset.get('caption_metadata', []): caption_url = caption.get('source_url') if not caption_url: continue subtitles.setdefault(caption.get('language', 'en'), []).append({ 'url': caption_url, 'ext': determine_ext(caption_url, 'vtt'), }) elif asset.get('type') == 'image': asset_location = asset.get('location') if not asset_location: continue thumbnails.append({ 'url': asset_location, 'width': int_or_none(asset.get('width')), 'height': int_or_none(asset.get('height')), }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'formats': formats, 'thumbnails': thumbnails,
'description': video_data.get('short_description'), 'like_count': int_or_none(video_data.get('like_count')), 'timestamp': parse_iso8601(video_data.get('released_at')), 'series': series, 'episode': episode, 'season': season, 'season_id': season_id, 'season_number': season_number, 'episode_number': episode_number, 'subtitles': subtitles, 'age_limit': parse_age_limit(video_data.get('rating')), }
random_line_split
volume_axes.py
from traits.api import Bool, Float, Tuple from tvtk.api import tvtk from .volume_scene_member import ABCVolumeSceneMember # Convenience for the trait definitions below FloatPair = Tuple(Float, Float) class VolumeAxes(ABCVolumeSceneMember): """ An object which builds a CubeAxesActor for a scene containing a Volume. """ # If True, show the minor tick marks on the CubeAxesActor show_axis_minor_ticks = Bool(False) # What are the physical value ranges for each axis? visible_axis_ranges = Tuple(FloatPair, FloatPair, FloatPair) # Which axes should have a scale shown? visible_axis_scales = Tuple(Bool, Bool, Bool) #-------------------------------------------------------------------------- # ABCVolumeSceneMember interface #-------------------------------------------------------------------------- def add_actors_to_scene(self, scene_model, volume_actor): # Some axes with ticks
#-------------------------------------------------------------------------- # Default values #-------------------------------------------------------------------------- def _visible_axis_ranges_default(self): return ((0.0, 1.0), (0.0, 1.0), (0.0, 1.0)) def _visible_axis_scales_default(self): return (False, False, False)
if any(self.visible_axis_scales): bounds = volume_actor.bounds x_vis, y_vis, z_vis = self.visible_axis_scales x_range, y_range, z_range = self.visible_axis_ranges cube_axes = tvtk.CubeAxesActor( bounds=bounds, camera=scene_model.camera, tick_location='outside', x_title='', x_units='', y_title='', y_units='', z_title='', z_units='', x_axis_visibility=x_vis, y_axis_visibility=y_vis, z_axis_visibility=z_vis, x_axis_range=x_range, y_axis_range=y_range, z_axis_range=z_range, x_axis_minor_tick_visibility=self.show_axis_minor_ticks, y_axis_minor_tick_visibility=self.show_axis_minor_ticks, z_axis_minor_tick_visibility=self.show_axis_minor_ticks, ) scene_model.renderer.add_actor(cube_axes)
identifier_body
volume_axes.py
from traits.api import Bool, Float, Tuple from tvtk.api import tvtk from .volume_scene_member import ABCVolumeSceneMember # Convenience for the trait definitions below FloatPair = Tuple(Float, Float) class VolumeAxes(ABCVolumeSceneMember): """ An object which builds a CubeAxesActor for a scene containing a Volume. """ # If True, show the minor tick marks on the CubeAxesActor show_axis_minor_ticks = Bool(False) # What are the physical value ranges for each axis? visible_axis_ranges = Tuple(FloatPair, FloatPair, FloatPair) # Which axes should have a scale shown? visible_axis_scales = Tuple(Bool, Bool, Bool) #-------------------------------------------------------------------------- # ABCVolumeSceneMember interface #-------------------------------------------------------------------------- def add_actors_to_scene(self, scene_model, volume_actor): # Some axes with ticks if any(self.visible_axis_scales): bounds = volume_actor.bounds x_vis, y_vis, z_vis = self.visible_axis_scales x_range, y_range, z_range = self.visible_axis_ranges cube_axes = tvtk.CubeAxesActor( bounds=bounds, camera=scene_model.camera, tick_location='outside', x_title='', x_units='', y_title='', y_units='', z_title='', z_units='', x_axis_visibility=x_vis, y_axis_visibility=y_vis, z_axis_visibility=z_vis, x_axis_range=x_range, y_axis_range=y_range, z_axis_range=z_range, x_axis_minor_tick_visibility=self.show_axis_minor_ticks, y_axis_minor_tick_visibility=self.show_axis_minor_ticks, z_axis_minor_tick_visibility=self.show_axis_minor_ticks, ) scene_model.renderer.add_actor(cube_axes) #-------------------------------------------------------------------------- # Default values #-------------------------------------------------------------------------- def _visible_axis_ranges_default(self): return ((0.0, 1.0), (0.0, 1.0), (0.0, 1.0)) def
(self): return (False, False, False)
_visible_axis_scales_default
identifier_name
volume_axes.py
from traits.api import Bool, Float, Tuple from tvtk.api import tvtk from .volume_scene_member import ABCVolumeSceneMember # Convenience for the trait definitions below FloatPair = Tuple(Float, Float) class VolumeAxes(ABCVolumeSceneMember): """ An object which builds a CubeAxesActor for a scene containing a Volume. """ # If True, show the minor tick marks on the CubeAxesActor show_axis_minor_ticks = Bool(False) # What are the physical value ranges for each axis? visible_axis_ranges = Tuple(FloatPair, FloatPair, FloatPair) # Which axes should have a scale shown? visible_axis_scales = Tuple(Bool, Bool, Bool) #-------------------------------------------------------------------------- # ABCVolumeSceneMember interface #-------------------------------------------------------------------------- def add_actors_to_scene(self, scene_model, volume_actor): # Some axes with ticks if any(self.visible_axis_scales):
#-------------------------------------------------------------------------- # Default values #-------------------------------------------------------------------------- def _visible_axis_ranges_default(self): return ((0.0, 1.0), (0.0, 1.0), (0.0, 1.0)) def _visible_axis_scales_default(self): return (False, False, False)
bounds = volume_actor.bounds x_vis, y_vis, z_vis = self.visible_axis_scales x_range, y_range, z_range = self.visible_axis_ranges cube_axes = tvtk.CubeAxesActor( bounds=bounds, camera=scene_model.camera, tick_location='outside', x_title='', x_units='', y_title='', y_units='', z_title='', z_units='', x_axis_visibility=x_vis, y_axis_visibility=y_vis, z_axis_visibility=z_vis, x_axis_range=x_range, y_axis_range=y_range, z_axis_range=z_range, x_axis_minor_tick_visibility=self.show_axis_minor_ticks, y_axis_minor_tick_visibility=self.show_axis_minor_ticks, z_axis_minor_tick_visibility=self.show_axis_minor_ticks, ) scene_model.renderer.add_actor(cube_axes)
conditional_block
volume_axes.py
from traits.api import Bool, Float, Tuple from tvtk.api import tvtk from .volume_scene_member import ABCVolumeSceneMember # Convenience for the trait definitions below FloatPair = Tuple(Float, Float) class VolumeAxes(ABCVolumeSceneMember): """ An object which builds a CubeAxesActor for a scene containing a Volume. """ # If True, show the minor tick marks on the CubeAxesActor show_axis_minor_ticks = Bool(False) # What are the physical value ranges for each axis? visible_axis_ranges = Tuple(FloatPair, FloatPair, FloatPair) # Which axes should have a scale shown? visible_axis_scales = Tuple(Bool, Bool, Bool) #-------------------------------------------------------------------------- # ABCVolumeSceneMember interface #-------------------------------------------------------------------------- def add_actors_to_scene(self, scene_model, volume_actor): # Some axes with ticks if any(self.visible_axis_scales): bounds = volume_actor.bounds x_vis, y_vis, z_vis = self.visible_axis_scales x_range, y_range, z_range = self.visible_axis_ranges cube_axes = tvtk.CubeAxesActor( bounds=bounds, camera=scene_model.camera, tick_location='outside', x_title='', x_units='', y_title='', y_units='', z_title='', z_units='', x_axis_visibility=x_vis, y_axis_visibility=y_vis, z_axis_visibility=z_vis, x_axis_range=x_range, y_axis_range=y_range, z_axis_range=z_range, x_axis_minor_tick_visibility=self.show_axis_minor_ticks, y_axis_minor_tick_visibility=self.show_axis_minor_ticks, z_axis_minor_tick_visibility=self.show_axis_minor_ticks, )
#-------------------------------------------------------------------------- # Default values #-------------------------------------------------------------------------- def _visible_axis_ranges_default(self): return ((0.0, 1.0), (0.0, 1.0), (0.0, 1.0)) def _visible_axis_scales_default(self): return (False, False, False)
scene_model.renderer.add_actor(cube_axes)
random_line_split