file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
benchmarks.rs
/* * Copyright 2020 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at
* distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #[macro_use] extern crate bencher; extern crate flatbuffers; extern crate flexbuffers; mod flatbuffers_benchmarks; mod flexbuffers_benchmarks; #[allow(dead_code, unused_imports)] #[path = "../../include_test/include_test1_generated.rs"] pub mod include_test1_generated; #[allow(dead_code, unused_imports)] #[path = "../../include_test/sub/include_test2_generated.rs"] pub mod include_test2_generated; #[allow(dead_code, unused_imports)] #[path = "../../monster_test_generated.rs"] mod monster_test_generated; pub use monster_test_generated::my_game; benchmark_main!( flatbuffers_benchmarks::benches, flexbuffers_benchmarks::benches );
* * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software
random_line_split
surveyRoutes.js
const _ = require("lodash"); const Path = require("path-parser"); const { URL } = require("url"); const mongoose = require("mongoose"); const requireSignIn = require("../middlewares/requireSignIn"); const requireCredits = require("../middlewares/requireCredits"); const Mailer = require("../services/Mailer"); const surveyTemplate = require("../services/templates/surveyTemplate"); const Survey = mongoose.model("surveys"); module.exports = app => { app.get("/api/surveys", requireSignIn, async (req, res) => { const surveys = await Survey.find({ _user: req.user.id }).select({ recipients: false }); res.send(surveys); }); app.get("/api/surveys/:surveyID/:choice", (req, res) => { res.send("Thank you for your response."); }); app.post("/api/surveys/webhooks", (req, res) => { const parsedURL = new Path("/api/surveys/:surveyID/:choice"); _.chain(req.body) .map(({ email, url }) => { //do not destructure because match can be null const match = parsedURL.test(new URL(url).pathname); if (match) { return { email, surveyID: match.surveyID, choice: match.choice }; } }) .compact() .uniqBy("email", "surveyID") .each(({ surveyID, email, choice }) => { Survey.updateOne( { _id: surveyID, recipients: { $elemMatch: { email: email, responded: false } } }, { $inc: { [choice]: 1 }, $set: { "recipients.$.responded": true }, lastResponded: new Date() } ).exec(); }) .value();
res.send({}); }); app.post("/api/surveys", requireSignIn, requireCredits, async (req, res) => { const { title, subject, body, recipients } = req.body; const survey = new Survey({ title, subject, body, recipients: recipients.split(",").map(email => ({ email: email.trim() })), _user: req.user.id, dateSent: Date.now() }); const mailer = new Mailer(survey, surveyTemplate(survey)); try { await mailer.send(); await survey.save(); req.user.credits -= 1; const user = await req.user.save(); res.send(user); } catch (err) { res.status(422).send(err); } }); };
random_line_split
surveyRoutes.js
const _ = require("lodash"); const Path = require("path-parser"); const { URL } = require("url"); const mongoose = require("mongoose"); const requireSignIn = require("../middlewares/requireSignIn"); const requireCredits = require("../middlewares/requireCredits"); const Mailer = require("../services/Mailer"); const surveyTemplate = require("../services/templates/surveyTemplate"); const Survey = mongoose.model("surveys"); module.exports = app => { app.get("/api/surveys", requireSignIn, async (req, res) => { const surveys = await Survey.find({ _user: req.user.id }).select({ recipients: false }); res.send(surveys); }); app.get("/api/surveys/:surveyID/:choice", (req, res) => { res.send("Thank you for your response."); }); app.post("/api/surveys/webhooks", (req, res) => { const parsedURL = new Path("/api/surveys/:surveyID/:choice"); _.chain(req.body) .map(({ email, url }) => { //do not destructure because match can be null const match = parsedURL.test(new URL(url).pathname); if (match)
}) .compact() .uniqBy("email", "surveyID") .each(({ surveyID, email, choice }) => { Survey.updateOne( { _id: surveyID, recipients: { $elemMatch: { email: email, responded: false } } }, { $inc: { [choice]: 1 }, $set: { "recipients.$.responded": true }, lastResponded: new Date() } ).exec(); }) .value(); res.send({}); }); app.post("/api/surveys", requireSignIn, requireCredits, async (req, res) => { const { title, subject, body, recipients } = req.body; const survey = new Survey({ title, subject, body, recipients: recipients.split(",").map(email => ({ email: email.trim() })), _user: req.user.id, dateSent: Date.now() }); const mailer = new Mailer(survey, surveyTemplate(survey)); try { await mailer.send(); await survey.save(); req.user.credits -= 1; const user = await req.user.save(); res.send(user); } catch (err) { res.status(422).send(err); } }); };
{ return { email, surveyID: match.surveyID, choice: match.choice }; }
conditional_block
TestGetDatasetMetadataHandler.py
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $ # # Unit testing for WebBrick library functions (Functions.py) # See http://pyunit.sourceforge.net/pyunit.html # import sys, unittest, logging, zipfile, re, StringIO, os, logging, cgi from os.path import normpath, abspath from rdflib import URIRef sys.path.append("..") sys.path.append("../cgi-bin") try: # Running Python 2.5 with simplejson? import simplejson as json except ImportError: import json as json import GetDatasetMetadataHandler, ManifestRDFUtils, SubmitDatasetUtils, TestConfig from MiscLib import TestUtils Logger = logging.getLogger("TestGetDatasetMetadataHandler") class TestGetDatasetMetadataHandler(unittest.TestCase): def setUp(self): return def tearDown(self): return # Tests # Test that the GetMetResponse def testGetDatasetMetadataResponse(self): outputStr = StringIO.StringIO() # Create a manifest file from mocked up form data ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary,TestConfig.ElementUriList, TestConfig.ElementValueList) # Invoke get metatadata submission program, passing faked dataset directory GetDatasetMetadataHandler.getDatasetMetadata(TestConfig.formdata, TestConfig.ManifestName, outputStr) outputStr.seek(0, os.SEEK_SET) firstLine = outputStr.readline() self.assertEqual( firstLine, "Content-type: application/JSON\n", "Expected Metadata as application/JSON") Logger.debug("Output String from output stream: " + outputStr.getvalue()) # Check retrieving metadata metadata = json.load(outputStr) Logger.debug("Metadata Length = "+ repr(len(metadata))) self.assertEquals(len(metadata), 4, "Expected 4 pairs of field-values to be returned") return def
(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ #"testUnits" "testGetDatasetMetadataResponse" ], "component": [ #"testComponents" ], "integration": [ #"testIntegration" ], "pending": [ #"testPending" ] } return TestUtils.getTestSuite(TestGetDatasetMetadataHandler, testdict, select=select) if __name__ == "__main__": #logging.basicConfig(level=logging.DEBUG) TestConfig.setDatasetsBaseDir(".") TestUtils.runTests("TestGetDatasetMetadataHandler.log", getTestSuite, sys.argv)
getTestSuite
identifier_name
TestGetDatasetMetadataHandler.py
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $ # # Unit testing for WebBrick library functions (Functions.py) # See http://pyunit.sourceforge.net/pyunit.html # import sys, unittest, logging, zipfile, re, StringIO, os, logging, cgi from os.path import normpath, abspath from rdflib import URIRef sys.path.append("..") sys.path.append("../cgi-bin") try: # Running Python 2.5 with simplejson? import simplejson as json except ImportError: import json as json import GetDatasetMetadataHandler, ManifestRDFUtils, SubmitDatasetUtils, TestConfig from MiscLib import TestUtils Logger = logging.getLogger("TestGetDatasetMetadataHandler") class TestGetDatasetMetadataHandler(unittest.TestCase): def setUp(self): return def tearDown(self): return # Tests # Test that the GetMetResponse def testGetDatasetMetadataResponse(self): outputStr = StringIO.StringIO() # Create a manifest file from mocked up form data ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary,TestConfig.ElementUriList, TestConfig.ElementValueList) # Invoke get metatadata submission program, passing faked dataset directory GetDatasetMetadataHandler.getDatasetMetadata(TestConfig.formdata, TestConfig.ManifestName, outputStr) outputStr.seek(0, os.SEEK_SET) firstLine = outputStr.readline() self.assertEqual( firstLine, "Content-type: application/JSON\n", "Expected Metadata as application/JSON") Logger.debug("Output String from output stream: " + outputStr.getvalue()) # Check retrieving metadata metadata = json.load(outputStr) Logger.debug("Metadata Length = "+ repr(len(metadata))) self.assertEquals(len(metadata), 4, "Expected 4 pairs of field-values to be returned") return def getTestSuite(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ #"testUnits" "testGetDatasetMetadataResponse" ], "component": [ #"testComponents" ], "integration": [ #"testIntegration" ], "pending": [ #"testPending" ] } return TestUtils.getTestSuite(TestGetDatasetMetadataHandler, testdict, select=select) if __name__ == "__main__": #logging.basicConfig(level=logging.DEBUG)
TestConfig.setDatasetsBaseDir(".") TestUtils.runTests("TestGetDatasetMetadataHandler.log", getTestSuite, sys.argv)
conditional_block
TestGetDatasetMetadataHandler.py
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $ # # Unit testing for WebBrick library functions (Functions.py) # See http://pyunit.sourceforge.net/pyunit.html # import sys, unittest, logging, zipfile, re, StringIO, os, logging, cgi from os.path import normpath, abspath from rdflib import URIRef sys.path.append("..") sys.path.append("../cgi-bin") try: # Running Python 2.5 with simplejson?
import GetDatasetMetadataHandler, ManifestRDFUtils, SubmitDatasetUtils, TestConfig from MiscLib import TestUtils Logger = logging.getLogger("TestGetDatasetMetadataHandler") class TestGetDatasetMetadataHandler(unittest.TestCase): def setUp(self): return def tearDown(self): return # Tests # Test that the GetMetResponse def testGetDatasetMetadataResponse(self): outputStr = StringIO.StringIO() # Create a manifest file from mocked up form data ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary,TestConfig.ElementUriList, TestConfig.ElementValueList) # Invoke get metatadata submission program, passing faked dataset directory GetDatasetMetadataHandler.getDatasetMetadata(TestConfig.formdata, TestConfig.ManifestName, outputStr) outputStr.seek(0, os.SEEK_SET) firstLine = outputStr.readline() self.assertEqual( firstLine, "Content-type: application/JSON\n", "Expected Metadata as application/JSON") Logger.debug("Output String from output stream: " + outputStr.getvalue()) # Check retrieving metadata metadata = json.load(outputStr) Logger.debug("Metadata Length = "+ repr(len(metadata))) self.assertEquals(len(metadata), 4, "Expected 4 pairs of field-values to be returned") return def getTestSuite(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ #"testUnits" "testGetDatasetMetadataResponse" ], "component": [ #"testComponents" ], "integration": [ #"testIntegration" ], "pending": [ #"testPending" ] } return TestUtils.getTestSuite(TestGetDatasetMetadataHandler, testdict, select=select) if __name__ == "__main__": #logging.basicConfig(level=logging.DEBUG) TestConfig.setDatasetsBaseDir(".") TestUtils.runTests("TestGetDatasetMetadataHandler.log", getTestSuite, sys.argv)
import simplejson as json except ImportError: import json as json
random_line_split
TestGetDatasetMetadataHandler.py
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $ # # Unit testing for WebBrick library functions (Functions.py) # See http://pyunit.sourceforge.net/pyunit.html # import sys, unittest, logging, zipfile, re, StringIO, os, logging, cgi from os.path import normpath, abspath from rdflib import URIRef sys.path.append("..") sys.path.append("../cgi-bin") try: # Running Python 2.5 with simplejson? import simplejson as json except ImportError: import json as json import GetDatasetMetadataHandler, ManifestRDFUtils, SubmitDatasetUtils, TestConfig from MiscLib import TestUtils Logger = logging.getLogger("TestGetDatasetMetadataHandler") class TestGetDatasetMetadataHandler(unittest.TestCase): def setUp(self):
def tearDown(self): return # Tests # Test that the GetMetResponse def testGetDatasetMetadataResponse(self): outputStr = StringIO.StringIO() # Create a manifest file from mocked up form data ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary,TestConfig.ElementUriList, TestConfig.ElementValueList) # Invoke get metatadata submission program, passing faked dataset directory GetDatasetMetadataHandler.getDatasetMetadata(TestConfig.formdata, TestConfig.ManifestName, outputStr) outputStr.seek(0, os.SEEK_SET) firstLine = outputStr.readline() self.assertEqual( firstLine, "Content-type: application/JSON\n", "Expected Metadata as application/JSON") Logger.debug("Output String from output stream: " + outputStr.getvalue()) # Check retrieving metadata metadata = json.load(outputStr) Logger.debug("Metadata Length = "+ repr(len(metadata))) self.assertEquals(len(metadata), 4, "Expected 4 pairs of field-values to be returned") return def getTestSuite(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ #"testUnits" "testGetDatasetMetadataResponse" ], "component": [ #"testComponents" ], "integration": [ #"testIntegration" ], "pending": [ #"testPending" ] } return TestUtils.getTestSuite(TestGetDatasetMetadataHandler, testdict, select=select) if __name__ == "__main__": #logging.basicConfig(level=logging.DEBUG) TestConfig.setDatasetsBaseDir(".") TestUtils.runTests("TestGetDatasetMetadataHandler.log", getTestSuite, sys.argv)
return
identifier_body
lib.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A collection of numeric types and traits for Rust. //! //! This includes new types for big integers, rationals, and complex numbers, //! new traits for generic programming on numeric properties like `Integer`, //! and generic range iterators. //! //! ## Example //! //! This example uses the BigRational type and [Newton's method][newt] to //! approximate a square root to arbitrary precision: //! //! ``` //! extern crate num; //! # #[cfg(all(feature = "bigint", feature="rational"))] //! # mod test { //! //! use num::FromPrimitive; //! use num::bigint::BigInt; //! use num::rational::{Ratio, BigRational}; //! //! # pub //! fn approx_sqrt(number: u64, iterations: usize) -> BigRational { //! let start: Ratio<BigInt> = Ratio::from_integer(FromPrimitive::from_u64(number).unwrap()); //! let mut approx = start.clone(); //! //! for _ in 0..iterations { //! approx = (&approx + (&start / &approx)) / //! Ratio::from_integer(FromPrimitive::from_u64(2).unwrap()); //! } //! //! approx //! } //! # } //! # #[cfg(not(all(feature = "bigint", feature="rational")))] //! # mod test { pub fn approx_sqrt(n: u64, _: usize) -> u64 { n } } //! # use test::approx_sqrt; //! //! fn main() { //! println!("{}", approx_sqrt(10, 4)); // prints 4057691201/1283082416 //! } //! //! ``` //! //! [newt]: https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method #![doc(html_logo_url = "http://rust-num.github.io/num/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://rust-num.github.io/num/favicon.ico", html_root_url = "http://rust-num.github.io/num/", html_playground_url = "http://play.rust-lang.org/")] #[cfg(feature = "rustc-serialize")] extern crate rustc_serialize; // Some of the tests of non-RNG-based functionality are randomized using the // RNG-based functionality, so the RNG-based functionality needs to be enabled // for tests. #[cfg(any(feature = "rand", all(feature = "bigint", test)))] extern crate rand; #[cfg(feature = "bigint")] pub use bigint::{BigInt, BigUint}; #[cfg(feature = "rational")] pub use rational::Rational; #[cfg(all(feature = "rational", feature="bigint"))] pub use rational::BigRational; #[cfg(feature = "complex")] pub use complex::Complex; pub use integer::Integer; pub use iter::{range, range_inclusive, range_step, range_step_inclusive}; pub use traits::{Num, Zero, One, Signed, Unsigned, Bounded, Saturating, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, PrimInt, Float, ToPrimitive, FromPrimitive, NumCast, cast}; #[cfg(test)] use std::hash; use std::ops::{Mul}; #[cfg(feature = "bigint")] pub mod bigint; pub mod complex; pub mod integer; pub mod iter; pub mod traits; #[cfg(feature = "rational")] pub mod rational; /// Returns the additive identity, `0`. #[inline(always)] pub fn zero<T: Zero>() -> T { Zero::zero() } /// Returns the multiplicative identity, `1`. #[inline(always)] pub fn one<T: One>() -> T { One::one() } /// Computes the absolute value. /// /// For `f32` and `f64`, `NaN` will be returned if the number is `NaN` /// /// For signed integers, `::MIN` will be returned if the number is `::MIN`. #[inline(always)] pub fn abs<T: Signed>(value: T) -> T { value.abs() } /// The positive difference of two numbers. /// /// Returns zero if `x` is less than or equal to `y`, otherwise the difference /// between `x` and `y` is returned. #[inline(always)] pub fn abs_sub<T: Signed>(x: T, y: T) -> T { x.abs_sub(&y) } /// Returns the sign of the number. /// /// For `f32` and `f64`: /// /// * `1.0` if the number is positive, `+0.0` or `INFINITY` /// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY` /// * `NaN` if the number is `NaN` /// /// For signed integers: /// /// * `0` if the number is zero /// * `1` if the number is positive /// * `-1` if the number is negative #[inline(always)] pub fn signum<T: Signed>(value: T) -> T { value.signum() } /// Raises a value to the power of exp, using exponentiation by squaring. /// /// # Example /// /// ```rust /// use num; /// /// assert_eq!(num::pow(2i8, 4), 16); /// assert_eq!(num::pow(6u8, 3), 216); /// ``` #[inline] pub fn pow<T: Clone + One + Mul<T, Output = T>>(mut base: T, mut exp: usize) -> T { if exp == 0 { return T::one() } while exp & 1 == 0 { base = base.clone() * base; exp >>= 1; } if exp == 1 { return base } let mut acc = base.clone(); while exp > 1 { exp >>= 1; base = base.clone() * base; if exp & 1 == 1 { acc = acc * base.clone(); } } acc } #[cfg(test)] fn hash<T: hash::Hash>(x: &T) -> u64
{ use std::hash::Hasher; let mut hasher = hash::SipHasher::new(); x.hash(&mut hasher); hasher.finish() }
identifier_body
lib.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A collection of numeric types and traits for Rust. //! //! This includes new types for big integers, rationals, and complex numbers, //! new traits for generic programming on numeric properties like `Integer`, //! and generic range iterators. //! //! ## Example //!
//! ``` //! extern crate num; //! # #[cfg(all(feature = "bigint", feature="rational"))] //! # mod test { //! //! use num::FromPrimitive; //! use num::bigint::BigInt; //! use num::rational::{Ratio, BigRational}; //! //! # pub //! fn approx_sqrt(number: u64, iterations: usize) -> BigRational { //! let start: Ratio<BigInt> = Ratio::from_integer(FromPrimitive::from_u64(number).unwrap()); //! let mut approx = start.clone(); //! //! for _ in 0..iterations { //! approx = (&approx + (&start / &approx)) / //! Ratio::from_integer(FromPrimitive::from_u64(2).unwrap()); //! } //! //! approx //! } //! # } //! # #[cfg(not(all(feature = "bigint", feature="rational")))] //! # mod test { pub fn approx_sqrt(n: u64, _: usize) -> u64 { n } } //! # use test::approx_sqrt; //! //! fn main() { //! println!("{}", approx_sqrt(10, 4)); // prints 4057691201/1283082416 //! } //! //! ``` //! //! [newt]: https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method #![doc(html_logo_url = "http://rust-num.github.io/num/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://rust-num.github.io/num/favicon.ico", html_root_url = "http://rust-num.github.io/num/", html_playground_url = "http://play.rust-lang.org/")] #[cfg(feature = "rustc-serialize")] extern crate rustc_serialize; // Some of the tests of non-RNG-based functionality are randomized using the // RNG-based functionality, so the RNG-based functionality needs to be enabled // for tests. #[cfg(any(feature = "rand", all(feature = "bigint", test)))] extern crate rand; #[cfg(feature = "bigint")] pub use bigint::{BigInt, BigUint}; #[cfg(feature = "rational")] pub use rational::Rational; #[cfg(all(feature = "rational", feature="bigint"))] pub use rational::BigRational; #[cfg(feature = "complex")] pub use complex::Complex; pub use integer::Integer; pub use iter::{range, range_inclusive, range_step, range_step_inclusive}; pub use traits::{Num, Zero, One, Signed, Unsigned, Bounded, Saturating, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, PrimInt, Float, ToPrimitive, FromPrimitive, NumCast, cast}; #[cfg(test)] use std::hash; use std::ops::{Mul}; #[cfg(feature = "bigint")] pub mod bigint; pub mod complex; pub mod integer; pub mod iter; pub mod traits; #[cfg(feature = "rational")] pub mod rational; /// Returns the additive identity, `0`. #[inline(always)] pub fn zero<T: Zero>() -> T { Zero::zero() } /// Returns the multiplicative identity, `1`. #[inline(always)] pub fn one<T: One>() -> T { One::one() } /// Computes the absolute value. /// /// For `f32` and `f64`, `NaN` will be returned if the number is `NaN` /// /// For signed integers, `::MIN` will be returned if the number is `::MIN`. #[inline(always)] pub fn abs<T: Signed>(value: T) -> T { value.abs() } /// The positive difference of two numbers. /// /// Returns zero if `x` is less than or equal to `y`, otherwise the difference /// between `x` and `y` is returned. #[inline(always)] pub fn abs_sub<T: Signed>(x: T, y: T) -> T { x.abs_sub(&y) } /// Returns the sign of the number. /// /// For `f32` and `f64`: /// /// * `1.0` if the number is positive, `+0.0` or `INFINITY` /// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY` /// * `NaN` if the number is `NaN` /// /// For signed integers: /// /// * `0` if the number is zero /// * `1` if the number is positive /// * `-1` if the number is negative #[inline(always)] pub fn signum<T: Signed>(value: T) -> T { value.signum() } /// Raises a value to the power of exp, using exponentiation by squaring. /// /// # Example /// /// ```rust /// use num; /// /// assert_eq!(num::pow(2i8, 4), 16); /// assert_eq!(num::pow(6u8, 3), 216); /// ``` #[inline] pub fn pow<T: Clone + One + Mul<T, Output = T>>(mut base: T, mut exp: usize) -> T { if exp == 0 { return T::one() } while exp & 1 == 0 { base = base.clone() * base; exp >>= 1; } if exp == 1 { return base } let mut acc = base.clone(); while exp > 1 { exp >>= 1; base = base.clone() * base; if exp & 1 == 1 { acc = acc * base.clone(); } } acc } #[cfg(test)] fn hash<T: hash::Hash>(x: &T) -> u64 { use std::hash::Hasher; let mut hasher = hash::SipHasher::new(); x.hash(&mut hasher); hasher.finish() }
//! This example uses the BigRational type and [Newton's method][newt] to //! approximate a square root to arbitrary precision: //!
random_line_split
lib.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A collection of numeric types and traits for Rust. //! //! This includes new types for big integers, rationals, and complex numbers, //! new traits for generic programming on numeric properties like `Integer`, //! and generic range iterators. //! //! ## Example //! //! This example uses the BigRational type and [Newton's method][newt] to //! approximate a square root to arbitrary precision: //! //! ``` //! extern crate num; //! # #[cfg(all(feature = "bigint", feature="rational"))] //! # mod test { //! //! use num::FromPrimitive; //! use num::bigint::BigInt; //! use num::rational::{Ratio, BigRational}; //! //! # pub //! fn approx_sqrt(number: u64, iterations: usize) -> BigRational { //! let start: Ratio<BigInt> = Ratio::from_integer(FromPrimitive::from_u64(number).unwrap()); //! let mut approx = start.clone(); //! //! for _ in 0..iterations { //! approx = (&approx + (&start / &approx)) / //! Ratio::from_integer(FromPrimitive::from_u64(2).unwrap()); //! } //! //! approx //! } //! # } //! # #[cfg(not(all(feature = "bigint", feature="rational")))] //! # mod test { pub fn approx_sqrt(n: u64, _: usize) -> u64 { n } } //! # use test::approx_sqrt; //! //! fn main() { //! println!("{}", approx_sqrt(10, 4)); // prints 4057691201/1283082416 //! } //! //! ``` //! //! [newt]: https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method #![doc(html_logo_url = "http://rust-num.github.io/num/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://rust-num.github.io/num/favicon.ico", html_root_url = "http://rust-num.github.io/num/", html_playground_url = "http://play.rust-lang.org/")] #[cfg(feature = "rustc-serialize")] extern crate rustc_serialize; // Some of the tests of non-RNG-based functionality are randomized using the // RNG-based functionality, so the RNG-based functionality needs to be enabled // for tests. #[cfg(any(feature = "rand", all(feature = "bigint", test)))] extern crate rand; #[cfg(feature = "bigint")] pub use bigint::{BigInt, BigUint}; #[cfg(feature = "rational")] pub use rational::Rational; #[cfg(all(feature = "rational", feature="bigint"))] pub use rational::BigRational; #[cfg(feature = "complex")] pub use complex::Complex; pub use integer::Integer; pub use iter::{range, range_inclusive, range_step, range_step_inclusive}; pub use traits::{Num, Zero, One, Signed, Unsigned, Bounded, Saturating, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, PrimInt, Float, ToPrimitive, FromPrimitive, NumCast, cast}; #[cfg(test)] use std::hash; use std::ops::{Mul}; #[cfg(feature = "bigint")] pub mod bigint; pub mod complex; pub mod integer; pub mod iter; pub mod traits; #[cfg(feature = "rational")] pub mod rational; /// Returns the additive identity, `0`. #[inline(always)] pub fn zero<T: Zero>() -> T { Zero::zero() } /// Returns the multiplicative identity, `1`. #[inline(always)] pub fn one<T: One>() -> T { One::one() } /// Computes the absolute value. /// /// For `f32` and `f64`, `NaN` will be returned if the number is `NaN` /// /// For signed integers, `::MIN` will be returned if the number is `::MIN`. #[inline(always)] pub fn abs<T: Signed>(value: T) -> T { value.abs() } /// The positive difference of two numbers. /// /// Returns zero if `x` is less than or equal to `y`, otherwise the difference /// between `x` and `y` is returned. #[inline(always)] pub fn abs_sub<T: Signed>(x: T, y: T) -> T { x.abs_sub(&y) } /// Returns the sign of the number. /// /// For `f32` and `f64`: /// /// * `1.0` if the number is positive, `+0.0` or `INFINITY` /// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY` /// * `NaN` if the number is `NaN` /// /// For signed integers: /// /// * `0` if the number is zero /// * `1` if the number is positive /// * `-1` if the number is negative #[inline(always)] pub fn signum<T: Signed>(value: T) -> T { value.signum() } /// Raises a value to the power of exp, using exponentiation by squaring. /// /// # Example /// /// ```rust /// use num; /// /// assert_eq!(num::pow(2i8, 4), 16); /// assert_eq!(num::pow(6u8, 3), 216); /// ``` #[inline] pub fn pow<T: Clone + One + Mul<T, Output = T>>(mut base: T, mut exp: usize) -> T { if exp == 0
while exp & 1 == 0 { base = base.clone() * base; exp >>= 1; } if exp == 1 { return base } let mut acc = base.clone(); while exp > 1 { exp >>= 1; base = base.clone() * base; if exp & 1 == 1 { acc = acc * base.clone(); } } acc } #[cfg(test)] fn hash<T: hash::Hash>(x: &T) -> u64 { use std::hash::Hasher; let mut hasher = hash::SipHasher::new(); x.hash(&mut hasher); hasher.finish() }
{ return T::one() }
conditional_block
lib.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A collection of numeric types and traits for Rust. //! //! This includes new types for big integers, rationals, and complex numbers, //! new traits for generic programming on numeric properties like `Integer`, //! and generic range iterators. //! //! ## Example //! //! This example uses the BigRational type and [Newton's method][newt] to //! approximate a square root to arbitrary precision: //! //! ``` //! extern crate num; //! # #[cfg(all(feature = "bigint", feature="rational"))] //! # mod test { //! //! use num::FromPrimitive; //! use num::bigint::BigInt; //! use num::rational::{Ratio, BigRational}; //! //! # pub //! fn approx_sqrt(number: u64, iterations: usize) -> BigRational { //! let start: Ratio<BigInt> = Ratio::from_integer(FromPrimitive::from_u64(number).unwrap()); //! let mut approx = start.clone(); //! //! for _ in 0..iterations { //! approx = (&approx + (&start / &approx)) / //! Ratio::from_integer(FromPrimitive::from_u64(2).unwrap()); //! } //! //! approx //! } //! # } //! # #[cfg(not(all(feature = "bigint", feature="rational")))] //! # mod test { pub fn approx_sqrt(n: u64, _: usize) -> u64 { n } } //! # use test::approx_sqrt; //! //! fn main() { //! println!("{}", approx_sqrt(10, 4)); // prints 4057691201/1283082416 //! } //! //! ``` //! //! [newt]: https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method #![doc(html_logo_url = "http://rust-num.github.io/num/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://rust-num.github.io/num/favicon.ico", html_root_url = "http://rust-num.github.io/num/", html_playground_url = "http://play.rust-lang.org/")] #[cfg(feature = "rustc-serialize")] extern crate rustc_serialize; // Some of the tests of non-RNG-based functionality are randomized using the // RNG-based functionality, so the RNG-based functionality needs to be enabled // for tests. #[cfg(any(feature = "rand", all(feature = "bigint", test)))] extern crate rand; #[cfg(feature = "bigint")] pub use bigint::{BigInt, BigUint}; #[cfg(feature = "rational")] pub use rational::Rational; #[cfg(all(feature = "rational", feature="bigint"))] pub use rational::BigRational; #[cfg(feature = "complex")] pub use complex::Complex; pub use integer::Integer; pub use iter::{range, range_inclusive, range_step, range_step_inclusive}; pub use traits::{Num, Zero, One, Signed, Unsigned, Bounded, Saturating, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, PrimInt, Float, ToPrimitive, FromPrimitive, NumCast, cast}; #[cfg(test)] use std::hash; use std::ops::{Mul}; #[cfg(feature = "bigint")] pub mod bigint; pub mod complex; pub mod integer; pub mod iter; pub mod traits; #[cfg(feature = "rational")] pub mod rational; /// Returns the additive identity, `0`. #[inline(always)] pub fn zero<T: Zero>() -> T { Zero::zero() } /// Returns the multiplicative identity, `1`. #[inline(always)] pub fn
<T: One>() -> T { One::one() } /// Computes the absolute value. /// /// For `f32` and `f64`, `NaN` will be returned if the number is `NaN` /// /// For signed integers, `::MIN` will be returned if the number is `::MIN`. #[inline(always)] pub fn abs<T: Signed>(value: T) -> T { value.abs() } /// The positive difference of two numbers. /// /// Returns zero if `x` is less than or equal to `y`, otherwise the difference /// between `x` and `y` is returned. #[inline(always)] pub fn abs_sub<T: Signed>(x: T, y: T) -> T { x.abs_sub(&y) } /// Returns the sign of the number. /// /// For `f32` and `f64`: /// /// * `1.0` if the number is positive, `+0.0` or `INFINITY` /// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY` /// * `NaN` if the number is `NaN` /// /// For signed integers: /// /// * `0` if the number is zero /// * `1` if the number is positive /// * `-1` if the number is negative #[inline(always)] pub fn signum<T: Signed>(value: T) -> T { value.signum() } /// Raises a value to the power of exp, using exponentiation by squaring. /// /// # Example /// /// ```rust /// use num; /// /// assert_eq!(num::pow(2i8, 4), 16); /// assert_eq!(num::pow(6u8, 3), 216); /// ``` #[inline] pub fn pow<T: Clone + One + Mul<T, Output = T>>(mut base: T, mut exp: usize) -> T { if exp == 0 { return T::one() } while exp & 1 == 0 { base = base.clone() * base; exp >>= 1; } if exp == 1 { return base } let mut acc = base.clone(); while exp > 1 { exp >>= 1; base = base.clone() * base; if exp & 1 == 1 { acc = acc * base.clone(); } } acc } #[cfg(test)] fn hash<T: hash::Hash>(x: &T) -> u64 { use std::hash::Hasher; let mut hasher = hash::SipHasher::new(); x.hash(&mut hasher); hasher.finish() }
one
identifier_name
rison.js
// Uses CommonJS, AMD or browser globals to create a module. // Based on: https://github.com/umdjs/umd/blob/master/commonjsStrict.js (function (root, factory) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['exports'], factory); } else if (typeof exports === 'object') { // CommonJS factory(exports); } else {
var rison = exports; ////////////////////////////////////////////////// // // the stringifier is based on // http://json.org/json.js as of 2006-04-28 from json.org // the parser is based on // http://osteele.com/sources/openlaszlo/json // if (typeof rison == 'undefined') window.rison = {}; /** * rules for an uri encoder that is more tolerant than encodeURIComponent * * encodeURIComponent passes ~!*()-_.' * * we also allow ,:@$/ * */ rison.uri_ok = { // ok in url paths and in form query args '~': true, '!': true, '*': true, '(': true, ')': true, '-': true, '_': true, '.': true, ',': true, ':': true, '@': true, '$': true, "'": true, '/': true }; /* * we divide the uri-safe glyphs into three sets * <rison> - used by rison ' ! : ( ) , * <reserved> - not common in strings, reserved * @ $ & ; = * * we define <identifier> as anything that's not forbidden */ /** * punctuation characters that are legal inside ids. */ // this var isn't actually used //rison.idchar_punctuation = "_-./~"; (function () { var l = []; for (var hi = 0; hi < 16; hi++) { for (var lo = 0; lo < 16; lo++) { if (hi+lo == 0) continue; var c = String.fromCharCode(hi*16 + lo); if (! /\w|[-_.\/~]/.test(c)) l.push('\\u00' + hi.toString(16) + lo.toString(16)); } } /** * characters that are illegal inside ids. * <rison> and <reserved> classes are illegal in ids. * */ rison.not_idchar = l.join('') //idcrx = new RegExp('[' + rison.not_idchar + ']'); //console.log('NOT', (idcrx.test(' ')) ); })(); //rison.not_idchar = " \t\r\n\"<>[]{}'!=:(),*@$;&"; rison.not_idchar = " '!:(),*@$"; /** * characters that are illegal as the start of an id * this is so ids can't look like numbers. */ rison.not_idstart = "-0123456789"; (function () { var idrx = '[^' + rison.not_idstart + rison.not_idchar + '][^' + rison.not_idchar + ']*'; rison.id_ok = new RegExp('^' + idrx + '$'); // regexp to find the end of an id when parsing // g flag on the regexp is necessary for iterative regexp.exec() rison.next_id = new RegExp(idrx, 'g'); })(); /** * this is like encodeURIComponent() but quotes fewer characters. * * @see rison.uri_ok * * encodeURIComponent passes ~!*()-_.' * rison.quote also passes ,:@$/ * and quotes " " as "+" instead of "%20" */ rison.quote = function(x) { if (/^[-A-Za-z0-9~!*()_.',:@$\/]*$/.test(x)) return x; return encodeURIComponent(x) .replace('%2C', ',', 'g') .replace('%3A', ':', 'g') .replace('%40', '@', 'g') .replace('%24', '$', 'g') .replace('%2F', '/', 'g') .replace('%20', '+', 'g'); }; // // based on json.js 2006-04-28 from json.org // license: http://www.json.org/license.html // // hacked by nix for use in uris. // (function () { var sq = { // url-ok but quoted in strings "'": true, '!': true }, s = { array: function (x) { var a = ['!('], b, f, i, l = x.length, v; for (i = 0; i < l; i += 1) { v = x[i]; f = s[typeof v]; if (f) { v = f(v); if (typeof v == 'string') { if (b) { a[a.length] = ','; } a[a.length] = v; b = true; } } } a[a.length] = ')'; return a.join(''); }, 'boolean': function (x) { if (x) return '!t'; return '!f' }, 'null': function (x) { return "!n"; }, number: function (x) { if (!isFinite(x)) return '!n'; // strip '+' out of exponent, '-' is ok though return String(x).replace(/\+/,''); }, object: function (x) { if (x) { if (x instanceof Array) { return s.array(x); } // WILL: will this work on non-Firefox browsers? if (typeof x.__prototype__ === 'object' && typeof x.__prototype__.encode_rison !== 'undefined') return x.encode_rison(); var a = ['('], b, f, i, v, ki, ks=[]; for (i in x) ks[ks.length] = i; ks.sort(); for (ki = 0; ki < ks.length; ki++) { i = ks[ki]; v = x[i]; f = s[typeof v]; if (f) { v = f(v); if (typeof v == 'string') { if (b) { a[a.length] = ','; } a.push(s.string(i), ':', v); b = true; } } } a[a.length] = ')'; return a.join(''); } return '!n'; }, string: function (x) { if (x == '') return "''"; if (rison.id_ok.test(x)) return x; x = x.replace(/(['!])/g, function(a, b) { if (sq[b]) return '!'+b; return b; }); return "'" + x + "'"; }, undefined: function (x) { throw new Error("rison can't encode the undefined value"); } }; /** * rison-encode a javascript structure * * implemementation based on Douglas Crockford's json.js: * http://json.org/json.js as of 2006-04-28 from json.org * */ rison.encode = function (v) { return s[typeof v](v); }; /** * rison-encode a javascript object without surrounding parens * */ rison.encode_object = function (v) { if (typeof v != 'object' || v === null || v instanceof Array) throw new Error("rison.encode_object expects an object argument"); var r = s[typeof v](v); return r.substring(1, r.length-1); }; /** * rison-encode a javascript array without surrounding parens * */ rison.encode_array = function (v) { if (!(v instanceof Array)) throw new Error("rison.encode_array expects an array argument"); var r = s[typeof v](v); return r.substring(2, r.length-1); }; /** * rison-encode and uri-encode a javascript structure * */ rison.encode_uri = function (v) { return rison.quote(s[typeof v](v)); }; })(); // // based on openlaszlo-json and hacked by nix for use in uris. // // Author: Oliver Steele // Copyright: Copyright 2006 Oliver Steele. All rights reserved. // Homepage: http://osteele.com/sources/openlaszlo/json // License: MIT License. // Version: 1.0 /** * parse a rison string into a javascript structure. * * this is the simplest decoder entry point. * * based on Oliver Steele's OpenLaszlo-JSON * http://osteele.com/sources/openlaszlo/json */ rison.decode = function(r) { var errcb = function(e) { throw Error('rison decoder error: ' + e); }; var p = new rison.parser(errcb); return p.parse(r); }; /** * parse an o-rison string into a javascript structure. * * this simply adds parentheses around the string before parsing. */ rison.decode_object = function(r) { return rison.decode('('+r+')'); }; /** * parse an a-rison string into a javascript structure. * * this simply adds array markup around the string before parsing. */ rison.decode_array = function(r) { return rison.decode('!('+r+')'); }; /** * construct a new parser object for reuse. * * @constructor * @class A Rison parser class. You should probably * use rison.decode instead. * @see rison.decode */ rison.parser = function (errcb) { this.errorHandler = errcb; }; /** * a string containing acceptable whitespace characters. * by default the rison decoder tolerates no whitespace. * to accept whitespace set rison.parser.WHITESPACE = " \t\n\r\f"; */ rison.parser.WHITESPACE = ""; // expose this as-is? rison.parser.prototype.setOptions = function (options) { if (options['errorHandler']) this.errorHandler = options.errorHandler; }; /** * parse a rison string into a javascript structure. */ rison.parser.prototype.parse = function (str) { this.string = str; this.index = 0; this.message = null; var value = this.readValue(); if (!this.message && this.next()) value = this.error("unable to parse string as rison: '" + rison.encode(str) + "'"); if (this.message && this.errorHandler) this.errorHandler(this.message, this.index); return value; }; rison.parser.prototype.error = function (message) { if (typeof(console) != 'undefined') console.log('rison parser error: ', message); this.message = message; return undefined; } rison.parser.prototype.readValue = function () { var c = this.next(); var fn = c && this.table[c]; if (fn) return fn.apply(this); // fell through table, parse as an id var s = this.string; var i = this.index-1; // Regexp.lastIndex may not work right in IE before 5.5? // g flag on the regexp is also necessary rison.next_id.lastIndex = i; var m = rison.next_id.exec(s); // console.log('matched id', i, r.lastIndex); if (m.length > 0) { var id = m[0]; this.index = i+id.length; return id; // a string } if (c) return this.error("invalid character: '" + c + "'"); return this.error("empty expression"); } rison.parser.parse_array = function (parser) { var ar = []; var c; while ((c = parser.next()) != ')') { if (!c) return parser.error("unmatched '!('"); if (ar.length) { if (c != ',') parser.error("missing ','"); } else if (c == ',') { return parser.error("extra ','"); } else --parser.index; var n = parser.readValue(); if (typeof n == "undefined") return undefined; ar.push(n); } return ar; }; rison.parser.bangs = { t: true, f: false, n: null, '(': rison.parser.parse_array } rison.parser.prototype.table = { '!': function () { var s = this.string; var c = s.charAt(this.index++); if (!c) return this.error('"!" at end of input'); var x = rison.parser.bangs[c]; if (typeof(x) == 'function') { return x.call(null, this); } else if (typeof(x) == 'undefined') { return this.error('unknown literal: "!' + c + '"'); } return x; }, '(': function () { var o = {}; var c; var count = 0; while ((c = this.next()) != ')') { if (count) { if (c != ',') this.error("missing ','"); } else if (c == ',') { return this.error("extra ','"); } else --this.index; var k = this.readValue(); if (typeof k == "undefined") return undefined; if (this.next() != ':') return this.error("missing ':'"); var v = this.readValue(); if (typeof v == "undefined") return undefined; o[k] = v; count++; } return o; }, "'": function () { var s = this.string; var i = this.index; var start = i; var segments = []; var c; while ((c = s.charAt(i++)) != "'") { //if (i == s.length) return this.error('unmatched "\'"'); if (!c) return this.error('unmatched "\'"'); if (c == '!') { if (start < i-1) segments.push(s.slice(start, i-1)); c = s.charAt(i++); if ("!'".indexOf(c) >= 0) { segments.push(c); } else { return this.error('invalid string escape: "!'+c+'"'); } start = i; } } if (start < i-1) segments.push(s.slice(start, i-1)); this.index = i; return segments.length == 1 ? segments[0] : segments.join(''); }, // Also any digit. The statement that follows this table // definition fills in the digits. '-': function () { var s = this.string; var i = this.index; var start = i-1; var state = 'int'; var permittedSigns = '-'; var transitions = { 'int+.': 'frac', 'int+e': 'exp', 'frac+e': 'exp' }; do { var c = s.charAt(i++); if (!c) break; if ('0' <= c && c <= '9') continue; if (permittedSigns.indexOf(c) >= 0) { permittedSigns = ''; continue; } state = transitions[state+'+'+c.toLowerCase()]; if (state == 'exp') permittedSigns = '-'; } while (state); this.index = --i; s = s.slice(start, i) if (s == '-') return this.error("invalid number"); return Number(s); } }; // copy table['-'] to each of table[i] | i <- '0'..'9': (function (table) { for (var i = 0; i <= 9; i++) table[String(i)] = table['-']; })(rison.parser.prototype.table); // return the next non-whitespace character, or undefined rison.parser.prototype.next = function () { var s = this.string; var i = this.index; do { if (i == s.length) return undefined; var c = s.charAt(i++); } while (rison.parser.WHITESPACE.indexOf(c) >= 0); this.index = i; return c; }; // End of UMD module wrapper }));
// Browser globals factory((root.rison = {})); } }(this, function (exports) {
random_line_split
rison.js
// Uses CommonJS, AMD or browser globals to create a module. // Based on: https://github.com/umdjs/umd/blob/master/commonjsStrict.js (function (root, factory) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['exports'], factory); } else if (typeof exports === 'object') { // CommonJS factory(exports); } else { // Browser globals factory((root.rison = {})); } }(this, function (exports) { var rison = exports; ////////////////////////////////////////////////// // // the stringifier is based on // http://json.org/json.js as of 2006-04-28 from json.org // the parser is based on // http://osteele.com/sources/openlaszlo/json // if (typeof rison == 'undefined') window.rison = {}; /** * rules for an uri encoder that is more tolerant than encodeURIComponent * * encodeURIComponent passes ~!*()-_.' * * we also allow ,:@$/ * */ rison.uri_ok = { // ok in url paths and in form query args '~': true, '!': true, '*': true, '(': true, ')': true, '-': true, '_': true, '.': true, ',': true, ':': true, '@': true, '$': true, "'": true, '/': true }; /* * we divide the uri-safe glyphs into three sets * <rison> - used by rison ' ! : ( ) , * <reserved> - not common in strings, reserved * @ $ & ; = * * we define <identifier> as anything that's not forbidden */ /** * punctuation characters that are legal inside ids. */ // this var isn't actually used //rison.idchar_punctuation = "_-./~"; (function () { var l = []; for (var hi = 0; hi < 16; hi++) { for (var lo = 0; lo < 16; lo++) { if (hi+lo == 0) continue; var c = String.fromCharCode(hi*16 + lo); if (! /\w|[-_.\/~]/.test(c)) l.push('\\u00' + hi.toString(16) + lo.toString(16)); } } /** * characters that are illegal inside ids. * <rison> and <reserved> classes are illegal in ids. * */ rison.not_idchar = l.join('') //idcrx = new RegExp('[' + rison.not_idchar + ']'); //console.log('NOT', (idcrx.test(' ')) ); })(); //rison.not_idchar = " \t\r\n\"<>[]{}'!=:(),*@$;&"; rison.not_idchar = " '!:(),*@$"; /** * characters that are illegal as the start of an id * this is so ids can't look like numbers. */ rison.not_idstart = "-0123456789"; (function () { var idrx = '[^' + rison.not_idstart + rison.not_idchar + '][^' + rison.not_idchar + ']*'; rison.id_ok = new RegExp('^' + idrx + '$'); // regexp to find the end of an id when parsing // g flag on the regexp is necessary for iterative regexp.exec() rison.next_id = new RegExp(idrx, 'g'); })(); /** * this is like encodeURIComponent() but quotes fewer characters. * * @see rison.uri_ok * * encodeURIComponent passes ~!*()-_.' * rison.quote also passes ,:@$/ * and quotes " " as "+" instead of "%20" */ rison.quote = function(x) { if (/^[-A-Za-z0-9~!*()_.',:@$\/]*$/.test(x)) return x; return encodeURIComponent(x) .replace('%2C', ',', 'g') .replace('%3A', ':', 'g') .replace('%40', '@', 'g') .replace('%24', '$', 'g') .replace('%2F', '/', 'g') .replace('%20', '+', 'g'); }; // // based on json.js 2006-04-28 from json.org // license: http://www.json.org/license.html // // hacked by nix for use in uris. // (function () { var sq = { // url-ok but quoted in strings "'": true, '!': true }, s = { array: function (x) { var a = ['!('], b, f, i, l = x.length, v; for (i = 0; i < l; i += 1) { v = x[i]; f = s[typeof v]; if (f) { v = f(v); if (typeof v == 'string') { if (b) { a[a.length] = ','; } a[a.length] = v; b = true; } } } a[a.length] = ')'; return a.join(''); }, 'boolean': function (x) { if (x) return '!t'; return '!f' }, 'null': function (x) { return "!n"; }, number: function (x) { if (!isFinite(x)) return '!n'; // strip '+' out of exponent, '-' is ok though return String(x).replace(/\+/,''); }, object: function (x) { if (x) { if (x instanceof Array) { return s.array(x); } // WILL: will this work on non-Firefox browsers? if (typeof x.__prototype__ === 'object' && typeof x.__prototype__.encode_rison !== 'undefined') return x.encode_rison(); var a = ['('], b, f, i, v, ki, ks=[]; for (i in x) ks[ks.length] = i; ks.sort(); for (ki = 0; ki < ks.length; ki++) { i = ks[ki]; v = x[i]; f = s[typeof v]; if (f) { v = f(v); if (typeof v == 'string') { if (b) { a[a.length] = ','; } a.push(s.string(i), ':', v); b = true; } } } a[a.length] = ')'; return a.join(''); } return '!n'; }, string: function (x) { if (x == '') return "''"; if (rison.id_ok.test(x)) return x; x = x.replace(/(['!])/g, function(a, b) { if (sq[b]) return '!'+b; return b; }); return "'" + x + "'"; }, undefined: function (x) { throw new Error("rison can't encode the undefined value"); } }; /** * rison-encode a javascript structure * * implemementation based on Douglas Crockford's json.js: * http://json.org/json.js as of 2006-04-28 from json.org * */ rison.encode = function (v) { return s[typeof v](v); }; /** * rison-encode a javascript object without surrounding parens * */ rison.encode_object = function (v) { if (typeof v != 'object' || v === null || v instanceof Array) throw new Error("rison.encode_object expects an object argument"); var r = s[typeof v](v); return r.substring(1, r.length-1); }; /** * rison-encode a javascript array without surrounding parens * */ rison.encode_array = function (v) { if (!(v instanceof Array)) throw new Error("rison.encode_array expects an array argument"); var r = s[typeof v](v); return r.substring(2, r.length-1); }; /** * rison-encode and uri-encode a javascript structure * */ rison.encode_uri = function (v) { return rison.quote(s[typeof v](v)); }; })(); // // based on openlaszlo-json and hacked by nix for use in uris. // // Author: Oliver Steele // Copyright: Copyright 2006 Oliver Steele. All rights reserved. // Homepage: http://osteele.com/sources/openlaszlo/json // License: MIT License. // Version: 1.0 /** * parse a rison string into a javascript structure. * * this is the simplest decoder entry point. * * based on Oliver Steele's OpenLaszlo-JSON * http://osteele.com/sources/openlaszlo/json */ rison.decode = function(r) { var errcb = function(e) { throw Error('rison decoder error: ' + e); }; var p = new rison.parser(errcb); return p.parse(r); }; /** * parse an o-rison string into a javascript structure. * * this simply adds parentheses around the string before parsing. */ rison.decode_object = function(r) { return rison.decode('('+r+')'); }; /** * parse an a-rison string into a javascript structure. * * this simply adds array markup around the string before parsing. */ rison.decode_array = function(r) { return rison.decode('!('+r+')'); }; /** * construct a new parser object for reuse. * * @constructor * @class A Rison parser class. You should probably * use rison.decode instead. * @see rison.decode */ rison.parser = function (errcb) { this.errorHandler = errcb; }; /** * a string containing acceptable whitespace characters. * by default the rison decoder tolerates no whitespace. * to accept whitespace set rison.parser.WHITESPACE = " \t\n\r\f"; */ rison.parser.WHITESPACE = ""; // expose this as-is? rison.parser.prototype.setOptions = function (options) { if (options['errorHandler']) this.errorHandler = options.errorHandler; }; /** * parse a rison string into a javascript structure. */ rison.parser.prototype.parse = function (str) { this.string = str; this.index = 0; this.message = null; var value = this.readValue(); if (!this.message && this.next()) value = this.error("unable to parse string as rison: '" + rison.encode(str) + "'"); if (this.message && this.errorHandler) this.errorHandler(this.message, this.index); return value; }; rison.parser.prototype.error = function (message) { if (typeof(console) != 'undefined') console.log('rison parser error: ', message); this.message = message; return undefined; } rison.parser.prototype.readValue = function () { var c = this.next(); var fn = c && this.table[c]; if (fn) return fn.apply(this); // fell through table, parse as an id var s = this.string; var i = this.index-1; // Regexp.lastIndex may not work right in IE before 5.5? // g flag on the regexp is also necessary rison.next_id.lastIndex = i; var m = rison.next_id.exec(s); // console.log('matched id', i, r.lastIndex); if (m.length > 0) { var id = m[0]; this.index = i+id.length; return id; // a string } if (c) return this.error("invalid character: '" + c + "'"); return this.error("empty expression"); } rison.parser.parse_array = function (parser) { var ar = []; var c; while ((c = parser.next()) != ')') { if (!c) return parser.error("unmatched '!('"); if (ar.length) { if (c != ',') parser.error("missing ','"); } else if (c == ',') { return parser.error("extra ','"); } else --parser.index; var n = parser.readValue(); if (typeof n == "undefined") return undefined; ar.push(n); } return ar; }; rison.parser.bangs = { t: true, f: false, n: null, '(': rison.parser.parse_array } rison.parser.prototype.table = { '!': function () { var s = this.string; var c = s.charAt(this.index++); if (!c) return this.error('"!" at end of input'); var x = rison.parser.bangs[c]; if (typeof(x) == 'function') { return x.call(null, this); } else if (typeof(x) == 'undefined') { return this.error('unknown literal: "!' + c + '"'); } return x; }, '(': function () { var o = {}; var c; var count = 0; while ((c = this.next()) != ')') { if (count) { if (c != ',') this.error("missing ','"); } else if (c == ',') { return this.error("extra ','"); } else --this.index; var k = this.readValue(); if (typeof k == "undefined") return undefined; if (this.next() != ':') return this.error("missing ':'"); var v = this.readValue(); if (typeof v == "undefined") return undefined; o[k] = v; count++; } return o; }, "'": function () { var s = this.string; var i = this.index; var start = i; var segments = []; var c; while ((c = s.charAt(i++)) != "'")
if (start < i-1) segments.push(s.slice(start, i-1)); this.index = i; return segments.length == 1 ? segments[0] : segments.join(''); }, // Also any digit. The statement that follows this table // definition fills in the digits. '-': function () { var s = this.string; var i = this.index; var start = i-1; var state = 'int'; var permittedSigns = '-'; var transitions = { 'int+.': 'frac', 'int+e': 'exp', 'frac+e': 'exp' }; do { var c = s.charAt(i++); if (!c) break; if ('0' <= c && c <= '9') continue; if (permittedSigns.indexOf(c) >= 0) { permittedSigns = ''; continue; } state = transitions[state+'+'+c.toLowerCase()]; if (state == 'exp') permittedSigns = '-'; } while (state); this.index = --i; s = s.slice(start, i) if (s == '-') return this.error("invalid number"); return Number(s); } }; // copy table['-'] to each of table[i] | i <- '0'..'9': (function (table) { for (var i = 0; i <= 9; i++) table[String(i)] = table['-']; })(rison.parser.prototype.table); // return the next non-whitespace character, or undefined rison.parser.prototype.next = function () { var s = this.string; var i = this.index; do { if (i == s.length) return undefined; var c = s.charAt(i++); } while (rison.parser.WHITESPACE.indexOf(c) >= 0); this.index = i; return c; }; // End of UMD module wrapper }));
{ //if (i == s.length) return this.error('unmatched "\'"'); if (!c) return this.error('unmatched "\'"'); if (c == '!') { if (start < i-1) segments.push(s.slice(start, i-1)); c = s.charAt(i++); if ("!'".indexOf(c) >= 0) { segments.push(c); } else { return this.error('invalid string escape: "!'+c+'"'); } start = i; } }
conditional_block
wait-for-element-to-be-removed.js
import {waitFor} from './wait-for' const isRemoved = result => !result || (Array.isArray(result) && !result.length) // Check if the element is not present. // As the name implies, waitForElementToBeRemoved should check `present` --> `removed` function initialCheck(elements) { if (isRemoved(elements)) {
throw new Error( 'The element(s) given to waitForElementToBeRemoved are already removed. waitForElementToBeRemoved requires that the element(s) exist(s) before waiting for removal.', ) } } async function waitForElementToBeRemoved(callback, options) { // created here so we get a nice stacktrace const timeoutError = new Error('Timed out in waitForElementToBeRemoved.') if (typeof callback !== 'function') { initialCheck(callback) const elements = Array.isArray(callback) ? callback : [callback] const getRemainingElements = elements.map(element => { let parent = element.parentElement if (parent === null) return () => null while (parent.parentElement) parent = parent.parentElement return () => (parent.contains(element) ? element : null) }) callback = () => getRemainingElements.map(c => c()).filter(Boolean) } initialCheck(callback()) return waitFor(() => { let result try { result = callback() } catch (error) { if (error.name === 'TestingLibraryElementError') { return undefined } throw error } if (!isRemoved(result)) { throw timeoutError } return undefined }, options) } export {waitForElementToBeRemoved} /* eslint require-await: "off" */
random_line_split
wait-for-element-to-be-removed.js
import {waitFor} from './wait-for' const isRemoved = result => !result || (Array.isArray(result) && !result.length) // Check if the element is not present. // As the name implies, waitForElementToBeRemoved should check `present` --> `removed` function initialCheck(elements)
async function waitForElementToBeRemoved(callback, options) { // created here so we get a nice stacktrace const timeoutError = new Error('Timed out in waitForElementToBeRemoved.') if (typeof callback !== 'function') { initialCheck(callback) const elements = Array.isArray(callback) ? callback : [callback] const getRemainingElements = elements.map(element => { let parent = element.parentElement if (parent === null) return () => null while (parent.parentElement) parent = parent.parentElement return () => (parent.contains(element) ? element : null) }) callback = () => getRemainingElements.map(c => c()).filter(Boolean) } initialCheck(callback()) return waitFor(() => { let result try { result = callback() } catch (error) { if (error.name === 'TestingLibraryElementError') { return undefined } throw error } if (!isRemoved(result)) { throw timeoutError } return undefined }, options) } export {waitForElementToBeRemoved} /* eslint require-await: "off" */
{ if (isRemoved(elements)) { throw new Error( 'The element(s) given to waitForElementToBeRemoved are already removed. waitForElementToBeRemoved requires that the element(s) exist(s) before waiting for removal.', ) } }
identifier_body
wait-for-element-to-be-removed.js
import {waitFor} from './wait-for' const isRemoved = result => !result || (Array.isArray(result) && !result.length) // Check if the element is not present. // As the name implies, waitForElementToBeRemoved should check `present` --> `removed` function
(elements) { if (isRemoved(elements)) { throw new Error( 'The element(s) given to waitForElementToBeRemoved are already removed. waitForElementToBeRemoved requires that the element(s) exist(s) before waiting for removal.', ) } } async function waitForElementToBeRemoved(callback, options) { // created here so we get a nice stacktrace const timeoutError = new Error('Timed out in waitForElementToBeRemoved.') if (typeof callback !== 'function') { initialCheck(callback) const elements = Array.isArray(callback) ? callback : [callback] const getRemainingElements = elements.map(element => { let parent = element.parentElement if (parent === null) return () => null while (parent.parentElement) parent = parent.parentElement return () => (parent.contains(element) ? element : null) }) callback = () => getRemainingElements.map(c => c()).filter(Boolean) } initialCheck(callback()) return waitFor(() => { let result try { result = callback() } catch (error) { if (error.name === 'TestingLibraryElementError') { return undefined } throw error } if (!isRemoved(result)) { throw timeoutError } return undefined }, options) } export {waitForElementToBeRemoved} /* eslint require-await: "off" */
initialCheck
identifier_name
wait-for-element-to-be-removed.js
import {waitFor} from './wait-for' const isRemoved = result => !result || (Array.isArray(result) && !result.length) // Check if the element is not present. // As the name implies, waitForElementToBeRemoved should check `present` --> `removed` function initialCheck(elements) { if (isRemoved(elements)) { throw new Error( 'The element(s) given to waitForElementToBeRemoved are already removed. waitForElementToBeRemoved requires that the element(s) exist(s) before waiting for removal.', ) } } async function waitForElementToBeRemoved(callback, options) { // created here so we get a nice stacktrace const timeoutError = new Error('Timed out in waitForElementToBeRemoved.') if (typeof callback !== 'function')
initialCheck(callback()) return waitFor(() => { let result try { result = callback() } catch (error) { if (error.name === 'TestingLibraryElementError') { return undefined } throw error } if (!isRemoved(result)) { throw timeoutError } return undefined }, options) } export {waitForElementToBeRemoved} /* eslint require-await: "off" */
{ initialCheck(callback) const elements = Array.isArray(callback) ? callback : [callback] const getRemainingElements = elements.map(element => { let parent = element.parentElement if (parent === null) return () => null while (parent.parentElement) parent = parent.parentElement return () => (parent.contains(element) ? element : null) }) callback = () => getRemainingElements.map(c => c()).filter(Boolean) }
conditional_block
Robot.py
import constants as c from gui.windows import VideoStream import socket import cv2 import urllib import numpy as np class Robot(object): def __init__(self, connection): self.connection = connection """ @type : Connections.ConnectionProcessEnd.RobotConnection """
self.socket = None self.stream = None self.bytes = None self.window = None self.psychopy_disabled = None self.stream_enabled = None self.target_to_command = None self.connection.waitMessages(self.start, self.exit, self.update, self.setup, self.sendMessage, poll=0) def start(self): while True: self.update() message = self.connection.receiveMessageInstant() if message is not None: if isinstance(message, int): self.sendMessage(self.target_to_command[message]) elif message in c.ROBOT_COMMANDS: self.sendMessage(message) elif isinstance(message, basestring): return message else: print("Robot message: " + str(message)) def updateVideo(self): if self.stream_enabled: if self.stream is not None: self.bytes += self.stream.read(1024) a = self.bytes.find('\xff\xd8') b = self.bytes.find('\xff\xd9') if a != -1 and b != -1: jpg = self.bytes[a:b+2] self.bytes = self.bytes[b+2:] i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_COLOR) if self.psychopy_disabled is not None: if self.psychopy_disabled: self.window.updateStream(i) else: self.connection.sendMessage(i) def updateWindow(self): if self.window is not None: self.window.update() def exitWindow(self): if self.window is not None: self.window.exitFlag = True self.window.exit() def update(self): self.updateVideo() self.updateWindow() def exit(self): self.exitWindow() self.connection.close() def sendRobotMessage(self, message): try: # seems like PiTank closes the socket after receiving message robot_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) robot_socket.connect(("192.168.42.1", 12345)) robot_socket.send(message) except Exception, e: print("Could not send message to robot (did you click setup? Is PiTank switched on and computer connected to PiTank?): " + str(e)) def sendMessage(self, message): if message in c.ROBOT_COMMANDS: self.sendRobotMessage(message) else: print("Unknown message in Robot: " + str(message)) def psychopyDisabled(self, options): return options[c.DISABLE] == 1 def streamEnabled(self, options): return options[c.ROBOT_STREAM] == 1 def getTargetToCommand(self, options): return { options[c.ROBOT_OPTION_FORWARD]: c.MOVE_FORWARD, options[c.ROBOT_OPTION_BACKWARD]: c.MOVE_BACKWARD, options[c.ROBOT_OPTION_LEFT]: c.MOVE_LEFT, options[c.ROBOT_OPTION_RIGHT]: c.MOVE_RIGHT, options[c.ROBOT_OPTION_STOP]: c.MOVE_STOP } def setup(self): options = self.connection.receiveMessageBlock() self.exitWindow() self.stream_enabled = self.streamEnabled(options[c.DATA_ROBOT]) self.target_to_command = self.getTargetToCommand(options[c.DATA_ROBOT]) if self.stream_enabled: self.psychopy_disabled = self.psychopyDisabled(options[c.DATA_BACKGROUND]) if self.psychopy_disabled: self.window = VideoStream.StreamWindow() self.window.setup() else: self.window = None else: self.window = None try: self.stream = urllib.urlopen("http://192.168.42.1:8080/?action=stream") self.bytes = "" return c.SUCCESS_MESSAGE except Exception, e: print("Error: " + str(e)) return c.FAIL_MESSAGE
random_line_split
Robot.py
import constants as c from gui.windows import VideoStream import socket import cv2 import urllib import numpy as np class Robot(object): def __init__(self, connection): self.connection = connection """ @type : Connections.ConnectionProcessEnd.RobotConnection """ self.socket = None self.stream = None self.bytes = None self.window = None self.psychopy_disabled = None self.stream_enabled = None self.target_to_command = None self.connection.waitMessages(self.start, self.exit, self.update, self.setup, self.sendMessage, poll=0) def start(self): while True: self.update() message = self.connection.receiveMessageInstant() if message is not None: if isinstance(message, int): self.sendMessage(self.target_to_command[message]) elif message in c.ROBOT_COMMANDS: self.sendMessage(message) elif isinstance(message, basestring): return message else: print("Robot message: " + str(message)) def updateVideo(self): if self.stream_enabled: if self.stream is not None: self.bytes += self.stream.read(1024) a = self.bytes.find('\xff\xd8') b = self.bytes.find('\xff\xd9') if a != -1 and b != -1: jpg = self.bytes[a:b+2] self.bytes = self.bytes[b+2:] i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_COLOR) if self.psychopy_disabled is not None: if self.psychopy_disabled: self.window.updateStream(i) else: self.connection.sendMessage(i) def updateWindow(self):
def exitWindow(self): if self.window is not None: self.window.exitFlag = True self.window.exit() def update(self): self.updateVideo() self.updateWindow() def exit(self): self.exitWindow() self.connection.close() def sendRobotMessage(self, message): try: # seems like PiTank closes the socket after receiving message robot_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) robot_socket.connect(("192.168.42.1", 12345)) robot_socket.send(message) except Exception, e: print("Could not send message to robot (did you click setup? Is PiTank switched on and computer connected to PiTank?): " + str(e)) def sendMessage(self, message): if message in c.ROBOT_COMMANDS: self.sendRobotMessage(message) else: print("Unknown message in Robot: " + str(message)) def psychopyDisabled(self, options): return options[c.DISABLE] == 1 def streamEnabled(self, options): return options[c.ROBOT_STREAM] == 1 def getTargetToCommand(self, options): return { options[c.ROBOT_OPTION_FORWARD]: c.MOVE_FORWARD, options[c.ROBOT_OPTION_BACKWARD]: c.MOVE_BACKWARD, options[c.ROBOT_OPTION_LEFT]: c.MOVE_LEFT, options[c.ROBOT_OPTION_RIGHT]: c.MOVE_RIGHT, options[c.ROBOT_OPTION_STOP]: c.MOVE_STOP } def setup(self): options = self.connection.receiveMessageBlock() self.exitWindow() self.stream_enabled = self.streamEnabled(options[c.DATA_ROBOT]) self.target_to_command = self.getTargetToCommand(options[c.DATA_ROBOT]) if self.stream_enabled: self.psychopy_disabled = self.psychopyDisabled(options[c.DATA_BACKGROUND]) if self.psychopy_disabled: self.window = VideoStream.StreamWindow() self.window.setup() else: self.window = None else: self.window = None try: self.stream = urllib.urlopen("http://192.168.42.1:8080/?action=stream") self.bytes = "" return c.SUCCESS_MESSAGE except Exception, e: print("Error: " + str(e)) return c.FAIL_MESSAGE
if self.window is not None: self.window.update()
identifier_body
Robot.py
import constants as c from gui.windows import VideoStream import socket import cv2 import urllib import numpy as np class Robot(object): def __init__(self, connection): self.connection = connection """ @type : Connections.ConnectionProcessEnd.RobotConnection """ self.socket = None self.stream = None self.bytes = None self.window = None self.psychopy_disabled = None self.stream_enabled = None self.target_to_command = None self.connection.waitMessages(self.start, self.exit, self.update, self.setup, self.sendMessage, poll=0) def start(self): while True: self.update() message = self.connection.receiveMessageInstant() if message is not None: if isinstance(message, int): self.sendMessage(self.target_to_command[message]) elif message in c.ROBOT_COMMANDS:
elif isinstance(message, basestring): return message else: print("Robot message: " + str(message)) def updateVideo(self): if self.stream_enabled: if self.stream is not None: self.bytes += self.stream.read(1024) a = self.bytes.find('\xff\xd8') b = self.bytes.find('\xff\xd9') if a != -1 and b != -1: jpg = self.bytes[a:b+2] self.bytes = self.bytes[b+2:] i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_COLOR) if self.psychopy_disabled is not None: if self.psychopy_disabled: self.window.updateStream(i) else: self.connection.sendMessage(i) def updateWindow(self): if self.window is not None: self.window.update() def exitWindow(self): if self.window is not None: self.window.exitFlag = True self.window.exit() def update(self): self.updateVideo() self.updateWindow() def exit(self): self.exitWindow() self.connection.close() def sendRobotMessage(self, message): try: # seems like PiTank closes the socket after receiving message robot_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) robot_socket.connect(("192.168.42.1", 12345)) robot_socket.send(message) except Exception, e: print("Could not send message to robot (did you click setup? Is PiTank switched on and computer connected to PiTank?): " + str(e)) def sendMessage(self, message): if message in c.ROBOT_COMMANDS: self.sendRobotMessage(message) else: print("Unknown message in Robot: " + str(message)) def psychopyDisabled(self, options): return options[c.DISABLE] == 1 def streamEnabled(self, options): return options[c.ROBOT_STREAM] == 1 def getTargetToCommand(self, options): return { options[c.ROBOT_OPTION_FORWARD]: c.MOVE_FORWARD, options[c.ROBOT_OPTION_BACKWARD]: c.MOVE_BACKWARD, options[c.ROBOT_OPTION_LEFT]: c.MOVE_LEFT, options[c.ROBOT_OPTION_RIGHT]: c.MOVE_RIGHT, options[c.ROBOT_OPTION_STOP]: c.MOVE_STOP } def setup(self): options = self.connection.receiveMessageBlock() self.exitWindow() self.stream_enabled = self.streamEnabled(options[c.DATA_ROBOT]) self.target_to_command = self.getTargetToCommand(options[c.DATA_ROBOT]) if self.stream_enabled: self.psychopy_disabled = self.psychopyDisabled(options[c.DATA_BACKGROUND]) if self.psychopy_disabled: self.window = VideoStream.StreamWindow() self.window.setup() else: self.window = None else: self.window = None try: self.stream = urllib.urlopen("http://192.168.42.1:8080/?action=stream") self.bytes = "" return c.SUCCESS_MESSAGE except Exception, e: print("Error: " + str(e)) return c.FAIL_MESSAGE
self.sendMessage(message)
conditional_block
Robot.py
import constants as c from gui.windows import VideoStream import socket import cv2 import urllib import numpy as np class Robot(object): def __init__(self, connection): self.connection = connection """ @type : Connections.ConnectionProcessEnd.RobotConnection """ self.socket = None self.stream = None self.bytes = None self.window = None self.psychopy_disabled = None self.stream_enabled = None self.target_to_command = None self.connection.waitMessages(self.start, self.exit, self.update, self.setup, self.sendMessage, poll=0) def start(self): while True: self.update() message = self.connection.receiveMessageInstant() if message is not None: if isinstance(message, int): self.sendMessage(self.target_to_command[message]) elif message in c.ROBOT_COMMANDS: self.sendMessage(message) elif isinstance(message, basestring): return message else: print("Robot message: " + str(message)) def
(self): if self.stream_enabled: if self.stream is not None: self.bytes += self.stream.read(1024) a = self.bytes.find('\xff\xd8') b = self.bytes.find('\xff\xd9') if a != -1 and b != -1: jpg = self.bytes[a:b+2] self.bytes = self.bytes[b+2:] i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_COLOR) if self.psychopy_disabled is not None: if self.psychopy_disabled: self.window.updateStream(i) else: self.connection.sendMessage(i) def updateWindow(self): if self.window is not None: self.window.update() def exitWindow(self): if self.window is not None: self.window.exitFlag = True self.window.exit() def update(self): self.updateVideo() self.updateWindow() def exit(self): self.exitWindow() self.connection.close() def sendRobotMessage(self, message): try: # seems like PiTank closes the socket after receiving message robot_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) robot_socket.connect(("192.168.42.1", 12345)) robot_socket.send(message) except Exception, e: print("Could not send message to robot (did you click setup? Is PiTank switched on and computer connected to PiTank?): " + str(e)) def sendMessage(self, message): if message in c.ROBOT_COMMANDS: self.sendRobotMessage(message) else: print("Unknown message in Robot: " + str(message)) def psychopyDisabled(self, options): return options[c.DISABLE] == 1 def streamEnabled(self, options): return options[c.ROBOT_STREAM] == 1 def getTargetToCommand(self, options): return { options[c.ROBOT_OPTION_FORWARD]: c.MOVE_FORWARD, options[c.ROBOT_OPTION_BACKWARD]: c.MOVE_BACKWARD, options[c.ROBOT_OPTION_LEFT]: c.MOVE_LEFT, options[c.ROBOT_OPTION_RIGHT]: c.MOVE_RIGHT, options[c.ROBOT_OPTION_STOP]: c.MOVE_STOP } def setup(self): options = self.connection.receiveMessageBlock() self.exitWindow() self.stream_enabled = self.streamEnabled(options[c.DATA_ROBOT]) self.target_to_command = self.getTargetToCommand(options[c.DATA_ROBOT]) if self.stream_enabled: self.psychopy_disabled = self.psychopyDisabled(options[c.DATA_BACKGROUND]) if self.psychopy_disabled: self.window = VideoStream.StreamWindow() self.window.setup() else: self.window = None else: self.window = None try: self.stream = urllib.urlopen("http://192.168.42.1:8080/?action=stream") self.bytes = "" return c.SUCCESS_MESSAGE except Exception, e: print("Error: " + str(e)) return c.FAIL_MESSAGE
updateVideo
identifier_name
main.rs
extern crate serenity; use serenity::prelude::*; use serenity::model::*; use std::env; struct Handler; impl EventHandler for Handler { // Set a handler for the `on_message` event - so that whenever a new message // is received - the closure (or function) passed will be called. // // Event handlers are dispatched through multi-threading, and so multiple // of a single event can be dispatched simultaneously. fn on_message(&self, _: Context, msg: Message) { if msg.content == "!ping"
} // Set a handler to be called on the `on_ready` event. This is called when a // shard is booted, and a READY payload is sent by Discord. This payload // contains data like the current user's guild Ids, current user data, // private channels, and more. // // In this case, just print what the current user's username is. fn on_ready(&self, _: Context, ready: Ready) { println!("{} is connected!", ready.user.name); } } fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment"); // Create a new instance of the Client, logging in as a bot. This will // automatically prepend your bot token with "Bot ", which is a requirement // by Discord for bot users. let mut client = Client::new(&token, Handler); // Finally, start a single shard, and start listening to events. // // Shards will automatically attempt to reconnect, and will perform // exponential backoff until it reconnects. if let Err(why) = client.start() { println!("Client error: {:?}", why); } }
{ // Sending a message can fail, due to a network error, an // authentication error, or lack of permissions to post in the // channel, so log to stdout when some error happens, with a // description of it. if let Err(why) = msg.channel_id.say("Pong!") { println!("Error sending message: {:?}", why); } }
conditional_block
main.rs
extern crate serenity; use serenity::prelude::*; use serenity::model::*; use std::env; struct Handler; impl EventHandler for Handler { // Set a handler for the `on_message` event - so that whenever a new message // is received - the closure (or function) passed will be called. // // Event handlers are dispatched through multi-threading, and so multiple // of a single event can be dispatched simultaneously. fn
(&self, _: Context, msg: Message) { if msg.content == "!ping" { // Sending a message can fail, due to a network error, an // authentication error, or lack of permissions to post in the // channel, so log to stdout when some error happens, with a // description of it. if let Err(why) = msg.channel_id.say("Pong!") { println!("Error sending message: {:?}", why); } } } // Set a handler to be called on the `on_ready` event. This is called when a // shard is booted, and a READY payload is sent by Discord. This payload // contains data like the current user's guild Ids, current user data, // private channels, and more. // // In this case, just print what the current user's username is. fn on_ready(&self, _: Context, ready: Ready) { println!("{} is connected!", ready.user.name); } } fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment"); // Create a new instance of the Client, logging in as a bot. This will // automatically prepend your bot token with "Bot ", which is a requirement // by Discord for bot users. let mut client = Client::new(&token, Handler); // Finally, start a single shard, and start listening to events. // // Shards will automatically attempt to reconnect, and will perform // exponential backoff until it reconnects. if let Err(why) = client.start() { println!("Client error: {:?}", why); } }
on_message
identifier_name
main.rs
extern crate serenity; use serenity::prelude::*; use serenity::model::*; use std::env; struct Handler; impl EventHandler for Handler { // Set a handler for the `on_message` event - so that whenever a new message // is received - the closure (or function) passed will be called. // // Event handlers are dispatched through multi-threading, and so multiple // of a single event can be dispatched simultaneously. fn on_message(&self, _: Context, msg: Message)
// Set a handler to be called on the `on_ready` event. This is called when a // shard is booted, and a READY payload is sent by Discord. This payload // contains data like the current user's guild Ids, current user data, // private channels, and more. // // In this case, just print what the current user's username is. fn on_ready(&self, _: Context, ready: Ready) { println!("{} is connected!", ready.user.name); } } fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment"); // Create a new instance of the Client, logging in as a bot. This will // automatically prepend your bot token with "Bot ", which is a requirement // by Discord for bot users. let mut client = Client::new(&token, Handler); // Finally, start a single shard, and start listening to events. // // Shards will automatically attempt to reconnect, and will perform // exponential backoff until it reconnects. if let Err(why) = client.start() { println!("Client error: {:?}", why); } }
{ if msg.content == "!ping" { // Sending a message can fail, due to a network error, an // authentication error, or lack of permissions to post in the // channel, so log to stdout when some error happens, with a // description of it. if let Err(why) = msg.channel_id.say("Pong!") { println!("Error sending message: {:?}", why); } } }
identifier_body
main.rs
extern crate serenity; use serenity::prelude::*; use serenity::model::*; use std::env; struct Handler; impl EventHandler for Handler { // Set a handler for the `on_message` event - so that whenever a new message // is received - the closure (or function) passed will be called. // // Event handlers are dispatched through multi-threading, and so multiple // of a single event can be dispatched simultaneously. fn on_message(&self, _: Context, msg: Message) { if msg.content == "!ping" { // Sending a message can fail, due to a network error, an // authentication error, or lack of permissions to post in the // channel, so log to stdout when some error happens, with a // description of it. if let Err(why) = msg.channel_id.say("Pong!") { println!("Error sending message: {:?}", why); } } }
// Set a handler to be called on the `on_ready` event. This is called when a // shard is booted, and a READY payload is sent by Discord. This payload // contains data like the current user's guild Ids, current user data, // private channels, and more. // // In this case, just print what the current user's username is. fn on_ready(&self, _: Context, ready: Ready) { println!("{} is connected!", ready.user.name); } } fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment"); // Create a new instance of the Client, logging in as a bot. This will // automatically prepend your bot token with "Bot ", which is a requirement // by Discord for bot users. let mut client = Client::new(&token, Handler); // Finally, start a single shard, and start listening to events. // // Shards will automatically attempt to reconnect, and will perform // exponential backoff until it reconnects. if let Err(why) = client.start() { println!("Client error: {:?}", why); } }
random_line_split
BestBuyWebService.js
export default class BestBuyWebService{ constructor(){ this.url =""; this.apiKey = ""; this.productData = null; this.products = null; } getData(theApp){ // theApp is a reference to the main app // we can pass information to it, including data // that is returned from this service let serviceChannel = new XMLHttpRequest(); let url = this.url; /* // *** To solve the issue of passing the data back to the main app... // *** and eventually, to catalogView // *** You could the addEventListener to call // *** a different function which will have both // *** the event object and dataPlaceHolder as parameters // *** see http://bit.ly/js-passmoreargsevent */ serviceChannel.addEventListener("readystatechange",this.resultsPreprocessor(theApp),false); serviceChannel.open("GET",url,true); serviceChannel.send(); } resultsPreprocessor(theApp){ /*the addEventListener function near line 29 requires a proper function (an event handler) to be returned so we can create one to be returned. */ let thisService = this; // a reference to the instance created from this class let eventHandler = function(evt){ thisService.results(evt,theApp); }; return eventHandler }; results(evt,theApp)
getProducts(){ // this method explicity gets the products property // from the JSON object. it assumes you have the JSON data if(this.productData!=null){ let jsonData = JSON.parse(this.productData); this.products = jsonData.products; return this.products; } return; // if we have no data, return nothing } }
{ if (evt.target.readyState == 4 && evt.target.status == 200){ // assign this instance's productData to be the responseText this.productData = evt.target.responseText; // assign the app's productData to be the responseText too theApp.productData = evt.target.responseText; // tell the app to prepare the catalog // there is another way to do it, with custom // events. but this will work for now. theApp.prepCatalog(); // console.log(evt.target.responseText); // return evt.target.responseText; } }
identifier_body
BestBuyWebService.js
export default class BestBuyWebService{ constructor(){ this.url =""; this.apiKey = ""; this.productData = null; this.products = null; } getData(theApp){ // theApp is a reference to the main app // we can pass information to it, including data // that is returned from this service let serviceChannel = new XMLHttpRequest(); let url = this.url; /* // *** To solve the issue of passing the data back to the main app... // *** and eventually, to catalogView // *** You could the addEventListener to call // *** a different function which will have both // *** the event object and dataPlaceHolder as parameters // *** see http://bit.ly/js-passmoreargsevent */ serviceChannel.addEventListener("readystatechange",this.resultsPreprocessor(theApp),false); serviceChannel.open("GET",url,true); serviceChannel.send(); }
(theApp){ /*the addEventListener function near line 29 requires a proper function (an event handler) to be returned so we can create one to be returned. */ let thisService = this; // a reference to the instance created from this class let eventHandler = function(evt){ thisService.results(evt,theApp); }; return eventHandler }; results(evt,theApp){ if (evt.target.readyState == 4 && evt.target.status == 200){ // assign this instance's productData to be the responseText this.productData = evt.target.responseText; // assign the app's productData to be the responseText too theApp.productData = evt.target.responseText; // tell the app to prepare the catalog // there is another way to do it, with custom // events. but this will work for now. theApp.prepCatalog(); // console.log(evt.target.responseText); // return evt.target.responseText; } } getProducts(){ // this method explicity gets the products property // from the JSON object. it assumes you have the JSON data if(this.productData!=null){ let jsonData = JSON.parse(this.productData); this.products = jsonData.products; return this.products; } return; // if we have no data, return nothing } }
resultsPreprocessor
identifier_name
BestBuyWebService.js
export default class BestBuyWebService{ constructor(){ this.url =""; this.apiKey = ""; this.productData = null; this.products = null; } getData(theApp){ // theApp is a reference to the main app // we can pass information to it, including data // that is returned from this service let serviceChannel = new XMLHttpRequest(); let url = this.url; /* // *** To solve the issue of passing the data back to the main app... // *** and eventually, to catalogView // *** You could the addEventListener to call // *** a different function which will have both // *** the event object and dataPlaceHolder as parameters // *** see http://bit.ly/js-passmoreargsevent */ serviceChannel.addEventListener("readystatechange",this.resultsPreprocessor(theApp),false); serviceChannel.open("GET",url,true); serviceChannel.send(); } resultsPreprocessor(theApp){ /*the addEventListener function near line 29 requires a proper function (an event handler) to be returned so we can create one to be returned. */ let thisService = this; // a reference to the instance created from this class let eventHandler = function(evt){ thisService.results(evt,theApp); }; return eventHandler }; results(evt,theApp){ if (evt.target.readyState == 4 && evt.target.status == 200){ // assign this instance's productData to be the responseText this.productData = evt.target.responseText; // assign the app's productData to be the responseText too theApp.productData = evt.target.responseText; // tell the app to prepare the catalog // there is another way to do it, with custom // events. but this will work for now. theApp.prepCatalog(); // console.log(evt.target.responseText); // return evt.target.responseText; } } getProducts(){ // this method explicity gets the products property // from the JSON object. it assumes you have the JSON data if(this.productData!=null)
return; // if we have no data, return nothing } }
{ let jsonData = JSON.parse(this.productData); this.products = jsonData.products; return this.products; }
conditional_block
BestBuyWebService.js
export default class BestBuyWebService{ constructor(){ this.url =""; this.apiKey = ""; this.productData = null; this.products = null; } getData(theApp){ // theApp is a reference to the main app // we can pass information to it, including data // that is returned from this service let serviceChannel = new XMLHttpRequest(); let url = this.url; /* // *** To solve the issue of passing the data back to the main app... // *** and eventually, to catalogView // *** You could the addEventListener to call // *** a different function which will have both // *** the event object and dataPlaceHolder as parameters // *** see http://bit.ly/js-passmoreargsevent */ serviceChannel.addEventListener("readystatechange",this.resultsPreprocessor(theApp),false); serviceChannel.open("GET",url,true); serviceChannel.send();
/*the addEventListener function near line 29 requires a proper function (an event handler) to be returned so we can create one to be returned. */ let thisService = this; // a reference to the instance created from this class let eventHandler = function(evt){ thisService.results(evt,theApp); }; return eventHandler }; results(evt,theApp){ if (evt.target.readyState == 4 && evt.target.status == 200){ // assign this instance's productData to be the responseText this.productData = evt.target.responseText; // assign the app's productData to be the responseText too theApp.productData = evt.target.responseText; // tell the app to prepare the catalog // there is another way to do it, with custom // events. but this will work for now. theApp.prepCatalog(); // console.log(evt.target.responseText); // return evt.target.responseText; } } getProducts(){ // this method explicity gets the products property // from the JSON object. it assumes you have the JSON data if(this.productData!=null){ let jsonData = JSON.parse(this.productData); this.products = jsonData.products; return this.products; } return; // if we have no data, return nothing } }
} resultsPreprocessor(theApp){
random_line_split
bin.rs
#[macro_use] extern crate malachite_base_test_util; extern crate malachite_nz; extern crate malachite_nz_test_util; extern crate malachite_q; extern crate serde; extern crate serde_json; use crate::demo_and_bench::register; use malachite_base_test_util::runner::cmd::read_command_line_arguments; use malachite_base_test_util::runner::Runner; // Examples: // // cargo run -- -l 10000 -m special_random -d demo_from_naturals -c "mean_bits_n 128 mean_bits_d 1" fn main() { let args = read_command_line_arguments("malachite-q test utils"); let mut runner = Runner::new(); register(&mut runner); if let Some(demo_key) = args.demo_key { runner.run_demo(&demo_key, args.generation_mode, args.config, args.limit); } else if let Some(bench_key) = args.bench_key { runner.run_bench( &bench_key, args.generation_mode, args.config, args.limit, &args.out, ); } else { panic!(); }
mod demo_and_bench;
}
random_line_split
bin.rs
#[macro_use] extern crate malachite_base_test_util; extern crate malachite_nz; extern crate malachite_nz_test_util; extern crate malachite_q; extern crate serde; extern crate serde_json; use crate::demo_and_bench::register; use malachite_base_test_util::runner::cmd::read_command_line_arguments; use malachite_base_test_util::runner::Runner; // Examples: // // cargo run -- -l 10000 -m special_random -d demo_from_naturals -c "mean_bits_n 128 mean_bits_d 1" fn
() { let args = read_command_line_arguments("malachite-q test utils"); let mut runner = Runner::new(); register(&mut runner); if let Some(demo_key) = args.demo_key { runner.run_demo(&demo_key, args.generation_mode, args.config, args.limit); } else if let Some(bench_key) = args.bench_key { runner.run_bench( &bench_key, args.generation_mode, args.config, args.limit, &args.out, ); } else { panic!(); } } mod demo_and_bench;
main
identifier_name
bin.rs
#[macro_use] extern crate malachite_base_test_util; extern crate malachite_nz; extern crate malachite_nz_test_util; extern crate malachite_q; extern crate serde; extern crate serde_json; use crate::demo_and_bench::register; use malachite_base_test_util::runner::cmd::read_command_line_arguments; use malachite_base_test_util::runner::Runner; // Examples: // // cargo run -- -l 10000 -m special_random -d demo_from_naturals -c "mean_bits_n 128 mean_bits_d 1" fn main() { let args = read_command_line_arguments("malachite-q test utils"); let mut runner = Runner::new(); register(&mut runner); if let Some(demo_key) = args.demo_key { runner.run_demo(&demo_key, args.generation_mode, args.config, args.limit); } else if let Some(bench_key) = args.bench_key
else { panic!(); } } mod demo_and_bench;
{ runner.run_bench( &bench_key, args.generation_mode, args.config, args.limit, &args.out, ); }
conditional_block
bin.rs
#[macro_use] extern crate malachite_base_test_util; extern crate malachite_nz; extern crate malachite_nz_test_util; extern crate malachite_q; extern crate serde; extern crate serde_json; use crate::demo_and_bench::register; use malachite_base_test_util::runner::cmd::read_command_line_arguments; use malachite_base_test_util::runner::Runner; // Examples: // // cargo run -- -l 10000 -m special_random -d demo_from_naturals -c "mean_bits_n 128 mean_bits_d 1" fn main()
mod demo_and_bench;
{ let args = read_command_line_arguments("malachite-q test utils"); let mut runner = Runner::new(); register(&mut runner); if let Some(demo_key) = args.demo_key { runner.run_demo(&demo_key, args.generation_mode, args.config, args.limit); } else if let Some(bench_key) = args.bench_key { runner.run_bench( &bench_key, args.generation_mode, args.config, args.limit, &args.out, ); } else { panic!(); } }
identifier_body
setup.py
import codecs import os from setuptools import setup, find_packages def read(filename):
setup( name='lemon-filebrowser', version='0.1.2', license='ISC', description="Fork of Patrick Kranzlmueller's django-filebrowser app.", url='https://github.com/trilan/lemon-filebrowser', author='Trilan Team', author_email='[email protected]', packages=find_packages(exclude=['tests', 'tests.*']), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read()
identifier_body
setup.py
import codecs import os from setuptools import setup, find_packages def
(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-filebrowser', version='0.1.2', license='ISC', description="Fork of Patrick Kranzlmueller's django-filebrowser app.", url='https://github.com/trilan/lemon-filebrowser', author='Trilan Team', author_email='[email protected]', packages=find_packages(exclude=['tests', 'tests.*']), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
read
identifier_name
setup.py
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read()
version='0.1.2', license='ISC', description="Fork of Patrick Kranzlmueller's django-filebrowser app.", url='https://github.com/trilan/lemon-filebrowser', author='Trilan Team', author_email='[email protected]', packages=find_packages(exclude=['tests', 'tests.*']), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
setup( name='lemon-filebrowser',
random_line_split
home.ts
import {Component, View, CORE_DIRECTIVES} from 'angular2/angular2'; import {ROUTER_DIRECTIVES} from 'angular2/router'; import UserRepo from '../../services/repositories/user_repo'; import User from '../../services/models/user'; import {Alert} from '../../ng2-bootstrap/alert/alert'; @Component({ selector: 'home' }) @View({ templateUrl: './components/home/home.html?v=<%= VERSION %>', directives: [CORE_DIRECTIVES, ROUTER_DIRECTIVES, Alert] }) export class Home { private loading:boolean; //Bootstrap alert private alertOpened:boolean = true; userName:string =''; constructor(private repo:UserRepo) { console.log(UserRepo); } addUser(currentUser) { this.loading = true; this.repo.getUser(currentUser.value) .then(u => { this.loading = false; this.userName = u.name; }); currentUser.value = ''; } getAll():User[] { return this.repo.getAll(); } remove(user:User) { this.repo.remove(user); } //Bootstrap Alert close(evt:MouseEvent) { evt.preventDefault(); this.alertOpened = false; } log(msg:string) { console.log(msg); } toggle() { this.alertOpened = !this.alertOpened; }
}
random_line_split
home.ts
import {Component, View, CORE_DIRECTIVES} from 'angular2/angular2'; import {ROUTER_DIRECTIVES} from 'angular2/router'; import UserRepo from '../../services/repositories/user_repo'; import User from '../../services/models/user'; import {Alert} from '../../ng2-bootstrap/alert/alert'; @Component({ selector: 'home' }) @View({ templateUrl: './components/home/home.html?v=<%= VERSION %>', directives: [CORE_DIRECTIVES, ROUTER_DIRECTIVES, Alert] }) export class Home { private loading:boolean; //Bootstrap alert private alertOpened:boolean = true; userName:string =''; constructor(private repo:UserRepo) { console.log(UserRepo); } addUser(currentUser) { this.loading = true; this.repo.getUser(currentUser.value) .then(u => { this.loading = false; this.userName = u.name; }); currentUser.value = ''; } getAll():User[] { return this.repo.getAll(); } remove(user:User) { this.repo.remove(user); } //Bootstrap Alert close(evt:MouseEvent) { evt.preventDefault(); this.alertOpened = false; } log(msg:string) { console.log(msg); }
() { this.alertOpened = !this.alertOpened; } }
toggle
identifier_name
home.ts
import {Component, View, CORE_DIRECTIVES} from 'angular2/angular2'; import {ROUTER_DIRECTIVES} from 'angular2/router'; import UserRepo from '../../services/repositories/user_repo'; import User from '../../services/models/user'; import {Alert} from '../../ng2-bootstrap/alert/alert'; @Component({ selector: 'home' }) @View({ templateUrl: './components/home/home.html?v=<%= VERSION %>', directives: [CORE_DIRECTIVES, ROUTER_DIRECTIVES, Alert] }) export class Home { private loading:boolean; //Bootstrap alert private alertOpened:boolean = true; userName:string =''; constructor(private repo:UserRepo) { console.log(UserRepo); } addUser(currentUser) { this.loading = true; this.repo.getUser(currentUser.value) .then(u => { this.loading = false; this.userName = u.name; }); currentUser.value = ''; } getAll():User[] { return this.repo.getAll(); } remove(user:User) { this.repo.remove(user); } //Bootstrap Alert close(evt:MouseEvent) { evt.preventDefault(); this.alertOpened = false; } log(msg:string) { console.log(msg); } toggle()
}
{ this.alertOpened = !this.alertOpened; }
identifier_body
1_7_active_tag_2b29f533fdfc.py
"""1.7 : Add active tags to expense types and tva Revision ID: 2b29f533fdfc Revises: 4ce6b915de98 Create Date: 2013-09-03 16:05:22.824684 """ # revision identifiers, used by Alembic. revision = '2b29f533fdfc' down_revision = '4ce6b915de98' from alembic import op import sqlalchemy as sa def upgrade(): try: col = sa.Column('active', sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("expense_type", col) except: print "The column already exists" col = sa.Column('active',
def downgrade(): op.drop_column("expense_type", "active") op.drop_column("tva", "active")
sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("tva", col)
random_line_split
1_7_active_tag_2b29f533fdfc.py
"""1.7 : Add active tags to expense types and tva Revision ID: 2b29f533fdfc Revises: 4ce6b915de98 Create Date: 2013-09-03 16:05:22.824684 """ # revision identifiers, used by Alembic. revision = '2b29f533fdfc' down_revision = '4ce6b915de98' from alembic import op import sqlalchemy as sa def upgrade():
def downgrade(): op.drop_column("expense_type", "active") op.drop_column("tva", "active")
try: col = sa.Column('active', sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("expense_type", col) except: print "The column already exists" col = sa.Column('active', sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("tva", col)
identifier_body
1_7_active_tag_2b29f533fdfc.py
"""1.7 : Add active tags to expense types and tva Revision ID: 2b29f533fdfc Revises: 4ce6b915de98 Create Date: 2013-09-03 16:05:22.824684 """ # revision identifiers, used by Alembic. revision = '2b29f533fdfc' down_revision = '4ce6b915de98' from alembic import op import sqlalchemy as sa def
(): try: col = sa.Column('active', sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("expense_type", col) except: print "The column already exists" col = sa.Column('active', sa.Boolean(), default=True, server_default=sa.sql.expression.true()) op.add_column("tva", col) def downgrade(): op.drop_column("expense_type", "active") op.drop_column("tva", "active")
upgrade
identifier_name
main.py
#------------------------------------------------------------------------------- # Name: Main.py # Purpose: This script creates chainages from a single or mutile line # # Author: smithc5 # # Created: 10/02/2015 # Copyright: (c) smithc5 2015 # Licence: <your licence> #------------------------------------------------------------------------------ import os import arcpy import sys import traceback from modules import create_chainages source_align_location = arcpy.GetParameterAsText(0) # Variable to store the location of the original source alignment. database_location = arcpy.GetParameterAsText(1) # Variable to store the location where the database is created to store the. # feature classes. chainage_distance = arcpy.GetParameterAsText(2) new_fc_name = os.path.basename(source_align_location[:-4]) # New name for the copied feature class. Original name minus file extension database_name = "{}.gdb".format(new_fc_name) # Variable to store the name of the .gdb to store the feature classes. DATABASE_FLIEPATH = os.path.join(database_location, database_name) new_fc_filepath = os.path.join(DATABASE_FLIEPATH, new_fc_name) # New file path to the copied feature class new_fc_filepath_with_m = "{0}_M".format(new_fc_filepath) # New file path to the copied feature class chainage_feature_class = "{0}_Chainages".format(new_fc_filepath) # This is the output feature class to store the chainages. def
(): try: create_chainages.check_if_gdb_exist(DATABASE_FLIEPATH) create_chainages.create_gdb(database_location, database_name) create_chainages.copy_features(source_align_location, new_fc_filepath) create_chainages.create_route(new_fc_filepath, "Name", new_fc_filepath_with_m) create_chainages.create_chainages(new_fc_filepath_with_m, chainage_distance, database_location, new_fc_filepath_with_m, DATABASE_FLIEPATH, chainage_feature_class) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n{0}\nError Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) msgs = "ARCPY ERRORS:\n{}\n".format(arcpy.GetMessages(2)) arcpy.AddError(msgs) arcpy.AddError(pymsg) print msgs print pymsg arcpy.AddMessage(arcpy.GetMessages(1)) print arcpy.GetMessages(1) if __name__ == '__main__': main()
main
identifier_name
main.py
#------------------------------------------------------------------------------- # Name: Main.py # Purpose: This script creates chainages from a single or mutile line # # Author: smithc5 # # Created: 10/02/2015 # Copyright: (c) smithc5 2015 # Licence: <your licence> #------------------------------------------------------------------------------ import os import arcpy import sys import traceback from modules import create_chainages source_align_location = arcpy.GetParameterAsText(0) # Variable to store the location of the original source alignment. database_location = arcpy.GetParameterAsText(1) # Variable to store the location where the database is created to store the. # feature classes. chainage_distance = arcpy.GetParameterAsText(2) new_fc_name = os.path.basename(source_align_location[:-4]) # New name for the copied feature class. Original name minus file extension database_name = "{}.gdb".format(new_fc_name) # Variable to store the name of the .gdb to store the feature classes. DATABASE_FLIEPATH = os.path.join(database_location, database_name) new_fc_filepath = os.path.join(DATABASE_FLIEPATH, new_fc_name) # New file path to the copied feature class new_fc_filepath_with_m = "{0}_M".format(new_fc_filepath) # New file path to the copied feature class
def main(): try: create_chainages.check_if_gdb_exist(DATABASE_FLIEPATH) create_chainages.create_gdb(database_location, database_name) create_chainages.copy_features(source_align_location, new_fc_filepath) create_chainages.create_route(new_fc_filepath, "Name", new_fc_filepath_with_m) create_chainages.create_chainages(new_fc_filepath_with_m, chainage_distance, database_location, new_fc_filepath_with_m, DATABASE_FLIEPATH, chainage_feature_class) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n{0}\nError Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) msgs = "ARCPY ERRORS:\n{}\n".format(arcpy.GetMessages(2)) arcpy.AddError(msgs) arcpy.AddError(pymsg) print msgs print pymsg arcpy.AddMessage(arcpy.GetMessages(1)) print arcpy.GetMessages(1) if __name__ == '__main__': main()
chainage_feature_class = "{0}_Chainages".format(new_fc_filepath) # This is the output feature class to store the chainages.
random_line_split
main.py
#------------------------------------------------------------------------------- # Name: Main.py # Purpose: This script creates chainages from a single or mutile line # # Author: smithc5 # # Created: 10/02/2015 # Copyright: (c) smithc5 2015 # Licence: <your licence> #------------------------------------------------------------------------------ import os import arcpy import sys import traceback from modules import create_chainages source_align_location = arcpy.GetParameterAsText(0) # Variable to store the location of the original source alignment. database_location = arcpy.GetParameterAsText(1) # Variable to store the location where the database is created to store the. # feature classes. chainage_distance = arcpy.GetParameterAsText(2) new_fc_name = os.path.basename(source_align_location[:-4]) # New name for the copied feature class. Original name minus file extension database_name = "{}.gdb".format(new_fc_name) # Variable to store the name of the .gdb to store the feature classes. DATABASE_FLIEPATH = os.path.join(database_location, database_name) new_fc_filepath = os.path.join(DATABASE_FLIEPATH, new_fc_name) # New file path to the copied feature class new_fc_filepath_with_m = "{0}_M".format(new_fc_filepath) # New file path to the copied feature class chainage_feature_class = "{0}_Chainages".format(new_fc_filepath) # This is the output feature class to store the chainages. def main(): try: create_chainages.check_if_gdb_exist(DATABASE_FLIEPATH) create_chainages.create_gdb(database_location, database_name) create_chainages.copy_features(source_align_location, new_fc_filepath) create_chainages.create_route(new_fc_filepath, "Name", new_fc_filepath_with_m) create_chainages.create_chainages(new_fc_filepath_with_m, chainage_distance, database_location, new_fc_filepath_with_m, DATABASE_FLIEPATH, chainage_feature_class) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n{0}\nError Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) msgs = "ARCPY ERRORS:\n{}\n".format(arcpy.GetMessages(2)) arcpy.AddError(msgs) arcpy.AddError(pymsg) print msgs print pymsg arcpy.AddMessage(arcpy.GetMessages(1)) print arcpy.GetMessages(1) if __name__ == '__main__':
main()
conditional_block
main.py
#------------------------------------------------------------------------------- # Name: Main.py # Purpose: This script creates chainages from a single or mutile line # # Author: smithc5 # # Created: 10/02/2015 # Copyright: (c) smithc5 2015 # Licence: <your licence> #------------------------------------------------------------------------------ import os import arcpy import sys import traceback from modules import create_chainages source_align_location = arcpy.GetParameterAsText(0) # Variable to store the location of the original source alignment. database_location = arcpy.GetParameterAsText(1) # Variable to store the location where the database is created to store the. # feature classes. chainage_distance = arcpy.GetParameterAsText(2) new_fc_name = os.path.basename(source_align_location[:-4]) # New name for the copied feature class. Original name minus file extension database_name = "{}.gdb".format(new_fc_name) # Variable to store the name of the .gdb to store the feature classes. DATABASE_FLIEPATH = os.path.join(database_location, database_name) new_fc_filepath = os.path.join(DATABASE_FLIEPATH, new_fc_name) # New file path to the copied feature class new_fc_filepath_with_m = "{0}_M".format(new_fc_filepath) # New file path to the copied feature class chainage_feature_class = "{0}_Chainages".format(new_fc_filepath) # This is the output feature class to store the chainages. def main():
if __name__ == '__main__': main()
try: create_chainages.check_if_gdb_exist(DATABASE_FLIEPATH) create_chainages.create_gdb(database_location, database_name) create_chainages.copy_features(source_align_location, new_fc_filepath) create_chainages.create_route(new_fc_filepath, "Name", new_fc_filepath_with_m) create_chainages.create_chainages(new_fc_filepath_with_m, chainage_distance, database_location, new_fc_filepath_with_m, DATABASE_FLIEPATH, chainage_feature_class) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n{0}\nError Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) msgs = "ARCPY ERRORS:\n{}\n".format(arcpy.GetMessages(2)) arcpy.AddError(msgs) arcpy.AddError(pymsg) print msgs print pymsg arcpy.AddMessage(arcpy.GetMessages(1)) print arcpy.GetMessages(1)
identifier_body
calendar.js
dhtmlXForm.prototype.items.calendar = { render: function(item, data) { var t = this; item._type = "calendar"; item._enabled = true; this.doAddLabel(item, data); this.doAddInput(item, data, "INPUT", "TEXT", true, true, "dhxform_textarea"); item.childNodes[item._ll?1:0].childNodes[0]._idd = item._idd; item._f = (data.dateFormat||"%d-%m-%Y"); // formats item._f0 = (data.serverDateFormat||item._f); // formats for save-load, if set - use them for saving and loading only item._c = new dhtmlXCalendarObject(item.childNodes[item._ll?1:0].childNodes[0], data.skin||item.getForm().skin||"dhx_skyblue"); item._c._nullInInput = true; // allow null value from input item._c.enableListener(item.childNodes[item._ll?1:0].childNodes[0]); item._c.setDateFormat(item._f); if (!data.enableTime) item._c.hideTime(); if (!isNaN(data.weekStart)) item._c.setWeekStartDay(data.weekStart); if (typeof(data.calendarPosition) != "undefined") item._c.setPosition(data.calendarPosition); item._c._itemIdd = item._idd; item._c.attachEvent("onBeforeChange", function(d) { if (item._value != d) { // call some events if (item.checkEvent("onBeforeChange")) { if (item.callEvent("onBeforeChange",[item._idd, item._value, d]) !== true)
} // accepted item._value = d; t.setValue(item, d); item.callEvent("onChange", [this._itemIdd, item._value]); } return true; }); this.setValue(item, data.value); return this; }, getCalendar: function(item) { return item._c; }, setSkin: function(item, skin) { item._c.setSkin(skin); }, setValue: function(item, value) { if (!value || value == null || typeof(value) == "undefined" || value == "") { item._value = null; item.childNodes[item._ll?1:0].childNodes[0].value = ""; } else { item._value = (value instanceof Date ? value : item._c._strToDate(value, item._f0)); item.childNodes[item._ll?1:0].childNodes[0].value = item._c._dateToStr(item._value, item._f); } item._c.setDate(item._value); window.dhtmlXFormLs[item.getForm()._rId].vals[item._idd] = item.childNodes[item._ll?1:0].childNodes[0].value; }, getValue: function(item, asString) { var d = item._c.getDate(); if (asString===true && d == null) return ""; return (asString===true?item._c._dateToStr(d,item._f0):d); }, destruct: function(item) { // unload calendar instance item._c.unload(); item._c = null; try {delete item._c;} catch(e){} item._f = null; try {delete item._f;} catch(e){} item._f0 = null; try {delete item._f0;} catch(e){} // remove custom events/objects item.childNodes[item._ll?1:0].childNodes[0]._idd = null; // unload item this.d2(item); item = null; } }; (function(){ for (var a in {doAddLabel:1,doAddInput:1,doUnloadNestedLists:1,setText:1,getText:1,enable:1,disable:1,isEnabled:1,setWidth:1,setReadonly:1,isReadonly:1,setFocus:1,getInput:1}) dhtmlXForm.prototype.items.calendar[a] = dhtmlXForm.prototype.items.input[a]; })(); dhtmlXForm.prototype.items.calendar.d2 = dhtmlXForm.prototype.items.input.destruct; dhtmlXForm.prototype.getCalendar = function(name) { return this.doWithItem(name, "getCalendar"); };
{ return false; }
conditional_block
calendar.js
dhtmlXForm.prototype.items.calendar = { render: function(item, data) { var t = this; item._type = "calendar"; item._enabled = true; this.doAddLabel(item, data); this.doAddInput(item, data, "INPUT", "TEXT", true, true, "dhxform_textarea"); item.childNodes[item._ll?1:0].childNodes[0]._idd = item._idd; item._f = (data.dateFormat||"%d-%m-%Y"); // formats item._f0 = (data.serverDateFormat||item._f); // formats for save-load, if set - use them for saving and loading only item._c = new dhtmlXCalendarObject(item.childNodes[item._ll?1:0].childNodes[0], data.skin||item.getForm().skin||"dhx_skyblue"); item._c._nullInInput = true; // allow null value from input item._c.enableListener(item.childNodes[item._ll?1:0].childNodes[0]); item._c.setDateFormat(item._f); if (!data.enableTime) item._c.hideTime(); if (!isNaN(data.weekStart)) item._c.setWeekStartDay(data.weekStart); if (typeof(data.calendarPosition) != "undefined") item._c.setPosition(data.calendarPosition); item._c._itemIdd = item._idd; item._c.attachEvent("onBeforeChange", function(d) { if (item._value != d) { // call some events if (item.checkEvent("onBeforeChange")) { if (item.callEvent("onBeforeChange",[item._idd, item._value, d]) !== true) { return false; } } // accepted item._value = d; t.setValue(item, d); item.callEvent("onChange", [this._itemIdd, item._value]); } return true; }); this.setValue(item, data.value); return this; }, getCalendar: function(item) { return item._c; }, setSkin: function(item, skin) { item._c.setSkin(skin); }, setValue: function(item, value) { if (!value || value == null || typeof(value) == "undefined" || value == "") { item._value = null; item.childNodes[item._ll?1:0].childNodes[0].value = ""; } else { item._value = (value instanceof Date ? value : item._c._strToDate(value, item._f0)); item.childNodes[item._ll?1:0].childNodes[0].value = item._c._dateToStr(item._value, item._f); } item._c.setDate(item._value); window.dhtmlXFormLs[item.getForm()._rId].vals[item._idd] = item.childNodes[item._ll?1:0].childNodes[0].value; }, getValue: function(item, asString) { var d = item._c.getDate(); if (asString===true && d == null) return ""; return (asString===true?item._c._dateToStr(d,item._f0):d); }, destruct: function(item) { // unload calendar instance
item._f = null; try {delete item._f;} catch(e){} item._f0 = null; try {delete item._f0;} catch(e){} // remove custom events/objects item.childNodes[item._ll?1:0].childNodes[0]._idd = null; // unload item this.d2(item); item = null; } }; (function(){ for (var a in {doAddLabel:1,doAddInput:1,doUnloadNestedLists:1,setText:1,getText:1,enable:1,disable:1,isEnabled:1,setWidth:1,setReadonly:1,isReadonly:1,setFocus:1,getInput:1}) dhtmlXForm.prototype.items.calendar[a] = dhtmlXForm.prototype.items.input[a]; })(); dhtmlXForm.prototype.items.calendar.d2 = dhtmlXForm.prototype.items.input.destruct; dhtmlXForm.prototype.getCalendar = function(name) { return this.doWithItem(name, "getCalendar"); };
item._c.unload(); item._c = null; try {delete item._c;} catch(e){}
random_line_split
length_expr.rs
// Copyright (c) 2015 Robert Clipsham <[email protected]>
// option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: Only field names, constants, integers, basic arithmetic expressions (+ - * / %) and parentheses are allowed in the "length" attribute #![feature(custom_attribute, plugin)] #![plugin(pnet_macros)] extern crate pnet; #[packet] pub struct PacketWithPayload { banana: u8, #[length = "banana + 7.5"] var_length: Vec<u8>, #[payload] payload: Vec<u8> } fn main() {}
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
random_line_split
length_expr.rs
// Copyright (c) 2015 Robert Clipsham <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: Only field names, constants, integers, basic arithmetic expressions (+ - * / %) and parentheses are allowed in the "length" attribute #![feature(custom_attribute, plugin)] #![plugin(pnet_macros)] extern crate pnet; #[packet] pub struct PacketWithPayload { banana: u8, #[length = "banana + 7.5"] var_length: Vec<u8>, #[payload] payload: Vec<u8> } fn main()
{}
identifier_body
length_expr.rs
// Copyright (c) 2015 Robert Clipsham <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: Only field names, constants, integers, basic arithmetic expressions (+ - * / %) and parentheses are allowed in the "length" attribute #![feature(custom_attribute, plugin)] #![plugin(pnet_macros)] extern crate pnet; #[packet] pub struct PacketWithPayload { banana: u8, #[length = "banana + 7.5"] var_length: Vec<u8>, #[payload] payload: Vec<u8> } fn
() {}
main
identifier_name
string_renderer.ts
// // Copyright 2017-21 Volker Sorge // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @file A simple audio renderer that ignores all prosody. * @author [email protected] (Volker Sorge) */ import { AbstractAudioRenderer } from './abstract_audio_renderer'; import { personalityMarkup } from './audio_util'; import { AuditoryDescription } from './auditory_description'; export class StringRenderer extends AbstractAudioRenderer { /** * @override */ public
(descrs: AuditoryDescription[]) { let str = ''; const markup = personalityMarkup(descrs); const clean = markup.filter((x) => x.span); if (!clean.length) { return str; } const len = clean.length - 1; for (let i = 0, descr; (descr = clean[i]); i++) { if (descr.span) { str += this.merge(descr.span); } if (i >= len) { continue; } const join = descr.join; str += typeof join === 'undefined' ? this.getSeparator() : join; } return str; } }
markup
identifier_name
string_renderer.ts
// // Copyright 2017-21 Volker Sorge // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @file A simple audio renderer that ignores all prosody. * @author [email protected] (Volker Sorge) */ import { AbstractAudioRenderer } from './abstract_audio_renderer'; import { personalityMarkup } from './audio_util'; import { AuditoryDescription } from './auditory_description'; export class StringRenderer extends AbstractAudioRenderer { /** * @override */ public markup(descrs: AuditoryDescription[]) { let str = ''; const markup = personalityMarkup(descrs); const clean = markup.filter((x) => x.span); if (!clean.length) { return str; } const len = clean.length - 1; for (let i = 0, descr; (descr = clean[i]); i++) { if (descr.span) { str += this.merge(descr.span); } if (i >= len) {
} return str; } }
continue; } const join = descr.join; str += typeof join === 'undefined' ? this.getSeparator() : join;
random_line_split
string_renderer.ts
// // Copyright 2017-21 Volker Sorge // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @file A simple audio renderer that ignores all prosody. * @author [email protected] (Volker Sorge) */ import { AbstractAudioRenderer } from './abstract_audio_renderer'; import { personalityMarkup } from './audio_util'; import { AuditoryDescription } from './auditory_description'; export class StringRenderer extends AbstractAudioRenderer { /** * @override */ public markup(descrs: AuditoryDescription[]) { let str = ''; const markup = personalityMarkup(descrs); const clean = markup.filter((x) => x.span); if (!clean.length) { return str; } const len = clean.length - 1; for (let i = 0, descr; (descr = clean[i]); i++) { if (descr.span)
if (i >= len) { continue; } const join = descr.join; str += typeof join === 'undefined' ? this.getSeparator() : join; } return str; } }
{ str += this.merge(descr.span); }
conditional_block
heroes.component.ts
import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { Hero } from './hero'; import { HeroService } from './hero.service'; @Component ({ moduleId: module.id, selector: 'my-heroes', templateUrl: 'heroes.component.html', styleUrls: [ 'heroes.component.css' ] }) export class HeroesComponent implements OnInit { heroes : Hero[]; selectedHero: Hero; constructor( private router: Router, private heroService: HeroService) { } getHeroes(): void { this.heroService.getHeroes().then(heroes => this.heroes = heroes); } ngOnInit(): void { this.getHeroes(); } onSelect(hero: Hero): void { this.selectedHero = hero; }
this.router.navigate(['/detail', this.selectedHero.id]); } add(name: string): void { name = name.trim(); if (!name) { return; } this.heroService.create(name) .then(hero => { this.heroes.push(hero); this.selectedHero = null; }); } delete(hero: Hero): void { this.heroService .delete(hero.id) .then(() => { this.heroes = this.heroes.filter(h => h !== hero); if (this.selectedHero === hero) { this.selectedHero = null; } }); } }
gotoDetail(): void {
random_line_split
heroes.component.ts
import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { Hero } from './hero'; import { HeroService } from './hero.service'; @Component ({ moduleId: module.id, selector: 'my-heroes', templateUrl: 'heroes.component.html', styleUrls: [ 'heroes.component.css' ] }) export class HeroesComponent implements OnInit { heroes : Hero[]; selectedHero: Hero; constructor( private router: Router, private heroService: HeroService) { } getHeroes(): void { this.heroService.getHeroes().then(heroes => this.heroes = heroes); } ngOnInit(): void { this.getHeroes(); } onSelect(hero: Hero): void { this.selectedHero = hero; } gotoDetail(): void { this.router.navigate(['/detail', this.selectedHero.id]); } add(name: string): void { name = name.trim(); if (!name) { return; } this.heroService.create(name) .then(hero => { this.heroes.push(hero); this.selectedHero = null; }); } delete(hero: Hero): void { this.heroService .delete(hero.id) .then(() => { this.heroes = this.heroes.filter(h => h !== hero); if (this.selectedHero === hero)
}); } }
{ this.selectedHero = null; }
conditional_block
heroes.component.ts
import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { Hero } from './hero'; import { HeroService } from './hero.service'; @Component ({ moduleId: module.id, selector: 'my-heroes', templateUrl: 'heroes.component.html', styleUrls: [ 'heroes.component.css' ] }) export class HeroesComponent implements OnInit { heroes : Hero[]; selectedHero: Hero;
( private router: Router, private heroService: HeroService) { } getHeroes(): void { this.heroService.getHeroes().then(heroes => this.heroes = heroes); } ngOnInit(): void { this.getHeroes(); } onSelect(hero: Hero): void { this.selectedHero = hero; } gotoDetail(): void { this.router.navigate(['/detail', this.selectedHero.id]); } add(name: string): void { name = name.trim(); if (!name) { return; } this.heroService.create(name) .then(hero => { this.heroes.push(hero); this.selectedHero = null; }); } delete(hero: Hero): void { this.heroService .delete(hero.id) .then(() => { this.heroes = this.heroes.filter(h => h !== hero); if (this.selectedHero === hero) { this.selectedHero = null; } }); } }
constructor
identifier_name
heroes.component.ts
import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { Hero } from './hero'; import { HeroService } from './hero.service'; @Component ({ moduleId: module.id, selector: 'my-heroes', templateUrl: 'heroes.component.html', styleUrls: [ 'heroes.component.css' ] }) export class HeroesComponent implements OnInit { heroes : Hero[]; selectedHero: Hero; constructor( private router: Router, private heroService: HeroService) { } getHeroes(): void { this.heroService.getHeroes().then(heroes => this.heroes = heroes); } ngOnInit(): void { this.getHeroes(); } onSelect(hero: Hero): void { this.selectedHero = hero; } gotoDetail(): void { this.router.navigate(['/detail', this.selectedHero.id]); } add(name: string): void
delete(hero: Hero): void { this.heroService .delete(hero.id) .then(() => { this.heroes = this.heroes.filter(h => h !== hero); if (this.selectedHero === hero) { this.selectedHero = null; } }); } }
{ name = name.trim(); if (!name) { return; } this.heroService.create(name) .then(hero => { this.heroes.push(hero); this.selectedHero = null; }); }
identifier_body
org-groups.js
import React, { Component, Fragment } from 'react'; import { navigate } from '@reach/router'; import PropTypes from 'prop-types'; import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap'; import { siteRoot, gettext, orgID } from '../../utils/constants'; import { seafileAPI } from '../../utils/seafile-api'; import { Utils } from '../../utils/utils'; import toaster from '../../components/toast'; import OrgGroupInfo from '../../models/org-group'; import MainPanelTopbar from './main-panel-topbar'; class Search extends React.Component { constructor(props) { super(props); this.state = { value: '' }; } handleInputChange = (e) => { this.setState({ value: e.target.value }); } handleKeyPress = (e) => { if (e.key == 'Enter') { e.preventDefault(); this.handleSubmit(); } } handleSubmit = () => { const value = this.state.value.trim(); if (!value) { return false; } this.props.submit(value); } render() { return ( <div className="input-icon"> <i className="d-flex input-icon-addon fas fa-search"></i> <input type="text" className="form-control search-input h-6 mr-1" style={{width: '15rem'}} placeholder={this.props.placeholder} value={this.state.value} onChange={this.handleInputChange} onKeyPress={this.handleKeyPress} autoComplete="off" /> </div> ); } } class
extends Component { constructor(props) { super(props); this.state = { page: 1, pageNext: false, orgGroups: [], isItemFreezed: false }; } componentDidMount() { let page = this.state.page; this.initData(page); } initData = (page) => { seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => { let orgGroups = res.data.groups.map(item => { return new OrgGroupInfo(item); }); this.setState({ orgGroups: orgGroups, pageNext: res.data.page_next, page: res.data.page, }); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } onChangePageNum = (e, num) => { e.preventDefault(); let page = this.state.page; if (num == 1) { page = page + 1; } else { page = page - 1; } this.initData(page); } onFreezedItem = () => { this.setState({isItemFreezed: true}); } onUnfreezedItem = () => { this.setState({isItemFreezed: false}); } deleteGroupItem = (group) => { seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => { this.setState({ orgGroups: this.state.orgGroups.filter(item => item.id != group.id) }); let msg = gettext('Successfully deleted {name}'); msg = msg.replace('{name}', group.groupName); toaster.success(msg); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } searchItems = (keyword) => { navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`); } getSearch = () => { return <Search placeholder={gettext('Search groups by name')} submit={this.searchItems} />; } render() { let groups = this.state.orgGroups; return ( <Fragment> <MainPanelTopbar search={this.getSearch()}/> <div className="main-panel-center flex-row"> <div className="cur-view-container"> <div className="cur-view-path"> <h3 className="sf-heading">{gettext('All Groups')}</h3> </div> <div className="cur-view-content"> <table> <thead> <tr> <th width="30%">{gettext('Name')}</th> <th width="35%">{gettext('Creator')}</th> <th width="23%">{gettext('Created At')}</th> <th width="12%" className="text-center">{gettext('Operations')}</th> </tr> </thead> <tbody> {groups.map(item => { return ( <GroupItem key={item.id} group={item} isItemFreezed={this.state.isItemFreezed} onFreezedItem={this.onFreezedItem} onUnfreezedItem={this.onUnfreezedItem} deleteGroupItem={this.deleteGroupItem} /> ); })} </tbody> </table> <div className="paginator"> {this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>} {(this.state.page != 1 && this.state.pageNext) && <span> | </span>} {this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>} </div> </div> </div> </div> </Fragment> ); } } const GroupItemPropTypes = { group: PropTypes.object.isRequired, isItemFreezed: PropTypes.bool.isRequired, onFreezedItem: PropTypes.func.isRequired, onUnfreezedItem: PropTypes.func.isRequired, deleteGroupItem: PropTypes.func.isRequired, }; class GroupItem extends React.Component { constructor(props) { super(props); this.state = { highlight: false, showMenu: false, isItemMenuShow: false }; } onMouseEnter = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: true, highlight: true, }); } } onMouseLeave = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: false, highlight: false }); } } onDropdownToggleClick = (e) => { e.preventDefault(); this.toggleOperationMenu(e); } toggleOperationMenu = (e) => { e.stopPropagation(); this.setState( {isItemMenuShow: !this.state.isItemMenuShow }, () => { if (this.state.isItemMenuShow) { this.props.onFreezedItem(); } else { this.setState({ highlight: false, showMenu: false, }); this.props.onUnfreezedItem(); } } ); } toggleDelete = () => { this.props.deleteGroupItem(this.props.group); } renderGroupHref = (group) => { let groupInfoHref; if (group.creatorName == 'system admin') { groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/'; } else { groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/'; } return groupInfoHref; } renderGroupCreator = (group) => { let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/'; if (group.creatorName == 'system admin') { return ( <td> -- </td> ); } else { return( <td> <a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a> </td> ); } } render() { let { group } = this.props; let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu; return ( <tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}> <td> <a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a> </td> {this.renderGroupCreator(group)} <td>{group.ctime}</td> <td className="text-center cursor-pointer"> {isOperationMenuShow && <Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}> <DropdownToggle tag="a" className="attr-action-icon fas fa-ellipsis-v" title={gettext('More Operations')} data-toggle="dropdown" aria-expanded={this.state.isItemMenuShow} onClick={this.onDropdownToggleClick} /> <DropdownMenu> <DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem> </DropdownMenu> </Dropdown> } </td> </tr> ); } } GroupItem.propTypes = GroupItemPropTypes; export default OrgGroups;
OrgGroups
identifier_name
org-groups.js
import React, { Component, Fragment } from 'react'; import { navigate } from '@reach/router'; import PropTypes from 'prop-types'; import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap'; import { siteRoot, gettext, orgID } from '../../utils/constants'; import { seafileAPI } from '../../utils/seafile-api'; import { Utils } from '../../utils/utils'; import toaster from '../../components/toast'; import OrgGroupInfo from '../../models/org-group'; import MainPanelTopbar from './main-panel-topbar'; class Search extends React.Component { constructor(props) { super(props); this.state = { value: '' }; } handleInputChange = (e) => { this.setState({ value: e.target.value }); } handleKeyPress = (e) => { if (e.key == 'Enter') { e.preventDefault(); this.handleSubmit(); } } handleSubmit = () => { const value = this.state.value.trim(); if (!value) { return false; } this.props.submit(value); } render() { return ( <div className="input-icon">
placeholder={this.props.placeholder} value={this.state.value} onChange={this.handleInputChange} onKeyPress={this.handleKeyPress} autoComplete="off" /> </div> ); } } class OrgGroups extends Component { constructor(props) { super(props); this.state = { page: 1, pageNext: false, orgGroups: [], isItemFreezed: false }; } componentDidMount() { let page = this.state.page; this.initData(page); } initData = (page) => { seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => { let orgGroups = res.data.groups.map(item => { return new OrgGroupInfo(item); }); this.setState({ orgGroups: orgGroups, pageNext: res.data.page_next, page: res.data.page, }); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } onChangePageNum = (e, num) => { e.preventDefault(); let page = this.state.page; if (num == 1) { page = page + 1; } else { page = page - 1; } this.initData(page); } onFreezedItem = () => { this.setState({isItemFreezed: true}); } onUnfreezedItem = () => { this.setState({isItemFreezed: false}); } deleteGroupItem = (group) => { seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => { this.setState({ orgGroups: this.state.orgGroups.filter(item => item.id != group.id) }); let msg = gettext('Successfully deleted {name}'); msg = msg.replace('{name}', group.groupName); toaster.success(msg); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } searchItems = (keyword) => { navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`); } getSearch = () => { return <Search placeholder={gettext('Search groups by name')} submit={this.searchItems} />; } render() { let groups = this.state.orgGroups; return ( <Fragment> <MainPanelTopbar search={this.getSearch()}/> <div className="main-panel-center flex-row"> <div className="cur-view-container"> <div className="cur-view-path"> <h3 className="sf-heading">{gettext('All Groups')}</h3> </div> <div className="cur-view-content"> <table> <thead> <tr> <th width="30%">{gettext('Name')}</th> <th width="35%">{gettext('Creator')}</th> <th width="23%">{gettext('Created At')}</th> <th width="12%" className="text-center">{gettext('Operations')}</th> </tr> </thead> <tbody> {groups.map(item => { return ( <GroupItem key={item.id} group={item} isItemFreezed={this.state.isItemFreezed} onFreezedItem={this.onFreezedItem} onUnfreezedItem={this.onUnfreezedItem} deleteGroupItem={this.deleteGroupItem} /> ); })} </tbody> </table> <div className="paginator"> {this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>} {(this.state.page != 1 && this.state.pageNext) && <span> | </span>} {this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>} </div> </div> </div> </div> </Fragment> ); } } const GroupItemPropTypes = { group: PropTypes.object.isRequired, isItemFreezed: PropTypes.bool.isRequired, onFreezedItem: PropTypes.func.isRequired, onUnfreezedItem: PropTypes.func.isRequired, deleteGroupItem: PropTypes.func.isRequired, }; class GroupItem extends React.Component { constructor(props) { super(props); this.state = { highlight: false, showMenu: false, isItemMenuShow: false }; } onMouseEnter = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: true, highlight: true, }); } } onMouseLeave = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: false, highlight: false }); } } onDropdownToggleClick = (e) => { e.preventDefault(); this.toggleOperationMenu(e); } toggleOperationMenu = (e) => { e.stopPropagation(); this.setState( {isItemMenuShow: !this.state.isItemMenuShow }, () => { if (this.state.isItemMenuShow) { this.props.onFreezedItem(); } else { this.setState({ highlight: false, showMenu: false, }); this.props.onUnfreezedItem(); } } ); } toggleDelete = () => { this.props.deleteGroupItem(this.props.group); } renderGroupHref = (group) => { let groupInfoHref; if (group.creatorName == 'system admin') { groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/'; } else { groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/'; } return groupInfoHref; } renderGroupCreator = (group) => { let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/'; if (group.creatorName == 'system admin') { return ( <td> -- </td> ); } else { return( <td> <a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a> </td> ); } } render() { let { group } = this.props; let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu; return ( <tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}> <td> <a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a> </td> {this.renderGroupCreator(group)} <td>{group.ctime}</td> <td className="text-center cursor-pointer"> {isOperationMenuShow && <Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}> <DropdownToggle tag="a" className="attr-action-icon fas fa-ellipsis-v" title={gettext('More Operations')} data-toggle="dropdown" aria-expanded={this.state.isItemMenuShow} onClick={this.onDropdownToggleClick} /> <DropdownMenu> <DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem> </DropdownMenu> </Dropdown> } </td> </tr> ); } } GroupItem.propTypes = GroupItemPropTypes; export default OrgGroups;
<i className="d-flex input-icon-addon fas fa-search"></i> <input type="text" className="form-control search-input h-6 mr-1" style={{width: '15rem'}}
random_line_split
org-groups.js
import React, { Component, Fragment } from 'react'; import { navigate } from '@reach/router'; import PropTypes from 'prop-types'; import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap'; import { siteRoot, gettext, orgID } from '../../utils/constants'; import { seafileAPI } from '../../utils/seafile-api'; import { Utils } from '../../utils/utils'; import toaster from '../../components/toast'; import OrgGroupInfo from '../../models/org-group'; import MainPanelTopbar from './main-panel-topbar'; class Search extends React.Component { constructor(props) { super(props); this.state = { value: '' }; } handleInputChange = (e) => { this.setState({ value: e.target.value }); } handleKeyPress = (e) => { if (e.key == 'Enter') { e.preventDefault(); this.handleSubmit(); } } handleSubmit = () => { const value = this.state.value.trim(); if (!value) { return false; } this.props.submit(value); } render() { return ( <div className="input-icon"> <i className="d-flex input-icon-addon fas fa-search"></i> <input type="text" className="form-control search-input h-6 mr-1" style={{width: '15rem'}} placeholder={this.props.placeholder} value={this.state.value} onChange={this.handleInputChange} onKeyPress={this.handleKeyPress} autoComplete="off" /> </div> ); } } class OrgGroups extends Component { constructor(props) { super(props); this.state = { page: 1, pageNext: false, orgGroups: [], isItemFreezed: false }; } componentDidMount() { let page = this.state.page; this.initData(page); } initData = (page) => { seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => { let orgGroups = res.data.groups.map(item => { return new OrgGroupInfo(item); }); this.setState({ orgGroups: orgGroups, pageNext: res.data.page_next, page: res.data.page, }); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } onChangePageNum = (e, num) => { e.preventDefault(); let page = this.state.page; if (num == 1) { page = page + 1; } else
this.initData(page); } onFreezedItem = () => { this.setState({isItemFreezed: true}); } onUnfreezedItem = () => { this.setState({isItemFreezed: false}); } deleteGroupItem = (group) => { seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => { this.setState({ orgGroups: this.state.orgGroups.filter(item => item.id != group.id) }); let msg = gettext('Successfully deleted {name}'); msg = msg.replace('{name}', group.groupName); toaster.success(msg); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } searchItems = (keyword) => { navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`); } getSearch = () => { return <Search placeholder={gettext('Search groups by name')} submit={this.searchItems} />; } render() { let groups = this.state.orgGroups; return ( <Fragment> <MainPanelTopbar search={this.getSearch()}/> <div className="main-panel-center flex-row"> <div className="cur-view-container"> <div className="cur-view-path"> <h3 className="sf-heading">{gettext('All Groups')}</h3> </div> <div className="cur-view-content"> <table> <thead> <tr> <th width="30%">{gettext('Name')}</th> <th width="35%">{gettext('Creator')}</th> <th width="23%">{gettext('Created At')}</th> <th width="12%" className="text-center">{gettext('Operations')}</th> </tr> </thead> <tbody> {groups.map(item => { return ( <GroupItem key={item.id} group={item} isItemFreezed={this.state.isItemFreezed} onFreezedItem={this.onFreezedItem} onUnfreezedItem={this.onUnfreezedItem} deleteGroupItem={this.deleteGroupItem} /> ); })} </tbody> </table> <div className="paginator"> {this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>} {(this.state.page != 1 && this.state.pageNext) && <span> | </span>} {this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>} </div> </div> </div> </div> </Fragment> ); } } const GroupItemPropTypes = { group: PropTypes.object.isRequired, isItemFreezed: PropTypes.bool.isRequired, onFreezedItem: PropTypes.func.isRequired, onUnfreezedItem: PropTypes.func.isRequired, deleteGroupItem: PropTypes.func.isRequired, }; class GroupItem extends React.Component { constructor(props) { super(props); this.state = { highlight: false, showMenu: false, isItemMenuShow: false }; } onMouseEnter = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: true, highlight: true, }); } } onMouseLeave = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: false, highlight: false }); } } onDropdownToggleClick = (e) => { e.preventDefault(); this.toggleOperationMenu(e); } toggleOperationMenu = (e) => { e.stopPropagation(); this.setState( {isItemMenuShow: !this.state.isItemMenuShow }, () => { if (this.state.isItemMenuShow) { this.props.onFreezedItem(); } else { this.setState({ highlight: false, showMenu: false, }); this.props.onUnfreezedItem(); } } ); } toggleDelete = () => { this.props.deleteGroupItem(this.props.group); } renderGroupHref = (group) => { let groupInfoHref; if (group.creatorName == 'system admin') { groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/'; } else { groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/'; } return groupInfoHref; } renderGroupCreator = (group) => { let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/'; if (group.creatorName == 'system admin') { return ( <td> -- </td> ); } else { return( <td> <a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a> </td> ); } } render() { let { group } = this.props; let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu; return ( <tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}> <td> <a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a> </td> {this.renderGroupCreator(group)} <td>{group.ctime}</td> <td className="text-center cursor-pointer"> {isOperationMenuShow && <Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}> <DropdownToggle tag="a" className="attr-action-icon fas fa-ellipsis-v" title={gettext('More Operations')} data-toggle="dropdown" aria-expanded={this.state.isItemMenuShow} onClick={this.onDropdownToggleClick} /> <DropdownMenu> <DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem> </DropdownMenu> </Dropdown> } </td> </tr> ); } } GroupItem.propTypes = GroupItemPropTypes; export default OrgGroups;
{ page = page - 1; }
conditional_block
lib.rs
// ================================================================= // // * WARNING * // // This file is generated! // // Changes made to this file will be overwritten. If changes are
// must be updated to generate the changes. // // ================================================================= #![doc(html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png")] //! <p>Use the AWS Elemental MediaTailor SDK to configure scalable ad insertion for your live and VOD content. With AWS Elemental MediaTailor, you can serve targeted ads to viewers while maintaining broadcast quality in over-the-top (OTT) video applications. For information about using the service, including detailed information about the settings covered in this guide, see the AWS Elemental MediaTailor User Guide.<p>Through the SDK, you manage AWS Elemental MediaTailor configurations the same as you do through the console. For example, you specify ad insertion behavior and mapping information for the origin server and the ad decision server (ADS).</p> //! //! If you're using the service, you're probably looking for [MediaTailorClient](struct.MediaTailorClient.html) and [MediaTailor](trait.MediaTailor.html). extern crate futures; #[macro_use] extern crate log; extern crate rusoto_core; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; mod generated; mod custom; pub use generated::*; pub use custom::*;
// required to the generated code, the service_crategen project
random_line_split
run.py
""" Current driven domain-wall motion with constant current and spin accumulation. """ # Copyright (C) 2011-2015 Claas Abert # # This file is part of magnum.fe. # # magnum.fe is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # magnum.fe is distributed in the hope that it will be useful,
# You should have received a copy of the GNU Lesser General Public License # along with magnum.fe. If not, see <http://www.gnu.org/licenses/>. # # Last modified by Claas Abert, 2015-02-16 from magnumfe import * ####################################### #### DEFINE MESH, STATE AND MATERIAL ####################################### mesh = BoxMesh(-600.0/2, -100.0/2, -10.0/2, 600.0/2, 100.0/2, 10.0/2, 120, 20, 1) state = State(mesh, scale = 1e-9, material = Material( alpha = 0.1, ms = 8e5, Aex = 1.3e-11, D0 = 1e-3, beta = 0.9, beta_prime = 0.8, lambda_sf = 10e-9, lambda_j = 4e-9, c = 3.125e-3 ), m = Expression(('1.0 - 2*(x[0] < 0.0)', 'x[0] > -10.0 && x[0] < 10.0', '0.0')), s = Constant((0.0, 0.0, 0.0)), j = Constant((0.0, 0.0, 0.0)) ) # normalize since initial configuration is not normalized state.m.normalize() # setup integrators llg = LLGAlougesProject([ ExchangeField(), DemagField("FK"), SpinTorque() ]) spindiff = SpinDiffusion() # relax for j in range(200): state.step(llg, 1e-12) # apply constant current state.j = Constant((3e12, 0, 0)) state.t = 0.0 # prepare log files mfile = File("data/m.pvd") sfile = File("data/s.pvd") for j in range(1000): # save fields every 10th step if j % 10 == 0: mfile << (state.m, state.t) sfile << (state.s, state.t) # calculate next step state.step([llg, spindiff], 1e-12)
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. #
random_line_split
run.py
""" Current driven domain-wall motion with constant current and spin accumulation. """ # Copyright (C) 2011-2015 Claas Abert # # This file is part of magnum.fe. # # magnum.fe is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # magnum.fe is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with magnum.fe. If not, see <http://www.gnu.org/licenses/>. # # Last modified by Claas Abert, 2015-02-16 from magnumfe import * ####################################### #### DEFINE MESH, STATE AND MATERIAL ####################################### mesh = BoxMesh(-600.0/2, -100.0/2, -10.0/2, 600.0/2, 100.0/2, 10.0/2, 120, 20, 1) state = State(mesh, scale = 1e-9, material = Material( alpha = 0.1, ms = 8e5, Aex = 1.3e-11, D0 = 1e-3, beta = 0.9, beta_prime = 0.8, lambda_sf = 10e-9, lambda_j = 4e-9, c = 3.125e-3 ), m = Expression(('1.0 - 2*(x[0] < 0.0)', 'x[0] > -10.0 && x[0] < 10.0', '0.0')), s = Constant((0.0, 0.0, 0.0)), j = Constant((0.0, 0.0, 0.0)) ) # normalize since initial configuration is not normalized state.m.normalize() # setup integrators llg = LLGAlougesProject([ ExchangeField(), DemagField("FK"), SpinTorque() ]) spindiff = SpinDiffusion() # relax for j in range(200): state.step(llg, 1e-12) # apply constant current state.j = Constant((3e12, 0, 0)) state.t = 0.0 # prepare log files mfile = File("data/m.pvd") sfile = File("data/s.pvd") for j in range(1000): # save fields every 10th step
if j % 10 == 0: mfile << (state.m, state.t) sfile << (state.s, state.t) # calculate next step state.step([llg, spindiff], 1e-12)
conditional_block
useQuietPeriod.hook.ts
import { REST } from '../../api'; import { useLatestPromise } from '../../presentation'; // Shape from back end interface IQuietPeriodConfig { startTime: number; endTime: number; enabled: boolean; // Do not use in UI -- point-in-time data from back-end inQuietPeriod: boolean; } class QuietPeriodService { private static _quietPeriodConfig: PromiseLike<IQuietPeriodConfig>; static async quietPeriodConfig(): Promise<IQuietPeriodConfig> { this._quietPeriodConfig = this._quietPeriodConfig ?? REST('/capabilities/quietPeriod').get<IQuietPeriodConfig>(); return await this._quietPeriodConfig; } } interface IQuietPeriod { currentStatus: 'UNKNOWN' | 'BEFORE_QUIET_PERIOD' | 'DURING_QUIET_PERIOD' | 'AFTER_QUIET_PERIOD' | 'NO_QUIET_PERIOD'; startTime: number; endTime: number; } export function useQuietPeriod(): IQuietPeriod { const result = useLatestPromise(() => QuietPeriodService.quietPeriodConfig(), []);
} const { startTime, endTime, enabled } = result.result; if (!enabled || !startTime || startTime < 0 || !endTime || endTime < 0) { return { currentStatus: 'NO_QUIET_PERIOD', startTime: undefined, endTime: undefined }; } const now = Date.now(); const currentStatus = now < startTime ? 'BEFORE_QUIET_PERIOD' : now > endTime ? 'AFTER_QUIET_PERIOD' : 'DURING_QUIET_PERIOD'; return { currentStatus, startTime, endTime }; }
if (result.status !== 'RESOLVED') { return { currentStatus: 'UNKNOWN', startTime: undefined, endTime: undefined };
random_line_split
useQuietPeriod.hook.ts
import { REST } from '../../api'; import { useLatestPromise } from '../../presentation'; // Shape from back end interface IQuietPeriodConfig { startTime: number; endTime: number; enabled: boolean; // Do not use in UI -- point-in-time data from back-end inQuietPeriod: boolean; } class QuietPeriodService { private static _quietPeriodConfig: PromiseLike<IQuietPeriodConfig>; static async quietPeriodConfig(): Promise<IQuietPeriodConfig> { this._quietPeriodConfig = this._quietPeriodConfig ?? REST('/capabilities/quietPeriod').get<IQuietPeriodConfig>(); return await this._quietPeriodConfig; } } interface IQuietPeriod { currentStatus: 'UNKNOWN' | 'BEFORE_QUIET_PERIOD' | 'DURING_QUIET_PERIOD' | 'AFTER_QUIET_PERIOD' | 'NO_QUIET_PERIOD'; startTime: number; endTime: number; } export function
(): IQuietPeriod { const result = useLatestPromise(() => QuietPeriodService.quietPeriodConfig(), []); if (result.status !== 'RESOLVED') { return { currentStatus: 'UNKNOWN', startTime: undefined, endTime: undefined }; } const { startTime, endTime, enabled } = result.result; if (!enabled || !startTime || startTime < 0 || !endTime || endTime < 0) { return { currentStatus: 'NO_QUIET_PERIOD', startTime: undefined, endTime: undefined }; } const now = Date.now(); const currentStatus = now < startTime ? 'BEFORE_QUIET_PERIOD' : now > endTime ? 'AFTER_QUIET_PERIOD' : 'DURING_QUIET_PERIOD'; return { currentStatus, startTime, endTime }; }
useQuietPeriod
identifier_name
useQuietPeriod.hook.ts
import { REST } from '../../api'; import { useLatestPromise } from '../../presentation'; // Shape from back end interface IQuietPeriodConfig { startTime: number; endTime: number; enabled: boolean; // Do not use in UI -- point-in-time data from back-end inQuietPeriod: boolean; } class QuietPeriodService { private static _quietPeriodConfig: PromiseLike<IQuietPeriodConfig>; static async quietPeriodConfig(): Promise<IQuietPeriodConfig> { this._quietPeriodConfig = this._quietPeriodConfig ?? REST('/capabilities/quietPeriod').get<IQuietPeriodConfig>(); return await this._quietPeriodConfig; } } interface IQuietPeriod { currentStatus: 'UNKNOWN' | 'BEFORE_QUIET_PERIOD' | 'DURING_QUIET_PERIOD' | 'AFTER_QUIET_PERIOD' | 'NO_QUIET_PERIOD'; startTime: number; endTime: number; } export function useQuietPeriod(): IQuietPeriod
{ const result = useLatestPromise(() => QuietPeriodService.quietPeriodConfig(), []); if (result.status !== 'RESOLVED') { return { currentStatus: 'UNKNOWN', startTime: undefined, endTime: undefined }; } const { startTime, endTime, enabled } = result.result; if (!enabled || !startTime || startTime < 0 || !endTime || endTime < 0) { return { currentStatus: 'NO_QUIET_PERIOD', startTime: undefined, endTime: undefined }; } const now = Date.now(); const currentStatus = now < startTime ? 'BEFORE_QUIET_PERIOD' : now > endTime ? 'AFTER_QUIET_PERIOD' : 'DURING_QUIET_PERIOD'; return { currentStatus, startTime, endTime }; }
identifier_body
useQuietPeriod.hook.ts
import { REST } from '../../api'; import { useLatestPromise } from '../../presentation'; // Shape from back end interface IQuietPeriodConfig { startTime: number; endTime: number; enabled: boolean; // Do not use in UI -- point-in-time data from back-end inQuietPeriod: boolean; } class QuietPeriodService { private static _quietPeriodConfig: PromiseLike<IQuietPeriodConfig>; static async quietPeriodConfig(): Promise<IQuietPeriodConfig> { this._quietPeriodConfig = this._quietPeriodConfig ?? REST('/capabilities/quietPeriod').get<IQuietPeriodConfig>(); return await this._quietPeriodConfig; } } interface IQuietPeriod { currentStatus: 'UNKNOWN' | 'BEFORE_QUIET_PERIOD' | 'DURING_QUIET_PERIOD' | 'AFTER_QUIET_PERIOD' | 'NO_QUIET_PERIOD'; startTime: number; endTime: number; } export function useQuietPeriod(): IQuietPeriod { const result = useLatestPromise(() => QuietPeriodService.quietPeriodConfig(), []); if (result.status !== 'RESOLVED') { return { currentStatus: 'UNKNOWN', startTime: undefined, endTime: undefined }; } const { startTime, endTime, enabled } = result.result; if (!enabled || !startTime || startTime < 0 || !endTime || endTime < 0)
const now = Date.now(); const currentStatus = now < startTime ? 'BEFORE_QUIET_PERIOD' : now > endTime ? 'AFTER_QUIET_PERIOD' : 'DURING_QUIET_PERIOD'; return { currentStatus, startTime, endTime }; }
{ return { currentStatus: 'NO_QUIET_PERIOD', startTime: undefined, endTime: undefined }; }
conditional_block
issue-17913.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
// option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: too big for the current architecture #![feature(box_syntax)] #[cfg(target_pointer_width = "64")] fn main() { let n = 0_usize; let a: Box<_> = box [&n; 0xF000000000000000_usize]; println!("{}", a[0xFFFFFF_usize]); } #[cfg(target_pointer_width = "32")] fn main() { let n = 0_usize; let a: Box<_> = box [&n; 0xFFFFFFFF_usize]; println!("{}", a[0xFFFFFF_usize]); }
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
random_line_split
issue-17913.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: too big for the current architecture #![feature(box_syntax)] #[cfg(target_pointer_width = "64")] fn main()
#[cfg(target_pointer_width = "32")] fn main() { let n = 0_usize; let a: Box<_> = box [&n; 0xFFFFFFFF_usize]; println!("{}", a[0xFFFFFF_usize]); }
{ let n = 0_usize; let a: Box<_> = box [&n; 0xF000000000000000_usize]; println!("{}", a[0xFFFFFF_usize]); }
identifier_body
issue-17913.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: too big for the current architecture #![feature(box_syntax)] #[cfg(target_pointer_width = "64")] fn
() { let n = 0_usize; let a: Box<_> = box [&n; 0xF000000000000000_usize]; println!("{}", a[0xFFFFFF_usize]); } #[cfg(target_pointer_width = "32")] fn main() { let n = 0_usize; let a: Box<_> = box [&n; 0xFFFFFFFF_usize]; println!("{}", a[0xFFFFFF_usize]); }
main
identifier_name
ganeti.mcpu_unittest.py
#!/usr/bin/python # # Copyright (C) 2009, 2011 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for unittesting the mcpu module""" import unittest import itertools from ganeti import compat from ganeti import mcpu from ganeti import opcodes from ganeti import cmdlib from ganeti import locking from ganeti import constants from ganeti.constants import \ LOCK_ATTEMPTS_TIMEOUT, \ LOCK_ATTEMPTS_MAXWAIT, \ LOCK_ATTEMPTS_MINWAIT import testutils REQ_BGL_WHITELIST = compat.UniqueFrozenset([ opcodes.OpClusterActivateMasterIp, opcodes.OpClusterDeactivateMasterIp, opcodes.OpClusterDestroy, opcodes.OpClusterPostInit, opcodes.OpClusterRename, opcodes.OpInstanceRename, opcodes.OpNodeAdd, opcodes.OpNodeRemove, opcodes.OpTestAllocator, ]) class TestLockAttemptTimeoutStrategy(unittest.TestCase): def testConstants(self): tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT) self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT) self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800, msg="Waiting less than half an hour per priority") self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600, msg="Waiting more than an hour per priority") def testSimple(self): strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5, _time_fn=lambda: 0.0) prev = None for i in range(len(strat._TIMEOUT_PER_ATTEMPT)): timeout = strat.NextAttempt() self.assert_(timeout is not None) self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT) self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT) self.assert_(prev is None or timeout >= prev) prev = timeout for _ in range(10): self.assert_(strat.NextAttempt() is None) class TestDispatchTable(unittest.TestCase): def test(self): for opcls in opcodes.OP_MAPPING.values(): if not opcls.WITH_LU: continue self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE, msg="%s missing handler class" % opcls) # Check against BGL whitelist lucls = mcpu.Processor.DISPATCH_TABLE[opcls] if lucls.REQ_BGL: self.assertTrue(opcls in REQ_BGL_WHITELIST, msg=("%s not whitelisted for BGL" % opcls.OP_ID)) else: self.assertFalse(opcls in REQ_BGL_WHITELIST, msg=("%s whitelisted for BGL, but doesn't use it" % opcls.OP_ID)) class TestProcessResult(unittest.TestCase): def setUp(self): self._submitted = [] self._count = itertools.count(200) def _Submit(self, jobs): job_ids = [self._count.next() for _ in jobs] self._submitted.extend(zip(job_ids, jobs)) return job_ids def testNoJobs(self): for i in [object(), [], False, True, None, 1, 929, {}]: self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i), i) def testDefaults(self): src = opcodes.OpTestDummy() res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(), opcodes.OpTestDelay(), ], [ opcodes.OpTestDelay(), ]])) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], }) (_, (op1, op2)) = self._submitted.pop(0) (_, (op3, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) for op in [op1, op2, op3]: self.assertTrue("OP_TEST_DUMMY" in op.comment) self.assertFalse(hasattr(op, "priority")) self.assertFalse(hasattr(op, "debug_level")) def testParams(self): src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH, debug_level=3) res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW), ], [ opcodes.OpTestDelay(comment="foobar", debug_level=10), ]], other=True, value=range(10))) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], "other": True, "value": range(10), }) (_, (op1, )) = self._submitted.pop(0) (_, (op2, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) self.assertEqual(op1.priority, constants.OP_PRIO_LOW) self.assertTrue("OP_TEST_DUMMY" in op1.comment) self.assertEqual(op1.debug_level, 3) self.assertEqual(op2.priority, constants.OP_PRIO_HIGH) self.assertEqual(op2.comment, "foobar") self.assertEqual(op2.debug_level, 3) class _FakeLuWithLocks: def __init__(self, needed_locks, share_locks): self.needed_locks = needed_locks self.share_locks = share_locks class _FakeGlm: def __init__(self, owning_nal): self._owning_nal = owning_nal def check_owned(self, level, names): assert level == locking.LEVEL_NODE_ALLOC assert names == locking.NAL return self._owning_nal def owning_all(self, level): return False class TestVerifyLocks(unittest.TestCase): def testNoLocks(self): lu = _FakeLuWithLocks({}, {}) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=NotImplemented, _nal_whitelist=NotImplemented) def testNotAllSameMode(self):
def testDifferentMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("using the same mode as nodes" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) def testSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 1, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("whitelisted to use different modes" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testAllWithoutAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("allocation lock must be used if" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) def testAllWithAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) except AssertionError, err: self.assertTrue("whitelisted for not acquiring" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) if __name__ == "__main__": testutils.GanetiTestProgram()
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
identifier_body
ganeti.mcpu_unittest.py
#!/usr/bin/python # # Copyright (C) 2009, 2011 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for unittesting the mcpu module""" import unittest import itertools from ganeti import compat from ganeti import mcpu from ganeti import opcodes from ganeti import cmdlib from ganeti import locking from ganeti import constants from ganeti.constants import \ LOCK_ATTEMPTS_TIMEOUT, \ LOCK_ATTEMPTS_MAXWAIT, \ LOCK_ATTEMPTS_MINWAIT import testutils REQ_BGL_WHITELIST = compat.UniqueFrozenset([ opcodes.OpClusterActivateMasterIp, opcodes.OpClusterDeactivateMasterIp, opcodes.OpClusterDestroy, opcodes.OpClusterPostInit, opcodes.OpClusterRename, opcodes.OpInstanceRename, opcodes.OpNodeAdd, opcodes.OpNodeRemove, opcodes.OpTestAllocator, ]) class TestLockAttemptTimeoutStrategy(unittest.TestCase): def testConstants(self): tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT) self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT) self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800, msg="Waiting less than half an hour per priority") self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600, msg="Waiting more than an hour per priority") def testSimple(self): strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5, _time_fn=lambda: 0.0) prev = None for i in range(len(strat._TIMEOUT_PER_ATTEMPT)): timeout = strat.NextAttempt() self.assert_(timeout is not None) self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT) self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT) self.assert_(prev is None or timeout >= prev) prev = timeout for _ in range(10): self.assert_(strat.NextAttempt() is None) class TestDispatchTable(unittest.TestCase): def test(self): for opcls in opcodes.OP_MAPPING.values(): if not opcls.WITH_LU: continue self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE, msg="%s missing handler class" % opcls) # Check against BGL whitelist lucls = mcpu.Processor.DISPATCH_TABLE[opcls] if lucls.REQ_BGL: self.assertTrue(opcls in REQ_BGL_WHITELIST, msg=("%s not whitelisted for BGL" % opcls.OP_ID)) else: self.assertFalse(opcls in REQ_BGL_WHITELIST, msg=("%s whitelisted for BGL, but doesn't use it" % opcls.OP_ID)) class TestProcessResult(unittest.TestCase): def setUp(self): self._submitted = [] self._count = itertools.count(200) def _Submit(self, jobs): job_ids = [self._count.next() for _ in jobs] self._submitted.extend(zip(job_ids, jobs)) return job_ids def testNoJobs(self): for i in [object(), [], False, True, None, 1, 929, {}]: self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i), i) def testDefaults(self): src = opcodes.OpTestDummy() res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(), opcodes.OpTestDelay(), ], [ opcodes.OpTestDelay(), ]])) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], }) (_, (op1, op2)) = self._submitted.pop(0) (_, (op3, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) for op in [op1, op2, op3]: self.assertTrue("OP_TEST_DUMMY" in op.comment) self.assertFalse(hasattr(op, "priority")) self.assertFalse(hasattr(op, "debug_level")) def testParams(self): src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH, debug_level=3) res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW), ], [ opcodes.OpTestDelay(comment="foobar", debug_level=10), ]], other=True, value=range(10))) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], "other": True, "value": range(10), }) (_, (op1, )) = self._submitted.pop(0) (_, (op2, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) self.assertEqual(op1.priority, constants.OP_PRIO_LOW) self.assertTrue("OP_TEST_DUMMY" in op1.comment) self.assertEqual(op1.debug_level, 3) self.assertEqual(op2.priority, constants.OP_PRIO_HIGH) self.assertEqual(op2.comment, "foobar") self.assertEqual(op2.debug_level, 3) class _FakeLuWithLocks: def __init__(self, needed_locks, share_locks): self.needed_locks = needed_locks self.share_locks = share_locks class _FakeGlm: def __init__(self, owning_nal): self._owning_nal = owning_nal def check_owned(self, level, names): assert level == locking.LEVEL_NODE_ALLOC assert names == locking.NAL return self._owning_nal def
(self, level): return False class TestVerifyLocks(unittest.TestCase): def testNoLocks(self): lu = _FakeLuWithLocks({}, {}) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=NotImplemented, _nal_whitelist=NotImplemented) def testNotAllSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testDifferentMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("using the same mode as nodes" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) def testSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 1, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("whitelisted to use different modes" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testAllWithoutAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("allocation lock must be used if" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) def testAllWithAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) except AssertionError, err: self.assertTrue("whitelisted for not acquiring" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) if __name__ == "__main__": testutils.GanetiTestProgram()
owning_all
identifier_name
ganeti.mcpu_unittest.py
#!/usr/bin/python # # Copyright (C) 2009, 2011 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for unittesting the mcpu module""" import unittest import itertools from ganeti import compat from ganeti import mcpu from ganeti import opcodes from ganeti import cmdlib from ganeti import locking from ganeti import constants from ganeti.constants import \ LOCK_ATTEMPTS_TIMEOUT, \ LOCK_ATTEMPTS_MAXWAIT, \ LOCK_ATTEMPTS_MINWAIT import testutils REQ_BGL_WHITELIST = compat.UniqueFrozenset([ opcodes.OpClusterActivateMasterIp, opcodes.OpClusterDeactivateMasterIp, opcodes.OpClusterDestroy, opcodes.OpClusterPostInit, opcodes.OpClusterRename, opcodes.OpInstanceRename, opcodes.OpNodeAdd, opcodes.OpNodeRemove, opcodes.OpTestAllocator, ]) class TestLockAttemptTimeoutStrategy(unittest.TestCase): def testConstants(self): tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT) self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT) self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800, msg="Waiting less than half an hour per priority") self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600, msg="Waiting more than an hour per priority") def testSimple(self): strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5, _time_fn=lambda: 0.0) prev = None for i in range(len(strat._TIMEOUT_PER_ATTEMPT)): timeout = strat.NextAttempt() self.assert_(timeout is not None) self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT) self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT) self.assert_(prev is None or timeout >= prev) prev = timeout for _ in range(10): self.assert_(strat.NextAttempt() is None) class TestDispatchTable(unittest.TestCase): def test(self): for opcls in opcodes.OP_MAPPING.values(): if not opcls.WITH_LU: continue self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE, msg="%s missing handler class" % opcls) # Check against BGL whitelist lucls = mcpu.Processor.DISPATCH_TABLE[opcls] if lucls.REQ_BGL: self.assertTrue(opcls in REQ_BGL_WHITELIST, msg=("%s not whitelisted for BGL" % opcls.OP_ID)) else: self.assertFalse(opcls in REQ_BGL_WHITELIST, msg=("%s whitelisted for BGL, but doesn't use it" % opcls.OP_ID)) class TestProcessResult(unittest.TestCase): def setUp(self): self._submitted = [] self._count = itertools.count(200) def _Submit(self, jobs): job_ids = [self._count.next() for _ in jobs] self._submitted.extend(zip(job_ids, jobs)) return job_ids def testNoJobs(self): for i in [object(), [], False, True, None, 1, 929, {}]: self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i), i) def testDefaults(self): src = opcodes.OpTestDummy() res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(), opcodes.OpTestDelay(), ], [ opcodes.OpTestDelay(), ]])) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], }) (_, (op1, op2)) = self._submitted.pop(0) (_, (op3, )) = self._submitted.pop(0)
self.assertTrue("OP_TEST_DUMMY" in op.comment) self.assertFalse(hasattr(op, "priority")) self.assertFalse(hasattr(op, "debug_level")) def testParams(self): src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH, debug_level=3) res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW), ], [ opcodes.OpTestDelay(comment="foobar", debug_level=10), ]], other=True, value=range(10))) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], "other": True, "value": range(10), }) (_, (op1, )) = self._submitted.pop(0) (_, (op2, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) self.assertEqual(op1.priority, constants.OP_PRIO_LOW) self.assertTrue("OP_TEST_DUMMY" in op1.comment) self.assertEqual(op1.debug_level, 3) self.assertEqual(op2.priority, constants.OP_PRIO_HIGH) self.assertEqual(op2.comment, "foobar") self.assertEqual(op2.debug_level, 3) class _FakeLuWithLocks: def __init__(self, needed_locks, share_locks): self.needed_locks = needed_locks self.share_locks = share_locks class _FakeGlm: def __init__(self, owning_nal): self._owning_nal = owning_nal def check_owned(self, level, names): assert level == locking.LEVEL_NODE_ALLOC assert names == locking.NAL return self._owning_nal def owning_all(self, level): return False class TestVerifyLocks(unittest.TestCase): def testNoLocks(self): lu = _FakeLuWithLocks({}, {}) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=NotImplemented, _nal_whitelist=NotImplemented) def testNotAllSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testDifferentMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("using the same mode as nodes" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) def testSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 1, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("whitelisted to use different modes" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testAllWithoutAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("allocation lock must be used if" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) def testAllWithAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) except AssertionError, err: self.assertTrue("whitelisted for not acquiring" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) if __name__ == "__main__": testutils.GanetiTestProgram()
self.assertRaises(IndexError, self._submitted.pop) for op in [op1, op2, op3]:
random_line_split
ganeti.mcpu_unittest.py
#!/usr/bin/python # # Copyright (C) 2009, 2011 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for unittesting the mcpu module""" import unittest import itertools from ganeti import compat from ganeti import mcpu from ganeti import opcodes from ganeti import cmdlib from ganeti import locking from ganeti import constants from ganeti.constants import \ LOCK_ATTEMPTS_TIMEOUT, \ LOCK_ATTEMPTS_MAXWAIT, \ LOCK_ATTEMPTS_MINWAIT import testutils REQ_BGL_WHITELIST = compat.UniqueFrozenset([ opcodes.OpClusterActivateMasterIp, opcodes.OpClusterDeactivateMasterIp, opcodes.OpClusterDestroy, opcodes.OpClusterPostInit, opcodes.OpClusterRename, opcodes.OpInstanceRename, opcodes.OpNodeAdd, opcodes.OpNodeRemove, opcodes.OpTestAllocator, ]) class TestLockAttemptTimeoutStrategy(unittest.TestCase): def testConstants(self): tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT) self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT) self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800, msg="Waiting less than half an hour per priority") self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600, msg="Waiting more than an hour per priority") def testSimple(self): strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5, _time_fn=lambda: 0.0) prev = None for i in range(len(strat._TIMEOUT_PER_ATTEMPT)): timeout = strat.NextAttempt() self.assert_(timeout is not None) self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT) self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT) self.assert_(prev is None or timeout >= prev) prev = timeout for _ in range(10): self.assert_(strat.NextAttempt() is None) class TestDispatchTable(unittest.TestCase): def test(self): for opcls in opcodes.OP_MAPPING.values(): if not opcls.WITH_LU:
self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE, msg="%s missing handler class" % opcls) # Check against BGL whitelist lucls = mcpu.Processor.DISPATCH_TABLE[opcls] if lucls.REQ_BGL: self.assertTrue(opcls in REQ_BGL_WHITELIST, msg=("%s not whitelisted for BGL" % opcls.OP_ID)) else: self.assertFalse(opcls in REQ_BGL_WHITELIST, msg=("%s whitelisted for BGL, but doesn't use it" % opcls.OP_ID)) class TestProcessResult(unittest.TestCase): def setUp(self): self._submitted = [] self._count = itertools.count(200) def _Submit(self, jobs): job_ids = [self._count.next() for _ in jobs] self._submitted.extend(zip(job_ids, jobs)) return job_ids def testNoJobs(self): for i in [object(), [], False, True, None, 1, 929, {}]: self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i), i) def testDefaults(self): src = opcodes.OpTestDummy() res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(), opcodes.OpTestDelay(), ], [ opcodes.OpTestDelay(), ]])) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], }) (_, (op1, op2)) = self._submitted.pop(0) (_, (op3, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) for op in [op1, op2, op3]: self.assertTrue("OP_TEST_DUMMY" in op.comment) self.assertFalse(hasattr(op, "priority")) self.assertFalse(hasattr(op, "debug_level")) def testParams(self): src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH, debug_level=3) res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[ opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW), ], [ opcodes.OpTestDelay(comment="foobar", debug_level=10), ]], other=True, value=range(10))) self.assertEqual(res, { constants.JOB_IDS_KEY: [200, 201], "other": True, "value": range(10), }) (_, (op1, )) = self._submitted.pop(0) (_, (op2, )) = self._submitted.pop(0) self.assertRaises(IndexError, self._submitted.pop) self.assertEqual(op1.priority, constants.OP_PRIO_LOW) self.assertTrue("OP_TEST_DUMMY" in op1.comment) self.assertEqual(op1.debug_level, 3) self.assertEqual(op2.priority, constants.OP_PRIO_HIGH) self.assertEqual(op2.comment, "foobar") self.assertEqual(op2.debug_level, 3) class _FakeLuWithLocks: def __init__(self, needed_locks, share_locks): self.needed_locks = needed_locks self.share_locks = share_locks class _FakeGlm: def __init__(self, owning_nal): self._owning_nal = owning_nal def check_owned(self, level, names): assert level == locking.LEVEL_NODE_ALLOC assert names == locking.NAL return self._owning_nal def owning_all(self, level): return False class TestVerifyLocks(unittest.TestCase): def testNoLocks(self): lu = _FakeLuWithLocks({}, {}) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=NotImplemented, _nal_whitelist=NotImplemented) def testNotAllSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testDifferentMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], }, { level: 0, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("using the same mode as nodes" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) def testSameMode(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: ["foo"], locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 1, locking.LEVEL_NODE_ALLOC: 1, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("whitelisted to use different modes" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) def testAllWithoutAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(False) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) except AssertionError, err: self.assertTrue("allocation lock must be used if" in str(err)) else: self.fail("Exception not raised") # Once more with the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) def testAllWithAllocLock(self): for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]: lu = _FakeLuWithLocks({ level: locking.ALL_SET, locking.LEVEL_NODE_ALLOC: locking.ALL_SET, }, { level: 0, locking.LEVEL_NODE_ALLOC: 0, }) glm = _FakeGlm(True) try: mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[_FakeLuWithLocks]) except AssertionError, err: self.assertTrue("whitelisted for not acquiring" in str(err)) else: self.fail("Exception not raised") # Once more without the whitelist mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[]) if __name__ == "__main__": testutils.GanetiTestProgram()
continue
conditional_block
pilconvert.py
#!C:\Users\DMoran\Downloads\WinPython-64bit-2.7.13.1Zero\python-2.7.13.amd64\python.exe # # The Python Imaging Library. # $Id$ # # convert image files # # History: # 0.1 96-04-20 fl Created # 0.2 96-10-04 fl Use draft mode when converting images # 0.3 96-12-30 fl Optimize output (PNG, JPEG) # 0.4 97-01-18 fl Made optimize an option (PNG, JPEG) # 0.5 98-12-30 fl Fixed -f option (from Anthony Baxter) # from __future__ import print_function import getopt import string import sys from PIL import Image def usage():
if len(sys.argv) == 1: usage() try: opt, argv = getopt.getopt(sys.argv[1:], "c:dfgopq:r") except getopt.error as v: print(v) sys.exit(1) output_format = None convert = None options = {} for o, a in opt: if o == "-f": Image.init() id = sorted(Image.ID) print("Supported formats (* indicates output format):") for i in id: if i in Image.SAVE: print(i+"*", end=' ') else: print(i, end=' ') sys.exit(1) elif o == "-c": output_format = a if o == "-g": convert = "L" elif o == "-p": convert = "P" elif o == "-r": convert = "RGB" elif o == "-o": options["optimize"] = 1 elif o == "-q": options["quality"] = string.atoi(a) if len(argv) != 2: usage() try: im = Image.open(argv[0]) if convert and im.mode != convert: im.draft(convert, im.size) im = im.convert(convert) if output_format: im.save(argv[1], output_format, **options) else: im.save(argv[1], **options) except: print("cannot convert image", end=' ') print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
print("PIL Convert 0.5/1998-12-30 -- convert image files") print("Usage: pilconvert [option] infile outfile") print() print("Options:") print() print(" -c <format> convert to format (default is given by extension)") print() print(" -g convert to greyscale") print(" -p convert to palette image (using standard palette)") print(" -r convert to rgb") print() print(" -o optimize output (trade speed for size)") print(" -q <value> set compression quality (0-100, JPEG only)") print() print(" -f list supported file formats") sys.exit(1)
identifier_body
pilconvert.py
#!C:\Users\DMoran\Downloads\WinPython-64bit-2.7.13.1Zero\python-2.7.13.amd64\python.exe # # The Python Imaging Library. # $Id$ # # convert image files # # History: # 0.1 96-04-20 fl Created # 0.2 96-10-04 fl Use draft mode when converting images # 0.3 96-12-30 fl Optimize output (PNG, JPEG) # 0.4 97-01-18 fl Made optimize an option (PNG, JPEG) # 0.5 98-12-30 fl Fixed -f option (from Anthony Baxter) # from __future__ import print_function import getopt import string import sys from PIL import Image def usage(): print("PIL Convert 0.5/1998-12-30 -- convert image files") print("Usage: pilconvert [option] infile outfile") print() print("Options:") print() print(" -c <format> convert to format (default is given by extension)") print() print(" -g convert to greyscale") print(" -p convert to palette image (using standard palette)") print(" -r convert to rgb") print() print(" -o optimize output (trade speed for size)") print(" -q <value> set compression quality (0-100, JPEG only)") print() print(" -f list supported file formats") sys.exit(1) if len(sys.argv) == 1: usage() try: opt, argv = getopt.getopt(sys.argv[1:], "c:dfgopq:r") except getopt.error as v: print(v) sys.exit(1) output_format = None convert = None options = {} for o, a in opt: if o == "-f": Image.init() id = sorted(Image.ID) print("Supported formats (* indicates output format):") for i in id: if i in Image.SAVE: print(i+"*", end=' ') else: print(i, end=' ') sys.exit(1) elif o == "-c": output_format = a if o == "-g": convert = "L" elif o == "-p": convert = "P" elif o == "-r": convert = "RGB" elif o == "-o": options["optimize"] = 1 elif o == "-q": options["quality"] = string.atoi(a) if len(argv) != 2: usage() try: im = Image.open(argv[0]) if convert and im.mode != convert: im.draft(convert, im.size) im = im.convert(convert)
print("cannot convert image", end=' ') print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
if output_format: im.save(argv[1], output_format, **options) else: im.save(argv[1], **options) except:
random_line_split
pilconvert.py
#!C:\Users\DMoran\Downloads\WinPython-64bit-2.7.13.1Zero\python-2.7.13.amd64\python.exe # # The Python Imaging Library. # $Id$ # # convert image files # # History: # 0.1 96-04-20 fl Created # 0.2 96-10-04 fl Use draft mode when converting images # 0.3 96-12-30 fl Optimize output (PNG, JPEG) # 0.4 97-01-18 fl Made optimize an option (PNG, JPEG) # 0.5 98-12-30 fl Fixed -f option (from Anthony Baxter) # from __future__ import print_function import getopt import string import sys from PIL import Image def
(): print("PIL Convert 0.5/1998-12-30 -- convert image files") print("Usage: pilconvert [option] infile outfile") print() print("Options:") print() print(" -c <format> convert to format (default is given by extension)") print() print(" -g convert to greyscale") print(" -p convert to palette image (using standard palette)") print(" -r convert to rgb") print() print(" -o optimize output (trade speed for size)") print(" -q <value> set compression quality (0-100, JPEG only)") print() print(" -f list supported file formats") sys.exit(1) if len(sys.argv) == 1: usage() try: opt, argv = getopt.getopt(sys.argv[1:], "c:dfgopq:r") except getopt.error as v: print(v) sys.exit(1) output_format = None convert = None options = {} for o, a in opt: if o == "-f": Image.init() id = sorted(Image.ID) print("Supported formats (* indicates output format):") for i in id: if i in Image.SAVE: print(i+"*", end=' ') else: print(i, end=' ') sys.exit(1) elif o == "-c": output_format = a if o == "-g": convert = "L" elif o == "-p": convert = "P" elif o == "-r": convert = "RGB" elif o == "-o": options["optimize"] = 1 elif o == "-q": options["quality"] = string.atoi(a) if len(argv) != 2: usage() try: im = Image.open(argv[0]) if convert and im.mode != convert: im.draft(convert, im.size) im = im.convert(convert) if output_format: im.save(argv[1], output_format, **options) else: im.save(argv[1], **options) except: print("cannot convert image", end=' ') print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
usage
identifier_name
pilconvert.py
#!C:\Users\DMoran\Downloads\WinPython-64bit-2.7.13.1Zero\python-2.7.13.amd64\python.exe # # The Python Imaging Library. # $Id$ # # convert image files # # History: # 0.1 96-04-20 fl Created # 0.2 96-10-04 fl Use draft mode when converting images # 0.3 96-12-30 fl Optimize output (PNG, JPEG) # 0.4 97-01-18 fl Made optimize an option (PNG, JPEG) # 0.5 98-12-30 fl Fixed -f option (from Anthony Baxter) # from __future__ import print_function import getopt import string import sys from PIL import Image def usage(): print("PIL Convert 0.5/1998-12-30 -- convert image files") print("Usage: pilconvert [option] infile outfile") print() print("Options:") print() print(" -c <format> convert to format (default is given by extension)") print() print(" -g convert to greyscale") print(" -p convert to palette image (using standard palette)") print(" -r convert to rgb") print() print(" -o optimize output (trade speed for size)") print(" -q <value> set compression quality (0-100, JPEG only)") print() print(" -f list supported file formats") sys.exit(1) if len(sys.argv) == 1: usage() try: opt, argv = getopt.getopt(sys.argv[1:], "c:dfgopq:r") except getopt.error as v: print(v) sys.exit(1) output_format = None convert = None options = {} for o, a in opt: if o == "-f": Image.init() id = sorted(Image.ID) print("Supported formats (* indicates output format):") for i in id: if i in Image.SAVE: print(i+"*", end=' ') else: print(i, end=' ') sys.exit(1) elif o == "-c": output_format = a if o == "-g":
elif o == "-p": convert = "P" elif o == "-r": convert = "RGB" elif o == "-o": options["optimize"] = 1 elif o == "-q": options["quality"] = string.atoi(a) if len(argv) != 2: usage() try: im = Image.open(argv[0]) if convert and im.mode != convert: im.draft(convert, im.size) im = im.convert(convert) if output_format: im.save(argv[1], output_format, **options) else: im.save(argv[1], **options) except: print("cannot convert image", end=' ') print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
convert = "L"
conditional_block
traversal.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Traversals over the DOM and flow trees, running the layout computations. use construct::FlowConstructor; use context::LayoutContext; use css::matching::{ApplicableDeclarations, MatchMethods, StyleSharingResult}; use flow::{MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal}; use flow::{self, Flow}; use incremental::{self, BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage}; use script::layout_interface::ReflowGoal; use selectors::bloom::BloomFilter; use std::cell::RefCell; use std::mem; use util::opts; use util::tid::tid; use wrapper::{LayoutNode, layout_node_to_unsafe_layout_node}; use wrapper::{ThreadSafeLayoutNode, UnsafeLayoutNode}; /// Every time we do another layout, the old bloom filters are invalid. This is /// detected by ticking a generation number every layout. type Generation = u32; /// A pair of the bloom filter used for css selector matching, and the node to /// which it applies. This is used to efficiently do `Descendant` selector /// matches. Thanks to the bloom filter, we can avoid walking up the tree /// looking for ancestors that aren't there in the majority of cases. /// /// As we walk down the DOM tree a task-local bloom filter is built of all the /// CSS `SimpleSelector`s which are part of a `Descendant` compound selector /// (i.e. paired with a `Descendant` combinator, in the `next` field of a /// `CompoundSelector`. /// /// Before a `Descendant` selector match is tried, it's compared against the /// bloom filter. If the bloom filter can exclude it, the selector is quickly /// rejected. /// /// When done styling a node, all selectors previously inserted into the filter /// are removed. /// /// Since a work-stealing queue is used for styling, sometimes, the bloom filter /// will no longer be the for the parent of the node we're currently on. When /// this happens, the task local bloom filter will be thrown away and rebuilt. thread_local!( static STYLE_BLOOM: RefCell<Option<(Box<BloomFilter>, UnsafeLayoutNode, Generation)>> = RefCell::new(None)); /// Returns the task local bloom filter. /// /// If one does not exist, a new one will be made for you. If it is out of date, /// it will be cleared and reused. fn take_task_local_bloom_filter(parent_node: Option<LayoutNode>, layout_context: &LayoutContext) -> Box<BloomFilter> { STYLE_BLOOM.with(|style_bloom| { match (parent_node, style_bloom.borrow_mut().take()) { // Root node. Needs new bloom filter. (None, _ ) => { debug!("[{}] No parent, but new bloom filter!", tid()); box BloomFilter::new() } // No bloom filter for this thread yet. (Some(parent), None) => { let mut bloom_filter = box BloomFilter::new(); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context); bloom_filter } // Found cached bloom filter. (Some(parent), Some((mut bloom_filter, old_node, old_generation))) => { if old_node == layout_node_to_unsafe_layout_node(&parent) && old_generation == layout_context.shared.generation { // Hey, the cached parent is our parent! We can reuse the bloom filter. debug!("[{}] Parent matches (={}). Reusing bloom filter.", tid(), old_node.0); } else { // Oh no. the cached parent is stale. I guess we need a new one. Reuse the existing // allocation to avoid malloc churn. bloom_filter.clear(); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context); } bloom_filter }, } }) } fn put_task_local_bloom_filter(bf: Box<BloomFilter>, unsafe_node: &UnsafeLayoutNode, layout_context: &LayoutContext) { STYLE_BLOOM.with(move |style_bloom| { assert!(style_bloom.borrow().is_none(), "Putting into a never-taken task-local bloom filter"); *style_bloom.borrow_mut() = Some((bf, *unsafe_node, layout_context.shared.generation)); }) } /// "Ancestors" in this context is inclusive of ourselves. fn insert_ancestors_into_bloom_filter(bf: &mut Box<BloomFilter>, mut n: LayoutNode, layout_context: &LayoutContext) { debug!("[{}] Inserting ancestors.", tid()); let mut ancestors = 0; loop { ancestors += 1; n.insert_into_bloom_filter(&mut **bf); n = match n.layout_parent_node(layout_context.shared) { None => break, Some(p) => p, }; } debug!("[{}] Inserted {} ancestors.", tid(), ancestors); } /// A top-down traversal. pub trait PreorderDomTraversal { /// The operation to perform. Return true to continue or false to stop. fn process(&self, node: LayoutNode); } /// A bottom-up traversal, with a optional in-order pass. pub trait PostorderDomTraversal { /// The operation to perform. Return true to continue or false to stop. fn process(&self, node: LayoutNode); } /// A bottom-up, parallelizable traversal. pub trait PostorderNodeMutTraversal { /// The operation to perform. Return true to continue or false to stop. fn process<'a>(&'a mut self, node: &ThreadSafeLayoutNode<'a>) -> bool; /// Returns true if this node should be pruned. If this returns true, we skip the operation /// entirely and do not process any descendant nodes. This is called *before* child nodes are /// visited. The default implementation never prunes any nodes. fn should_prune<'a>(&'a self, _node: &ThreadSafeLayoutNode<'a>) -> bool { false } } /// The recalc-style-for-node traversal, which styles each node and must run before /// layout computation. This computes the styles applied to each node. #[derive(Copy, Clone)] pub struct RecalcStyleForNode<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> { #[inline] #[allow(unsafe_code)] fn
(&self, node: LayoutNode) { // Initialize layout data. // // FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML // parser. node.initialize_layout_data(); // Get the parent node. let parent_opt = node.layout_parent_node(self.layout_context.shared); // Get the style bloom filter. let mut bf = take_task_local_bloom_filter(parent_opt, self.layout_context); let nonincremental_layout = opts::get().nonincremental_layout; if nonincremental_layout || node.is_dirty() { // Remove existing CSS styles from nodes whose content has changed (e.g. text changed), // to force non-incremental reflow. if node.has_changed() { let node = ThreadSafeLayoutNode::new(&node); node.unstyle(); } // Check to see whether we can share a style with someone. let style_sharing_candidate_cache = &mut self.layout_context.style_sharing_candidate_cache(); let sharing_result = unsafe { node.share_style_if_possible(style_sharing_candidate_cache, parent_opt.clone()) }; // Otherwise, match and cascade selectors. match sharing_result { StyleSharingResult::CannotShare(mut shareable) => { let mut applicable_declarations = ApplicableDeclarations::new(); if node.as_element().is_some() { // Perform the CSS selector matching. let stylist = unsafe { &*self.layout_context.shared.stylist }; node.match_node(stylist, Some(&*bf), &mut applicable_declarations, &mut shareable); } else if node.has_changed() { ThreadSafeLayoutNode::new(&node).set_restyle_damage( incremental::rebuild_and_reflow()) } // Perform the CSS cascade. unsafe { node.cascade_node(self.layout_context.shared, parent_opt, &applicable_declarations, &mut self.layout_context.applicable_declarations_cache(), &self.layout_context.shared.new_animations_sender); } // Add ourselves to the LRU cache. if shareable { if let Some(element) = node.as_element() { style_sharing_candidate_cache.insert_if_possible(&element); } } } StyleSharingResult::StyleWasShared(index, damage) => { style_sharing_candidate_cache.touch(index); ThreadSafeLayoutNode::new(&node).set_restyle_damage(damage); } } } let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node); // Before running the children, we need to insert our nodes into the bloom // filter. debug!("[{}] + {:X}", tid(), unsafe_layout_node.0); node.insert_into_bloom_filter(&mut *bf); // NB: flow construction updates the bloom filter on the way up. put_task_local_bloom_filter(bf, &unsafe_layout_node, self.layout_context); } } /// The flow construction traversal, which builds flows for styled nodes. #[derive(Copy, Clone)] pub struct ConstructFlows<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderDomTraversal for ConstructFlows<'a> { #[inline] #[allow(unsafe_code)] fn process(&self, node: LayoutNode) { // Construct flows for this node. { let tnode = ThreadSafeLayoutNode::new(&node); // Always reconstruct if incremental layout is turned off. let nonincremental_layout = opts::get().nonincremental_layout; if nonincremental_layout || node.has_dirty_descendants() { let mut flow_constructor = FlowConstructor::new(self.layout_context); if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) { flow_constructor.process(&tnode); debug!("Constructed flow for {:x}: {:x}", tnode.debug_id(), tnode.flow_debug_id()); } } // Reset the layout damage in this node. It's been propagated to the // flow by the flow constructor. tnode.set_restyle_damage(RestyleDamage::empty()); } unsafe { node.set_changed(false); node.set_dirty(false); node.set_dirty_siblings(false); node.set_dirty_descendants(false); } let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node); let (mut bf, old_node, old_generation) = STYLE_BLOOM.with(|style_bloom| { mem::replace(&mut *style_bloom.borrow_mut(), None) .expect("The bloom filter should have been set by style recalc.") }); assert_eq!(old_node, unsafe_layout_node); assert_eq!(old_generation, self.layout_context.shared.generation); match node.layout_parent_node(self.layout_context.shared) { None => { debug!("[{}] - {:X}, and deleting BF.", tid(), unsafe_layout_node.0); // If this is the reflow root, eat the task-local bloom filter. } Some(parent) => { // Otherwise, put it back, but remove this node. node.remove_from_bloom_filter(&mut *bf); let unsafe_parent = layout_node_to_unsafe_layout_node(&parent); put_task_local_bloom_filter(bf, &unsafe_parent, self.layout_context); }, }; } } /// The bubble-inline-sizes traversal, the first part of layout computation. This computes /// preferred and intrinsic inline-sizes and bubbles them up the tree. pub struct BubbleISizes<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for BubbleISizes<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.bubble_inline_sizes(); flow::mut_base(flow).restyle_damage.remove(BUBBLE_ISIZES); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.contains(BUBBLE_ISIZES) } } /// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`. #[derive(Copy, Clone)] pub struct AssignISizes<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderFlowTraversal for AssignISizes<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.assign_inline_sizes(self.layout_context); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) } } /// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of /// layout computation. Determines the final block-sizes for all layout objects, computes /// positions, and computes overflow regions. In Gecko this corresponds to `Reflow` and /// `FinishAndStoreOverflow`. #[derive(Copy, Clone)] pub struct AssignBSizesAndStoreOverflow<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for AssignBSizesAndStoreOverflow<'a> { #[inline] fn process(&self, flow: &mut Flow) { // Can't do anything with flows impacted by floats until we reach their inorder parent. // NB: We must return without resetting the restyle bits for these, as we haven't actually // reflowed anything! if flow::base(flow).flags.impacted_by_floats() { return } flow.assign_block_size(self.layout_context); flow.early_store_overflow(self.layout_context); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) } } #[derive(Copy, Clone)] pub struct ComputeAbsolutePositions<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderFlowTraversal for ComputeAbsolutePositions<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.compute_absolute_position(self.layout_context); } } #[derive(Copy, Clone)] pub struct BuildDisplayList<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for BuildDisplayList<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.build_display_list(self.layout_context); } #[inline] fn should_process(&self, _: &mut Flow) -> bool { self.layout_context.shared.goal == ReflowGoal::ForDisplay } }
process
identifier_name
traversal.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Traversals over the DOM and flow trees, running the layout computations. use construct::FlowConstructor; use context::LayoutContext; use css::matching::{ApplicableDeclarations, MatchMethods, StyleSharingResult}; use flow::{MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal}; use flow::{self, Flow}; use incremental::{self, BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage}; use script::layout_interface::ReflowGoal; use selectors::bloom::BloomFilter; use std::cell::RefCell; use std::mem; use util::opts; use util::tid::tid; use wrapper::{LayoutNode, layout_node_to_unsafe_layout_node}; use wrapper::{ThreadSafeLayoutNode, UnsafeLayoutNode}; /// Every time we do another layout, the old bloom filters are invalid. This is /// detected by ticking a generation number every layout. type Generation = u32; /// A pair of the bloom filter used for css selector matching, and the node to /// which it applies. This is used to efficiently do `Descendant` selector /// matches. Thanks to the bloom filter, we can avoid walking up the tree /// looking for ancestors that aren't there in the majority of cases. /// /// As we walk down the DOM tree a task-local bloom filter is built of all the /// CSS `SimpleSelector`s which are part of a `Descendant` compound selector /// (i.e. paired with a `Descendant` combinator, in the `next` field of a /// `CompoundSelector`. /// /// Before a `Descendant` selector match is tried, it's compared against the /// bloom filter. If the bloom filter can exclude it, the selector is quickly /// rejected. /// /// When done styling a node, all selectors previously inserted into the filter /// are removed. /// /// Since a work-stealing queue is used for styling, sometimes, the bloom filter /// will no longer be the for the parent of the node we're currently on. When /// this happens, the task local bloom filter will be thrown away and rebuilt. thread_local!( static STYLE_BLOOM: RefCell<Option<(Box<BloomFilter>, UnsafeLayoutNode, Generation)>> = RefCell::new(None)); /// Returns the task local bloom filter. /// /// If one does not exist, a new one will be made for you. If it is out of date, /// it will be cleared and reused. fn take_task_local_bloom_filter(parent_node: Option<LayoutNode>, layout_context: &LayoutContext) -> Box<BloomFilter> { STYLE_BLOOM.with(|style_bloom| { match (parent_node, style_bloom.borrow_mut().take()) { // Root node. Needs new bloom filter. (None, _ ) => { debug!("[{}] No parent, but new bloom filter!", tid()); box BloomFilter::new() } // No bloom filter for this thread yet. (Some(parent), None) => { let mut bloom_filter = box BloomFilter::new(); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context); bloom_filter } // Found cached bloom filter. (Some(parent), Some((mut bloom_filter, old_node, old_generation))) => {
old_generation == layout_context.shared.generation { // Hey, the cached parent is our parent! We can reuse the bloom filter. debug!("[{}] Parent matches (={}). Reusing bloom filter.", tid(), old_node.0); } else { // Oh no. the cached parent is stale. I guess we need a new one. Reuse the existing // allocation to avoid malloc churn. bloom_filter.clear(); insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context); } bloom_filter }, } }) } fn put_task_local_bloom_filter(bf: Box<BloomFilter>, unsafe_node: &UnsafeLayoutNode, layout_context: &LayoutContext) { STYLE_BLOOM.with(move |style_bloom| { assert!(style_bloom.borrow().is_none(), "Putting into a never-taken task-local bloom filter"); *style_bloom.borrow_mut() = Some((bf, *unsafe_node, layout_context.shared.generation)); }) } /// "Ancestors" in this context is inclusive of ourselves. fn insert_ancestors_into_bloom_filter(bf: &mut Box<BloomFilter>, mut n: LayoutNode, layout_context: &LayoutContext) { debug!("[{}] Inserting ancestors.", tid()); let mut ancestors = 0; loop { ancestors += 1; n.insert_into_bloom_filter(&mut **bf); n = match n.layout_parent_node(layout_context.shared) { None => break, Some(p) => p, }; } debug!("[{}] Inserted {} ancestors.", tid(), ancestors); } /// A top-down traversal. pub trait PreorderDomTraversal { /// The operation to perform. Return true to continue or false to stop. fn process(&self, node: LayoutNode); } /// A bottom-up traversal, with a optional in-order pass. pub trait PostorderDomTraversal { /// The operation to perform. Return true to continue or false to stop. fn process(&self, node: LayoutNode); } /// A bottom-up, parallelizable traversal. pub trait PostorderNodeMutTraversal { /// The operation to perform. Return true to continue or false to stop. fn process<'a>(&'a mut self, node: &ThreadSafeLayoutNode<'a>) -> bool; /// Returns true if this node should be pruned. If this returns true, we skip the operation /// entirely and do not process any descendant nodes. This is called *before* child nodes are /// visited. The default implementation never prunes any nodes. fn should_prune<'a>(&'a self, _node: &ThreadSafeLayoutNode<'a>) -> bool { false } } /// The recalc-style-for-node traversal, which styles each node and must run before /// layout computation. This computes the styles applied to each node. #[derive(Copy, Clone)] pub struct RecalcStyleForNode<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> { #[inline] #[allow(unsafe_code)] fn process(&self, node: LayoutNode) { // Initialize layout data. // // FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML // parser. node.initialize_layout_data(); // Get the parent node. let parent_opt = node.layout_parent_node(self.layout_context.shared); // Get the style bloom filter. let mut bf = take_task_local_bloom_filter(parent_opt, self.layout_context); let nonincremental_layout = opts::get().nonincremental_layout; if nonincremental_layout || node.is_dirty() { // Remove existing CSS styles from nodes whose content has changed (e.g. text changed), // to force non-incremental reflow. if node.has_changed() { let node = ThreadSafeLayoutNode::new(&node); node.unstyle(); } // Check to see whether we can share a style with someone. let style_sharing_candidate_cache = &mut self.layout_context.style_sharing_candidate_cache(); let sharing_result = unsafe { node.share_style_if_possible(style_sharing_candidate_cache, parent_opt.clone()) }; // Otherwise, match and cascade selectors. match sharing_result { StyleSharingResult::CannotShare(mut shareable) => { let mut applicable_declarations = ApplicableDeclarations::new(); if node.as_element().is_some() { // Perform the CSS selector matching. let stylist = unsafe { &*self.layout_context.shared.stylist }; node.match_node(stylist, Some(&*bf), &mut applicable_declarations, &mut shareable); } else if node.has_changed() { ThreadSafeLayoutNode::new(&node).set_restyle_damage( incremental::rebuild_and_reflow()) } // Perform the CSS cascade. unsafe { node.cascade_node(self.layout_context.shared, parent_opt, &applicable_declarations, &mut self.layout_context.applicable_declarations_cache(), &self.layout_context.shared.new_animations_sender); } // Add ourselves to the LRU cache. if shareable { if let Some(element) = node.as_element() { style_sharing_candidate_cache.insert_if_possible(&element); } } } StyleSharingResult::StyleWasShared(index, damage) => { style_sharing_candidate_cache.touch(index); ThreadSafeLayoutNode::new(&node).set_restyle_damage(damage); } } } let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node); // Before running the children, we need to insert our nodes into the bloom // filter. debug!("[{}] + {:X}", tid(), unsafe_layout_node.0); node.insert_into_bloom_filter(&mut *bf); // NB: flow construction updates the bloom filter on the way up. put_task_local_bloom_filter(bf, &unsafe_layout_node, self.layout_context); } } /// The flow construction traversal, which builds flows for styled nodes. #[derive(Copy, Clone)] pub struct ConstructFlows<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderDomTraversal for ConstructFlows<'a> { #[inline] #[allow(unsafe_code)] fn process(&self, node: LayoutNode) { // Construct flows for this node. { let tnode = ThreadSafeLayoutNode::new(&node); // Always reconstruct if incremental layout is turned off. let nonincremental_layout = opts::get().nonincremental_layout; if nonincremental_layout || node.has_dirty_descendants() { let mut flow_constructor = FlowConstructor::new(self.layout_context); if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) { flow_constructor.process(&tnode); debug!("Constructed flow for {:x}: {:x}", tnode.debug_id(), tnode.flow_debug_id()); } } // Reset the layout damage in this node. It's been propagated to the // flow by the flow constructor. tnode.set_restyle_damage(RestyleDamage::empty()); } unsafe { node.set_changed(false); node.set_dirty(false); node.set_dirty_siblings(false); node.set_dirty_descendants(false); } let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node); let (mut bf, old_node, old_generation) = STYLE_BLOOM.with(|style_bloom| { mem::replace(&mut *style_bloom.borrow_mut(), None) .expect("The bloom filter should have been set by style recalc.") }); assert_eq!(old_node, unsafe_layout_node); assert_eq!(old_generation, self.layout_context.shared.generation); match node.layout_parent_node(self.layout_context.shared) { None => { debug!("[{}] - {:X}, and deleting BF.", tid(), unsafe_layout_node.0); // If this is the reflow root, eat the task-local bloom filter. } Some(parent) => { // Otherwise, put it back, but remove this node. node.remove_from_bloom_filter(&mut *bf); let unsafe_parent = layout_node_to_unsafe_layout_node(&parent); put_task_local_bloom_filter(bf, &unsafe_parent, self.layout_context); }, }; } } /// The bubble-inline-sizes traversal, the first part of layout computation. This computes /// preferred and intrinsic inline-sizes and bubbles them up the tree. pub struct BubbleISizes<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for BubbleISizes<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.bubble_inline_sizes(); flow::mut_base(flow).restyle_damage.remove(BUBBLE_ISIZES); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.contains(BUBBLE_ISIZES) } } /// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`. #[derive(Copy, Clone)] pub struct AssignISizes<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderFlowTraversal for AssignISizes<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.assign_inline_sizes(self.layout_context); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) } } /// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of /// layout computation. Determines the final block-sizes for all layout objects, computes /// positions, and computes overflow regions. In Gecko this corresponds to `Reflow` and /// `FinishAndStoreOverflow`. #[derive(Copy, Clone)] pub struct AssignBSizesAndStoreOverflow<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for AssignBSizesAndStoreOverflow<'a> { #[inline] fn process(&self, flow: &mut Flow) { // Can't do anything with flows impacted by floats until we reach their inorder parent. // NB: We must return without resetting the restyle bits for these, as we haven't actually // reflowed anything! if flow::base(flow).flags.impacted_by_floats() { return } flow.assign_block_size(self.layout_context); flow.early_store_overflow(self.layout_context); } #[inline] fn should_process(&self, flow: &mut Flow) -> bool { flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) } } #[derive(Copy, Clone)] pub struct ComputeAbsolutePositions<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PreorderFlowTraversal for ComputeAbsolutePositions<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.compute_absolute_position(self.layout_context); } } #[derive(Copy, Clone)] pub struct BuildDisplayList<'a> { pub layout_context: &'a LayoutContext<'a>, } impl<'a> PostorderFlowTraversal for BuildDisplayList<'a> { #[inline] fn process(&self, flow: &mut Flow) { flow.build_display_list(self.layout_context); } #[inline] fn should_process(&self, _: &mut Flow) -> bool { self.layout_context.shared.goal == ReflowGoal::ForDisplay } }
if old_node == layout_node_to_unsafe_layout_node(&parent) &&
random_line_split
issue-20727-2.rs
// aux-build:issue-20727.rs // ignore-cross-compile extern crate issue_20727;
// @has - '//*[@class="rust trait"]' 'trait Add<RHS = Self> {' // @has - '//*[@class="rust trait"]' 'type Output;' type Output; // @has - '//*[@class="rust trait"]' 'fn add(self, rhs: RHS) -> Self::Output;' fn add(self, rhs: RHS) -> Self::Output; } // @has issue_20727_2/reexport/trait.Add.html pub mod reexport { // @has - '//*[@class="rust trait"]' 'trait Add<RHS = Self> {' // @has - '//*[@class="rust trait"]' 'type Output;' // @has - '//*[@class="rust trait"]' 'fn add(self, rhs: RHS) -> Self::Output;' pub use issue_20727::Add; }
// @has issue_20727_2/trait.Add.html pub trait Add<RHS = Self> {
random_line_split
es5ModuleInternalNamedImports.ts
// @target: ES5 // @module: AMD export module M { // variable export var M_V = 0; // interface export interface M_I { } //calss export class
{ } // instantiated module export module M_M { var x; } // uninstantiated module export module M_MU { } // function export function M_F() { } // enum export enum M_E { } // type export type M_T = number; // alias export import M_A = M_M; // Reexports export {M_V as v}; export {M_I as i}; export {M_C as c}; export {M_M as m}; export {M_MU as mu}; export {M_F as f}; export {M_E as e}; export {M_A as a}; import * as M2 from "M2"; import M4 from "M4"; export import M5 = require("M5"); } import M3 from "M3";
M_C
identifier_name
es5ModuleInternalNamedImports.ts
// @target: ES5 // @module: AMD export module M { // variable export var M_V = 0; // interface
export module M_M { var x; } // uninstantiated module export module M_MU { } // function export function M_F() { } // enum export enum M_E { } // type export type M_T = number; // alias export import M_A = M_M; // Reexports export {M_V as v}; export {M_I as i}; export {M_C as c}; export {M_M as m}; export {M_MU as mu}; export {M_F as f}; export {M_E as e}; export {M_A as a}; import * as M2 from "M2"; import M4 from "M4"; export import M5 = require("M5"); } import M3 from "M3";
export interface M_I { } //calss export class M_C { } // instantiated module
random_line_split
es5ModuleInternalNamedImports.ts
// @target: ES5 // @module: AMD export module M { // variable export var M_V = 0; // interface export interface M_I { } //calss export class M_C { } // instantiated module export module M_M { var x; } // uninstantiated module export module M_MU { } // function export function M_F()
// enum export enum M_E { } // type export type M_T = number; // alias export import M_A = M_M; // Reexports export {M_V as v}; export {M_I as i}; export {M_C as c}; export {M_M as m}; export {M_MU as mu}; export {M_F as f}; export {M_E as e}; export {M_A as a}; import * as M2 from "M2"; import M4 from "M4"; export import M5 = require("M5"); } import M3 from "M3";
{ }
identifier_body
menu_link_weight.js
/** * @file * Menu Link Weight Javascript functionality. */
*/ Drupal.behaviors.menuLinkWeightAutomaticTitle = { attach: function (context) { $('fieldset.menu-link-form', context).each(function () { var $checkbox = $('.form-item-menu-enabled input', this); var $link_title = $('.form-item-menu-link-title input', context); var $current_selection = $('.menu-link-weight-link-current', context); var $node_title = $(this).closest('form').find('.form-item-title input'); // If there is no title, take over the title of the link. if ($current_selection.html() == '') { $current_selection.html($link_title.val().substring(0, 30)); } // Take over any link title change. $link_title.keyup(function () { $current_selection.html($link_title.val().substring(0, 30)); }); // Also update on node title change, as this may update the link title. $node_title.keyup(function () { $current_selection.html($link_title.val().substring(0, 30)); }); }); } }; })(jQuery);
(function ($) { /** * Automatically update the current link title in the menu link weight list.
random_line_split
menu_link_weight.js
/** * @file * Menu Link Weight Javascript functionality. */ (function ($) { /** * Automatically update the current link title in the menu link weight list. */ Drupal.behaviors.menuLinkWeightAutomaticTitle = { attach: function (context) { $('fieldset.menu-link-form', context).each(function () { var $checkbox = $('.form-item-menu-enabled input', this); var $link_title = $('.form-item-menu-link-title input', context); var $current_selection = $('.menu-link-weight-link-current', context); var $node_title = $(this).closest('form').find('.form-item-title input'); // If there is no title, take over the title of the link. if ($current_selection.html() == '')
// Take over any link title change. $link_title.keyup(function () { $current_selection.html($link_title.val().substring(0, 30)); }); // Also update on node title change, as this may update the link title. $node_title.keyup(function () { $current_selection.html($link_title.val().substring(0, 30)); }); }); } }; })(jQuery);
{ $current_selection.html($link_title.val().substring(0, 30)); }
conditional_block
webkit.py
from .base import Browser, ExecutorBrowser, require_arg from .base import get_timeout_multiplier # noqa: F401 from ..executors import executor_kwargs as base_executor_kwargs from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401 WebDriverRefTestExecutor) # noqa: F401 from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401 from ..webdriver_server import WebKitDriverServer __wptrunner__ = {"product": "webkit", "check_args": "check_args", "browser": "WebKitBrowser", "browser_kwargs": "browser_kwargs", "executor": {"testharness": "WebDriverTestharnessExecutor", "reftest": "WebDriverRefTestExecutor", "wdspec": "WebKitDriverWdspecExecutor"},
"env_options": "env_options", "run_info_extras": "run_info_extras", "timeout_multiplier": "get_timeout_multiplier"} def check_args(**kwargs): require_arg(kwargs, "binary") require_arg(kwargs, "webdriver_binary") require_arg(kwargs, "webkit_port") def browser_kwargs(test_type, run_info_data, config, **kwargs): return {"binary": kwargs["binary"], "webdriver_binary": kwargs["webdriver_binary"], "webdriver_args": kwargs.get("webdriver_args")} def capabilities_for_port(server_config, **kwargs): port_name = kwargs["webkit_port"] if port_name in ["gtk", "wpe"]: port_key_map = {"gtk": "webkitgtk"} browser_options_port = port_key_map.get(port_name, port_name) browser_options_key = "%s:browserOptions" % browser_options_port return { "browserName": "MiniBrowser", "browserVersion": "2.20", "platformName": "ANY", browser_options_key: { "binary": kwargs["binary"], "args": kwargs.get("binary_args", []), "certificates": [ {"host": server_config["browser_host"], "certificateFile": kwargs["host_cert_path"]}]}} return {} def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): executor_kwargs = base_executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs) executor_kwargs["close_after_done"] = True executor_kwargs["capabilities"] = capabilities_for_port(server_config, **kwargs) return executor_kwargs def env_extras(**kwargs): return [] def env_options(): return {} def run_info_extras(**kwargs): return {"webkit_port": kwargs["webkit_port"]} class WebKitBrowser(Browser): """Generic WebKit browser is backed by WebKit's WebDriver implementation, which is supplied through ``wptrunner.webdriver.WebKitDriverServer``. """ def __init__(self, logger, binary, webdriver_binary=None, webdriver_args=None): Browser.__init__(self, logger) self.binary = binary self.server = WebKitDriverServer(self.logger, binary=webdriver_binary, args=webdriver_args) def start(self, **kwargs): self.server.start(block=False) def stop(self, force=False): self.server.stop(force=force) def pid(self): return self.server.pid def is_alive(self): # TODO(ato): This only indicates the driver is alive, # and doesn't say anything about whether a browser session # is active. return self.server.is_alive def cleanup(self): self.stop() def executor_browser(self): return ExecutorBrowser, {"webdriver_url": self.server.url}
"executor_kwargs": "executor_kwargs", "env_extras": "env_extras",
random_line_split
webkit.py
from .base import Browser, ExecutorBrowser, require_arg from .base import get_timeout_multiplier # noqa: F401 from ..executors import executor_kwargs as base_executor_kwargs from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401 WebDriverRefTestExecutor) # noqa: F401 from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401 from ..webdriver_server import WebKitDriverServer __wptrunner__ = {"product": "webkit", "check_args": "check_args", "browser": "WebKitBrowser", "browser_kwargs": "browser_kwargs", "executor": {"testharness": "WebDriverTestharnessExecutor", "reftest": "WebDriverRefTestExecutor", "wdspec": "WebKitDriverWdspecExecutor"}, "executor_kwargs": "executor_kwargs", "env_extras": "env_extras", "env_options": "env_options", "run_info_extras": "run_info_extras", "timeout_multiplier": "get_timeout_multiplier"} def check_args(**kwargs): require_arg(kwargs, "binary") require_arg(kwargs, "webdriver_binary") require_arg(kwargs, "webkit_port") def browser_kwargs(test_type, run_info_data, config, **kwargs): return {"binary": kwargs["binary"], "webdriver_binary": kwargs["webdriver_binary"], "webdriver_args": kwargs.get("webdriver_args")} def capabilities_for_port(server_config, **kwargs): port_name = kwargs["webkit_port"] if port_name in ["gtk", "wpe"]:
return {} def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): executor_kwargs = base_executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs) executor_kwargs["close_after_done"] = True executor_kwargs["capabilities"] = capabilities_for_port(server_config, **kwargs) return executor_kwargs def env_extras(**kwargs): return [] def env_options(): return {} def run_info_extras(**kwargs): return {"webkit_port": kwargs["webkit_port"]} class WebKitBrowser(Browser): """Generic WebKit browser is backed by WebKit's WebDriver implementation, which is supplied through ``wptrunner.webdriver.WebKitDriverServer``. """ def __init__(self, logger, binary, webdriver_binary=None, webdriver_args=None): Browser.__init__(self, logger) self.binary = binary self.server = WebKitDriverServer(self.logger, binary=webdriver_binary, args=webdriver_args) def start(self, **kwargs): self.server.start(block=False) def stop(self, force=False): self.server.stop(force=force) def pid(self): return self.server.pid def is_alive(self): # TODO(ato): This only indicates the driver is alive, # and doesn't say anything about whether a browser session # is active. return self.server.is_alive def cleanup(self): self.stop() def executor_browser(self): return ExecutorBrowser, {"webdriver_url": self.server.url}
port_key_map = {"gtk": "webkitgtk"} browser_options_port = port_key_map.get(port_name, port_name) browser_options_key = "%s:browserOptions" % browser_options_port return { "browserName": "MiniBrowser", "browserVersion": "2.20", "platformName": "ANY", browser_options_key: { "binary": kwargs["binary"], "args": kwargs.get("binary_args", []), "certificates": [ {"host": server_config["browser_host"], "certificateFile": kwargs["host_cert_path"]}]}}
conditional_block
webkit.py
from .base import Browser, ExecutorBrowser, require_arg from .base import get_timeout_multiplier # noqa: F401 from ..executors import executor_kwargs as base_executor_kwargs from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401 WebDriverRefTestExecutor) # noqa: F401 from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401 from ..webdriver_server import WebKitDriverServer __wptrunner__ = {"product": "webkit", "check_args": "check_args", "browser": "WebKitBrowser", "browser_kwargs": "browser_kwargs", "executor": {"testharness": "WebDriverTestharnessExecutor", "reftest": "WebDriverRefTestExecutor", "wdspec": "WebKitDriverWdspecExecutor"}, "executor_kwargs": "executor_kwargs", "env_extras": "env_extras", "env_options": "env_options", "run_info_extras": "run_info_extras", "timeout_multiplier": "get_timeout_multiplier"} def check_args(**kwargs): require_arg(kwargs, "binary") require_arg(kwargs, "webdriver_binary") require_arg(kwargs, "webkit_port") def browser_kwargs(test_type, run_info_data, config, **kwargs): return {"binary": kwargs["binary"], "webdriver_binary": kwargs["webdriver_binary"], "webdriver_args": kwargs.get("webdriver_args")} def capabilities_for_port(server_config, **kwargs): port_name = kwargs["webkit_port"] if port_name in ["gtk", "wpe"]: port_key_map = {"gtk": "webkitgtk"} browser_options_port = port_key_map.get(port_name, port_name) browser_options_key = "%s:browserOptions" % browser_options_port return { "browserName": "MiniBrowser", "browserVersion": "2.20", "platformName": "ANY", browser_options_key: { "binary": kwargs["binary"], "args": kwargs.get("binary_args", []), "certificates": [ {"host": server_config["browser_host"], "certificateFile": kwargs["host_cert_path"]}]}} return {} def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): executor_kwargs = base_executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs) executor_kwargs["close_after_done"] = True executor_kwargs["capabilities"] = capabilities_for_port(server_config, **kwargs) return executor_kwargs def env_extras(**kwargs): return [] def env_options(): return {} def run_info_extras(**kwargs): return {"webkit_port": kwargs["webkit_port"]} class WebKitBrowser(Browser): """Generic WebKit browser is backed by WebKit's WebDriver implementation, which is supplied through ``wptrunner.webdriver.WebKitDriverServer``. """ def __init__(self, logger, binary, webdriver_binary=None, webdriver_args=None): Browser.__init__(self, logger) self.binary = binary self.server = WebKitDriverServer(self.logger, binary=webdriver_binary, args=webdriver_args) def start(self, **kwargs): self.server.start(block=False) def stop(self, force=False): self.server.stop(force=force) def pid(self): return self.server.pid def
(self): # TODO(ato): This only indicates the driver is alive, # and doesn't say anything about whether a browser session # is active. return self.server.is_alive def cleanup(self): self.stop() def executor_browser(self): return ExecutorBrowser, {"webdriver_url": self.server.url}
is_alive
identifier_name
webkit.py
from .base import Browser, ExecutorBrowser, require_arg from .base import get_timeout_multiplier # noqa: F401 from ..executors import executor_kwargs as base_executor_kwargs from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401 WebDriverRefTestExecutor) # noqa: F401 from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401 from ..webdriver_server import WebKitDriverServer __wptrunner__ = {"product": "webkit", "check_args": "check_args", "browser": "WebKitBrowser", "browser_kwargs": "browser_kwargs", "executor": {"testharness": "WebDriverTestharnessExecutor", "reftest": "WebDriverRefTestExecutor", "wdspec": "WebKitDriverWdspecExecutor"}, "executor_kwargs": "executor_kwargs", "env_extras": "env_extras", "env_options": "env_options", "run_info_extras": "run_info_extras", "timeout_multiplier": "get_timeout_multiplier"} def check_args(**kwargs): require_arg(kwargs, "binary") require_arg(kwargs, "webdriver_binary") require_arg(kwargs, "webkit_port") def browser_kwargs(test_type, run_info_data, config, **kwargs): return {"binary": kwargs["binary"], "webdriver_binary": kwargs["webdriver_binary"], "webdriver_args": kwargs.get("webdriver_args")} def capabilities_for_port(server_config, **kwargs): port_name = kwargs["webkit_port"] if port_name in ["gtk", "wpe"]: port_key_map = {"gtk": "webkitgtk"} browser_options_port = port_key_map.get(port_name, port_name) browser_options_key = "%s:browserOptions" % browser_options_port return { "browserName": "MiniBrowser", "browserVersion": "2.20", "platformName": "ANY", browser_options_key: { "binary": kwargs["binary"], "args": kwargs.get("binary_args", []), "certificates": [ {"host": server_config["browser_host"], "certificateFile": kwargs["host_cert_path"]}]}} return {} def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): executor_kwargs = base_executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs) executor_kwargs["close_after_done"] = True executor_kwargs["capabilities"] = capabilities_for_port(server_config, **kwargs) return executor_kwargs def env_extras(**kwargs): return [] def env_options():
def run_info_extras(**kwargs): return {"webkit_port": kwargs["webkit_port"]} class WebKitBrowser(Browser): """Generic WebKit browser is backed by WebKit's WebDriver implementation, which is supplied through ``wptrunner.webdriver.WebKitDriverServer``. """ def __init__(self, logger, binary, webdriver_binary=None, webdriver_args=None): Browser.__init__(self, logger) self.binary = binary self.server = WebKitDriverServer(self.logger, binary=webdriver_binary, args=webdriver_args) def start(self, **kwargs): self.server.start(block=False) def stop(self, force=False): self.server.stop(force=force) def pid(self): return self.server.pid def is_alive(self): # TODO(ato): This only indicates the driver is alive, # and doesn't say anything about whether a browser session # is active. return self.server.is_alive def cleanup(self): self.stop() def executor_browser(self): return ExecutorBrowser, {"webdriver_url": self.server.url}
return {}
identifier_body
big.js-global-tests.ts
/* This file contains tests for the globally exported big.js definitions. Use the global Big constructor and the types in the global BigJs namespace. Do not import anything from 'big.js' Tests include code from http://mikemcl.github.io/big.js/ Minor changes have been made such as adding variable definitions where required. */ function constructorTests() { const x = new Big(9); // '9' const y = new Big(x); // '9' const d = Big(435.345); // 'new' is optional const e = Big('435.345'); // 'new' is optional const a = new Big('5032485723458348569331745.33434346346912144534543'); const b = new Big('4.321e+4'); // '43210' const c = new Big('-735.0918e-430'); // '-7.350918e-428' } function staticPropertiesTests() { Big.DP = 40; Big.RM = 3; Big.RM = BigJs.RoundingMode.RoundUp; } function absTests() { const x = new Big(-0.8); x.abs(); // '0.8' } function cmpTests() { const x = new Big(6); const y = new Big(5); x.cmp(y); // 1 y.cmp(x.minus(1)); // 0 } function divTests() { const x = new Big(355); const y = new Big(113); x.div(y); // '3.14159292035398230088' Big.DP = 2; x.div(y); // '3.14' x.div(5); // '71' } function eqTests() { 0 === 1e-324; // true const x = new Big(0); x.eq('1e-324'); // false Big(-0).eq(x); // true ( -0 === 0 ) } function gtTests() { 0.1 > 0.3 - 0.2; // true const x = new Big(0.1); x.gt(Big(0.3).minus(0.2)); // false Big(0).gt(x); // false } function gteTests() { 0.3 - 0.2 >= 0.1; // false const x = new Big(0.3).minus(0.2); x.gte(0.1); // true Big(1).gte(x); // true } function ltTests() { 0.3 - 0.2 < 0.1; // true const x = new Big(0.3).minus(0.2); x.lt(0.1); // false Big(0).lt(x); // true } function lteTests() { 0.1 <= 0.3 - 0.2; // false const x = new Big(0.1); x.lte(Big(0.3).minus(0.2)); // true Big(-1).lte(x); // true } function minusTests() { 0.3 - 0.1; // 0.19999999999999998 const x = new Big(0.3); x.minus(0.1); // '0.2' } function modTests() { 1 % 0.9; // 0.09999999999999998 const x = Big(1); x.mod(0.9); // '0.1' } function plusTests() { 0.1 + 0.2; // 0.30000000000000004 const x = new Big(0.1); const y = x.plus(0.2); // '0.3' Big(0.7).plus(x).plus(y); // '1' } function powTests() { Math.pow(0.7, 2); // 0.48999999999999994 const x = new Big(0.7); x.pow(2); // '0.49' Big.DP = 20; Big(3).pow(-2); // '0.11111111111111111111' new Big(123.456).pow(1000).toString().length; // 5099 new Big(2).pow(1e+6); // Time taken (Node.js): 9 minutes 34 secs. } function roundTests() { const x = 123.45; Math.round(x); // 123 const y = new Big(x); y.round(); // '123' y.round(2); // '123.45' y.round(10); // '123.45' y.round(1, 0); // '123.4' y.round(1, 1); // '123.5' y.round(1, 2); // '123.4' y.round(1, 3); // '123.5' y; // '123.45' } function sqrtTests() { const x = new Big(16); x.sqrt(); // '4' const y = new Big(3); y.sqrt(); // '1.73205080756887729353' } function timesTests() { 0.6 * 3; // 1.7999999999999998 const x = new Big(0.6); const y = x.times(3); // '1.8' Big('7e+500').times(y); // '1.26e+501' } function toExponentialTests() { const x = 45.6; const y = new Big(x); x.toExponential(); // '4.56e+1' y.toExponential(); // '4.56e+1' x.toExponential(0); // '5e+1' y.toExponential(0); // '5e+1' x.toExponential(1); // '4.6e+1' y.toExponential(1); // '4.6e+1' x.toExponential(3); // '4.560e+1' y.toExponential(3); // '4.560e+1' } function toFixedTests() { const x = 45.6; const y = new Big(x); x.toFixed(); // '46' y.toFixed(); // '45.6' y.toFixed(0); // '46' x.toFixed(3); // '45.600' y.toFixed(3); // '45.600' } function toPrecisionTests() { const x = 45.6; const y = new Big(x); x.toPrecision(); // '45.6' y.toPrecision(); // '45.6' x.toPrecision(1); // '5e+1' y.toPrecision(1); // '5e+1' x.toPrecision(5); // '45.600' y.toPrecision(5); // '45.600' } function toStringTests() { const x = new Big('9.99e+20'); x.toString(); // '999000000000000000000' const y = new Big('1E21'); x.toString(); // '1e+21' } function valueOfTests()
function toJSONTests() { const x = new Big('177.7e+457'); const y = new Big(235.4325); const z = new Big('0.0098074'); const str = JSON.stringify([x, y, z]); const a = new Big('123').toJSON(); JSON.parse(str, (k, v) => k === '' ? v : new Big(v)); // Returns an array of three Big numbers. } // see http://mikemcl.github.io/big.js/#faq // "How can I simultaneously use different decimal places and/or rounding mode settings for different Big numbers?" function testMultipleConstructors() { const Big10 = Big(); // Set the decimal places of division operations for each constructor. Big.DP = 3; Big10.DP = 10; const x = Big(5); const y = Big10(5); x.div(3); // 1.667 y.div(3); // 1.6666666667 } function multipleTypesAccepted(n: number | BigJs.Big | string) { const y = Big(n) .minus(n) .mod(n) .plus(n) .times(n); y.cmp(n); y.eq(n); y.gt(n); y.gte(n); y.lt(n); y.lte(n); y.div(n); }
{ const x = new Big('177.7e+457'); x.valueOf(); // '1.777e+459' }
identifier_body
big.js-global-tests.ts
/* This file contains tests for the globally exported big.js definitions. Use the global Big constructor and the types in the global BigJs namespace. Do not import anything from 'big.js' Tests include code from http://mikemcl.github.io/big.js/ Minor changes have been made such as adding variable definitions where required. */ function constructorTests() { const x = new Big(9); // '9' const y = new Big(x); // '9' const d = Big(435.345); // 'new' is optional const e = Big('435.345'); // 'new' is optional const a = new Big('5032485723458348569331745.33434346346912144534543'); const b = new Big('4.321e+4'); // '43210' const c = new Big('-735.0918e-430'); // '-7.350918e-428' } function staticPropertiesTests() { Big.DP = 40; Big.RM = 3; Big.RM = BigJs.RoundingMode.RoundUp; } function absTests() { const x = new Big(-0.8); x.abs(); // '0.8' } function cmpTests() { const x = new Big(6); const y = new Big(5); x.cmp(y); // 1 y.cmp(x.minus(1)); // 0 } function divTests() { const x = new Big(355); const y = new Big(113); x.div(y); // '3.14159292035398230088' Big.DP = 2; x.div(y); // '3.14' x.div(5); // '71' } function eqTests() { 0 === 1e-324; // true const x = new Big(0); x.eq('1e-324'); // false Big(-0).eq(x); // true ( -0 === 0 ) } function gtTests() { 0.1 > 0.3 - 0.2; // true const x = new Big(0.1); x.gt(Big(0.3).minus(0.2)); // false
0.3 - 0.2 >= 0.1; // false const x = new Big(0.3).minus(0.2); x.gte(0.1); // true Big(1).gte(x); // true } function ltTests() { 0.3 - 0.2 < 0.1; // true const x = new Big(0.3).minus(0.2); x.lt(0.1); // false Big(0).lt(x); // true } function lteTests() { 0.1 <= 0.3 - 0.2; // false const x = new Big(0.1); x.lte(Big(0.3).minus(0.2)); // true Big(-1).lte(x); // true } function minusTests() { 0.3 - 0.1; // 0.19999999999999998 const x = new Big(0.3); x.minus(0.1); // '0.2' } function modTests() { 1 % 0.9; // 0.09999999999999998 const x = Big(1); x.mod(0.9); // '0.1' } function plusTests() { 0.1 + 0.2; // 0.30000000000000004 const x = new Big(0.1); const y = x.plus(0.2); // '0.3' Big(0.7).plus(x).plus(y); // '1' } function powTests() { Math.pow(0.7, 2); // 0.48999999999999994 const x = new Big(0.7); x.pow(2); // '0.49' Big.DP = 20; Big(3).pow(-2); // '0.11111111111111111111' new Big(123.456).pow(1000).toString().length; // 5099 new Big(2).pow(1e+6); // Time taken (Node.js): 9 minutes 34 secs. } function roundTests() { const x = 123.45; Math.round(x); // 123 const y = new Big(x); y.round(); // '123' y.round(2); // '123.45' y.round(10); // '123.45' y.round(1, 0); // '123.4' y.round(1, 1); // '123.5' y.round(1, 2); // '123.4' y.round(1, 3); // '123.5' y; // '123.45' } function sqrtTests() { const x = new Big(16); x.sqrt(); // '4' const y = new Big(3); y.sqrt(); // '1.73205080756887729353' } function timesTests() { 0.6 * 3; // 1.7999999999999998 const x = new Big(0.6); const y = x.times(3); // '1.8' Big('7e+500').times(y); // '1.26e+501' } function toExponentialTests() { const x = 45.6; const y = new Big(x); x.toExponential(); // '4.56e+1' y.toExponential(); // '4.56e+1' x.toExponential(0); // '5e+1' y.toExponential(0); // '5e+1' x.toExponential(1); // '4.6e+1' y.toExponential(1); // '4.6e+1' x.toExponential(3); // '4.560e+1' y.toExponential(3); // '4.560e+1' } function toFixedTests() { const x = 45.6; const y = new Big(x); x.toFixed(); // '46' y.toFixed(); // '45.6' y.toFixed(0); // '46' x.toFixed(3); // '45.600' y.toFixed(3); // '45.600' } function toPrecisionTests() { const x = 45.6; const y = new Big(x); x.toPrecision(); // '45.6' y.toPrecision(); // '45.6' x.toPrecision(1); // '5e+1' y.toPrecision(1); // '5e+1' x.toPrecision(5); // '45.600' y.toPrecision(5); // '45.600' } function toStringTests() { const x = new Big('9.99e+20'); x.toString(); // '999000000000000000000' const y = new Big('1E21'); x.toString(); // '1e+21' } function valueOfTests() { const x = new Big('177.7e+457'); x.valueOf(); // '1.777e+459' } function toJSONTests() { const x = new Big('177.7e+457'); const y = new Big(235.4325); const z = new Big('0.0098074'); const str = JSON.stringify([x, y, z]); const a = new Big('123').toJSON(); JSON.parse(str, (k, v) => k === '' ? v : new Big(v)); // Returns an array of three Big numbers. } // see http://mikemcl.github.io/big.js/#faq // "How can I simultaneously use different decimal places and/or rounding mode settings for different Big numbers?" function testMultipleConstructors() { const Big10 = Big(); // Set the decimal places of division operations for each constructor. Big.DP = 3; Big10.DP = 10; const x = Big(5); const y = Big10(5); x.div(3); // 1.667 y.div(3); // 1.6666666667 } function multipleTypesAccepted(n: number | BigJs.Big | string) { const y = Big(n) .minus(n) .mod(n) .plus(n) .times(n); y.cmp(n); y.eq(n); y.gt(n); y.gte(n); y.lt(n); y.lte(n); y.div(n); }
Big(0).gt(x); // false } function gteTests() {
random_line_split
big.js-global-tests.ts
/* This file contains tests for the globally exported big.js definitions. Use the global Big constructor and the types in the global BigJs namespace. Do not import anything from 'big.js' Tests include code from http://mikemcl.github.io/big.js/ Minor changes have been made such as adding variable definitions where required. */ function constructorTests() { const x = new Big(9); // '9' const y = new Big(x); // '9' const d = Big(435.345); // 'new' is optional const e = Big('435.345'); // 'new' is optional const a = new Big('5032485723458348569331745.33434346346912144534543'); const b = new Big('4.321e+4'); // '43210' const c = new Big('-735.0918e-430'); // '-7.350918e-428' } function staticPropertiesTests() { Big.DP = 40; Big.RM = 3; Big.RM = BigJs.RoundingMode.RoundUp; } function absTests() { const x = new Big(-0.8); x.abs(); // '0.8' } function cmpTests() { const x = new Big(6); const y = new Big(5); x.cmp(y); // 1 y.cmp(x.minus(1)); // 0 } function divTests() { const x = new Big(355); const y = new Big(113); x.div(y); // '3.14159292035398230088' Big.DP = 2; x.div(y); // '3.14' x.div(5); // '71' } function eqTests() { 0 === 1e-324; // true const x = new Big(0); x.eq('1e-324'); // false Big(-0).eq(x); // true ( -0 === 0 ) } function gtTests() { 0.1 > 0.3 - 0.2; // true const x = new Big(0.1); x.gt(Big(0.3).minus(0.2)); // false Big(0).gt(x); // false } function gteTests() { 0.3 - 0.2 >= 0.1; // false const x = new Big(0.3).minus(0.2); x.gte(0.1); // true Big(1).gte(x); // true } function ltTests() { 0.3 - 0.2 < 0.1; // true const x = new Big(0.3).minus(0.2); x.lt(0.1); // false Big(0).lt(x); // true } function lteTests() { 0.1 <= 0.3 - 0.2; // false const x = new Big(0.1); x.lte(Big(0.3).minus(0.2)); // true Big(-1).lte(x); // true } function minusTests() { 0.3 - 0.1; // 0.19999999999999998 const x = new Big(0.3); x.minus(0.1); // '0.2' } function modTests() { 1 % 0.9; // 0.09999999999999998 const x = Big(1); x.mod(0.9); // '0.1' } function plusTests() { 0.1 + 0.2; // 0.30000000000000004 const x = new Big(0.1); const y = x.plus(0.2); // '0.3' Big(0.7).plus(x).plus(y); // '1' } function powTests() { Math.pow(0.7, 2); // 0.48999999999999994 const x = new Big(0.7); x.pow(2); // '0.49' Big.DP = 20; Big(3).pow(-2); // '0.11111111111111111111' new Big(123.456).pow(1000).toString().length; // 5099 new Big(2).pow(1e+6); // Time taken (Node.js): 9 minutes 34 secs. } function roundTests() { const x = 123.45; Math.round(x); // 123 const y = new Big(x); y.round(); // '123' y.round(2); // '123.45' y.round(10); // '123.45' y.round(1, 0); // '123.4' y.round(1, 1); // '123.5' y.round(1, 2); // '123.4' y.round(1, 3); // '123.5' y; // '123.45' } function sqrtTests() { const x = new Big(16); x.sqrt(); // '4' const y = new Big(3); y.sqrt(); // '1.73205080756887729353' } function timesTests() { 0.6 * 3; // 1.7999999999999998 const x = new Big(0.6); const y = x.times(3); // '1.8' Big('7e+500').times(y); // '1.26e+501' } function
() { const x = 45.6; const y = new Big(x); x.toExponential(); // '4.56e+1' y.toExponential(); // '4.56e+1' x.toExponential(0); // '5e+1' y.toExponential(0); // '5e+1' x.toExponential(1); // '4.6e+1' y.toExponential(1); // '4.6e+1' x.toExponential(3); // '4.560e+1' y.toExponential(3); // '4.560e+1' } function toFixedTests() { const x = 45.6; const y = new Big(x); x.toFixed(); // '46' y.toFixed(); // '45.6' y.toFixed(0); // '46' x.toFixed(3); // '45.600' y.toFixed(3); // '45.600' } function toPrecisionTests() { const x = 45.6; const y = new Big(x); x.toPrecision(); // '45.6' y.toPrecision(); // '45.6' x.toPrecision(1); // '5e+1' y.toPrecision(1); // '5e+1' x.toPrecision(5); // '45.600' y.toPrecision(5); // '45.600' } function toStringTests() { const x = new Big('9.99e+20'); x.toString(); // '999000000000000000000' const y = new Big('1E21'); x.toString(); // '1e+21' } function valueOfTests() { const x = new Big('177.7e+457'); x.valueOf(); // '1.777e+459' } function toJSONTests() { const x = new Big('177.7e+457'); const y = new Big(235.4325); const z = new Big('0.0098074'); const str = JSON.stringify([x, y, z]); const a = new Big('123').toJSON(); JSON.parse(str, (k, v) => k === '' ? v : new Big(v)); // Returns an array of three Big numbers. } // see http://mikemcl.github.io/big.js/#faq // "How can I simultaneously use different decimal places and/or rounding mode settings for different Big numbers?" function testMultipleConstructors() { const Big10 = Big(); // Set the decimal places of division operations for each constructor. Big.DP = 3; Big10.DP = 10; const x = Big(5); const y = Big10(5); x.div(3); // 1.667 y.div(3); // 1.6666666667 } function multipleTypesAccepted(n: number | BigJs.Big | string) { const y = Big(n) .minus(n) .mod(n) .plus(n) .times(n); y.cmp(n); y.eq(n); y.gt(n); y.gte(n); y.lt(n); y.lte(n); y.div(n); }
toExponentialTests
identifier_name
text_box.rs
use super::behaviors::{TextAction, TextBehavior}; use crate::{api::prelude::*, prelude::*, proc_macros::*, themes::theme_orbtk::*}; // --- KEYS -- pub static STYLE_TEXT_BOX: &str = "text_box"; static ID_CURSOR: &str = "id_cursor"; // --- KEYS -- widget!( /// The `TextBox` widget represents a single line text input widget. /// /// * style: `text_box` TextBox: ActivateHandler, KeyDownHandler, TextInputHandler { /// Sets or shares the text property. text: String, /// Sets or shares the water_mark text property. water_mark: String, /// Sets or shares the text selection property. selection: TextSelection, /// Sets or shares the foreground property. foreground: Brush, /// Sets or shares the font size property. font_size: f64, /// Sets or shares the font property. font: String, /// Sets or shares the background property. background: Brush, /// Sets or shares the border radius property. border_radius: f64, /// Sets or shares the border thickness property. border_width: Thickness, /// Sets or shares the border brush property. border_brush: Brush, /// Sets or shares the padding property. padding: Thickness, /// Sets or shares the focused property. focused: bool, /// Sets or shares ta value that describes if the TextBox should lose focus on activation (enter). lose_focus_on_activation: bool, /// Used to request focus from outside. Set to `true` tor request focus. request_focus: bool, /// If set to `true` all character will be focused when the widget gets focus. Default is `true` select_all_on_focus: bool, /// Indicates if the widget is hovered by the mouse cursor. hover: bool } ); impl Template for TextBox { fn
(self, id: Entity, ctx: &mut BuildContext) -> Self { let text_block = TextBlock::new() .v_align("center") .h_align("start") .foreground(id) .text(id) .water_mark(id) .font(id) .font_size(id) .localizable(false) .build(ctx); let cursor = Cursor::new().id(ID_CURSOR).selection(id).build(ctx); let text_behavior = TextBehavior::new() .cursor(cursor.0) .target(id.0) .text_block(text_block.0) .focused(id) .font(id) .font_size(id) .lose_focus_on_activation(id) .select_all_on_focus(id) .request_focus(id) .text(id) .selection(id) .build(ctx); self.name("TextBox") .style(STYLE_TEXT_BOX) .background(colors::LYNCH_COLOR) .border_brush("transparent") .border_width(0.0) .border_radius(2.0) .focused(false) .font_size(orbtk_fonts::FONT_SIZE_12) .font("Roboto-Regular") .foreground(colors::LINK_WATER_COLOR) .height(32.0) .lose_focus_on_activation(true) .min_width(128.0) .padding(4.0) .select_all_on_focus(true) .selection(TextSelection::default()) .text("") .child(text_behavior) .child( Container::new() .background(id) .border_radius(id) .border_width(id) .border_brush(id) .padding(id) .child( Grid::new() .clip(true) .child(cursor) .child(text_block) .build(ctx), ) .build(ctx), ) .on_changed("text", move |ctx, _| { ctx.send_message(TextAction::ForceUpdate(false), text_behavior); }) } }
template
identifier_name
text_box.rs
use super::behaviors::{TextAction, TextBehavior}; use crate::{api::prelude::*, prelude::*, proc_macros::*, themes::theme_orbtk::*}; // --- KEYS -- pub static STYLE_TEXT_BOX: &str = "text_box"; static ID_CURSOR: &str = "id_cursor"; // --- KEYS -- widget!( /// The `TextBox` widget represents a single line text input widget. /// /// * style: `text_box` TextBox: ActivateHandler, KeyDownHandler, TextInputHandler { /// Sets or shares the text property. text: String, /// Sets or shares the water_mark text property. water_mark: String, /// Sets or shares the text selection property. selection: TextSelection, /// Sets or shares the foreground property. foreground: Brush, /// Sets or shares the font size property. font_size: f64, /// Sets or shares the font property. font: String, /// Sets or shares the background property. background: Brush, /// Sets or shares the border radius property. border_radius: f64, /// Sets or shares the border thickness property. border_width: Thickness, /// Sets or shares the border brush property. border_brush: Brush, /// Sets or shares the padding property. padding: Thickness, /// Sets or shares the focused property. focused: bool, /// Sets or shares ta value that describes if the TextBox should lose focus on activation (enter). lose_focus_on_activation: bool, /// Used to request focus from outside. Set to `true` tor request focus. request_focus: bool, /// If set to `true` all character will be focused when the widget gets focus. Default is `true` select_all_on_focus: bool, /// Indicates if the widget is hovered by the mouse cursor. hover: bool } ); impl Template for TextBox {
.v_align("center") .h_align("start") .foreground(id) .text(id) .water_mark(id) .font(id) .font_size(id) .localizable(false) .build(ctx); let cursor = Cursor::new().id(ID_CURSOR).selection(id).build(ctx); let text_behavior = TextBehavior::new() .cursor(cursor.0) .target(id.0) .text_block(text_block.0) .focused(id) .font(id) .font_size(id) .lose_focus_on_activation(id) .select_all_on_focus(id) .request_focus(id) .text(id) .selection(id) .build(ctx); self.name("TextBox") .style(STYLE_TEXT_BOX) .background(colors::LYNCH_COLOR) .border_brush("transparent") .border_width(0.0) .border_radius(2.0) .focused(false) .font_size(orbtk_fonts::FONT_SIZE_12) .font("Roboto-Regular") .foreground(colors::LINK_WATER_COLOR) .height(32.0) .lose_focus_on_activation(true) .min_width(128.0) .padding(4.0) .select_all_on_focus(true) .selection(TextSelection::default()) .text("") .child(text_behavior) .child( Container::new() .background(id) .border_radius(id) .border_width(id) .border_brush(id) .padding(id) .child( Grid::new() .clip(true) .child(cursor) .child(text_block) .build(ctx), ) .build(ctx), ) .on_changed("text", move |ctx, _| { ctx.send_message(TextAction::ForceUpdate(false), text_behavior); }) } }
fn template(self, id: Entity, ctx: &mut BuildContext) -> Self { let text_block = TextBlock::new()
random_line_split
text_box.rs
use super::behaviors::{TextAction, TextBehavior}; use crate::{api::prelude::*, prelude::*, proc_macros::*, themes::theme_orbtk::*}; // --- KEYS -- pub static STYLE_TEXT_BOX: &str = "text_box"; static ID_CURSOR: &str = "id_cursor"; // --- KEYS -- widget!( /// The `TextBox` widget represents a single line text input widget. /// /// * style: `text_box` TextBox: ActivateHandler, KeyDownHandler, TextInputHandler { /// Sets or shares the text property. text: String, /// Sets or shares the water_mark text property. water_mark: String, /// Sets or shares the text selection property. selection: TextSelection, /// Sets or shares the foreground property. foreground: Brush, /// Sets or shares the font size property. font_size: f64, /// Sets or shares the font property. font: String, /// Sets or shares the background property. background: Brush, /// Sets or shares the border radius property. border_radius: f64, /// Sets or shares the border thickness property. border_width: Thickness, /// Sets or shares the border brush property. border_brush: Brush, /// Sets or shares the padding property. padding: Thickness, /// Sets or shares the focused property. focused: bool, /// Sets or shares ta value that describes if the TextBox should lose focus on activation (enter). lose_focus_on_activation: bool, /// Used to request focus from outside. Set to `true` tor request focus. request_focus: bool, /// If set to `true` all character will be focused when the widget gets focus. Default is `true` select_all_on_focus: bool, /// Indicates if the widget is hovered by the mouse cursor. hover: bool } ); impl Template for TextBox { fn template(self, id: Entity, ctx: &mut BuildContext) -> Self
}
{ let text_block = TextBlock::new() .v_align("center") .h_align("start") .foreground(id) .text(id) .water_mark(id) .font(id) .font_size(id) .localizable(false) .build(ctx); let cursor = Cursor::new().id(ID_CURSOR).selection(id).build(ctx); let text_behavior = TextBehavior::new() .cursor(cursor.0) .target(id.0) .text_block(text_block.0) .focused(id) .font(id) .font_size(id) .lose_focus_on_activation(id) .select_all_on_focus(id) .request_focus(id) .text(id) .selection(id) .build(ctx); self.name("TextBox") .style(STYLE_TEXT_BOX) .background(colors::LYNCH_COLOR) .border_brush("transparent") .border_width(0.0) .border_radius(2.0) .focused(false) .font_size(orbtk_fonts::FONT_SIZE_12) .font("Roboto-Regular") .foreground(colors::LINK_WATER_COLOR) .height(32.0) .lose_focus_on_activation(true) .min_width(128.0) .padding(4.0) .select_all_on_focus(true) .selection(TextSelection::default()) .text("") .child(text_behavior) .child( Container::new() .background(id) .border_radius(id) .border_width(id) .border_brush(id) .padding(id) .child( Grid::new() .clip(true) .child(cursor) .child(text_block) .build(ctx), ) .build(ctx), ) .on_changed("text", move |ctx, _| { ctx.send_message(TextAction::ForceUpdate(false), text_behavior); }) }
identifier_body
config.js
// # Ghost Configuration // Setup your Ghost install for various environments var path = require('path'), config; config = { // ### Development **(default)** development: { // The url to use when providing links to the site, E.g. in RSS and email. url: 'http://objectiveclem.local', // Example mail config // Visit http://docs.ghost.org/mail for instructions // ``` // mail: { // transport: 'SMTP', // options: { // service: 'Mailgun', // auth: { // user: '', // mailgun username // pass: '' // mailgun password // } // } // }, // ``` database: { client: 'sqlite3', connection: { filename: path.join(__dirname, '/content/data/ghost-dev.db') }, debug: false }, server: { // Host to be passed to node's `net.Server#listen()` host: '127.0.0.1', // Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT` port: '2368' } }, // ### Production // When running Ghost in the wild, use the production environment // Configure your URL and mail settings here production: { url: 'objective-clem.azurewebsites.net', mail: {}, database: { client: 'sqlite3', connection: { filename: path.join(__dirname, '/content/data/ghost.db') }, debug: false }, server: { // Host to be passed to node's `net.Server#listen()` host: '127.0.0.1', // Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT` port: process.env.PORT } }, // **Developers only need to edit below here** // ### Testing // Used when developing Ghost to run tests and check the health of Ghost // Uses a different port number testing: { url: 'http://127.0.0.1:2369',
} }, server: { host: '127.0.0.1', port: '2369' } }, // ### Travis // Automated testing run through Github travis: { url: 'http://127.0.0.1:2368', database: { client: 'sqlite3', connection: { filename: path.join(__dirname, '/content/data/ghost-travis.db') } }, server: { host: '127.0.0.1', port: '2368' } } }; // Export config module.exports = config;
database: { client: 'sqlite3', connection: { filename: path.join(__dirname, '/content/data/ghost-test.db')
random_line_split
foundation.d.ts
/** * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { MDCFoundation } from 'material__base';
export class MDCDialogFoundation extends MDCFoundation<MSDDialogAdapter> { static readonly cssClasses: cssClasses; static readonly strings: strings; static readonly defaultAdapter: MSDDialogAdapter; open(): void; close(): void; isOpen(): boolean; accept(shouldNotify: boolean): void; cancel(shouldNotify: boolean): void; } export default MDCDialogFoundation;
import { cssClasses, strings } from './constants'; import { MSDDialogAdapter } from './adapter';
random_line_split
foundation.d.ts
/** * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { MDCFoundation } from 'material__base'; import { cssClasses, strings } from './constants'; import { MSDDialogAdapter } from './adapter'; export class
extends MDCFoundation<MSDDialogAdapter> { static readonly cssClasses: cssClasses; static readonly strings: strings; static readonly defaultAdapter: MSDDialogAdapter; open(): void; close(): void; isOpen(): boolean; accept(shouldNotify: boolean): void; cancel(shouldNotify: boolean): void; } export default MDCDialogFoundation;
MDCDialogFoundation
identifier_name