file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
datastructures.rs
|
use std::iter;
use std::ops::{Index, IndexMut};
pub type Idx = [u32; 2];
#[derive(Debug)]
pub struct Matrix<T> {
shape: Idx,
data: Vec<Vec<T>>
}
impl<T: Copy> Matrix<T> {
pub fn fill(shape: Idx, value: T) -> Matrix<T>
|
pub fn iter<'a>(&'a self) -> Box<Iterator<Item=(Idx, T)> + 'a> {
Box::new((0..self.height()).flat_map(move |y| {
(0..self.width()).map(move |x| ([x, y], self[[x, y]]))
}))
}
}
impl<T> Matrix<T> {
pub fn width(&self) -> u32 {
self.shape[0]
}
pub fn height(&self) -> u32 {
self.shape[1]
}
}
impl<T> Index<Idx> for Matrix<T> {
type Output = T;
fn index(&self, index: Idx) -> &T {
let (x, y) = (index[0], index[1]);
assert!(x < self.width() && y < self.height());
&self.data[x as usize][y as usize]
}
}
impl<T> IndexMut<Idx> for Matrix<T> {
fn index_mut(&mut self, index: Idx) -> &mut T {
let (x, y) = (index[0], index[1]);
assert!(x < self.width() && y < self.height());
&mut self.data[x as usize][y as usize]
}
}
|
{
let data = (0..shape[0]).map(|_| {
iter::repeat(value).take(shape[1] as usize)
.collect::<Vec<_>>()
}).collect::<Vec<_>>();
Matrix {
shape: shape,
data: data
}
}
|
identifier_body
|
partial_cmp_natural.rs
|
use malachite_base::num::arithmetic::traits::Sign;
use malachite_base::num::basic::traits::One;
use malachite_base::num::logic::traits::SignificantBits;
use malachite_nz::natural::Natural;
use std::cmp::Ordering;
use Rational;
impl PartialOrd<Natural> for Rational {
/// Compares a `Rational` to a `Natural`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Rational::from_str("22/7").unwrap() > Natural::from(3u32));
/// assert!(Rational::from_str("22/7").unwrap() < Natural::from(4u32));
/// ```
fn partial_cmp(&self, other: &Natural) -> Option<Ordering> {
// First check signs
let self_sign = self.sign();
let other_sign = other.sign();
let sign_cmp = self_sign.cmp(&other_sign);
if sign_cmp != Ordering::Equal || self_sign == Ordering::Equal {
return Some(sign_cmp);
}
// Then check if one is < 1 and the other is > 1
let self_cmp_one = self.numerator.cmp(&self.denominator);
let other_cmp_one = other.cmp(&Natural::ONE);
let one_cmp = self_cmp_one.cmp(&other_cmp_one);
if one_cmp != Ordering::Equal {
return Some(one_cmp);
}
// Then compare numerators and denominators
let n_cmp = self.numerator.cmp(other);
let d_cmp = self.denominator.cmp(&Natural::ONE);
if n_cmp == Ordering::Equal && d_cmp == Ordering::Equal {
return Some(Ordering::Equal);
} else {
let nd_cmp = n_cmp.cmp(&d_cmp);
if nd_cmp != Ordering::Equal {
return Some(nd_cmp);
}
}
let first_prod_bits = self.numerator.significant_bits();
let second_prod_bits = self.denominator.significant_bits() + other.significant_bits();
if first_prod_bits < second_prod_bits - 1 {
return Some(Ordering::Less);
} else if first_prod_bits > second_prod_bits {
return Some(Ordering::Greater);
}
// Finally, cross-multiply.
Some(self.numerator.cmp(&(&self.denominator * other)))
}
}
impl PartialOrd<Rational> for Natural {
/// Compares a `Natural` to a `Rational`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Natural::from(3u32) < Rational::from_str("22/7").unwrap());
/// assert!(Natural::from(4u32) > Rational::from_str("22/7").unwrap());
/// ```
#[inline]
fn
|
(&self, other: &Rational) -> Option<Ordering> {
other.partial_cmp(self).map(Ordering::reverse)
}
}
|
partial_cmp
|
identifier_name
|
partial_cmp_natural.rs
|
use malachite_base::num::arithmetic::traits::Sign;
use malachite_base::num::basic::traits::One;
use malachite_base::num::logic::traits::SignificantBits;
use malachite_nz::natural::Natural;
use std::cmp::Ordering;
use Rational;
impl PartialOrd<Natural> for Rational {
/// Compares a `Rational` to a `Natural`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Rational::from_str("22/7").unwrap() > Natural::from(3u32));
/// assert!(Rational::from_str("22/7").unwrap() < Natural::from(4u32));
/// ```
fn partial_cmp(&self, other: &Natural) -> Option<Ordering> {
// First check signs
let self_sign = self.sign();
|
let sign_cmp = self_sign.cmp(&other_sign);
if sign_cmp != Ordering::Equal || self_sign == Ordering::Equal {
return Some(sign_cmp);
}
// Then check if one is < 1 and the other is > 1
let self_cmp_one = self.numerator.cmp(&self.denominator);
let other_cmp_one = other.cmp(&Natural::ONE);
let one_cmp = self_cmp_one.cmp(&other_cmp_one);
if one_cmp != Ordering::Equal {
return Some(one_cmp);
}
// Then compare numerators and denominators
let n_cmp = self.numerator.cmp(other);
let d_cmp = self.denominator.cmp(&Natural::ONE);
if n_cmp == Ordering::Equal && d_cmp == Ordering::Equal {
return Some(Ordering::Equal);
} else {
let nd_cmp = n_cmp.cmp(&d_cmp);
if nd_cmp != Ordering::Equal {
return Some(nd_cmp);
}
}
let first_prod_bits = self.numerator.significant_bits();
let second_prod_bits = self.denominator.significant_bits() + other.significant_bits();
if first_prod_bits < second_prod_bits - 1 {
return Some(Ordering::Less);
} else if first_prod_bits > second_prod_bits {
return Some(Ordering::Greater);
}
// Finally, cross-multiply.
Some(self.numerator.cmp(&(&self.denominator * other)))
}
}
impl PartialOrd<Rational> for Natural {
/// Compares a `Natural` to a `Rational`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Natural::from(3u32) < Rational::from_str("22/7").unwrap());
/// assert!(Natural::from(4u32) > Rational::from_str("22/7").unwrap());
/// ```
#[inline]
fn partial_cmp(&self, other: &Rational) -> Option<Ordering> {
other.partial_cmp(self).map(Ordering::reverse)
}
}
|
let other_sign = other.sign();
|
random_line_split
|
partial_cmp_natural.rs
|
use malachite_base::num::arithmetic::traits::Sign;
use malachite_base::num::basic::traits::One;
use malachite_base::num::logic::traits::SignificantBits;
use malachite_nz::natural::Natural;
use std::cmp::Ordering;
use Rational;
impl PartialOrd<Natural> for Rational {
/// Compares a `Rational` to a `Natural`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Rational::from_str("22/7").unwrap() > Natural::from(3u32));
/// assert!(Rational::from_str("22/7").unwrap() < Natural::from(4u32));
/// ```
fn partial_cmp(&self, other: &Natural) -> Option<Ordering> {
// First check signs
let self_sign = self.sign();
let other_sign = other.sign();
let sign_cmp = self_sign.cmp(&other_sign);
if sign_cmp != Ordering::Equal || self_sign == Ordering::Equal {
return Some(sign_cmp);
}
// Then check if one is < 1 and the other is > 1
let self_cmp_one = self.numerator.cmp(&self.denominator);
let other_cmp_one = other.cmp(&Natural::ONE);
let one_cmp = self_cmp_one.cmp(&other_cmp_one);
if one_cmp != Ordering::Equal {
return Some(one_cmp);
}
// Then compare numerators and denominators
let n_cmp = self.numerator.cmp(other);
let d_cmp = self.denominator.cmp(&Natural::ONE);
if n_cmp == Ordering::Equal && d_cmp == Ordering::Equal {
return Some(Ordering::Equal);
} else
|
let first_prod_bits = self.numerator.significant_bits();
let second_prod_bits = self.denominator.significant_bits() + other.significant_bits();
if first_prod_bits < second_prod_bits - 1 {
return Some(Ordering::Less);
} else if first_prod_bits > second_prod_bits {
return Some(Ordering::Greater);
}
// Finally, cross-multiply.
Some(self.numerator.cmp(&(&self.denominator * other)))
}
}
impl PartialOrd<Rational> for Natural {
/// Compares a `Natural` to a `Rational`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Natural::from(3u32) < Rational::from_str("22/7").unwrap());
/// assert!(Natural::from(4u32) > Rational::from_str("22/7").unwrap());
/// ```
#[inline]
fn partial_cmp(&self, other: &Rational) -> Option<Ordering> {
other.partial_cmp(self).map(Ordering::reverse)
}
}
|
{
let nd_cmp = n_cmp.cmp(&d_cmp);
if nd_cmp != Ordering::Equal {
return Some(nd_cmp);
}
}
|
conditional_block
|
partial_cmp_natural.rs
|
use malachite_base::num::arithmetic::traits::Sign;
use malachite_base::num::basic::traits::One;
use malachite_base::num::logic::traits::SignificantBits;
use malachite_nz::natural::Natural;
use std::cmp::Ordering;
use Rational;
impl PartialOrd<Natural> for Rational {
/// Compares a `Rational` to a `Natural`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Rational::from_str("22/7").unwrap() > Natural::from(3u32));
/// assert!(Rational::from_str("22/7").unwrap() < Natural::from(4u32));
/// ```
fn partial_cmp(&self, other: &Natural) -> Option<Ordering>
|
}
impl PartialOrd<Rational> for Natural {
/// Compares a `Natural` to a `Rational`.
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_nz;
///
/// use malachite_nz::natural::Natural;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert!(Natural::from(3u32) < Rational::from_str("22/7").unwrap());
/// assert!(Natural::from(4u32) > Rational::from_str("22/7").unwrap());
/// ```
#[inline]
fn partial_cmp(&self, other: &Rational) -> Option<Ordering> {
other.partial_cmp(self).map(Ordering::reverse)
}
}
|
{
// First check signs
let self_sign = self.sign();
let other_sign = other.sign();
let sign_cmp = self_sign.cmp(&other_sign);
if sign_cmp != Ordering::Equal || self_sign == Ordering::Equal {
return Some(sign_cmp);
}
// Then check if one is < 1 and the other is > 1
let self_cmp_one = self.numerator.cmp(&self.denominator);
let other_cmp_one = other.cmp(&Natural::ONE);
let one_cmp = self_cmp_one.cmp(&other_cmp_one);
if one_cmp != Ordering::Equal {
return Some(one_cmp);
}
// Then compare numerators and denominators
let n_cmp = self.numerator.cmp(other);
let d_cmp = self.denominator.cmp(&Natural::ONE);
if n_cmp == Ordering::Equal && d_cmp == Ordering::Equal {
return Some(Ordering::Equal);
} else {
let nd_cmp = n_cmp.cmp(&d_cmp);
if nd_cmp != Ordering::Equal {
return Some(nd_cmp);
}
}
let first_prod_bits = self.numerator.significant_bits();
let second_prod_bits = self.denominator.significant_bits() + other.significant_bits();
if first_prod_bits < second_prod_bits - 1 {
return Some(Ordering::Less);
} else if first_prod_bits > second_prod_bits {
return Some(Ordering::Greater);
}
// Finally, cross-multiply.
Some(self.numerator.cmp(&(&self.denominator * other)))
}
|
identifier_body
|
cheese.py
|
"""
Contains CheesePreprocessor
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from ...preprocessors.base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class CheesePreprocessor(Preprocessor):
"""
Adds a cheese tag to the resources object
"""
def
|
(self, **kw):
"""
Public constructor
"""
super(CheesePreprocessor, self).__init__(**kw)
def preprocess(self, nb, resources):
"""
Sphinx preprocessing to apply on each notebook.
Parameters
----------
nb : NotebookNode
Notebook being converted
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
"""
resources['cheese'] = 'real'
return nb, resources
|
__init__
|
identifier_name
|
cheese.py
|
Contains CheesePreprocessor
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from ...preprocessors.base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class CheesePreprocessor(Preprocessor):
"""
Adds a cheese tag to the resources object
"""
def __init__(self, **kw):
"""
Public constructor
"""
super(CheesePreprocessor, self).__init__(**kw)
def preprocess(self, nb, resources):
"""
Sphinx preprocessing to apply on each notebook.
Parameters
----------
nb : NotebookNode
Notebook being converted
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
"""
resources['cheese'] = 'real'
return nb, resources
|
"""
|
random_line_split
|
|
cheese.py
|
"""
Contains CheesePreprocessor
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from ...preprocessors.base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class CheesePreprocessor(Preprocessor):
|
"""
Adds a cheese tag to the resources object
"""
def __init__(self, **kw):
"""
Public constructor
"""
super(CheesePreprocessor, self).__init__(**kw)
def preprocess(self, nb, resources):
"""
Sphinx preprocessing to apply on each notebook.
Parameters
----------
nb : NotebookNode
Notebook being converted
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
"""
resources['cheese'] = 'real'
return nb, resources
|
identifier_body
|
|
0_setup.py
|
#!/usr/bin/python
#
# \file 0_setup.py
# \brief Setup rbank
# \date 2009-03-10-22-43-GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Setup rbank
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
|
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Setup rbank")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
# Setup source directories
printLog(log, ">>> Setup source directories <<<")
for dir in RBankCmbSourceDirectories:
mkPath(log, DatabaseDirectory + "/" + dir)
mkPath(log, LeveldesignWorldDirectory)
# Setup export directories
printLog(log, ">>> Setup export directories <<<")
mkPath(log, ExportBuildDirectory + "/" + RBankCmbExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + RBankCmbTagExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + SmallbankExportDirectory)
# Setup build directories
printLog(log, ">>> Setup build directories <<<")
mkPath(log, ExportBuildDirectory + "/" + ZoneWeldBuildDirectory)
for dir in IgLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
for dir in ShapeLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
mkPath(log, ExportBuildDirectory + "/" + RbankBboxBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgLandBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgOtherBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankTessellationBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankSmoothBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRawBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankPreprocBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRetrieversBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankOutputBuildDirectory)
# Setup client directories
printLog(log, ">>> Setup client directories <<<")
mkPath(log, InstallDirectory + "/" + PacsInstallDirectory)
log.close()
# end of file
|
log = open("log.log", "w")
|
random_line_split
|
0_setup.py
|
#!/usr/bin/python
#
# \file 0_setup.py
# \brief Setup rbank
# \date 2009-03-10-22-43-GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Setup rbank
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Setup rbank")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
# Setup source directories
printLog(log, ">>> Setup source directories <<<")
for dir in RBankCmbSourceDirectories:
|
mkPath(log, LeveldesignWorldDirectory)
# Setup export directories
printLog(log, ">>> Setup export directories <<<")
mkPath(log, ExportBuildDirectory + "/" + RBankCmbExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + RBankCmbTagExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + SmallbankExportDirectory)
# Setup build directories
printLog(log, ">>> Setup build directories <<<")
mkPath(log, ExportBuildDirectory + "/" + ZoneWeldBuildDirectory)
for dir in IgLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
for dir in ShapeLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
mkPath(log, ExportBuildDirectory + "/" + RbankBboxBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgLandBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgOtherBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankTessellationBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankSmoothBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRawBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankPreprocBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRetrieversBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankOutputBuildDirectory)
# Setup client directories
printLog(log, ">>> Setup client directories <<<")
mkPath(log, InstallDirectory + "/" + PacsInstallDirectory)
log.close()
# end of file
|
mkPath(log, DatabaseDirectory + "/" + dir)
|
conditional_block
|
rpi_pfio.py
|
"""
Allows to configure a switch using the PiFace Digital I/O module on a RPi.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.rpi_pfio/
"""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
import homeassistant.components.rpi_pfio as rpi_pfio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['rpi_pfio']
ATTR_INVERT_LOGIC = 'invert_logic'
ATTR_NAME = 'name'
CONF_PORTS = 'ports'
DEFAULT_INVERT_LOGIC = False
PORT_SCHEMA = vol.Schema({
vol.Optional(ATTR_NAME, default=None): cv.string,
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_PORTS, default={}): vol.Schema({
cv.positive_int: PORT_SCHEMA
})
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the PiFace Digital Output devices."""
switches = []
ports = config.get(CONF_PORTS)
for port, port_entity in ports.items():
name = port_entity[ATTR_NAME]
invert_logic = port_entity[ATTR_INVERT_LOGIC]
switches.append(RPiPFIOSwitch(port, name, invert_logic))
add_devices(switches)
class RPiPFIOSwitch(ToggleEntity):
"""Representation of a PiFace Digital Output."""
def __init__(self, port, name, invert_logic):
"""Initialize the pin."""
self._port = port
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._state = False
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self):
"""Turn the device on."""
rpi_pfio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def
|
(self):
"""Turn the device off."""
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
turn_off
|
identifier_name
|
rpi_pfio.py
|
"""
Allows to configure a switch using the PiFace Digital I/O module on a RPi.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.rpi_pfio/
"""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
import homeassistant.components.rpi_pfio as rpi_pfio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['rpi_pfio']
ATTR_INVERT_LOGIC = 'invert_logic'
ATTR_NAME = 'name'
CONF_PORTS = 'ports'
DEFAULT_INVERT_LOGIC = False
PORT_SCHEMA = vol.Schema({
vol.Optional(ATTR_NAME, default=None): cv.string,
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_PORTS, default={}): vol.Schema({
cv.positive_int: PORT_SCHEMA
})
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the PiFace Digital Output devices."""
switches = []
ports = config.get(CONF_PORTS)
for port, port_entity in ports.items():
name = port_entity[ATTR_NAME]
invert_logic = port_entity[ATTR_INVERT_LOGIC]
switches.append(RPiPFIOSwitch(port, name, invert_logic))
|
def __init__(self, port, name, invert_logic):
"""Initialize the pin."""
self._port = port
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._state = False
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self):
"""Turn the device on."""
rpi_pfio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self):
"""Turn the device off."""
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
add_devices(switches)
class RPiPFIOSwitch(ToggleEntity):
"""Representation of a PiFace Digital Output."""
|
random_line_split
|
rpi_pfio.py
|
"""
Allows to configure a switch using the PiFace Digital I/O module on a RPi.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.rpi_pfio/
"""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
import homeassistant.components.rpi_pfio as rpi_pfio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['rpi_pfio']
ATTR_INVERT_LOGIC = 'invert_logic'
ATTR_NAME = 'name'
CONF_PORTS = 'ports'
DEFAULT_INVERT_LOGIC = False
PORT_SCHEMA = vol.Schema({
vol.Optional(ATTR_NAME, default=None): cv.string,
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_PORTS, default={}): vol.Schema({
cv.positive_int: PORT_SCHEMA
})
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the PiFace Digital Output devices."""
switches = []
ports = config.get(CONF_PORTS)
for port, port_entity in ports.items():
name = port_entity[ATTR_NAME]
invert_logic = port_entity[ATTR_INVERT_LOGIC]
switches.append(RPiPFIOSwitch(port, name, invert_logic))
add_devices(switches)
class RPiPFIOSwitch(ToggleEntity):
"""Representation of a PiFace Digital Output."""
def __init__(self, port, name, invert_logic):
"""Initialize the pin."""
self._port = port
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._state = False
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self):
"""Turn the device on."""
rpi_pfio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self):
|
"""Turn the device off."""
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
identifier_body
|
|
rpi_pfio.py
|
"""
Allows to configure a switch using the PiFace Digital I/O module on a RPi.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.rpi_pfio/
"""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
import homeassistant.components.rpi_pfio as rpi_pfio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['rpi_pfio']
ATTR_INVERT_LOGIC = 'invert_logic'
ATTR_NAME = 'name'
CONF_PORTS = 'ports'
DEFAULT_INVERT_LOGIC = False
PORT_SCHEMA = vol.Schema({
vol.Optional(ATTR_NAME, default=None): cv.string,
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_PORTS, default={}): vol.Schema({
cv.positive_int: PORT_SCHEMA
})
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the PiFace Digital Output devices."""
switches = []
ports = config.get(CONF_PORTS)
for port, port_entity in ports.items():
|
add_devices(switches)
class RPiPFIOSwitch(ToggleEntity):
"""Representation of a PiFace Digital Output."""
def __init__(self, port, name, invert_logic):
"""Initialize the pin."""
self._port = port
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._state = False
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self):
"""Turn the device on."""
rpi_pfio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self):
"""Turn the device off."""
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
name = port_entity[ATTR_NAME]
invert_logic = port_entity[ATTR_INVERT_LOGIC]
switches.append(RPiPFIOSwitch(port, name, invert_logic))
|
conditional_block
|
svh-a-change-trait-bound.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
macro_rules! three {
() => { 3 }
}
pub trait U {}
pub trait V {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn foo<T:V>(_: int) -> int {
3
}
pub fn an_unused_name() -> int {
4
}
|
random_line_split
|
|
svh-a-change-trait-bound.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
macro_rules! three {
() => { 3 }
}
pub trait U {}
pub trait V {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn
|
<T:V>(_: int) -> int {
3
}
pub fn an_unused_name() -> int {
4
}
|
foo
|
identifier_name
|
svh-a-change-trait-bound.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
macro_rules! three {
() => { 3 }
}
pub trait U {}
pub trait V {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn foo<T:V>(_: int) -> int {
3
}
pub fn an_unused_name() -> int
|
{
4
}
|
identifier_body
|
|
plot_self_training_varying_threshold.py
|
"""
=============================================
Effect of varying threshold for self-training
=============================================
This example illustrates the effect of a varying threshold on self-training.
The `breast_cancer` dataset is loaded, and labels are deleted such that only 50
out of 569 samples have labels. A `SelfTrainingClassifier` is fitted on this
dataset, with varying thresholds.
The upper graph shows the amount of labeled samples that the classifier has
available by the end of fit, and the accuracy of the classifier. The lower
graph shows the last iteration in which a sample was labeled. All values are
cross validated with 3 folds.
At low thresholds (in [0.4, 0.5]), the classifier learns from samples that were
labeled with a low confidence. These low-confidence samples are likely have
incorrect predicted labels, and as a result, fitting on these incorrect labels
produces a poor accuracy. Note that the classifier labels almost all of the
samples, and only takes one iteration.
For very high thresholds (in [0.9, 1)) we observe that the classifier does not
augment its dataset (the amount of self-labeled samples is 0). As a result, the
accuracy achieved with a threshold of 0.9999 is the same as a normal supervised
classifier would achieve.
The optimal accuracy lies in between both of these extremes at a threshold of
around 0.7.
"""
# Authors: Oliver Rausch <[email protected]>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.semi_supervised import SelfTrainingClassifier
from sklearn.metrics import accuracy_score
from sklearn.utils import shuffle
n_splits = 3
X, y = datasets.load_breast_cancer(return_X_y=True)
X, y = shuffle(X, y, random_state=42)
y_true = y.copy()
y[50:] = -1
total_samples = y.shape[0]
base_classifier = SVC(probability=True, gamma=0.001, random_state=42)
x_values = np.arange(0.4, 1.05, 0.05)
x_values = np.append(x_values, 0.99999)
scores = np.empty((x_values.shape[0], n_splits))
amount_labeled = np.empty((x_values.shape[0], n_splits))
amount_iterations = np.empty((x_values.shape[0], n_splits))
for (i, threshold) in enumerate(x_values):
self_training_clf = SelfTrainingClassifier(base_classifier, threshold=threshold)
# We need manual cross validation so that we don't treat -1 as a separate
# class when computing accuracy
skfolds = StratifiedKFold(n_splits=n_splits)
for fold, (train_index, test_index) in enumerate(skfolds.split(X, y)):
|
ax1 = plt.subplot(211)
ax1.errorbar(
x_values, scores.mean(axis=1), yerr=scores.std(axis=1), capsize=2, color="b"
)
ax1.set_ylabel("Accuracy", color="b")
ax1.tick_params("y", colors="b")
ax2 = ax1.twinx()
ax2.errorbar(
x_values,
amount_labeled.mean(axis=1),
yerr=amount_labeled.std(axis=1),
capsize=2,
color="g",
)
ax2.set_ylim(bottom=0)
ax2.set_ylabel("Amount of labeled samples", color="g")
ax2.tick_params("y", colors="g")
ax3 = plt.subplot(212, sharex=ax1)
ax3.errorbar(
x_values,
amount_iterations.mean(axis=1),
yerr=amount_iterations.std(axis=1),
capsize=2,
color="b",
)
ax3.set_ylim(bottom=0)
ax3.set_ylabel("Amount of iterations")
ax3.set_xlabel("Threshold")
plt.show()
|
X_train = X[train_index]
y_train = y[train_index]
X_test = X[test_index]
y_test = y[test_index]
y_test_true = y_true[test_index]
self_training_clf.fit(X_train, y_train)
# The amount of labeled samples that at the end of fitting
amount_labeled[i, fold] = (
total_samples
- np.unique(self_training_clf.labeled_iter_, return_counts=True)[1][0]
)
# The last iteration the classifier labeled a sample in
amount_iterations[i, fold] = np.max(self_training_clf.labeled_iter_)
y_pred = self_training_clf.predict(X_test)
scores[i, fold] = accuracy_score(y_test_true, y_pred)
|
conditional_block
|
plot_self_training_varying_threshold.py
|
"""
=============================================
Effect of varying threshold for self-training
=============================================
This example illustrates the effect of a varying threshold on self-training.
The `breast_cancer` dataset is loaded, and labels are deleted such that only 50
out of 569 samples have labels. A `SelfTrainingClassifier` is fitted on this
dataset, with varying thresholds.
The upper graph shows the amount of labeled samples that the classifier has
available by the end of fit, and the accuracy of the classifier. The lower
graph shows the last iteration in which a sample was labeled. All values are
cross validated with 3 folds.
At low thresholds (in [0.4, 0.5]), the classifier learns from samples that were
labeled with a low confidence. These low-confidence samples are likely have
incorrect predicted labels, and as a result, fitting on these incorrect labels
produces a poor accuracy. Note that the classifier labels almost all of the
samples, and only takes one iteration.
For very high thresholds (in [0.9, 1)) we observe that the classifier does not
augment its dataset (the amount of self-labeled samples is 0). As a result, the
accuracy achieved with a threshold of 0.9999 is the same as a normal supervised
classifier would achieve.
The optimal accuracy lies in between both of these extremes at a threshold of
around 0.7.
"""
# Authors: Oliver Rausch <[email protected]>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.semi_supervised import SelfTrainingClassifier
from sklearn.metrics import accuracy_score
from sklearn.utils import shuffle
n_splits = 3
X, y = datasets.load_breast_cancer(return_X_y=True)
X, y = shuffle(X, y, random_state=42)
y_true = y.copy()
y[50:] = -1
total_samples = y.shape[0]
base_classifier = SVC(probability=True, gamma=0.001, random_state=42)
x_values = np.arange(0.4, 1.05, 0.05)
x_values = np.append(x_values, 0.99999)
scores = np.empty((x_values.shape[0], n_splits))
amount_labeled = np.empty((x_values.shape[0], n_splits))
amount_iterations = np.empty((x_values.shape[0], n_splits))
for (i, threshold) in enumerate(x_values):
self_training_clf = SelfTrainingClassifier(base_classifier, threshold=threshold)
# We need manual cross validation so that we don't treat -1 as a separate
# class when computing accuracy
skfolds = StratifiedKFold(n_splits=n_splits)
for fold, (train_index, test_index) in enumerate(skfolds.split(X, y)):
X_train = X[train_index]
y_train = y[train_index]
X_test = X[test_index]
y_test = y[test_index]
y_test_true = y_true[test_index]
self_training_clf.fit(X_train, y_train)
# The amount of labeled samples that at the end of fitting
amount_labeled[i, fold] = (
total_samples
- np.unique(self_training_clf.labeled_iter_, return_counts=True)[1][0]
)
# The last iteration the classifier labeled a sample in
amount_iterations[i, fold] = np.max(self_training_clf.labeled_iter_)
y_pred = self_training_clf.predict(X_test)
scores[i, fold] = accuracy_score(y_test_true, y_pred)
ax1 = plt.subplot(211)
ax1.errorbar(
x_values, scores.mean(axis=1), yerr=scores.std(axis=1), capsize=2, color="b"
)
ax1.set_ylabel("Accuracy", color="b")
ax1.tick_params("y", colors="b")
ax2 = ax1.twinx()
ax2.errorbar(
x_values,
amount_labeled.mean(axis=1),
yerr=amount_labeled.std(axis=1),
capsize=2,
color="g",
)
ax2.set_ylim(bottom=0)
ax2.set_ylabel("Amount of labeled samples", color="g")
ax2.tick_params("y", colors="g")
ax3 = plt.subplot(212, sharex=ax1)
ax3.errorbar(
x_values,
amount_iterations.mean(axis=1),
yerr=amount_iterations.std(axis=1),
capsize=2,
color="b",
)
ax3.set_ylim(bottom=0)
ax3.set_ylabel("Amount of iterations")
|
plt.show()
|
ax3.set_xlabel("Threshold")
|
random_line_split
|
themes.js
|
module.exports = {
light: {
background1: 'rgba(227,227,227,.95)',
background2: 'rgba(204,204,204,.95)',
background2hover: 'rgba(208,208,208,.95)',
foreground1: 'rgba(105,105,105,.95)',
text1: 'rgba(36,36,36,.95)',
text2: 'rgba(87,87,87,.95)'
},
|
background2hover: 'rgba(58,58,58,.95)',
foreground1: 'rgba(112,112,112,.95)',
text1: 'rgba(235,235,235,.95)',
text2: 'rgba(161,161,161,.95)'
}
}
|
dark: {
background1: 'rgba(35,35,35,.95)',
background2: 'rgba(54,54,54,.95)',
|
random_line_split
|
ladders-challenges.ts
|
import type {ChallengeType} from './room-battle';
/**
* A bundle of:
- a ID
* - a battle format
* - a valid team for that format
* - misc other preferences for the battle
*
* To start a battle, you need one of these for every player.
*/
export class BattleReady {
readonly userid: ID;
readonly formatid: string;
readonly settings: User['battleSettings'];
readonly rating: number;
readonly challengeType: ChallengeType;
readonly time: number;
constructor(
userid: ID,
formatid: string,
settings: User['battleSettings'],
rating = 0,
challengeType: ChallengeType = 'challenge'
) {
this.userid = userid;
this.formatid = formatid;
this.settings = settings;
this.rating = rating;
this.challengeType = challengeType;
this.time = Date.now();
}
}
export abstract class AbstractChallenge {
from: ID;
to: ID;
ready: BattleReady | null;
format: string;
acceptCommand: string | null;
message: string;
acceptButton: string;
rejectButton: string;
roomid: RoomID;
constructor(from: ID, to: ID, ready: BattleReady | string, options: {
acceptCommand?: string, rejectCommand?: string, roomid?: RoomID,
message?: string, acceptButton?: string, rejectButton?: string,
} = {}) {
this.from = from;
this.to = to;
this.ready = typeof ready === 'string' ? null : ready;
this.format = typeof ready === 'string' ? ready : ready.formatid;
this.acceptCommand = options.acceptCommand || null;
this.message = options.message || '';
this.roomid = options.roomid || '';
this.acceptButton = options.acceptButton || '';
this.rejectButton = options.rejectButton || '';
}
destroy(accepted?: boolean) {}
}
/**
* As a regular battle challenge, acceptCommand will be null, but you
* can set acceptCommand to use this for custom requests wanting a
* team for something.
*/
export class BattleChallenge extends AbstractChallenge {
declare ready: BattleReady;
declare acceptCommand: string | null;
}
export class GameChallenge extends AbstractChallenge {
declare ready: null;
declare acceptCommand: string;
}
/**
* Invites for `/importinputlog` (`ready: null`) or 4-player battles
* (`ready: BattleReady`)
*/
export class BattleInvite extends AbstractChallenge {
declare acceptCommand: string;
|
(accepted?: boolean) {
if (accepted) return;
const room = Rooms.get(this.roomid);
if (!room) return; // room expired?
const battle = room.battle!;
let invitesFull = true;
for (const player of battle.players) {
if (!player.invite && !player.id) invitesFull = false;
if (player.invite === this.to) player.invite = '';
}
if (invitesFull) battle.sendInviteForm(true);
}
}
/**
* The defining difference between a BattleChallenge and a GameChallenge is
* that a BattleChallenge has a Ready (and is for a RoomBattle format) and
* a GameChallenge doesn't (and is for a RoomGame).
*
* But remember that both can have a custom acceptCommand.
*/
export type Challenge = BattleChallenge | GameChallenge;
/**
* Lists outgoing and incoming challenges for each user ID.
*/
export class Challenges extends Map<ID, Challenge[]> {
getOrCreate(userid: ID): Challenge[] {
let challenges = this.get(userid);
if (challenges) return challenges;
challenges = [];
this.set(userid, challenges);
return challenges;
}
/** Throws Chat.ErrorMessage if a challenge between these users is already in the table */
add(challenge: Challenge): true {
const oldChallenge = this.search(challenge.to, challenge.from);
if (oldChallenge) {
throw new Chat.ErrorMessage(`There is already a challenge (${challenge.format}) between ${challenge.to} and ${challenge.from}!`);
}
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
to.push(challenge);
from.push(challenge);
this.update(challenge.to, challenge.from);
return true;
}
/** Returns false if the challenge isn't in the table */
remove(challenge: Challenge, accepted?: boolean): boolean {
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
const toIndex = to.indexOf(challenge);
let success = false;
if (toIndex >= 0) {
to.splice(toIndex, 1);
if (!to.length) this.delete(challenge.to);
success = true;
}
const fromIndex = from.indexOf(challenge);
if (fromIndex >= 0) {
from.splice(fromIndex, 1);
if (!from.length) this.delete(challenge.from);
}
if (success) {
this.update(challenge.to, challenge.from);
challenge.destroy(accepted);
}
return success;
}
search(userid1: ID, userid2: ID): Challenge | null {
const challenges = this.get(userid1);
if (!challenges) return null;
for (const challenge of challenges) {
if (
(challenge.to === userid1 && challenge.from === userid2) ||
(challenge.to === userid2 && challenge.from === userid1)
) {
return challenge;
}
}
return null;
}
searchByRoom(userid: ID, roomid: RoomID) {
const challenges = this.get(userid);
if (!challenges) return null;
for (const challenge of challenges) {
if (challenge.roomid === roomid) return challenge;
}
return null;
}
/**
* Try to accept a custom challenge, throwing `Chat.ErrorMessage` on failure,
* and returning the user the challenge was from on a success.
*/
resolveAcceptCommand(context: Chat.CommandContext) {
const targetid = context.target as ID;
const chall = this.search(context.user.id, targetid);
if (!chall || chall.to !== context.user.id || chall.acceptCommand !== context.message) {
throw new Chat.ErrorMessage(`Challenge not found. You are using the wrong command. Challenges should be accepted with /accept`);
}
return chall;
}
accept(context: Chat.CommandContext) {
const chall = this.resolveAcceptCommand(context);
this.remove(chall, true);
const fromUser = Users.get(chall.from);
if (!fromUser) throw new Chat.ErrorMessage(`User "${chall.from}" is not available right now.`);
return fromUser;
}
clearFor(userid: ID, reason?: string): number {
const user = Users.get(userid);
const userIdentity = user ? user.getIdentity() : ` ${userid}`;
const challenges = this.get(userid);
if (!challenges) return 0;
for (const challenge of challenges) {
const otherid = challenge.to === userid ? challenge.from : challenge.to;
const otherUser = Users.get(otherid);
const otherIdentity = otherUser ? otherUser.getIdentity() : ` ${otherid}`;
const otherChallenges = this.get(otherid)!;
const otherIndex = otherChallenges.indexOf(challenge);
if (otherIndex >= 0) otherChallenges.splice(otherIndex, 1);
if (otherChallenges.length === 0) this.delete(otherid);
if (!user && !otherUser) continue;
const header = `|pm|${userIdentity}|${otherIdentity}|`;
let message = `${header}/challenge`;
if (reason) message = `${header}/text Challenge cancelled because ${reason}.\n${message}`;
user?.send(message);
otherUser?.send(message);
}
this.delete(userid);
return challenges.length;
}
getUpdate(challenge: Challenge | null) {
if (!challenge) return `/challenge`;
const teambuilderFormat = challenge.ready ? challenge.ready.formatid : '';
return `/challenge ${challenge.format}|${teambuilderFormat}|${challenge.message}|${challenge.acceptButton}|${challenge.rejectButton}`;
}
update(userid1: ID, userid2: ID) {
const challenge = this.search(userid1, userid2);
userid1 = challenge ? challenge.from : userid1;
userid2 = challenge ? challenge.to : userid2;
this.send(userid1, userid2, this.getUpdate(challenge));
}
send(userid1: ID, userid2: ID, message: string) {
const user1 = Users.get(userid1);
const user2 = Users.get(userid2);
const user1Identity = user1 ? user1.getIdentity() : ` ${userid1}`;
const user2Identity = user2 ? user2.getIdentity() : ` ${userid2}`;
const fullMessage = `|pm|${user1Identity}|${user2Identity}|${message}`;
user1?.send(fullMessage);
user2?.send(fullMessage);
}
updateFor(connection: Connection | User) {
const user = connection.user;
const challenges = this.get(user.id);
if (!challenges) return;
const userIdentity = user.getIdentity();
let messages = '';
for (const challenge of challenges) {
let fromIdentity, toIdentity;
if (challenge.from === user.id) {
fromIdentity = userIdentity;
const toUser = Users.get(challenge.to);
toIdentity = toUser ? toUser.getIdentity() : ` ${challenge.to}`;
} else {
const fromUser = Users.get(challenge.from);
fromIdentity = fromUser ? fromUser.getIdentity() : ` ${challenge.from}`;
toIdentity = userIdentity;
}
messages += `|pm|${fromIdentity}|${toIdentity}|${this.getUpdate(challenge)}\n`;
}
connection.send(messages);
}
}
export const challenges = new Challenges();
|
destroy
|
identifier_name
|
ladders-challenges.ts
|
import type {ChallengeType} from './room-battle';
/**
* A bundle of:
- a ID
* - a battle format
* - a valid team for that format
* - misc other preferences for the battle
*
* To start a battle, you need one of these for every player.
*/
export class BattleReady {
readonly userid: ID;
readonly formatid: string;
readonly settings: User['battleSettings'];
readonly rating: number;
readonly challengeType: ChallengeType;
readonly time: number;
constructor(
userid: ID,
formatid: string,
settings: User['battleSettings'],
rating = 0,
challengeType: ChallengeType = 'challenge'
) {
this.userid = userid;
this.formatid = formatid;
this.settings = settings;
this.rating = rating;
this.challengeType = challengeType;
this.time = Date.now();
}
}
export abstract class AbstractChallenge {
from: ID;
to: ID;
ready: BattleReady | null;
format: string;
acceptCommand: string | null;
message: string;
acceptButton: string;
rejectButton: string;
roomid: RoomID;
constructor(from: ID, to: ID, ready: BattleReady | string, options: {
acceptCommand?: string, rejectCommand?: string, roomid?: RoomID,
message?: string, acceptButton?: string, rejectButton?: string,
} = {}) {
this.from = from;
this.to = to;
this.ready = typeof ready === 'string' ? null : ready;
this.format = typeof ready === 'string' ? ready : ready.formatid;
this.acceptCommand = options.acceptCommand || null;
this.message = options.message || '';
this.roomid = options.roomid || '';
this.acceptButton = options.acceptButton || '';
this.rejectButton = options.rejectButton || '';
}
destroy(accepted?: boolean) {}
}
/**
* As a regular battle challenge, acceptCommand will be null, but you
* can set acceptCommand to use this for custom requests wanting a
* team for something.
*/
export class BattleChallenge extends AbstractChallenge {
declare ready: BattleReady;
declare acceptCommand: string | null;
}
export class GameChallenge extends AbstractChallenge {
declare ready: null;
declare acceptCommand: string;
}
/**
* Invites for `/importinputlog` (`ready: null`) or 4-player battles
* (`ready: BattleReady`)
*/
export class BattleInvite extends AbstractChallenge {
declare acceptCommand: string;
destroy(accepted?: boolean) {
if (accepted) return;
const room = Rooms.get(this.roomid);
if (!room) return; // room expired?
const battle = room.battle!;
let invitesFull = true;
for (const player of battle.players) {
if (!player.invite && !player.id) invitesFull = false;
if (player.invite === this.to) player.invite = '';
}
if (invitesFull) battle.sendInviteForm(true);
}
}
/**
* The defining difference between a BattleChallenge and a GameChallenge is
* that a BattleChallenge has a Ready (and is for a RoomBattle format) and
* a GameChallenge doesn't (and is for a RoomGame).
*
* But remember that both can have a custom acceptCommand.
*/
export type Challenge = BattleChallenge | GameChallenge;
/**
* Lists outgoing and incoming challenges for each user ID.
*/
export class Challenges extends Map<ID, Challenge[]> {
getOrCreate(userid: ID): Challenge[] {
let challenges = this.get(userid);
if (challenges) return challenges;
challenges = [];
this.set(userid, challenges);
return challenges;
}
/** Throws Chat.ErrorMessage if a challenge between these users is already in the table */
add(challenge: Challenge): true {
const oldChallenge = this.search(challenge.to, challenge.from);
if (oldChallenge) {
throw new Chat.ErrorMessage(`There is already a challenge (${challenge.format}) between ${challenge.to} and ${challenge.from}!`);
|
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
to.push(challenge);
from.push(challenge);
this.update(challenge.to, challenge.from);
return true;
}
/** Returns false if the challenge isn't in the table */
remove(challenge: Challenge, accepted?: boolean): boolean {
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
const toIndex = to.indexOf(challenge);
let success = false;
if (toIndex >= 0) {
to.splice(toIndex, 1);
if (!to.length) this.delete(challenge.to);
success = true;
}
const fromIndex = from.indexOf(challenge);
if (fromIndex >= 0) {
from.splice(fromIndex, 1);
if (!from.length) this.delete(challenge.from);
}
if (success) {
this.update(challenge.to, challenge.from);
challenge.destroy(accepted);
}
return success;
}
search(userid1: ID, userid2: ID): Challenge | null {
const challenges = this.get(userid1);
if (!challenges) return null;
for (const challenge of challenges) {
if (
(challenge.to === userid1 && challenge.from === userid2) ||
(challenge.to === userid2 && challenge.from === userid1)
) {
return challenge;
}
}
return null;
}
searchByRoom(userid: ID, roomid: RoomID) {
const challenges = this.get(userid);
if (!challenges) return null;
for (const challenge of challenges) {
if (challenge.roomid === roomid) return challenge;
}
return null;
}
/**
* Try to accept a custom challenge, throwing `Chat.ErrorMessage` on failure,
* and returning the user the challenge was from on a success.
*/
resolveAcceptCommand(context: Chat.CommandContext) {
const targetid = context.target as ID;
const chall = this.search(context.user.id, targetid);
if (!chall || chall.to !== context.user.id || chall.acceptCommand !== context.message) {
throw new Chat.ErrorMessage(`Challenge not found. You are using the wrong command. Challenges should be accepted with /accept`);
}
return chall;
}
accept(context: Chat.CommandContext) {
const chall = this.resolveAcceptCommand(context);
this.remove(chall, true);
const fromUser = Users.get(chall.from);
if (!fromUser) throw new Chat.ErrorMessage(`User "${chall.from}" is not available right now.`);
return fromUser;
}
clearFor(userid: ID, reason?: string): number {
const user = Users.get(userid);
const userIdentity = user ? user.getIdentity() : ` ${userid}`;
const challenges = this.get(userid);
if (!challenges) return 0;
for (const challenge of challenges) {
const otherid = challenge.to === userid ? challenge.from : challenge.to;
const otherUser = Users.get(otherid);
const otherIdentity = otherUser ? otherUser.getIdentity() : ` ${otherid}`;
const otherChallenges = this.get(otherid)!;
const otherIndex = otherChallenges.indexOf(challenge);
if (otherIndex >= 0) otherChallenges.splice(otherIndex, 1);
if (otherChallenges.length === 0) this.delete(otherid);
if (!user && !otherUser) continue;
const header = `|pm|${userIdentity}|${otherIdentity}|`;
let message = `${header}/challenge`;
if (reason) message = `${header}/text Challenge cancelled because ${reason}.\n${message}`;
user?.send(message);
otherUser?.send(message);
}
this.delete(userid);
return challenges.length;
}
getUpdate(challenge: Challenge | null) {
if (!challenge) return `/challenge`;
const teambuilderFormat = challenge.ready ? challenge.ready.formatid : '';
return `/challenge ${challenge.format}|${teambuilderFormat}|${challenge.message}|${challenge.acceptButton}|${challenge.rejectButton}`;
}
update(userid1: ID, userid2: ID) {
const challenge = this.search(userid1, userid2);
userid1 = challenge ? challenge.from : userid1;
userid2 = challenge ? challenge.to : userid2;
this.send(userid1, userid2, this.getUpdate(challenge));
}
send(userid1: ID, userid2: ID, message: string) {
const user1 = Users.get(userid1);
const user2 = Users.get(userid2);
const user1Identity = user1 ? user1.getIdentity() : ` ${userid1}`;
const user2Identity = user2 ? user2.getIdentity() : ` ${userid2}`;
const fullMessage = `|pm|${user1Identity}|${user2Identity}|${message}`;
user1?.send(fullMessage);
user2?.send(fullMessage);
}
updateFor(connection: Connection | User) {
const user = connection.user;
const challenges = this.get(user.id);
if (!challenges) return;
const userIdentity = user.getIdentity();
let messages = '';
for (const challenge of challenges) {
let fromIdentity, toIdentity;
if (challenge.from === user.id) {
fromIdentity = userIdentity;
const toUser = Users.get(challenge.to);
toIdentity = toUser ? toUser.getIdentity() : ` ${challenge.to}`;
} else {
const fromUser = Users.get(challenge.from);
fromIdentity = fromUser ? fromUser.getIdentity() : ` ${challenge.from}`;
toIdentity = userIdentity;
}
messages += `|pm|${fromIdentity}|${toIdentity}|${this.getUpdate(challenge)}\n`;
}
connection.send(messages);
}
}
export const challenges = new Challenges();
|
}
|
random_line_split
|
ladders-challenges.ts
|
import type {ChallengeType} from './room-battle';
/**
* A bundle of:
- a ID
* - a battle format
* - a valid team for that format
* - misc other preferences for the battle
*
* To start a battle, you need one of these for every player.
*/
export class BattleReady {
readonly userid: ID;
readonly formatid: string;
readonly settings: User['battleSettings'];
readonly rating: number;
readonly challengeType: ChallengeType;
readonly time: number;
constructor(
userid: ID,
formatid: string,
settings: User['battleSettings'],
rating = 0,
challengeType: ChallengeType = 'challenge'
) {
this.userid = userid;
this.formatid = formatid;
this.settings = settings;
this.rating = rating;
this.challengeType = challengeType;
this.time = Date.now();
}
}
export abstract class AbstractChallenge {
from: ID;
to: ID;
ready: BattleReady | null;
format: string;
acceptCommand: string | null;
message: string;
acceptButton: string;
rejectButton: string;
roomid: RoomID;
constructor(from: ID, to: ID, ready: BattleReady | string, options: {
acceptCommand?: string, rejectCommand?: string, roomid?: RoomID,
message?: string, acceptButton?: string, rejectButton?: string,
} = {}) {
this.from = from;
this.to = to;
this.ready = typeof ready === 'string' ? null : ready;
this.format = typeof ready === 'string' ? ready : ready.formatid;
this.acceptCommand = options.acceptCommand || null;
this.message = options.message || '';
this.roomid = options.roomid || '';
this.acceptButton = options.acceptButton || '';
this.rejectButton = options.rejectButton || '';
}
destroy(accepted?: boolean) {}
}
/**
* As a regular battle challenge, acceptCommand will be null, but you
* can set acceptCommand to use this for custom requests wanting a
* team for something.
*/
export class BattleChallenge extends AbstractChallenge {
declare ready: BattleReady;
declare acceptCommand: string | null;
}
export class GameChallenge extends AbstractChallenge {
declare ready: null;
declare acceptCommand: string;
}
/**
* Invites for `/importinputlog` (`ready: null`) or 4-player battles
* (`ready: BattleReady`)
*/
export class BattleInvite extends AbstractChallenge {
declare acceptCommand: string;
destroy(accepted?: boolean) {
if (accepted) return;
const room = Rooms.get(this.roomid);
if (!room) return; // room expired?
const battle = room.battle!;
let invitesFull = true;
for (const player of battle.players) {
if (!player.invite && !player.id) invitesFull = false;
if (player.invite === this.to) player.invite = '';
}
if (invitesFull) battle.sendInviteForm(true);
}
}
/**
* The defining difference between a BattleChallenge and a GameChallenge is
* that a BattleChallenge has a Ready (and is for a RoomBattle format) and
* a GameChallenge doesn't (and is for a RoomGame).
*
* But remember that both can have a custom acceptCommand.
*/
export type Challenge = BattleChallenge | GameChallenge;
/**
* Lists outgoing and incoming challenges for each user ID.
*/
export class Challenges extends Map<ID, Challenge[]> {
getOrCreate(userid: ID): Challenge[] {
let challenges = this.get(userid);
if (challenges) return challenges;
challenges = [];
this.set(userid, challenges);
return challenges;
}
/** Throws Chat.ErrorMessage if a challenge between these users is already in the table */
add(challenge: Challenge): true {
const oldChallenge = this.search(challenge.to, challenge.from);
if (oldChallenge) {
throw new Chat.ErrorMessage(`There is already a challenge (${challenge.format}) between ${challenge.to} and ${challenge.from}!`);
}
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
to.push(challenge);
from.push(challenge);
this.update(challenge.to, challenge.from);
return true;
}
/** Returns false if the challenge isn't in the table */
remove(challenge: Challenge, accepted?: boolean): boolean {
const to = this.getOrCreate(challenge.to);
const from = this.getOrCreate(challenge.from);
const toIndex = to.indexOf(challenge);
let success = false;
if (toIndex >= 0) {
to.splice(toIndex, 1);
if (!to.length) this.delete(challenge.to);
success = true;
}
const fromIndex = from.indexOf(challenge);
if (fromIndex >= 0) {
from.splice(fromIndex, 1);
if (!from.length) this.delete(challenge.from);
}
if (success) {
this.update(challenge.to, challenge.from);
challenge.destroy(accepted);
}
return success;
}
search(userid1: ID, userid2: ID): Challenge | null
|
searchByRoom(userid: ID, roomid: RoomID) {
const challenges = this.get(userid);
if (!challenges) return null;
for (const challenge of challenges) {
if (challenge.roomid === roomid) return challenge;
}
return null;
}
/**
* Try to accept a custom challenge, throwing `Chat.ErrorMessage` on failure,
* and returning the user the challenge was from on a success.
*/
resolveAcceptCommand(context: Chat.CommandContext) {
const targetid = context.target as ID;
const chall = this.search(context.user.id, targetid);
if (!chall || chall.to !== context.user.id || chall.acceptCommand !== context.message) {
throw new Chat.ErrorMessage(`Challenge not found. You are using the wrong command. Challenges should be accepted with /accept`);
}
return chall;
}
accept(context: Chat.CommandContext) {
const chall = this.resolveAcceptCommand(context);
this.remove(chall, true);
const fromUser = Users.get(chall.from);
if (!fromUser) throw new Chat.ErrorMessage(`User "${chall.from}" is not available right now.`);
return fromUser;
}
clearFor(userid: ID, reason?: string): number {
const user = Users.get(userid);
const userIdentity = user ? user.getIdentity() : ` ${userid}`;
const challenges = this.get(userid);
if (!challenges) return 0;
for (const challenge of challenges) {
const otherid = challenge.to === userid ? challenge.from : challenge.to;
const otherUser = Users.get(otherid);
const otherIdentity = otherUser ? otherUser.getIdentity() : ` ${otherid}`;
const otherChallenges = this.get(otherid)!;
const otherIndex = otherChallenges.indexOf(challenge);
if (otherIndex >= 0) otherChallenges.splice(otherIndex, 1);
if (otherChallenges.length === 0) this.delete(otherid);
if (!user && !otherUser) continue;
const header = `|pm|${userIdentity}|${otherIdentity}|`;
let message = `${header}/challenge`;
if (reason) message = `${header}/text Challenge cancelled because ${reason}.\n${message}`;
user?.send(message);
otherUser?.send(message);
}
this.delete(userid);
return challenges.length;
}
getUpdate(challenge: Challenge | null) {
if (!challenge) return `/challenge`;
const teambuilderFormat = challenge.ready ? challenge.ready.formatid : '';
return `/challenge ${challenge.format}|${teambuilderFormat}|${challenge.message}|${challenge.acceptButton}|${challenge.rejectButton}`;
}
update(userid1: ID, userid2: ID) {
const challenge = this.search(userid1, userid2);
userid1 = challenge ? challenge.from : userid1;
userid2 = challenge ? challenge.to : userid2;
this.send(userid1, userid2, this.getUpdate(challenge));
}
send(userid1: ID, userid2: ID, message: string) {
const user1 = Users.get(userid1);
const user2 = Users.get(userid2);
const user1Identity = user1 ? user1.getIdentity() : ` ${userid1}`;
const user2Identity = user2 ? user2.getIdentity() : ` ${userid2}`;
const fullMessage = `|pm|${user1Identity}|${user2Identity}|${message}`;
user1?.send(fullMessage);
user2?.send(fullMessage);
}
updateFor(connection: Connection | User) {
const user = connection.user;
const challenges = this.get(user.id);
if (!challenges) return;
const userIdentity = user.getIdentity();
let messages = '';
for (const challenge of challenges) {
let fromIdentity, toIdentity;
if (challenge.from === user.id) {
fromIdentity = userIdentity;
const toUser = Users.get(challenge.to);
toIdentity = toUser ? toUser.getIdentity() : ` ${challenge.to}`;
} else {
const fromUser = Users.get(challenge.from);
fromIdentity = fromUser ? fromUser.getIdentity() : ` ${challenge.from}`;
toIdentity = userIdentity;
}
messages += `|pm|${fromIdentity}|${toIdentity}|${this.getUpdate(challenge)}\n`;
}
connection.send(messages);
}
}
export const challenges = new Challenges();
|
{
const challenges = this.get(userid1);
if (!challenges) return null;
for (const challenge of challenges) {
if (
(challenge.to === userid1 && challenge.from === userid2) ||
(challenge.to === userid2 && challenge.from === userid1)
) {
return challenge;
}
}
return null;
}
|
identifier_body
|
select-demo.ts
|
import {Component} from '@angular/core';
import {FormControl} from '@angular/forms';
import {MdSelectChange} from '@angular/material';
@Component({
moduleId: module.id,
selector: 'select-demo',
templateUrl: 'select-demo.html',
styleUrls: ['select-demo.css'],
})
export class SelectDemo {
drinksRequired = false;
pokemonRequired = false;
drinksDisabled = false;
pokemonDisabled = false;
showSelect = false;
currentDrink: string;
currentPokemon: string[];
latestChangeEvent: MdSelectChange;
floatPlaceholder: string = 'auto';
foodControl = new FormControl('pizza-1');
drinksTheme = 'primary';
pokemonTheme = 'primary';
foods = [
{value: null, viewValue: 'None'},
{value: 'steak-0', viewValue: 'Steak'},
{value: 'pizza-1', viewValue: 'Pizza'},
{value: 'tacos-2', viewValue: 'Tacos'}
];
drinks = [
{value: 'coke-0', viewValue: 'Coke'},
{value: 'long-name-1', viewValue: 'Decaf Chocolate Brownie Vanilla Gingerbread Frappuccino'},
|
{value: 'coffee-4', viewValue: 'Coffee'},
{value: 'tea-5', viewValue: 'Tea'},
{value: 'juice-6', viewValue: 'Orange juice'},
{value: 'wine-7', viewValue: 'Wine'},
{value: 'milk-8', viewValue: 'Milk'},
];
pokemon = [
{value: 'bulbasaur-0', viewValue: 'Bulbasaur'},
{value: 'charizard-1', viewValue: 'Charizard'},
{value: 'squirtle-2', viewValue: 'Squirtle'},
{value: 'pikachu-3', viewValue: 'Pikachu'},
{value: 'eevee-4', viewValue: 'Eevee'},
{value: 'ditto-5', viewValue: 'Ditto'},
{value: 'psyduck-6', viewValue: 'Psyduck'},
];
availableThemes = [
{value: 'primary', name: 'Primary' },
{value: 'accent', name: 'Accent' },
{value: 'warn', name: 'Warn' }
];
toggleDisabled() {
this.foodControl.enabled ? this.foodControl.disable() : this.foodControl.enable();
}
setPokemonValue() {
this.currentPokemon = ['eevee-4', 'psyduck-6'];
}
}
|
{value: 'water-2', viewValue: 'Water'},
{value: 'pepper-3', viewValue: 'Dr. Pepper'},
|
random_line_split
|
select-demo.ts
|
import {Component} from '@angular/core';
import {FormControl} from '@angular/forms';
import {MdSelectChange} from '@angular/material';
@Component({
moduleId: module.id,
selector: 'select-demo',
templateUrl: 'select-demo.html',
styleUrls: ['select-demo.css'],
})
export class
|
{
drinksRequired = false;
pokemonRequired = false;
drinksDisabled = false;
pokemonDisabled = false;
showSelect = false;
currentDrink: string;
currentPokemon: string[];
latestChangeEvent: MdSelectChange;
floatPlaceholder: string = 'auto';
foodControl = new FormControl('pizza-1');
drinksTheme = 'primary';
pokemonTheme = 'primary';
foods = [
{value: null, viewValue: 'None'},
{value: 'steak-0', viewValue: 'Steak'},
{value: 'pizza-1', viewValue: 'Pizza'},
{value: 'tacos-2', viewValue: 'Tacos'}
];
drinks = [
{value: 'coke-0', viewValue: 'Coke'},
{value: 'long-name-1', viewValue: 'Decaf Chocolate Brownie Vanilla Gingerbread Frappuccino'},
{value: 'water-2', viewValue: 'Water'},
{value: 'pepper-3', viewValue: 'Dr. Pepper'},
{value: 'coffee-4', viewValue: 'Coffee'},
{value: 'tea-5', viewValue: 'Tea'},
{value: 'juice-6', viewValue: 'Orange juice'},
{value: 'wine-7', viewValue: 'Wine'},
{value: 'milk-8', viewValue: 'Milk'},
];
pokemon = [
{value: 'bulbasaur-0', viewValue: 'Bulbasaur'},
{value: 'charizard-1', viewValue: 'Charizard'},
{value: 'squirtle-2', viewValue: 'Squirtle'},
{value: 'pikachu-3', viewValue: 'Pikachu'},
{value: 'eevee-4', viewValue: 'Eevee'},
{value: 'ditto-5', viewValue: 'Ditto'},
{value: 'psyduck-6', viewValue: 'Psyduck'},
];
availableThemes = [
{value: 'primary', name: 'Primary' },
{value: 'accent', name: 'Accent' },
{value: 'warn', name: 'Warn' }
];
toggleDisabled() {
this.foodControl.enabled ? this.foodControl.disable() : this.foodControl.enable();
}
setPokemonValue() {
this.currentPokemon = ['eevee-4', 'psyduck-6'];
}
}
|
SelectDemo
|
identifier_name
|
select-demo.ts
|
import {Component} from '@angular/core';
import {FormControl} from '@angular/forms';
import {MdSelectChange} from '@angular/material';
@Component({
moduleId: module.id,
selector: 'select-demo',
templateUrl: 'select-demo.html',
styleUrls: ['select-demo.css'],
})
export class SelectDemo {
drinksRequired = false;
pokemonRequired = false;
drinksDisabled = false;
pokemonDisabled = false;
showSelect = false;
currentDrink: string;
currentPokemon: string[];
latestChangeEvent: MdSelectChange;
floatPlaceholder: string = 'auto';
foodControl = new FormControl('pizza-1');
drinksTheme = 'primary';
pokemonTheme = 'primary';
foods = [
{value: null, viewValue: 'None'},
{value: 'steak-0', viewValue: 'Steak'},
{value: 'pizza-1', viewValue: 'Pizza'},
{value: 'tacos-2', viewValue: 'Tacos'}
];
drinks = [
{value: 'coke-0', viewValue: 'Coke'},
{value: 'long-name-1', viewValue: 'Decaf Chocolate Brownie Vanilla Gingerbread Frappuccino'},
{value: 'water-2', viewValue: 'Water'},
{value: 'pepper-3', viewValue: 'Dr. Pepper'},
{value: 'coffee-4', viewValue: 'Coffee'},
{value: 'tea-5', viewValue: 'Tea'},
{value: 'juice-6', viewValue: 'Orange juice'},
{value: 'wine-7', viewValue: 'Wine'},
{value: 'milk-8', viewValue: 'Milk'},
];
pokemon = [
{value: 'bulbasaur-0', viewValue: 'Bulbasaur'},
{value: 'charizard-1', viewValue: 'Charizard'},
{value: 'squirtle-2', viewValue: 'Squirtle'},
{value: 'pikachu-3', viewValue: 'Pikachu'},
{value: 'eevee-4', viewValue: 'Eevee'},
{value: 'ditto-5', viewValue: 'Ditto'},
{value: 'psyduck-6', viewValue: 'Psyduck'},
];
availableThemes = [
{value: 'primary', name: 'Primary' },
{value: 'accent', name: 'Accent' },
{value: 'warn', name: 'Warn' }
];
toggleDisabled()
|
setPokemonValue() {
this.currentPokemon = ['eevee-4', 'psyduck-6'];
}
}
|
{
this.foodControl.enabled ? this.foodControl.disable() : this.foodControl.enable();
}
|
identifier_body
|
compare.py
|
#!/usr/bin/env python
import argparse
import json
import csv
import sys
sys.path.append('python')
import plotting
import utils
from opener import opener
parser = argparse.ArgumentParser()
parser.add_argument('-b', action='store_true') # passed on to ROOT when plotting
parser.add_argument('--outdir', required=True)
parser.add_argument('--plotdirs', required=True)
parser.add_argument('--names', required=True)
parser.add_argument('--stats', default='')
parser.add_argument('--no-errors', action='store_true')
parser.add_argument('--plot-performance', action='store_true')
parser.add_argument('--scale-errors')
parser.add_argument('--rebin', type=int)
parser.add_argument('--colors')
parser.add_argument('--linestyles')
parser.add_argument('--datadir', default='data/imgt')
parser.add_argument('--leaves-per-tree')
parser.add_argument('--linewidths')
parser.add_argument('--markersizes')
parser.add_argument('--dont-calculate-mean-info', action='store_true')
parser.add_argument('--normalize', action='store_true')
parser.add_argument('--graphify', action='store_true')
parser.add_argument('--strings-to-ignore') # remove this string from the plot names in each dir (e.g. '-mean-bins') NOTE replaces '_' with '-'
args = parser.parse_args()
if args.strings_to_ignore is not None:
args.strings_to_ignore = args.strings_to_ignore.replace('_', '-')
args.plotdirs = utils.get_arg_list(args.plotdirs)
args.scale_errors = utils.get_arg_list(args.scale_errors)
args.colors = utils.get_arg_list(args.colors, intify=True)
args.linestyles = utils.get_arg_list(args.linestyles, intify=True)
args.names = utils.get_arg_list(args.names)
args.leaves_per_tree = utils.get_arg_list(args.leaves_per_tree, intify=True)
args.strings_to_ignore = utils.get_arg_list(args.strings_to_ignore)
args.markersizes = utils.get_arg_list(args.markersizes, intify=True)
args.linewidths = utils.get_arg_list(args.linewidths, intify=True)
for iname in range(len(args.names)):
|
assert len(args.plotdirs) == len(args.names)
with opener('r')(args.datadir + '/v-meta.json') as json_file: # get location of <begin> cysteine in each v region
args.cyst_positions = json.load(json_file)
with opener('r')(args.datadir + '/j_tryp.csv') as csv_file: # get location of <end> tryptophan in each j region (TGG)
tryp_reader = csv.reader(csv_file)
args.tryp_positions = {row[0]:row[1] for row in tryp_reader} # WARNING: this doesn't filter out the header line
plotting.compare_directories(args)
|
args.names[iname] = args.names[iname].replace('@', ' ')
|
conditional_block
|
compare.py
|
#!/usr/bin/env python
import argparse
import json
import csv
import sys
sys.path.append('python')
|
import utils
from opener import opener
parser = argparse.ArgumentParser()
parser.add_argument('-b', action='store_true') # passed on to ROOT when plotting
parser.add_argument('--outdir', required=True)
parser.add_argument('--plotdirs', required=True)
parser.add_argument('--names', required=True)
parser.add_argument('--stats', default='')
parser.add_argument('--no-errors', action='store_true')
parser.add_argument('--plot-performance', action='store_true')
parser.add_argument('--scale-errors')
parser.add_argument('--rebin', type=int)
parser.add_argument('--colors')
parser.add_argument('--linestyles')
parser.add_argument('--datadir', default='data/imgt')
parser.add_argument('--leaves-per-tree')
parser.add_argument('--linewidths')
parser.add_argument('--markersizes')
parser.add_argument('--dont-calculate-mean-info', action='store_true')
parser.add_argument('--normalize', action='store_true')
parser.add_argument('--graphify', action='store_true')
parser.add_argument('--strings-to-ignore') # remove this string from the plot names in each dir (e.g. '-mean-bins') NOTE replaces '_' with '-'
args = parser.parse_args()
if args.strings_to_ignore is not None:
args.strings_to_ignore = args.strings_to_ignore.replace('_', '-')
args.plotdirs = utils.get_arg_list(args.plotdirs)
args.scale_errors = utils.get_arg_list(args.scale_errors)
args.colors = utils.get_arg_list(args.colors, intify=True)
args.linestyles = utils.get_arg_list(args.linestyles, intify=True)
args.names = utils.get_arg_list(args.names)
args.leaves_per_tree = utils.get_arg_list(args.leaves_per_tree, intify=True)
args.strings_to_ignore = utils.get_arg_list(args.strings_to_ignore)
args.markersizes = utils.get_arg_list(args.markersizes, intify=True)
args.linewidths = utils.get_arg_list(args.linewidths, intify=True)
for iname in range(len(args.names)):
args.names[iname] = args.names[iname].replace('@', ' ')
assert len(args.plotdirs) == len(args.names)
with opener('r')(args.datadir + '/v-meta.json') as json_file: # get location of <begin> cysteine in each v region
args.cyst_positions = json.load(json_file)
with opener('r')(args.datadir + '/j_tryp.csv') as csv_file: # get location of <end> tryptophan in each j region (TGG)
tryp_reader = csv.reader(csv_file)
args.tryp_positions = {row[0]:row[1] for row in tryp_reader} # WARNING: this doesn't filter out the header line
plotting.compare_directories(args)
|
import plotting
|
random_line_split
|
_selinux.py
|
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
# the whole _selinux module itself will be wrapped.
import os
import shutil
import portage
from portage import _encodings
|
import selinux
from selinux import is_selinux_enabled
def copyfile(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
shutil.copyfile(src, dest)
finally:
setfscreate()
def getcontext():
(rc, ctx) = selinux.getcon()
if rc < 0:
raise OSError(_("getcontext: Failed getting current process context."))
return ctx
def mkdir(target, refdir):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.getfilecon(refdir)
if rc < 0:
refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("mkdir: Failed getting context of reference directory \"%s\".") \
% refdir)
setfscreate(ctx)
try:
os.mkdir(target)
finally:
setfscreate()
def rename(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("rename: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
os.rename(src,dest)
finally:
setfscreate()
def settype(newtype):
ret = getcontext().split(":")
ret[2] = newtype
return ":".join(ret)
def setexec(ctx="\n"):
ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
if selinux.setexeccon(ctx) < 0:
ctx = _unicode_decode(ctx, encoding=_encodings['content'],
errors='replace')
if selinux.security_getenforce() == 1:
raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
else:
portage.writemsg("!!! " + \
_("Failed setting exec() context \"%s\".") % ctx, \
noiselevel=-1)
def setfscreate(ctx="\n"):
ctx = _unicode_encode(ctx,
encoding=_encodings['content'], errors='strict')
if selinux.setfscreatecon(ctx) < 0:
ctx = _unicode_decode(ctx,
encoding=_encodings['content'], errors='replace')
raise OSError(
_("setfscreate: Failed setting fs create context \"%s\".") % ctx)
def spawn_wrapper(spawn_func, selinux_type):
selinux_type = _unicode_encode(selinux_type,
encoding=_encodings['content'], errors='strict')
def wrapper_func(*args, **kwargs):
con = settype(selinux_type)
setexec(con)
try:
return spawn_func(*args, **kwargs)
finally:
setexec()
return wrapper_func
def symlink(target, link, reflnk):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(reflnk)
if rc < 0:
reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("symlink: Failed getting context of reference symlink \"%s\".") \
% reflnk)
setfscreate(ctx)
try:
os.symlink(target, link)
finally:
setfscreate()
|
from portage import _unicode_decode
from portage import _unicode_encode
from portage.localization import _
|
random_line_split
|
_selinux.py
|
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
# the whole _selinux module itself will be wrapped.
import os
import shutil
import portage
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.localization import _
import selinux
from selinux import is_selinux_enabled
def copyfile(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
shutil.copyfile(src, dest)
finally:
setfscreate()
def getcontext():
(rc, ctx) = selinux.getcon()
if rc < 0:
raise OSError(_("getcontext: Failed getting current process context."))
return ctx
def mkdir(target, refdir):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.getfilecon(refdir)
if rc < 0:
refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("mkdir: Failed getting context of reference directory \"%s\".") \
% refdir)
setfscreate(ctx)
try:
os.mkdir(target)
finally:
setfscreate()
def rename(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("rename: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
os.rename(src,dest)
finally:
setfscreate()
def settype(newtype):
ret = getcontext().split(":")
ret[2] = newtype
return ":".join(ret)
def setexec(ctx="\n"):
ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
if selinux.setexeccon(ctx) < 0:
ctx = _unicode_decode(ctx, encoding=_encodings['content'],
errors='replace')
if selinux.security_getenforce() == 1:
raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
else:
portage.writemsg("!!! " + \
_("Failed setting exec() context \"%s\".") % ctx, \
noiselevel=-1)
def setfscreate(ctx="\n"):
ctx = _unicode_encode(ctx,
encoding=_encodings['content'], errors='strict')
if selinux.setfscreatecon(ctx) < 0:
ctx = _unicode_decode(ctx,
encoding=_encodings['content'], errors='replace')
raise OSError(
_("setfscreate: Failed setting fs create context \"%s\".") % ctx)
def spawn_wrapper(spawn_func, selinux_type):
|
def symlink(target, link, reflnk):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(reflnk)
if rc < 0:
reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("symlink: Failed getting context of reference symlink \"%s\".") \
% reflnk)
setfscreate(ctx)
try:
os.symlink(target, link)
finally:
setfscreate()
|
selinux_type = _unicode_encode(selinux_type,
encoding=_encodings['content'], errors='strict')
def wrapper_func(*args, **kwargs):
con = settype(selinux_type)
setexec(con)
try:
return spawn_func(*args, **kwargs)
finally:
setexec()
return wrapper_func
|
identifier_body
|
_selinux.py
|
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
# the whole _selinux module itself will be wrapped.
import os
import shutil
import portage
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.localization import _
import selinux
from selinux import is_selinux_enabled
def copyfile(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
shutil.copyfile(src, dest)
finally:
setfscreate()
def getcontext():
(rc, ctx) = selinux.getcon()
if rc < 0:
raise OSError(_("getcontext: Failed getting current process context."))
return ctx
def mkdir(target, refdir):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.getfilecon(refdir)
if rc < 0:
refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("mkdir: Failed getting context of reference directory \"%s\".") \
% refdir)
setfscreate(ctx)
try:
os.mkdir(target)
finally:
setfscreate()
def rename(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("rename: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
os.rename(src,dest)
finally:
setfscreate()
def settype(newtype):
ret = getcontext().split(":")
ret[2] = newtype
return ":".join(ret)
def setexec(ctx="\n"):
ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
if selinux.setexeccon(ctx) < 0:
ctx = _unicode_decode(ctx, encoding=_encodings['content'],
errors='replace')
if selinux.security_getenforce() == 1:
raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
else:
portage.writemsg("!!! " + \
_("Failed setting exec() context \"%s\".") % ctx, \
noiselevel=-1)
def setfscreate(ctx="\n"):
ctx = _unicode_encode(ctx,
encoding=_encodings['content'], errors='strict')
if selinux.setfscreatecon(ctx) < 0:
|
def spawn_wrapper(spawn_func, selinux_type):
selinux_type = _unicode_encode(selinux_type,
encoding=_encodings['content'], errors='strict')
def wrapper_func(*args, **kwargs):
con = settype(selinux_type)
setexec(con)
try:
return spawn_func(*args, **kwargs)
finally:
setexec()
return wrapper_func
def symlink(target, link, reflnk):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(reflnk)
if rc < 0:
reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("symlink: Failed getting context of reference symlink \"%s\".") \
% reflnk)
setfscreate(ctx)
try:
os.symlink(target, link)
finally:
setfscreate()
|
ctx = _unicode_decode(ctx,
encoding=_encodings['content'], errors='replace')
raise OSError(
_("setfscreate: Failed setting fs create context \"%s\".") % ctx)
|
conditional_block
|
_selinux.py
|
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
# the whole _selinux module itself will be wrapped.
import os
import shutil
import portage
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.localization import _
import selinux
from selinux import is_selinux_enabled
def copyfile(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
shutil.copyfile(src, dest)
finally:
setfscreate()
def getcontext():
(rc, ctx) = selinux.getcon()
if rc < 0:
raise OSError(_("getcontext: Failed getting current process context."))
return ctx
def mkdir(target, refdir):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.getfilecon(refdir)
if rc < 0:
refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("mkdir: Failed getting context of reference directory \"%s\".") \
% refdir)
setfscreate(ctx)
try:
os.mkdir(target)
finally:
setfscreate()
def rename(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("rename: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
os.rename(src,dest)
finally:
setfscreate()
def settype(newtype):
ret = getcontext().split(":")
ret[2] = newtype
return ":".join(ret)
def setexec(ctx="\n"):
ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
if selinux.setexeccon(ctx) < 0:
ctx = _unicode_decode(ctx, encoding=_encodings['content'],
errors='replace')
if selinux.security_getenforce() == 1:
raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
else:
portage.writemsg("!!! " + \
_("Failed setting exec() context \"%s\".") % ctx, \
noiselevel=-1)
def setfscreate(ctx="\n"):
ctx = _unicode_encode(ctx,
encoding=_encodings['content'], errors='strict')
if selinux.setfscreatecon(ctx) < 0:
ctx = _unicode_decode(ctx,
encoding=_encodings['content'], errors='replace')
raise OSError(
_("setfscreate: Failed setting fs create context \"%s\".") % ctx)
def spawn_wrapper(spawn_func, selinux_type):
selinux_type = _unicode_encode(selinux_type,
encoding=_encodings['content'], errors='strict')
def wrapper_func(*args, **kwargs):
con = settype(selinux_type)
setexec(con)
try:
return spawn_func(*args, **kwargs)
finally:
setexec()
return wrapper_func
def
|
(target, link, reflnk):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(reflnk)
if rc < 0:
reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("symlink: Failed getting context of reference symlink \"%s\".") \
% reflnk)
setfscreate(ctx)
try:
os.symlink(target, link)
finally:
setfscreate()
|
symlink
|
identifier_name
|
TestServer.ts
|
import { Type } from '@tsed/core';
import { ServerLoader, IServerSettings, OverrideService, ExpressApplication, ServerSettings } from '@tsed/common';
import { ProjectMapper, MutationTestingReportService } from '@stryker-mutator/dashboard-data-access';
import { bootstrap, inject, TestContext } from '@tsed/testing';
import Configuration from '../../src/services/Configuration';
import supertest from 'supertest';
import { SuperTest, Test } from 'supertest';
import DataAccess from '../../src/services/DataAccess';
import GithubRepositoryService from '../../src/services/GithubRepositoryService';
import sinon = require('sinon');
import bodyParser = require('body-parser');
import { Authentication } from '../../src/github/models';
import { createToken } from '../../src/middleware/securityMiddleware';
@OverrideService(Configuration)
class ConfigurationStub implements Configuration {
public static githubClientId: string;
get githubClientId() { return ConfigurationStub.githubClientId; }
public static githubSecret: string;
get githubSecret() { return ConfigurationStub.githubSecret; }
public static baseUrl: string;
get baseUrl() { return ConfigurationStub.baseUrl; }
public static jwtSecret: string;
get jwtSecret() { return ConfigurationStub.jwtSecret; }
public static isDevelopment: boolean;
get isDevelopment() { return ConfigurationStub.isDevelopment; }
}
@OverrideService(DataAccess)
export class DataAccessStub implements DataAccess {
public static repositoryMapper: sinon.SinonStubbedInstance<ProjectMapper>;
public static mutationTestingReportService: sinon.SinonStubbedInstance<MutationTestingReportService>;
public get repositoryMapper(): ProjectMapper {
return DataAccessStub.repositoryMapper as any;
}
public get mutationTestingReportService(): MutationTestingReportService {
return DataAccessStub.mutationTestingReportService as any;
}
}
export async function createAuthToken(user: Authentication) {
const token = await createToken(user, ConfigurationStub.jwtSecret);
return `Bearer ${token}`;
}
@OverrideService(GithubRepositoryService)
export class RepositoryServiceStub {
public static getAllForUser: sinon.SinonStub;
public static getAllForOrganization: sinon.SinonStub;
public static update: sinon.SinonStub;
public get getAllForUser() {
return RepositoryServiceStub.getAllForUser;
}
public get getAllForOrganization() {
return RepositoryServiceStub.getAllForOrganization;
}
public get update() {
return RepositoryServiceStub.update;
}
}
beforeEach(() => {
ConfigurationStub.githubClientId = 'github client id';
ConfigurationStub.githubSecret = 'github secret';
ConfigurationStub.jwtSecret = 'jwt secret';
ConfigurationStub.baseUrl = 'base url';
ConfigurationStub.isDevelopment = true;
DataAccessStub.repositoryMapper = {
createStorageIfNotExists: sinon.stub(),
findAll: sinon.stub(),
insertOrMerge: sinon.stub(),
findOne: sinon.stub(),
insert: sinon.stub(),
replace: sinon.stub()
};
DataAccessStub.mutationTestingReportService = {
createStorageIfNotExists: sinon.stub(),
saveReport: sinon.stub(),
findOne: sinon.stub(),
};
RepositoryServiceStub.getAllForOrganization = sinon.stub();
RepositoryServiceStub.getAllForUser = sinon.stub();
RepositoryServiceStub.update = sinon.stub();
});
afterEach(async () => {
TestContext.reset();
sinon.restore();
});
export default async function testServer<TController>(Controller: Type<TController>, ...middlewares: import('express').RequestHandler[])
: Promise<SuperTest<Test>> {
let request: SuperTest<Test> = null as any;
@ServerSettings({
logger: {
level: 'off' as any
}
})
class TestServer extends ServerLoader {
constructor() {
super();
const resetSettings: Partial<IServerSettings> = {
componentsScan: [],
mount: {}
};
this.setSettings(resetSettings);
this.addComponents([
ConfigurationStub,
DataAccessStub,
RepositoryServiceStub
]);
this.addControllers('/', [Controller]);
}
public $beforeRoutesInit() {
if (middlewares.length)
|
this.use(bodyParser.json());
}
}
await bootstrap(TestServer)();
await inject([ExpressApplication], (app: ExpressApplication) => {
request = supertest(app);
})();
return request;
}
|
{
this.use(...middlewares);
}
|
conditional_block
|
TestServer.ts
|
import { Type } from '@tsed/core';
import { ServerLoader, IServerSettings, OverrideService, ExpressApplication, ServerSettings } from '@tsed/common';
import { ProjectMapper, MutationTestingReportService } from '@stryker-mutator/dashboard-data-access';
import { bootstrap, inject, TestContext } from '@tsed/testing';
import Configuration from '../../src/services/Configuration';
import supertest from 'supertest';
import { SuperTest, Test } from 'supertest';
import DataAccess from '../../src/services/DataAccess';
import GithubRepositoryService from '../../src/services/GithubRepositoryService';
import sinon = require('sinon');
import bodyParser = require('body-parser');
import { Authentication } from '../../src/github/models';
import { createToken } from '../../src/middleware/securityMiddleware';
@OverrideService(Configuration)
class ConfigurationStub implements Configuration {
public static githubClientId: string;
get githubClientId() { return ConfigurationStub.githubClientId; }
public static githubSecret: string;
get githubSecret() { return ConfigurationStub.githubSecret; }
public static baseUrl: string;
get baseUrl() { return ConfigurationStub.baseUrl; }
public static jwtSecret: string;
get jwtSecret() { return ConfigurationStub.jwtSecret; }
public static isDevelopment: boolean;
get isDevelopment() { return ConfigurationStub.isDevelopment; }
}
@OverrideService(DataAccess)
export class DataAccessStub implements DataAccess {
public static repositoryMapper: sinon.SinonStubbedInstance<ProjectMapper>;
public static mutationTestingReportService: sinon.SinonStubbedInstance<MutationTestingReportService>;
public get repositoryMapper(): ProjectMapper {
return DataAccessStub.repositoryMapper as any;
}
public get mutationTestingReportService(): MutationTestingReportService {
return DataAccessStub.mutationTestingReportService as any;
}
}
export async function createAuthToken(user: Authentication) {
const token = await createToken(user, ConfigurationStub.jwtSecret);
return `Bearer ${token}`;
}
@OverrideService(GithubRepositoryService)
export class RepositoryServiceStub {
public static getAllForUser: sinon.SinonStub;
public static getAllForOrganization: sinon.SinonStub;
public static update: sinon.SinonStub;
public get getAllForUser() {
return RepositoryServiceStub.getAllForUser;
|
}
public get getAllForOrganization() {
return RepositoryServiceStub.getAllForOrganization;
}
public get update() {
return RepositoryServiceStub.update;
}
}
beforeEach(() => {
ConfigurationStub.githubClientId = 'github client id';
ConfigurationStub.githubSecret = 'github secret';
ConfigurationStub.jwtSecret = 'jwt secret';
ConfigurationStub.baseUrl = 'base url';
ConfigurationStub.isDevelopment = true;
DataAccessStub.repositoryMapper = {
createStorageIfNotExists: sinon.stub(),
findAll: sinon.stub(),
insertOrMerge: sinon.stub(),
findOne: sinon.stub(),
insert: sinon.stub(),
replace: sinon.stub()
};
DataAccessStub.mutationTestingReportService = {
createStorageIfNotExists: sinon.stub(),
saveReport: sinon.stub(),
findOne: sinon.stub(),
};
RepositoryServiceStub.getAllForOrganization = sinon.stub();
RepositoryServiceStub.getAllForUser = sinon.stub();
RepositoryServiceStub.update = sinon.stub();
});
afterEach(async () => {
TestContext.reset();
sinon.restore();
});
export default async function testServer<TController>(Controller: Type<TController>, ...middlewares: import('express').RequestHandler[])
: Promise<SuperTest<Test>> {
let request: SuperTest<Test> = null as any;
@ServerSettings({
logger: {
level: 'off' as any
}
})
class TestServer extends ServerLoader {
constructor() {
super();
const resetSettings: Partial<IServerSettings> = {
componentsScan: [],
mount: {}
};
this.setSettings(resetSettings);
this.addComponents([
ConfigurationStub,
DataAccessStub,
RepositoryServiceStub
]);
this.addControllers('/', [Controller]);
}
public $beforeRoutesInit() {
if (middlewares.length) {
this.use(...middlewares);
}
this.use(bodyParser.json());
}
}
await bootstrap(TestServer)();
await inject([ExpressApplication], (app: ExpressApplication) => {
request = supertest(app);
})();
return request;
}
|
random_line_split
|
|
TestServer.ts
|
import { Type } from '@tsed/core';
import { ServerLoader, IServerSettings, OverrideService, ExpressApplication, ServerSettings } from '@tsed/common';
import { ProjectMapper, MutationTestingReportService } from '@stryker-mutator/dashboard-data-access';
import { bootstrap, inject, TestContext } from '@tsed/testing';
import Configuration from '../../src/services/Configuration';
import supertest from 'supertest';
import { SuperTest, Test } from 'supertest';
import DataAccess from '../../src/services/DataAccess';
import GithubRepositoryService from '../../src/services/GithubRepositoryService';
import sinon = require('sinon');
import bodyParser = require('body-parser');
import { Authentication } from '../../src/github/models';
import { createToken } from '../../src/middleware/securityMiddleware';
@OverrideService(Configuration)
class ConfigurationStub implements Configuration {
public static githubClientId: string;
get githubClientId() { return ConfigurationStub.githubClientId; }
public static githubSecret: string;
get githubSecret() { return ConfigurationStub.githubSecret; }
public static baseUrl: string;
get baseUrl() { return ConfigurationStub.baseUrl; }
public static jwtSecret: string;
get jwtSecret() { return ConfigurationStub.jwtSecret; }
public static isDevelopment: boolean;
get isDevelopment() { return ConfigurationStub.isDevelopment; }
}
@OverrideService(DataAccess)
export class DataAccessStub implements DataAccess {
public static repositoryMapper: sinon.SinonStubbedInstance<ProjectMapper>;
public static mutationTestingReportService: sinon.SinonStubbedInstance<MutationTestingReportService>;
public get repositoryMapper(): ProjectMapper {
return DataAccessStub.repositoryMapper as any;
}
public get mutationTestingReportService(): MutationTestingReportService {
return DataAccessStub.mutationTestingReportService as any;
}
}
export async function createAuthToken(user: Authentication)
|
@OverrideService(GithubRepositoryService)
export class RepositoryServiceStub {
public static getAllForUser: sinon.SinonStub;
public static getAllForOrganization: sinon.SinonStub;
public static update: sinon.SinonStub;
public get getAllForUser() {
return RepositoryServiceStub.getAllForUser;
}
public get getAllForOrganization() {
return RepositoryServiceStub.getAllForOrganization;
}
public get update() {
return RepositoryServiceStub.update;
}
}
beforeEach(() => {
ConfigurationStub.githubClientId = 'github client id';
ConfigurationStub.githubSecret = 'github secret';
ConfigurationStub.jwtSecret = 'jwt secret';
ConfigurationStub.baseUrl = 'base url';
ConfigurationStub.isDevelopment = true;
DataAccessStub.repositoryMapper = {
createStorageIfNotExists: sinon.stub(),
findAll: sinon.stub(),
insertOrMerge: sinon.stub(),
findOne: sinon.stub(),
insert: sinon.stub(),
replace: sinon.stub()
};
DataAccessStub.mutationTestingReportService = {
createStorageIfNotExists: sinon.stub(),
saveReport: sinon.stub(),
findOne: sinon.stub(),
};
RepositoryServiceStub.getAllForOrganization = sinon.stub();
RepositoryServiceStub.getAllForUser = sinon.stub();
RepositoryServiceStub.update = sinon.stub();
});
afterEach(async () => {
TestContext.reset();
sinon.restore();
});
export default async function testServer<TController>(Controller: Type<TController>, ...middlewares: import('express').RequestHandler[])
: Promise<SuperTest<Test>> {
let request: SuperTest<Test> = null as any;
@ServerSettings({
logger: {
level: 'off' as any
}
})
class TestServer extends ServerLoader {
constructor() {
super();
const resetSettings: Partial<IServerSettings> = {
componentsScan: [],
mount: {}
};
this.setSettings(resetSettings);
this.addComponents([
ConfigurationStub,
DataAccessStub,
RepositoryServiceStub
]);
this.addControllers('/', [Controller]);
}
public $beforeRoutesInit() {
if (middlewares.length) {
this.use(...middlewares);
}
this.use(bodyParser.json());
}
}
await bootstrap(TestServer)();
await inject([ExpressApplication], (app: ExpressApplication) => {
request = supertest(app);
})();
return request;
}
|
{
const token = await createToken(user, ConfigurationStub.jwtSecret);
return `Bearer ${token}`;
}
|
identifier_body
|
TestServer.ts
|
import { Type } from '@tsed/core';
import { ServerLoader, IServerSettings, OverrideService, ExpressApplication, ServerSettings } from '@tsed/common';
import { ProjectMapper, MutationTestingReportService } from '@stryker-mutator/dashboard-data-access';
import { bootstrap, inject, TestContext } from '@tsed/testing';
import Configuration from '../../src/services/Configuration';
import supertest from 'supertest';
import { SuperTest, Test } from 'supertest';
import DataAccess from '../../src/services/DataAccess';
import GithubRepositoryService from '../../src/services/GithubRepositoryService';
import sinon = require('sinon');
import bodyParser = require('body-parser');
import { Authentication } from '../../src/github/models';
import { createToken } from '../../src/middleware/securityMiddleware';
@OverrideService(Configuration)
class ConfigurationStub implements Configuration {
public static githubClientId: string;
get githubClientId() { return ConfigurationStub.githubClientId; }
public static githubSecret: string;
get githubSecret() { return ConfigurationStub.githubSecret; }
public static baseUrl: string;
get baseUrl() { return ConfigurationStub.baseUrl; }
public static jwtSecret: string;
get jwtSecret() { return ConfigurationStub.jwtSecret; }
public static isDevelopment: boolean;
get isDevelopment() { return ConfigurationStub.isDevelopment; }
}
@OverrideService(DataAccess)
export class DataAccessStub implements DataAccess {
public static repositoryMapper: sinon.SinonStubbedInstance<ProjectMapper>;
public static mutationTestingReportService: sinon.SinonStubbedInstance<MutationTestingReportService>;
public get repositoryMapper(): ProjectMapper {
return DataAccessStub.repositoryMapper as any;
}
public get mutationTestingReportService(): MutationTestingReportService {
return DataAccessStub.mutationTestingReportService as any;
}
}
export async function createAuthToken(user: Authentication) {
const token = await createToken(user, ConfigurationStub.jwtSecret);
return `Bearer ${token}`;
}
@OverrideService(GithubRepositoryService)
export class RepositoryServiceStub {
public static getAllForUser: sinon.SinonStub;
public static getAllForOrganization: sinon.SinonStub;
public static update: sinon.SinonStub;
public get getAllForUser() {
return RepositoryServiceStub.getAllForUser;
}
public get getAllForOrganization() {
return RepositoryServiceStub.getAllForOrganization;
}
public get update() {
return RepositoryServiceStub.update;
}
}
beforeEach(() => {
ConfigurationStub.githubClientId = 'github client id';
ConfigurationStub.githubSecret = 'github secret';
ConfigurationStub.jwtSecret = 'jwt secret';
ConfigurationStub.baseUrl = 'base url';
ConfigurationStub.isDevelopment = true;
DataAccessStub.repositoryMapper = {
createStorageIfNotExists: sinon.stub(),
findAll: sinon.stub(),
insertOrMerge: sinon.stub(),
findOne: sinon.stub(),
insert: sinon.stub(),
replace: sinon.stub()
};
DataAccessStub.mutationTestingReportService = {
createStorageIfNotExists: sinon.stub(),
saveReport: sinon.stub(),
findOne: sinon.stub(),
};
RepositoryServiceStub.getAllForOrganization = sinon.stub();
RepositoryServiceStub.getAllForUser = sinon.stub();
RepositoryServiceStub.update = sinon.stub();
});
afterEach(async () => {
TestContext.reset();
sinon.restore();
});
export default async function testServer<TController>(Controller: Type<TController>, ...middlewares: import('express').RequestHandler[])
: Promise<SuperTest<Test>> {
let request: SuperTest<Test> = null as any;
@ServerSettings({
logger: {
level: 'off' as any
}
})
class TestServer extends ServerLoader {
constructor() {
super();
const resetSettings: Partial<IServerSettings> = {
componentsScan: [],
mount: {}
};
this.setSettings(resetSettings);
this.addComponents([
ConfigurationStub,
DataAccessStub,
RepositoryServiceStub
]);
this.addControllers('/', [Controller]);
}
public
|
() {
if (middlewares.length) {
this.use(...middlewares);
}
this.use(bodyParser.json());
}
}
await bootstrap(TestServer)();
await inject([ExpressApplication], (app: ExpressApplication) => {
request = supertest(app);
})();
return request;
}
|
$beforeRoutesInit
|
identifier_name
|
main.rs
|
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
mod airport_data;
mod api;
mod path;
use airport_data::our_airports::OurAirports;
|
use anyhow::{Context, Result};
use rocket::config::{Config, Environment};
use rocket_contrib::serve::StaticFiles;
#[rocket::main]
async fn main() -> Result<()> {
let config = {
let env = Environment::active().context("failed to get Rocket config")?;
Config::build(env)
.workers(1)
.finalize()
.context("failed to build Rocket config")?
};
let mut airports_source = OurAirports::init().context("failed to init OurAirports data")?;
if airports_source.is_up_to_date() {
println!("loading OurAirports data..");
} else {
println!("updating OurAirports data..");
airports_source.update().context("update failed")?;
};
let airports = airports_source
.load()
.context("failed to load OurAirports data")?;
println!("finished loading OurAirports data");
rocket::custom(config)
.manage(airports)
.mount("/", StaticFiles::from("frontend/public/"))
.mount("/api", routes![api::search_routes::search_routes])
.launch()
.await
.context("failed to initialize Rocket")
}
|
use airport_data::AirportData;
|
random_line_split
|
main.rs
|
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
mod airport_data;
mod api;
mod path;
use airport_data::our_airports::OurAirports;
use airport_data::AirportData;
use anyhow::{Context, Result};
use rocket::config::{Config, Environment};
use rocket_contrib::serve::StaticFiles;
#[rocket::main]
async fn
|
() -> Result<()> {
let config = {
let env = Environment::active().context("failed to get Rocket config")?;
Config::build(env)
.workers(1)
.finalize()
.context("failed to build Rocket config")?
};
let mut airports_source = OurAirports::init().context("failed to init OurAirports data")?;
if airports_source.is_up_to_date() {
println!("loading OurAirports data..");
} else {
println!("updating OurAirports data..");
airports_source.update().context("update failed")?;
};
let airports = airports_source
.load()
.context("failed to load OurAirports data")?;
println!("finished loading OurAirports data");
rocket::custom(config)
.manage(airports)
.mount("/", StaticFiles::from("frontend/public/"))
.mount("/api", routes![api::search_routes::search_routes])
.launch()
.await
.context("failed to initialize Rocket")
}
|
main
|
identifier_name
|
main.rs
|
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
mod airport_data;
mod api;
mod path;
use airport_data::our_airports::OurAirports;
use airport_data::AirportData;
use anyhow::{Context, Result};
use rocket::config::{Config, Environment};
use rocket_contrib::serve::StaticFiles;
#[rocket::main]
async fn main() -> Result<()>
|
{
let config = {
let env = Environment::active().context("failed to get Rocket config")?;
Config::build(env)
.workers(1)
.finalize()
.context("failed to build Rocket config")?
};
let mut airports_source = OurAirports::init().context("failed to init OurAirports data")?;
if airports_source.is_up_to_date() {
println!("loading OurAirports data..");
} else {
println!("updating OurAirports data..");
airports_source.update().context("update failed")?;
};
let airports = airports_source
.load()
.context("failed to load OurAirports data")?;
println!("finished loading OurAirports data");
rocket::custom(config)
.manage(airports)
.mount("/", StaticFiles::from("frontend/public/"))
.mount("/api", routes![api::search_routes::search_routes])
.launch()
.await
.context("failed to initialize Rocket")
}
|
identifier_body
|
|
main.rs
|
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
mod airport_data;
mod api;
mod path;
use airport_data::our_airports::OurAirports;
use airport_data::AirportData;
use anyhow::{Context, Result};
use rocket::config::{Config, Environment};
use rocket_contrib::serve::StaticFiles;
#[rocket::main]
async fn main() -> Result<()> {
let config = {
let env = Environment::active().context("failed to get Rocket config")?;
Config::build(env)
.workers(1)
.finalize()
.context("failed to build Rocket config")?
};
let mut airports_source = OurAirports::init().context("failed to init OurAirports data")?;
if airports_source.is_up_to_date() {
println!("loading OurAirports data..");
} else
|
;
let airports = airports_source
.load()
.context("failed to load OurAirports data")?;
println!("finished loading OurAirports data");
rocket::custom(config)
.manage(airports)
.mount("/", StaticFiles::from("frontend/public/"))
.mount("/api", routes![api::search_routes::search_routes])
.launch()
.await
.context("failed to initialize Rocket")
}
|
{
println!("updating OurAirports data..");
airports_source.update().context("update failed")?;
}
|
conditional_block
|
smoke_test.py
|
#
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import json
import unittest
import os
import threading
import time
import psutil
import requests
from tempdir import TempDir
from mock import patch
from pixelated.client.dispatcher_api_client import PixelatedDispatcherClient
from pixelated.proxy import DispatcherProxy
from pixelated.manager import DispatcherManager, SSLConfig, DEFAULT_PORT
from pixelated.test.util import EnforceTLSv1Adapter, cafile, certfile, keyfile
__author__ = 'fbernitt'
INHERIT = None
class SmokeTest(unittest.TestCase):
__slots__ = ('_run_method', '_shutdown_method', '_thread_name', '_thread')
class
|
(object):
def __init__(self, run_method, shutdown_method, thread_name=None):
self._run_method = run_method
self._shutdown_method = shutdown_method
self._thread_name = thread_name
self._thread = None
def _start_server(self):
self._thread = threading.Thread(target=self._run_method)
self._thread.setDaemon(True)
if self._thread_name:
self._thread.setName(self._thread_name)
self._thread.start()
def __enter__(self):
self._start_server()
time.sleep(0.3) # let server start
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._shutdown_method()
self._thread.join()
self._kill_subprocesses()
def _kill_subprocesses(self):
for child in psutil.Process(os.getpid()).children():
try:
p = psutil.Process(child.pid)
p.kill()
except psutil.Error:
pass
def setUp(self):
self._tmpdir = TempDir()
self.ssl_request = requests.Session()
self.ssl_request.mount('https://', EnforceTLSv1Adapter())
def tearDown(self):
self._tmpdir.dissolve()
def _dispatcher_manager(self):
fake_mailpile = os.path.join(os.path.dirname(__file__), 'fake_mailpile.py')
ssl_config = SSLConfig(certfile(), keyfile())
provider_ca = None
server = DispatcherManager(self._tmpdir.name, fake_mailpile, ssl_config, 'leap provider hostname', provider_ca, mailpile_virtualenv=INHERIT)
return SmokeTest.Server(server.serve_forever, server.shutdown, thread_name='PixelatedServerManager')
def _dispatcher_proxy(self):
dispatcher = DispatcherProxy(PixelatedDispatcherClient('localhost', DEFAULT_PORT, cacert=cafile(), assert_hostname=False), port=12345, certfile=certfile(),
keyfile=keyfile())
return SmokeTest.Server(dispatcher.serve_forever, dispatcher.shutdown, thread_name='PixelatedDispatcherProxy')
def _method(self, method, url, form_data=None, json_data=None, timeout=5.0):
if json_data:
headers = {'content-type': 'application/json'}
data = json.dumps(json_data)
cookies = None
else:
cookies = {'_xsrf': '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'}
headers = None
data = form_data.copy()
data['_xsrf'] = '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'
return method(url, data=data, headers=headers, cookies=cookies, timeout=timeout, verify=cafile())
def get(self, url):
return self.ssl_request.get(url, verify=cafile())
def put(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.put, url, form_data=form_data, json_data=json_data)
def post(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.post, url, form_data=form_data, json_data=json_data)
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_dispatcher_run(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
self.assertSuccess(self.get('https://localhost:4443/agents'), json_body={
'agents': [{'name': 'test', 'state': 'stopped', 'uri': 'http://localhost:4443/agents/test'}]})
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
self.assertSuccess(self.get('https://localhost:4443/agents/test/runtime'),
json_body={'state': 'running', 'port': 5000})
time.sleep(2) # let mailpile start
self.assertSuccess(self.get('http://localhost:5000/'))
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'}))
def test_dispatcher_starts(self):
with self._dispatcher_proxy():
self.assertSuccess(self.get('https://localhost:12345/auth/login'))
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_server_dispatcher_combination(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
with self._dispatcher_proxy():
# add user
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
# try to login with agent down
# self.assertError(302, self.post('https://localhost:12345/auth/login',
# form_data={'username': 'test', 'password': 'test'}))
# start agent
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
# let mailpile start
time.sleep(1)
self.assertMemoryUsage(
self.get('https://localhost:4443/stats/memory_usage'))
try:
# try to login with agent up
self.assertSuccess(self.post('https://localhost:12345/auth/login',
form_data={'username': 'test', 'password': 'some password'}),
body='Hello World!')
finally:
# shutdown mailple
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'})
def assertSuccess(self, response, body=None, json_body=None):
status = response.status_code
self.assertTrue(200 <= status < 300, msg='%d: %s' % (response.status_code, response.reason))
if body:
self.assertEqual(body, response.content)
if json_body:
self.assertEqual(json_body, response.json())
def assertError(self, error_code, response):
self.assertEqual(error_code, response.status_code,
'Expected status code %d but got %d' % (error_code, response.status_code))
def assertMemoryUsage(self, response):
self.assertSuccess(response)
usage = response.json()
self.assertEqual(1, len(usage['agents']))
|
Server
|
identifier_name
|
smoke_test.py
|
#
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
|
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import json
import unittest
import os
import threading
import time
import psutil
import requests
from tempdir import TempDir
from mock import patch
from pixelated.client.dispatcher_api_client import PixelatedDispatcherClient
from pixelated.proxy import DispatcherProxy
from pixelated.manager import DispatcherManager, SSLConfig, DEFAULT_PORT
from pixelated.test.util import EnforceTLSv1Adapter, cafile, certfile, keyfile
__author__ = 'fbernitt'
INHERIT = None
class SmokeTest(unittest.TestCase):
__slots__ = ('_run_method', '_shutdown_method', '_thread_name', '_thread')
class Server(object):
def __init__(self, run_method, shutdown_method, thread_name=None):
self._run_method = run_method
self._shutdown_method = shutdown_method
self._thread_name = thread_name
self._thread = None
def _start_server(self):
self._thread = threading.Thread(target=self._run_method)
self._thread.setDaemon(True)
if self._thread_name:
self._thread.setName(self._thread_name)
self._thread.start()
def __enter__(self):
self._start_server()
time.sleep(0.3) # let server start
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._shutdown_method()
self._thread.join()
self._kill_subprocesses()
def _kill_subprocesses(self):
for child in psutil.Process(os.getpid()).children():
try:
p = psutil.Process(child.pid)
p.kill()
except psutil.Error:
pass
def setUp(self):
self._tmpdir = TempDir()
self.ssl_request = requests.Session()
self.ssl_request.mount('https://', EnforceTLSv1Adapter())
def tearDown(self):
self._tmpdir.dissolve()
def _dispatcher_manager(self):
fake_mailpile = os.path.join(os.path.dirname(__file__), 'fake_mailpile.py')
ssl_config = SSLConfig(certfile(), keyfile())
provider_ca = None
server = DispatcherManager(self._tmpdir.name, fake_mailpile, ssl_config, 'leap provider hostname', provider_ca, mailpile_virtualenv=INHERIT)
return SmokeTest.Server(server.serve_forever, server.shutdown, thread_name='PixelatedServerManager')
def _dispatcher_proxy(self):
dispatcher = DispatcherProxy(PixelatedDispatcherClient('localhost', DEFAULT_PORT, cacert=cafile(), assert_hostname=False), port=12345, certfile=certfile(),
keyfile=keyfile())
return SmokeTest.Server(dispatcher.serve_forever, dispatcher.shutdown, thread_name='PixelatedDispatcherProxy')
def _method(self, method, url, form_data=None, json_data=None, timeout=5.0):
if json_data:
headers = {'content-type': 'application/json'}
data = json.dumps(json_data)
cookies = None
else:
cookies = {'_xsrf': '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'}
headers = None
data = form_data.copy()
data['_xsrf'] = '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'
return method(url, data=data, headers=headers, cookies=cookies, timeout=timeout, verify=cafile())
def get(self, url):
return self.ssl_request.get(url, verify=cafile())
def put(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.put, url, form_data=form_data, json_data=json_data)
def post(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.post, url, form_data=form_data, json_data=json_data)
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_dispatcher_run(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
self.assertSuccess(self.get('https://localhost:4443/agents'), json_body={
'agents': [{'name': 'test', 'state': 'stopped', 'uri': 'http://localhost:4443/agents/test'}]})
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
self.assertSuccess(self.get('https://localhost:4443/agents/test/runtime'),
json_body={'state': 'running', 'port': 5000})
time.sleep(2) # let mailpile start
self.assertSuccess(self.get('http://localhost:5000/'))
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'}))
def test_dispatcher_starts(self):
with self._dispatcher_proxy():
self.assertSuccess(self.get('https://localhost:12345/auth/login'))
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_server_dispatcher_combination(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
with self._dispatcher_proxy():
# add user
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
# try to login with agent down
# self.assertError(302, self.post('https://localhost:12345/auth/login',
# form_data={'username': 'test', 'password': 'test'}))
# start agent
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
# let mailpile start
time.sleep(1)
self.assertMemoryUsage(
self.get('https://localhost:4443/stats/memory_usage'))
try:
# try to login with agent up
self.assertSuccess(self.post('https://localhost:12345/auth/login',
form_data={'username': 'test', 'password': 'some password'}),
body='Hello World!')
finally:
# shutdown mailple
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'})
def assertSuccess(self, response, body=None, json_body=None):
status = response.status_code
self.assertTrue(200 <= status < 300, msg='%d: %s' % (response.status_code, response.reason))
if body:
self.assertEqual(body, response.content)
if json_body:
self.assertEqual(json_body, response.json())
def assertError(self, error_code, response):
self.assertEqual(error_code, response.status_code,
'Expected status code %d but got %d' % (error_code, response.status_code))
def assertMemoryUsage(self, response):
self.assertSuccess(response)
usage = response.json()
self.assertEqual(1, len(usage['agents']))
|
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
|
random_line_split
|
smoke_test.py
|
#
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import json
import unittest
import os
import threading
import time
import psutil
import requests
from tempdir import TempDir
from mock import patch
from pixelated.client.dispatcher_api_client import PixelatedDispatcherClient
from pixelated.proxy import DispatcherProxy
from pixelated.manager import DispatcherManager, SSLConfig, DEFAULT_PORT
from pixelated.test.util import EnforceTLSv1Adapter, cafile, certfile, keyfile
__author__ = 'fbernitt'
INHERIT = None
class SmokeTest(unittest.TestCase):
__slots__ = ('_run_method', '_shutdown_method', '_thread_name', '_thread')
class Server(object):
def __init__(self, run_method, shutdown_method, thread_name=None):
self._run_method = run_method
self._shutdown_method = shutdown_method
self._thread_name = thread_name
self._thread = None
def _start_server(self):
self._thread = threading.Thread(target=self._run_method)
self._thread.setDaemon(True)
if self._thread_name:
self._thread.setName(self._thread_name)
self._thread.start()
def __enter__(self):
self._start_server()
time.sleep(0.3) # let server start
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._shutdown_method()
self._thread.join()
self._kill_subprocesses()
def _kill_subprocesses(self):
for child in psutil.Process(os.getpid()).children():
try:
p = psutil.Process(child.pid)
p.kill()
except psutil.Error:
pass
def setUp(self):
self._tmpdir = TempDir()
self.ssl_request = requests.Session()
self.ssl_request.mount('https://', EnforceTLSv1Adapter())
def tearDown(self):
self._tmpdir.dissolve()
def _dispatcher_manager(self):
fake_mailpile = os.path.join(os.path.dirname(__file__), 'fake_mailpile.py')
ssl_config = SSLConfig(certfile(), keyfile())
provider_ca = None
server = DispatcherManager(self._tmpdir.name, fake_mailpile, ssl_config, 'leap provider hostname', provider_ca, mailpile_virtualenv=INHERIT)
return SmokeTest.Server(server.serve_forever, server.shutdown, thread_name='PixelatedServerManager')
def _dispatcher_proxy(self):
dispatcher = DispatcherProxy(PixelatedDispatcherClient('localhost', DEFAULT_PORT, cacert=cafile(), assert_hostname=False), port=12345, certfile=certfile(),
keyfile=keyfile())
return SmokeTest.Server(dispatcher.serve_forever, dispatcher.shutdown, thread_name='PixelatedDispatcherProxy')
def _method(self, method, url, form_data=None, json_data=None, timeout=5.0):
if json_data:
headers = {'content-type': 'application/json'}
data = json.dumps(json_data)
cookies = None
else:
cookies = {'_xsrf': '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'}
headers = None
data = form_data.copy()
data['_xsrf'] = '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'
return method(url, data=data, headers=headers, cookies=cookies, timeout=timeout, verify=cafile())
def get(self, url):
return self.ssl_request.get(url, verify=cafile())
def put(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.put, url, form_data=form_data, json_data=json_data)
def post(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.post, url, form_data=form_data, json_data=json_data)
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_dispatcher_run(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
self.assertSuccess(self.get('https://localhost:4443/agents'), json_body={
'agents': [{'name': 'test', 'state': 'stopped', 'uri': 'http://localhost:4443/agents/test'}]})
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
self.assertSuccess(self.get('https://localhost:4443/agents/test/runtime'),
json_body={'state': 'running', 'port': 5000})
time.sleep(2) # let mailpile start
self.assertSuccess(self.get('http://localhost:5000/'))
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'}))
def test_dispatcher_starts(self):
with self._dispatcher_proxy():
self.assertSuccess(self.get('https://localhost:12345/auth/login'))
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_server_dispatcher_combination(self, leap_provider_mock, leap_certificate_mock):
|
def assertSuccess(self, response, body=None, json_body=None):
status = response.status_code
self.assertTrue(200 <= status < 300, msg='%d: %s' % (response.status_code, response.reason))
if body:
self.assertEqual(body, response.content)
if json_body:
self.assertEqual(json_body, response.json())
def assertError(self, error_code, response):
self.assertEqual(error_code, response.status_code,
'Expected status code %d but got %d' % (error_code, response.status_code))
def assertMemoryUsage(self, response):
self.assertSuccess(response)
usage = response.json()
self.assertEqual(1, len(usage['agents']))
|
with self._dispatcher_manager():
with self._dispatcher_proxy():
# add user
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
# try to login with agent down
# self.assertError(302, self.post('https://localhost:12345/auth/login',
# form_data={'username': 'test', 'password': 'test'}))
# start agent
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
# let mailpile start
time.sleep(1)
self.assertMemoryUsage(
self.get('https://localhost:4443/stats/memory_usage'))
try:
# try to login with agent up
self.assertSuccess(self.post('https://localhost:12345/auth/login',
form_data={'username': 'test', 'password': 'some password'}),
body='Hello World!')
finally:
# shutdown mailple
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'})
|
identifier_body
|
smoke_test.py
|
#
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import json
import unittest
import os
import threading
import time
import psutil
import requests
from tempdir import TempDir
from mock import patch
from pixelated.client.dispatcher_api_client import PixelatedDispatcherClient
from pixelated.proxy import DispatcherProxy
from pixelated.manager import DispatcherManager, SSLConfig, DEFAULT_PORT
from pixelated.test.util import EnforceTLSv1Adapter, cafile, certfile, keyfile
__author__ = 'fbernitt'
INHERIT = None
class SmokeTest(unittest.TestCase):
__slots__ = ('_run_method', '_shutdown_method', '_thread_name', '_thread')
class Server(object):
def __init__(self, run_method, shutdown_method, thread_name=None):
self._run_method = run_method
self._shutdown_method = shutdown_method
self._thread_name = thread_name
self._thread = None
def _start_server(self):
self._thread = threading.Thread(target=self._run_method)
self._thread.setDaemon(True)
if self._thread_name:
self._thread.setName(self._thread_name)
self._thread.start()
def __enter__(self):
self._start_server()
time.sleep(0.3) # let server start
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._shutdown_method()
self._thread.join()
self._kill_subprocesses()
def _kill_subprocesses(self):
for child in psutil.Process(os.getpid()).children():
try:
p = psutil.Process(child.pid)
p.kill()
except psutil.Error:
pass
def setUp(self):
self._tmpdir = TempDir()
self.ssl_request = requests.Session()
self.ssl_request.mount('https://', EnforceTLSv1Adapter())
def tearDown(self):
self._tmpdir.dissolve()
def _dispatcher_manager(self):
fake_mailpile = os.path.join(os.path.dirname(__file__), 'fake_mailpile.py')
ssl_config = SSLConfig(certfile(), keyfile())
provider_ca = None
server = DispatcherManager(self._tmpdir.name, fake_mailpile, ssl_config, 'leap provider hostname', provider_ca, mailpile_virtualenv=INHERIT)
return SmokeTest.Server(server.serve_forever, server.shutdown, thread_name='PixelatedServerManager')
def _dispatcher_proxy(self):
dispatcher = DispatcherProxy(PixelatedDispatcherClient('localhost', DEFAULT_PORT, cacert=cafile(), assert_hostname=False), port=12345, certfile=certfile(),
keyfile=keyfile())
return SmokeTest.Server(dispatcher.serve_forever, dispatcher.shutdown, thread_name='PixelatedDispatcherProxy')
def _method(self, method, url, form_data=None, json_data=None, timeout=5.0):
if json_data:
|
else:
cookies = {'_xsrf': '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'}
headers = None
data = form_data.copy()
data['_xsrf'] = '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'
return method(url, data=data, headers=headers, cookies=cookies, timeout=timeout, verify=cafile())
def get(self, url):
return self.ssl_request.get(url, verify=cafile())
def put(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.put, url, form_data=form_data, json_data=json_data)
def post(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.post, url, form_data=form_data, json_data=json_data)
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_dispatcher_run(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
self.assertSuccess(self.get('https://localhost:4443/agents'), json_body={
'agents': [{'name': 'test', 'state': 'stopped', 'uri': 'http://localhost:4443/agents/test'}]})
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
self.assertSuccess(self.get('https://localhost:4443/agents/test/runtime'),
json_body={'state': 'running', 'port': 5000})
time.sleep(2) # let mailpile start
self.assertSuccess(self.get('http://localhost:5000/'))
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'}))
def test_dispatcher_starts(self):
with self._dispatcher_proxy():
self.assertSuccess(self.get('https://localhost:12345/auth/login'))
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_server_dispatcher_combination(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
with self._dispatcher_proxy():
# add user
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
# try to login with agent down
# self.assertError(302, self.post('https://localhost:12345/auth/login',
# form_data={'username': 'test', 'password': 'test'}))
# start agent
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
# let mailpile start
time.sleep(1)
self.assertMemoryUsage(
self.get('https://localhost:4443/stats/memory_usage'))
try:
# try to login with agent up
self.assertSuccess(self.post('https://localhost:12345/auth/login',
form_data={'username': 'test', 'password': 'some password'}),
body='Hello World!')
finally:
# shutdown mailple
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'})
def assertSuccess(self, response, body=None, json_body=None):
status = response.status_code
self.assertTrue(200 <= status < 300, msg='%d: %s' % (response.status_code, response.reason))
if body:
self.assertEqual(body, response.content)
if json_body:
self.assertEqual(json_body, response.json())
def assertError(self, error_code, response):
self.assertEqual(error_code, response.status_code,
'Expected status code %d but got %d' % (error_code, response.status_code))
def assertMemoryUsage(self, response):
self.assertSuccess(response)
usage = response.json()
self.assertEqual(1, len(usage['agents']))
|
headers = {'content-type': 'application/json'}
data = json.dumps(json_data)
cookies = None
|
conditional_block
|
config.rs
|
// Copyright (c) 2016 Nikita Pekin and the smexybot contributors
// See the README.md file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use error::Result;
use serde_json;
use std::collections::HashSet;
use std::fs::File;
use std::io::{ErrorKind, Read};
#[cfg(feature = "nightly")]
include!("config.in.rs");
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/config.rs"));
impl Config {
pub fn
|
(name: Option<&str>) -> Self {
if let Some(name) = name {
match Config::load_from_file(name) {
Ok(config) => {
return config;
},
Err(err) => warn!("Failed for load config from \"{}\": {}", name, err),
}
}
info!("Using default config");
Default::default()
}
pub fn load_from_file(name: &str) -> Result<Self> {
let mut file = match File::open(name) {
Ok(file) => file,
// If no file is present, assume this is a fresh config.
Err(ref err) if err.kind() == ErrorKind::NotFound => return Ok(Default::default()),
Err(_) => panic!("Failed to open file: {}", name),
};
let mut config = String::new();
file.read_to_string(&mut config)
.expect(&format!("Failed to read from file: {}", name));
let config = serde_json::from_str(&config).expect("Failed to deserialize Config");
info!("Loaded config from: \"{}\"", name);
Ok(config)
}
}
impl Default for Config {
fn default() -> Config {
Config {
bot_name: "smexybot".to_owned(),
command_prefix: ";".to_owned(),
owners: HashSet::new(),
source_url: "https://github.com/indiv0/smexybot".to_owned(),
}
}
}
|
new
|
identifier_name
|
config.rs
|
// Copyright (c) 2016 Nikita Pekin and the smexybot contributors
// See the README.md file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use error::Result;
use serde_json;
use std::collections::HashSet;
use std::fs::File;
use std::io::{ErrorKind, Read};
#[cfg(feature = "nightly")]
include!("config.in.rs");
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/config.rs"));
impl Config {
pub fn new(name: Option<&str>) -> Self {
if let Some(name) = name {
match Config::load_from_file(name) {
Ok(config) => {
return config;
},
Err(err) => warn!("Failed for load config from \"{}\": {}", name, err),
}
}
info!("Using default config");
Default::default()
}
pub fn load_from_file(name: &str) -> Result<Self> {
let mut file = match File::open(name) {
Ok(file) => file,
// If no file is present, assume this is a fresh config.
Err(ref err) if err.kind() == ErrorKind::NotFound => return Ok(Default::default()),
Err(_) => panic!("Failed to open file: {}", name),
};
let mut config = String::new();
file.read_to_string(&mut config)
.expect(&format!("Failed to read from file: {}", name));
let config = serde_json::from_str(&config).expect("Failed to deserialize Config");
info!("Loaded config from: \"{}\"", name);
Ok(config)
}
}
impl Default for Config {
fn default() -> Config
|
}
|
{
Config {
bot_name: "smexybot".to_owned(),
command_prefix: ";".to_owned(),
owners: HashSet::new(),
source_url: "https://github.com/indiv0/smexybot".to_owned(),
}
}
|
identifier_body
|
config.rs
|
// Copyright (c) 2016 Nikita Pekin and the smexybot contributors
// See the README.md file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use error::Result;
use serde_json;
use std::collections::HashSet;
use std::fs::File;
use std::io::{ErrorKind, Read};
#[cfg(feature = "nightly")]
include!("config.in.rs");
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/config.rs"));
impl Config {
pub fn new(name: Option<&str>) -> Self {
if let Some(name) = name {
match Config::load_from_file(name) {
Ok(config) => {
return config;
},
Err(err) => warn!("Failed for load config from \"{}\": {}", name, err),
}
}
info!("Using default config");
Default::default()
}
pub fn load_from_file(name: &str) -> Result<Self> {
let mut file = match File::open(name) {
Ok(file) => file,
// If no file is present, assume this is a fresh config.
Err(ref err) if err.kind() == ErrorKind::NotFound => return Ok(Default::default()),
Err(_) => panic!("Failed to open file: {}", name),
};
let mut config = String::new();
file.read_to_string(&mut config)
.expect(&format!("Failed to read from file: {}", name));
let config = serde_json::from_str(&config).expect("Failed to deserialize Config");
info!("Loaded config from: \"{}\"", name);
Ok(config)
}
}
|
command_prefix: ";".to_owned(),
owners: HashSet::new(),
source_url: "https://github.com/indiv0/smexybot".to_owned(),
}
}
}
|
impl Default for Config {
fn default() -> Config {
Config {
bot_name: "smexybot".to_owned(),
|
random_line_split
|
coerce-unify-return.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that coercions unify the expected return type of a polymorphic
// function call, instead of leaving the type variables as they were.
// pretty-expanded FIXME #23616
struct
|
;
impl Foo {
fn foo<T>(self, x: T) -> Option<T> { Some(x) }
}
pub fn main() {
let _: Option<fn()> = Some(main);
let _: Option<fn()> = Foo.foo(main);
// The same two cases, with implicit type variables made explicit.
let _: Option<fn()> = Some::<_>(main);
let _: Option<fn()> = Foo.foo::<_>(main);
}
|
Foo
|
identifier_name
|
coerce-unify-return.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that coercions unify the expected return type of a polymorphic
// function call, instead of leaving the type variables as they were.
// pretty-expanded FIXME #23616
struct Foo;
impl Foo {
fn foo<T>(self, x: T) -> Option<T> { Some(x) }
}
pub fn main() {
|
let _: Option<fn()> = Foo.foo(main);
// The same two cases, with implicit type variables made explicit.
let _: Option<fn()> = Some::<_>(main);
let _: Option<fn()> = Foo.foo::<_>(main);
}
|
let _: Option<fn()> = Some(main);
|
random_line_split
|
coerce-unify-return.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that coercions unify the expected return type of a polymorphic
// function call, instead of leaving the type variables as they were.
// pretty-expanded FIXME #23616
struct Foo;
impl Foo {
fn foo<T>(self, x: T) -> Option<T>
|
}
pub fn main() {
let _: Option<fn()> = Some(main);
let _: Option<fn()> = Foo.foo(main);
// The same two cases, with implicit type variables made explicit.
let _: Option<fn()> = Some::<_>(main);
let _: Option<fn()> = Foo.foo::<_>(main);
}
|
{ Some(x) }
|
identifier_body
|
Elements.ts
|
/**
* Copyright (c) 2017 ~ present NAVER Corp.
* billboard.js project is licensed under the MIT license
*/
// @ts-nocheck
import CLASS from "./classes";
import {getCentroid, isString, parseDate} from "./util";
/**
* Stanford diagram plugin element class
* @class ColorScale
* @param {Stanford} owner Stanford instance
* @private
*/
export default class Elements {
private owner;
constructor(owner) {
this.owner = owner;
// MEMO: Avoid blocking eventRect
const elements = owner.$$.$el.main.select(".bb-chart")
.append("g")
.attr("class", CLASS.stanfordElements);
elements.append("g").attr("class", CLASS.stanfordLines);
elements.append("g").attr("class", CLASS.stanfordRegions);
}
updateStanfordLines(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
// Stanford-Lines
const stanfordLine = main.select(`.${CLASS.stanfordLines}`)
.style("shape-rendering", "geometricprecision")
.selectAll(`.${CLASS.stanfordLine}`)
.data(this.owner.config.lines);
// exit
stanfordLine.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordLineEnter = stanfordLine.enter().append("g");
stanfordLineEnter.append("line")
.style("opacity", "0");
stanfordLineEnter
.merge(stanfordLine)
.attr("class", d => CLASS.stanfordLine + (d.class ? ` ${d.class}` : ""))
.select("line")
.transition()
.duration(duration)
.attr("x1", d => (isRotated ? yvCustom(d, "y1") : xvCustom(d, "x1")))
.attr("x2", d => (isRotated ? yvCustom(d, "y2") : xvCustom(d, "x2")))
.attr("y1", d => (isRotated ? xvCustom(d, "x1") : yvCustom(d, "y1")))
.attr("y2", d => (isRotated ? xvCustom(d, "x2") : yvCustom(d, "y2")))
.transition()
.style("opacity", null);
}
updateStanfordRegions(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
const countPointsInRegion = this.owner.countEpochsInRegion.bind($$);
// Stanford-Regions
let stanfordRegion = main.select(`.${CLASS.stanfordRegions}`)
.selectAll(`.${CLASS.stanfordRegion}`)
.data(this.owner.config.regions);
// exit
stanfordRegion.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordRegionEnter = stanfordRegion.enter().append("g");
stanfordRegionEnter.append("polygon")
.style("opacity", "0");
stanfordRegionEnter.append("text")
.attr("transform", isRotated ? "rotate(-90)" : "")
.style("opacity", "0");
stanfordRegion = stanfordRegionEnter.merge(stanfordRegion);
// update
stanfordRegion
.attr("class", d => CLASS.stanfordRegion + (d.class ? ` ${d.class}` : ""))
.select("polygon")
.transition()
.duration(duration)
.attr("points", d => d.points.map(value => [
isRotated ? yvCustom(value, "y") : xvCustom(value, "x"),
isRotated ? xvCustom(value, "x") : yvCustom(value, "y")
].join(",")).join(" "))
.transition()
.style("opacity", d => String(d.opacity ? d.opacity : 0.2));
stanfordRegion.select("text")
.transition()
.duration(duration)
.attr("x", d => (isRotated ? yvCustom(getCentroid(d.points), "y") : xvCustom(getCentroid(d.points), "x")))
.attr("y", d => (isRotated ? xvCustom(getCentroid(d.points), "x") : yvCustom(getCentroid(d.points), "y")))
.text(d => {
if (d.text) {
const {value, percentage} = countPointsInRegion(d.points);
return d.text(value, percentage);
}
return "";
})
.attr("text-anchor", "middle")
.attr("dominant-baseline", "middle")
.transition()
.style("opacity", null);
}
updateStanfordElements(duration = 0): void {
this.updateStanfordLines(duration);
this.updateStanfordRegions(duration);
}
xvCustom(d, xyValue): number
|
yvCustom(d, xyValue): number {
const $$ = this;
const yScale = d.axis && d.axis === "y2" ? $$.scale.y2 : $$.scale.y;
const value = xyValue ? d[xyValue] : $$.getBaseValue(d);
return Math.ceil(yScale(value));
}
}
|
{
const $$ = this;
const {axis, config} = $$;
let value = xyValue ? d[xyValue] : $$.getBaseValue(d);
if (axis.isTimeSeries()) {
value = parseDate.call($$, value);
} else if (axis.isCategorized() && isString(value)) {
value = config.axis_x_categories.indexOf(d.value);
}
return Math.ceil($$.scale.x(value));
}
|
identifier_body
|
Elements.ts
|
/**
* Copyright (c) 2017 ~ present NAVER Corp.
* billboard.js project is licensed under the MIT license
*/
// @ts-nocheck
import CLASS from "./classes";
import {getCentroid, isString, parseDate} from "./util";
/**
* Stanford diagram plugin element class
* @class ColorScale
* @param {Stanford} owner Stanford instance
* @private
*/
export default class Elements {
private owner;
constructor(owner) {
this.owner = owner;
// MEMO: Avoid blocking eventRect
const elements = owner.$$.$el.main.select(".bb-chart")
.append("g")
.attr("class", CLASS.stanfordElements);
elements.append("g").attr("class", CLASS.stanfordLines);
elements.append("g").attr("class", CLASS.stanfordRegions);
}
updateStanfordLines(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
// Stanford-Lines
const stanfordLine = main.select(`.${CLASS.stanfordLines}`)
.style("shape-rendering", "geometricprecision")
.selectAll(`.${CLASS.stanfordLine}`)
.data(this.owner.config.lines);
// exit
stanfordLine.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordLineEnter = stanfordLine.enter().append("g");
stanfordLineEnter.append("line")
.style("opacity", "0");
stanfordLineEnter
.merge(stanfordLine)
.attr("class", d => CLASS.stanfordLine + (d.class ? ` ${d.class}` : ""))
.select("line")
.transition()
.duration(duration)
.attr("x1", d => (isRotated ? yvCustom(d, "y1") : xvCustom(d, "x1")))
.attr("x2", d => (isRotated ? yvCustom(d, "y2") : xvCustom(d, "x2")))
.attr("y1", d => (isRotated ? xvCustom(d, "x1") : yvCustom(d, "y1")))
.attr("y2", d => (isRotated ? xvCustom(d, "x2") : yvCustom(d, "y2")))
.transition()
.style("opacity", null);
}
updateStanfordRegions(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
const countPointsInRegion = this.owner.countEpochsInRegion.bind($$);
// Stanford-Regions
let stanfordRegion = main.select(`.${CLASS.stanfordRegions}`)
.selectAll(`.${CLASS.stanfordRegion}`)
.data(this.owner.config.regions);
// exit
stanfordRegion.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordRegionEnter = stanfordRegion.enter().append("g");
stanfordRegionEnter.append("polygon")
.style("opacity", "0");
stanfordRegionEnter.append("text")
.attr("transform", isRotated ? "rotate(-90)" : "")
.style("opacity", "0");
stanfordRegion = stanfordRegionEnter.merge(stanfordRegion);
// update
stanfordRegion
.attr("class", d => CLASS.stanfordRegion + (d.class ? ` ${d.class}` : ""))
.select("polygon")
.transition()
.duration(duration)
.attr("points", d => d.points.map(value => [
isRotated ? yvCustom(value, "y") : xvCustom(value, "x"),
isRotated ? xvCustom(value, "x") : yvCustom(value, "y")
].join(",")).join(" "))
.transition()
.style("opacity", d => String(d.opacity ? d.opacity : 0.2));
stanfordRegion.select("text")
.transition()
.duration(duration)
.attr("x", d => (isRotated ? yvCustom(getCentroid(d.points), "y") : xvCustom(getCentroid(d.points), "x")))
.attr("y", d => (isRotated ? xvCustom(getCentroid(d.points), "x") : yvCustom(getCentroid(d.points), "y")))
.text(d => {
if (d.text) {
const {value, percentage} = countPointsInRegion(d.points);
return d.text(value, percentage);
}
return "";
})
.attr("text-anchor", "middle")
.attr("dominant-baseline", "middle")
.transition()
.style("opacity", null);
}
updateStanfordElements(duration = 0): void {
this.updateStanfordLines(duration);
this.updateStanfordRegions(duration);
}
xvCustom(d, xyValue): number {
const $$ = this;
const {axis, config} = $$;
let value = xyValue ? d[xyValue] : $$.getBaseValue(d);
if (axis.isTimeSeries()) {
value = parseDate.call($$, value);
} else if (axis.isCategorized() && isString(value))
|
return Math.ceil($$.scale.x(value));
}
yvCustom(d, xyValue): number {
const $$ = this;
const yScale = d.axis && d.axis === "y2" ? $$.scale.y2 : $$.scale.y;
const value = xyValue ? d[xyValue] : $$.getBaseValue(d);
return Math.ceil(yScale(value));
}
}
|
{
value = config.axis_x_categories.indexOf(d.value);
}
|
conditional_block
|
Elements.ts
|
/**
* Copyright (c) 2017 ~ present NAVER Corp.
* billboard.js project is licensed under the MIT license
*/
// @ts-nocheck
import CLASS from "./classes";
import {getCentroid, isString, parseDate} from "./util";
/**
* Stanford diagram plugin element class
* @class ColorScale
* @param {Stanford} owner Stanford instance
* @private
*/
export default class
|
{
private owner;
constructor(owner) {
this.owner = owner;
// MEMO: Avoid blocking eventRect
const elements = owner.$$.$el.main.select(".bb-chart")
.append("g")
.attr("class", CLASS.stanfordElements);
elements.append("g").attr("class", CLASS.stanfordLines);
elements.append("g").attr("class", CLASS.stanfordRegions);
}
updateStanfordLines(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
// Stanford-Lines
const stanfordLine = main.select(`.${CLASS.stanfordLines}`)
.style("shape-rendering", "geometricprecision")
.selectAll(`.${CLASS.stanfordLine}`)
.data(this.owner.config.lines);
// exit
stanfordLine.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordLineEnter = stanfordLine.enter().append("g");
stanfordLineEnter.append("line")
.style("opacity", "0");
stanfordLineEnter
.merge(stanfordLine)
.attr("class", d => CLASS.stanfordLine + (d.class ? ` ${d.class}` : ""))
.select("line")
.transition()
.duration(duration)
.attr("x1", d => (isRotated ? yvCustom(d, "y1") : xvCustom(d, "x1")))
.attr("x2", d => (isRotated ? yvCustom(d, "y2") : xvCustom(d, "x2")))
.attr("y1", d => (isRotated ? xvCustom(d, "x1") : yvCustom(d, "y1")))
.attr("y2", d => (isRotated ? xvCustom(d, "x2") : yvCustom(d, "y2")))
.transition()
.style("opacity", null);
}
updateStanfordRegions(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
const countPointsInRegion = this.owner.countEpochsInRegion.bind($$);
// Stanford-Regions
let stanfordRegion = main.select(`.${CLASS.stanfordRegions}`)
.selectAll(`.${CLASS.stanfordRegion}`)
.data(this.owner.config.regions);
// exit
stanfordRegion.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordRegionEnter = stanfordRegion.enter().append("g");
stanfordRegionEnter.append("polygon")
.style("opacity", "0");
stanfordRegionEnter.append("text")
.attr("transform", isRotated ? "rotate(-90)" : "")
.style("opacity", "0");
stanfordRegion = stanfordRegionEnter.merge(stanfordRegion);
// update
stanfordRegion
.attr("class", d => CLASS.stanfordRegion + (d.class ? ` ${d.class}` : ""))
.select("polygon")
.transition()
.duration(duration)
.attr("points", d => d.points.map(value => [
isRotated ? yvCustom(value, "y") : xvCustom(value, "x"),
isRotated ? xvCustom(value, "x") : yvCustom(value, "y")
].join(",")).join(" "))
.transition()
.style("opacity", d => String(d.opacity ? d.opacity : 0.2));
stanfordRegion.select("text")
.transition()
.duration(duration)
.attr("x", d => (isRotated ? yvCustom(getCentroid(d.points), "y") : xvCustom(getCentroid(d.points), "x")))
.attr("y", d => (isRotated ? xvCustom(getCentroid(d.points), "x") : yvCustom(getCentroid(d.points), "y")))
.text(d => {
if (d.text) {
const {value, percentage} = countPointsInRegion(d.points);
return d.text(value, percentage);
}
return "";
})
.attr("text-anchor", "middle")
.attr("dominant-baseline", "middle")
.transition()
.style("opacity", null);
}
updateStanfordElements(duration = 0): void {
this.updateStanfordLines(duration);
this.updateStanfordRegions(duration);
}
xvCustom(d, xyValue): number {
const $$ = this;
const {axis, config} = $$;
let value = xyValue ? d[xyValue] : $$.getBaseValue(d);
if (axis.isTimeSeries()) {
value = parseDate.call($$, value);
} else if (axis.isCategorized() && isString(value)) {
value = config.axis_x_categories.indexOf(d.value);
}
return Math.ceil($$.scale.x(value));
}
yvCustom(d, xyValue): number {
const $$ = this;
const yScale = d.axis && d.axis === "y2" ? $$.scale.y2 : $$.scale.y;
const value = xyValue ? d[xyValue] : $$.getBaseValue(d);
return Math.ceil(yScale(value));
}
}
|
Elements
|
identifier_name
|
Elements.ts
|
/**
* Copyright (c) 2017 ~ present NAVER Corp.
* billboard.js project is licensed under the MIT license
*/
// @ts-nocheck
import CLASS from "./classes";
import {getCentroid, isString, parseDate} from "./util";
/**
* Stanford diagram plugin element class
* @class ColorScale
* @param {Stanford} owner Stanford instance
* @private
*/
export default class Elements {
private owner;
constructor(owner) {
this.owner = owner;
// MEMO: Avoid blocking eventRect
const elements = owner.$$.$el.main.select(".bb-chart")
.append("g")
.attr("class", CLASS.stanfordElements);
elements.append("g").attr("class", CLASS.stanfordLines);
elements.append("g").attr("class", CLASS.stanfordRegions);
}
updateStanfordLines(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
// Stanford-Lines
const stanfordLine = main.select(`.${CLASS.stanfordLines}`)
.style("shape-rendering", "geometricprecision")
.selectAll(`.${CLASS.stanfordLine}`)
.data(this.owner.config.lines);
// exit
stanfordLine.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordLineEnter = stanfordLine.enter().append("g");
stanfordLineEnter.append("line")
.style("opacity", "0");
stanfordLineEnter
.merge(stanfordLine)
.attr("class", d => CLASS.stanfordLine + (d.class ? ` ${d.class}` : ""))
.select("line")
.transition()
.duration(duration)
.attr("x1", d => (isRotated ? yvCustom(d, "y1") : xvCustom(d, "x1")))
.attr("x2", d => (isRotated ? yvCustom(d, "y2") : xvCustom(d, "x2")))
.attr("y1", d => (isRotated ? xvCustom(d, "x1") : yvCustom(d, "y1")))
.attr("y2", d => (isRotated ? xvCustom(d, "x2") : yvCustom(d, "y2")))
.transition()
.style("opacity", null);
}
updateStanfordRegions(duration: number): void {
const {$$} = this.owner;
const {config, $el: {main}} = $$;
const isRotated = config.axis_rotated;
const xvCustom = this.xvCustom.bind($$);
const yvCustom = this.yvCustom.bind($$);
const countPointsInRegion = this.owner.countEpochsInRegion.bind($$);
// Stanford-Regions
let stanfordRegion = main.select(`.${CLASS.stanfordRegions}`)
.selectAll(`.${CLASS.stanfordRegion}`)
.data(this.owner.config.regions);
// exit
stanfordRegion.exit().transition()
.duration(duration)
.style("opacity", "0")
.remove();
// enter
const stanfordRegionEnter = stanfordRegion.enter().append("g");
stanfordRegionEnter.append("polygon")
.style("opacity", "0");
stanfordRegionEnter.append("text")
.attr("transform", isRotated ? "rotate(-90)" : "")
.style("opacity", "0");
stanfordRegion = stanfordRegionEnter.merge(stanfordRegion);
// update
|
.select("polygon")
.transition()
.duration(duration)
.attr("points", d => d.points.map(value => [
isRotated ? yvCustom(value, "y") : xvCustom(value, "x"),
isRotated ? xvCustom(value, "x") : yvCustom(value, "y")
].join(",")).join(" "))
.transition()
.style("opacity", d => String(d.opacity ? d.opacity : 0.2));
stanfordRegion.select("text")
.transition()
.duration(duration)
.attr("x", d => (isRotated ? yvCustom(getCentroid(d.points), "y") : xvCustom(getCentroid(d.points), "x")))
.attr("y", d => (isRotated ? xvCustom(getCentroid(d.points), "x") : yvCustom(getCentroid(d.points), "y")))
.text(d => {
if (d.text) {
const {value, percentage} = countPointsInRegion(d.points);
return d.text(value, percentage);
}
return "";
})
.attr("text-anchor", "middle")
.attr("dominant-baseline", "middle")
.transition()
.style("opacity", null);
}
updateStanfordElements(duration = 0): void {
this.updateStanfordLines(duration);
this.updateStanfordRegions(duration);
}
xvCustom(d, xyValue): number {
const $$ = this;
const {axis, config} = $$;
let value = xyValue ? d[xyValue] : $$.getBaseValue(d);
if (axis.isTimeSeries()) {
value = parseDate.call($$, value);
} else if (axis.isCategorized() && isString(value)) {
value = config.axis_x_categories.indexOf(d.value);
}
return Math.ceil($$.scale.x(value));
}
yvCustom(d, xyValue): number {
const $$ = this;
const yScale = d.axis && d.axis === "y2" ? $$.scale.y2 : $$.scale.y;
const value = xyValue ? d[xyValue] : $$.getBaseValue(d);
return Math.ceil(yScale(value));
}
}
|
stanfordRegion
.attr("class", d => CLASS.stanfordRegion + (d.class ? ` ${d.class}` : ""))
|
random_line_split
|
snooze-rounded.js
|
d: "M10 11h2.63l-3.72 4.35C8.36 16 8.82 17 9.67 17H14c.55 0 1-.45 1-1s-.45-1-1-1h-2.63l3.72-4.35c.55-.65.09-1.65-.76-1.65H10c-.55 0-1 .45-1 1s.45 1 1 1zm11.3-4.58c-.35.42-.98.48-1.41.13l-3.07-2.56c-.42-.36-.48-.99-.12-1.41.35-.42.98-.48 1.41-.13l3.07 2.56c.42.36.48.99.12 1.41zm-18.6 0c.35.43.98.48 1.4.13l3.07-2.56c.43-.36.49-.99.13-1.41-.35-.43-.98-.48-1.4-.13L2.82 5.01c-.42.36-.48.99-.12 1.41zM12 6c3.86 0 7 3.14 7 7s-3.14 7-7 7-7-3.14-7-7 3.14-7 7-7m0-2c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9z"
}), 'SnoozeRounded');
|
import { h } from 'omi';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(h("path", {
|
random_line_split
|
|
_cuda_types.py
|
import numpy
from cupy._core._scalar import get_typename
# Base class for cuda types.
class TypeBase:
def __str__(self):
raise NotImplementedError
def declvar(self, x):
return f'{self} {x}'
class Void(TypeBase):
def __init__(self):
pass
def __str__(self):
return 'void'
class Scalar(TypeBase):
def __init__(self, dtype):
self.dtype = numpy.dtype(dtype)
def __str__(self):
dtype = self.dtype
if dtype == numpy.float16:
# For the performance
dtype = numpy.dtype('float32')
return get_typename(dtype)
def __eq__(self, other):
return isinstance(other, Scalar) and self.dtype == other.dtype
def __hash__(self):
|
class ArrayBase(TypeBase):
def __init__(self, child_type: TypeBase, ndim: int):
assert isinstance(child_type, TypeBase)
self.child_type = child_type
self.ndim = ndim
class CArray(ArrayBase):
def __init__(self, dtype, ndim, is_c_contiguous, index_32_bits):
self.dtype = dtype
self._c_contiguous = is_c_contiguous
self._index_32_bits = index_32_bits
super().__init__(Scalar(dtype), ndim)
@classmethod
def from_ndarray(cls, x):
return CArray(x.dtype, x.ndim, x._c_contiguous, x._index_32_bits)
def __str__(self):
ctype = get_typename(self.dtype)
c_contiguous = get_cuda_code_from_constant(self._c_contiguous, bool_)
index_32_bits = get_cuda_code_from_constant(self._index_32_bits, bool_)
return f'CArray<{ctype}, {self.ndim}, {c_contiguous}, {index_32_bits}>'
def __eq__(self, other):
return (
isinstance(other, CArray) and
self.dtype == other.dtype and
self.ndim == other.ndim and
self._c_contiguous == other._c_contiguous and
self._index_32_bits == other._index_32_bits
)
def __hash__(self):
return hash(
(self.dtype, self.ndim, self._c_contiguous, self._index_32_bits))
class SharedMem(ArrayBase):
def __init__(self, child_type, size):
if not (isinstance(size, int) or size is None):
raise 'size of shared_memory must be integer or `None`'
self._size = size
super().__init__(child_type, 1)
def declvar(self, x):
if self._size is None:
return f'extern __shared__ {self.child_type} {x}[]'
return f'__shared__ {self.child_type} {x}[{self._size}]'
class Ptr(ArrayBase):
def __init__(self, child_type):
super().__init__(child_type, 1)
def __str__(self):
return f'{self.child_type}*'
class Tuple(TypeBase):
def __init__(self, types):
self.types = types
def __str__(self):
types = ', '.join([str(t) for t in self.types])
return f'thrust::tuple<{types}>'
def __eq__(self, other):
return isinstance(other, Tuple) and self.types == other.types
void = Void()
bool_ = Scalar(numpy.bool_)
int32 = Scalar(numpy.int32)
uint32 = Scalar(numpy.uint32)
_suffix_literals_dict = {
'float64': '',
'float32': 'f',
'int64': 'll',
'int32': '',
'uint64': 'ull',
'uint32': 'u',
'bool': '',
}
def get_cuda_code_from_constant(x, ctype):
dtype = ctype.dtype
suffix_literal = _suffix_literals_dict.get(dtype.name)
if suffix_literal is not None:
s = str(x).lower()
return f'{s}{suffix_literal}'
ctype = str(ctype)
if dtype.kind == 'c':
return f'{ctype}({x.real}, {x.imag})'
if ' ' in ctype:
return f'({ctype}){x}'
return f'{ctype}({x})'
|
return hash(self.dtype)
|
identifier_body
|
_cuda_types.py
|
import numpy
from cupy._core._scalar import get_typename
# Base class for cuda types.
class TypeBase:
def __str__(self):
raise NotImplementedError
def declvar(self, x):
return f'{self} {x}'
class Void(TypeBase):
def __init__(self):
pass
def __str__(self):
return 'void'
class Scalar(TypeBase):
def __init__(self, dtype):
self.dtype = numpy.dtype(dtype)
def __str__(self):
dtype = self.dtype
if dtype == numpy.float16:
# For the performance
dtype = numpy.dtype('float32')
return get_typename(dtype)
def __eq__(self, other):
return isinstance(other, Scalar) and self.dtype == other.dtype
def __hash__(self):
return hash(self.dtype)
class ArrayBase(TypeBase):
def __init__(self, child_type: TypeBase, ndim: int):
assert isinstance(child_type, TypeBase)
self.child_type = child_type
self.ndim = ndim
class CArray(ArrayBase):
def __init__(self, dtype, ndim, is_c_contiguous, index_32_bits):
self.dtype = dtype
self._c_contiguous = is_c_contiguous
self._index_32_bits = index_32_bits
super().__init__(Scalar(dtype), ndim)
@classmethod
def from_ndarray(cls, x):
return CArray(x.dtype, x.ndim, x._c_contiguous, x._index_32_bits)
def
|
(self):
ctype = get_typename(self.dtype)
c_contiguous = get_cuda_code_from_constant(self._c_contiguous, bool_)
index_32_bits = get_cuda_code_from_constant(self._index_32_bits, bool_)
return f'CArray<{ctype}, {self.ndim}, {c_contiguous}, {index_32_bits}>'
def __eq__(self, other):
return (
isinstance(other, CArray) and
self.dtype == other.dtype and
self.ndim == other.ndim and
self._c_contiguous == other._c_contiguous and
self._index_32_bits == other._index_32_bits
)
def __hash__(self):
return hash(
(self.dtype, self.ndim, self._c_contiguous, self._index_32_bits))
class SharedMem(ArrayBase):
def __init__(self, child_type, size):
if not (isinstance(size, int) or size is None):
raise 'size of shared_memory must be integer or `None`'
self._size = size
super().__init__(child_type, 1)
def declvar(self, x):
if self._size is None:
return f'extern __shared__ {self.child_type} {x}[]'
return f'__shared__ {self.child_type} {x}[{self._size}]'
class Ptr(ArrayBase):
def __init__(self, child_type):
super().__init__(child_type, 1)
def __str__(self):
return f'{self.child_type}*'
class Tuple(TypeBase):
def __init__(self, types):
self.types = types
def __str__(self):
types = ', '.join([str(t) for t in self.types])
return f'thrust::tuple<{types}>'
def __eq__(self, other):
return isinstance(other, Tuple) and self.types == other.types
void = Void()
bool_ = Scalar(numpy.bool_)
int32 = Scalar(numpy.int32)
uint32 = Scalar(numpy.uint32)
_suffix_literals_dict = {
'float64': '',
'float32': 'f',
'int64': 'll',
'int32': '',
'uint64': 'ull',
'uint32': 'u',
'bool': '',
}
def get_cuda_code_from_constant(x, ctype):
dtype = ctype.dtype
suffix_literal = _suffix_literals_dict.get(dtype.name)
if suffix_literal is not None:
s = str(x).lower()
return f'{s}{suffix_literal}'
ctype = str(ctype)
if dtype.kind == 'c':
return f'{ctype}({x.real}, {x.imag})'
if ' ' in ctype:
return f'({ctype}){x}'
return f'{ctype}({x})'
|
__str__
|
identifier_name
|
_cuda_types.py
|
import numpy
from cupy._core._scalar import get_typename
# Base class for cuda types.
class TypeBase:
def __str__(self):
raise NotImplementedError
def declvar(self, x):
return f'{self} {x}'
class Void(TypeBase):
def __init__(self):
pass
def __str__(self):
return 'void'
class Scalar(TypeBase):
def __init__(self, dtype):
self.dtype = numpy.dtype(dtype)
def __str__(self):
dtype = self.dtype
if dtype == numpy.float16:
# For the performance
dtype = numpy.dtype('float32')
return get_typename(dtype)
def __eq__(self, other):
return isinstance(other, Scalar) and self.dtype == other.dtype
def __hash__(self):
return hash(self.dtype)
class ArrayBase(TypeBase):
def __init__(self, child_type: TypeBase, ndim: int):
assert isinstance(child_type, TypeBase)
self.child_type = child_type
self.ndim = ndim
class CArray(ArrayBase):
def __init__(self, dtype, ndim, is_c_contiguous, index_32_bits):
self.dtype = dtype
self._c_contiguous = is_c_contiguous
self._index_32_bits = index_32_bits
super().__init__(Scalar(dtype), ndim)
@classmethod
def from_ndarray(cls, x):
return CArray(x.dtype, x.ndim, x._c_contiguous, x._index_32_bits)
def __str__(self):
ctype = get_typename(self.dtype)
c_contiguous = get_cuda_code_from_constant(self._c_contiguous, bool_)
index_32_bits = get_cuda_code_from_constant(self._index_32_bits, bool_)
return f'CArray<{ctype}, {self.ndim}, {c_contiguous}, {index_32_bits}>'
def __eq__(self, other):
return (
isinstance(other, CArray) and
self.dtype == other.dtype and
self.ndim == other.ndim and
self._c_contiguous == other._c_contiguous and
self._index_32_bits == other._index_32_bits
)
def __hash__(self):
return hash(
(self.dtype, self.ndim, self._c_contiguous, self._index_32_bits))
class SharedMem(ArrayBase):
def __init__(self, child_type, size):
if not (isinstance(size, int) or size is None):
raise 'size of shared_memory must be integer or `None`'
self._size = size
super().__init__(child_type, 1)
def declvar(self, x):
if self._size is None:
return f'extern __shared__ {self.child_type} {x}[]'
return f'__shared__ {self.child_type} {x}[{self._size}]'
class Ptr(ArrayBase):
def __init__(self, child_type):
super().__init__(child_type, 1)
def __str__(self):
return f'{self.child_type}*'
class Tuple(TypeBase):
def __init__(self, types):
self.types = types
def __str__(self):
types = ', '.join([str(t) for t in self.types])
return f'thrust::tuple<{types}>'
def __eq__(self, other):
return isinstance(other, Tuple) and self.types == other.types
void = Void()
bool_ = Scalar(numpy.bool_)
int32 = Scalar(numpy.int32)
uint32 = Scalar(numpy.uint32)
_suffix_literals_dict = {
'float64': '',
'float32': 'f',
'int64': 'll',
'int32': '',
'uint64': 'ull',
'uint32': 'u',
'bool': '',
}
def get_cuda_code_from_constant(x, ctype):
dtype = ctype.dtype
suffix_literal = _suffix_literals_dict.get(dtype.name)
if suffix_literal is not None:
s = str(x).lower()
return f'{s}{suffix_literal}'
ctype = str(ctype)
if dtype.kind == 'c':
return f'{ctype}({x.real}, {x.imag})'
if ' ' in ctype:
|
return f'{ctype}({x})'
|
return f'({ctype}){x}'
|
conditional_block
|
_cuda_types.py
|
import numpy
from cupy._core._scalar import get_typename
# Base class for cuda types.
class TypeBase:
def __str__(self):
raise NotImplementedError
def declvar(self, x):
return f'{self} {x}'
class Void(TypeBase):
def __init__(self):
pass
def __str__(self):
return 'void'
class Scalar(TypeBase):
def __init__(self, dtype):
self.dtype = numpy.dtype(dtype)
def __str__(self):
dtype = self.dtype
if dtype == numpy.float16:
# For the performance
dtype = numpy.dtype('float32')
return get_typename(dtype)
def __eq__(self, other):
return isinstance(other, Scalar) and self.dtype == other.dtype
def __hash__(self):
return hash(self.dtype)
class ArrayBase(TypeBase):
def __init__(self, child_type: TypeBase, ndim: int):
assert isinstance(child_type, TypeBase)
self.child_type = child_type
self.ndim = ndim
class CArray(ArrayBase):
def __init__(self, dtype, ndim, is_c_contiguous, index_32_bits):
|
super().__init__(Scalar(dtype), ndim)
@classmethod
def from_ndarray(cls, x):
return CArray(x.dtype, x.ndim, x._c_contiguous, x._index_32_bits)
def __str__(self):
ctype = get_typename(self.dtype)
c_contiguous = get_cuda_code_from_constant(self._c_contiguous, bool_)
index_32_bits = get_cuda_code_from_constant(self._index_32_bits, bool_)
return f'CArray<{ctype}, {self.ndim}, {c_contiguous}, {index_32_bits}>'
def __eq__(self, other):
return (
isinstance(other, CArray) and
self.dtype == other.dtype and
self.ndim == other.ndim and
self._c_contiguous == other._c_contiguous and
self._index_32_bits == other._index_32_bits
)
def __hash__(self):
return hash(
(self.dtype, self.ndim, self._c_contiguous, self._index_32_bits))
class SharedMem(ArrayBase):
def __init__(self, child_type, size):
if not (isinstance(size, int) or size is None):
raise 'size of shared_memory must be integer or `None`'
self._size = size
super().__init__(child_type, 1)
def declvar(self, x):
if self._size is None:
return f'extern __shared__ {self.child_type} {x}[]'
return f'__shared__ {self.child_type} {x}[{self._size}]'
class Ptr(ArrayBase):
def __init__(self, child_type):
super().__init__(child_type, 1)
def __str__(self):
return f'{self.child_type}*'
class Tuple(TypeBase):
def __init__(self, types):
self.types = types
def __str__(self):
types = ', '.join([str(t) for t in self.types])
return f'thrust::tuple<{types}>'
def __eq__(self, other):
return isinstance(other, Tuple) and self.types == other.types
void = Void()
bool_ = Scalar(numpy.bool_)
int32 = Scalar(numpy.int32)
uint32 = Scalar(numpy.uint32)
_suffix_literals_dict = {
'float64': '',
'float32': 'f',
'int64': 'll',
'int32': '',
'uint64': 'ull',
'uint32': 'u',
'bool': '',
}
def get_cuda_code_from_constant(x, ctype):
dtype = ctype.dtype
suffix_literal = _suffix_literals_dict.get(dtype.name)
if suffix_literal is not None:
s = str(x).lower()
return f'{s}{suffix_literal}'
ctype = str(ctype)
if dtype.kind == 'c':
return f'{ctype}({x.real}, {x.imag})'
if ' ' in ctype:
return f'({ctype}){x}'
return f'{ctype}({x})'
|
self.dtype = dtype
self._c_contiguous = is_c_contiguous
self._index_32_bits = index_32_bits
|
random_line_split
|
wc.dom.messageBox.test.js
|
define(["intern!object", "intern/chai!assert", "wc/dom/messageBox", "wc/dom/classList", "wc/ui/icon", "wc/i18n/i18n", "./resources/test.utils!"],
function (registerSuite, assert, controller, classList, icon, i18n, testutils) {
"use strict";
/*
* Unit tests for wc/dom/messageBox
*/
var testHolder,
testBoxId = "messageboxtest-box1",
testMessageBoxHTML = "<section id='" + testBoxId + "' class='wc-messagebox'><h1></h1><div class='messages'></div></section>",
testContent;
function
|
(type) {
var box = document.getElementById(testBoxId),
iconName, title, boxHeading;
if (!box) {
testHolder.insertAdjacentHTML("beforeend", testMessageBoxHTML);
box = document.getElementById(testBoxId);
}
boxHeading = box.firstElementChild;
if (type && !icon.get(boxHeading)) {
classList.add(box, type);
switch (type) {
case "wc-messagebox-type-error" :
iconName = "fa-minus-circle";
title = i18n.get("messagetitle_error");
break;
case "wc-messagebox-type-warn" :
iconName = "fa-exclamation-triangle";
title = i18n.get("messagetitle_warn");
break;
case "wc-messagebox-type-info" :
iconName = "fa-info-circle";
title = i18n.get("messagetitle_info");
break;
case "wc-messagebox-type-success" :
iconName = "fa-check-circle";
title = i18n.get("messagetitle_success");
break;
}
if (iconName) {
icon.add(boxHeading, iconName);
icon.add(boxHeading, "fa-fw");
boxHeading.insertAdjacentHTML("beforeend", "<span>" + title + "</span>");
}
}
return box;
}
registerSuite({
name: "wc/dom/messageBox",
setup: function() {
testHolder = testutils.getTestHolder();
},
beforeEach: function() {
testHolder.innerHTML = testContent;
},
afterEach: function() {
testHolder.innerHTML = "";
},
testGetWidget: function() {
var widget = controller.getWidget();
assert.isOk(widget);
if (widget.constructor && widget.constructor.name) {
assert.strictEqual(widget.constructor.name, "Widget");
} else {
// rough but (barely) adequate test
// once we stop supporting IE we should be able to remove this
assert.isTrue(!!widget.isOneOfMe && !!widget.constructor.isOneOfMe);
}
assert.isTrue(widget.isOneOfMe(getTestBox()));
},
testIsOneOfMe_noArgs: function() {
assert.isFalse(controller.isOneOfMe());
},
testIsOneOfMe_notElementArgs: function() {
assert.isFalse(controller.isOneOfMe({}));
},
testIsOneOfMe_elementNotMessageBox: function() {
var element = document.createElement("span");
testHolder.appendChild(element);
assert.isFalse(controller.isOneOfMe(element));
},
testIsOneOfMe_generic: function() {
assert.isTrue(controller.isOneOfMe(getTestBox()));
},
testGet_noContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(), box);
},
testGet_inContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(testHolder), box);
},
testGet_noContainerAll: function() {
var box = getTestBox(),
found = controller.get(null, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGet_inContainerAll: function() {
var box = getTestBox(),
found = controller.get(testHolder, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGetErrorBoxWidget: function() {
var widget = controller.getErrorBoxWidget(),
box = getTestBox("wc-messagebox-type-error");
assert.isOk(widget);
assert.isTrue(widget.isOneOfMe(box));
}
});
}
);
|
getTestBox
|
identifier_name
|
wc.dom.messageBox.test.js
|
define(["intern!object", "intern/chai!assert", "wc/dom/messageBox", "wc/dom/classList", "wc/ui/icon", "wc/i18n/i18n", "./resources/test.utils!"],
function (registerSuite, assert, controller, classList, icon, i18n, testutils) {
"use strict";
/*
* Unit tests for wc/dom/messageBox
*/
var testHolder,
testBoxId = "messageboxtest-box1",
testMessageBoxHTML = "<section id='" + testBoxId + "' class='wc-messagebox'><h1></h1><div class='messages'></div></section>",
testContent;
function getTestBox(type) {
var box = document.getElementById(testBoxId),
iconName, title, boxHeading;
if (!box) {
testHolder.insertAdjacentHTML("beforeend", testMessageBoxHTML);
box = document.getElementById(testBoxId);
}
boxHeading = box.firstElementChild;
if (type && !icon.get(boxHeading)) {
classList.add(box, type);
switch (type) {
case "wc-messagebox-type-error" :
iconName = "fa-minus-circle";
title = i18n.get("messagetitle_error");
break;
case "wc-messagebox-type-warn" :
iconName = "fa-exclamation-triangle";
title = i18n.get("messagetitle_warn");
break;
case "wc-messagebox-type-info" :
iconName = "fa-info-circle";
title = i18n.get("messagetitle_info");
break;
case "wc-messagebox-type-success" :
iconName = "fa-check-circle";
title = i18n.get("messagetitle_success");
break;
}
if (iconName)
|
}
return box;
}
registerSuite({
name: "wc/dom/messageBox",
setup: function() {
testHolder = testutils.getTestHolder();
},
beforeEach: function() {
testHolder.innerHTML = testContent;
},
afterEach: function() {
testHolder.innerHTML = "";
},
testGetWidget: function() {
var widget = controller.getWidget();
assert.isOk(widget);
if (widget.constructor && widget.constructor.name) {
assert.strictEqual(widget.constructor.name, "Widget");
} else {
// rough but (barely) adequate test
// once we stop supporting IE we should be able to remove this
assert.isTrue(!!widget.isOneOfMe && !!widget.constructor.isOneOfMe);
}
assert.isTrue(widget.isOneOfMe(getTestBox()));
},
testIsOneOfMe_noArgs: function() {
assert.isFalse(controller.isOneOfMe());
},
testIsOneOfMe_notElementArgs: function() {
assert.isFalse(controller.isOneOfMe({}));
},
testIsOneOfMe_elementNotMessageBox: function() {
var element = document.createElement("span");
testHolder.appendChild(element);
assert.isFalse(controller.isOneOfMe(element));
},
testIsOneOfMe_generic: function() {
assert.isTrue(controller.isOneOfMe(getTestBox()));
},
testGet_noContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(), box);
},
testGet_inContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(testHolder), box);
},
testGet_noContainerAll: function() {
var box = getTestBox(),
found = controller.get(null, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGet_inContainerAll: function() {
var box = getTestBox(),
found = controller.get(testHolder, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGetErrorBoxWidget: function() {
var widget = controller.getErrorBoxWidget(),
box = getTestBox("wc-messagebox-type-error");
assert.isOk(widget);
assert.isTrue(widget.isOneOfMe(box));
}
});
}
);
|
{
icon.add(boxHeading, iconName);
icon.add(boxHeading, "fa-fw");
boxHeading.insertAdjacentHTML("beforeend", "<span>" + title + "</span>");
}
|
conditional_block
|
wc.dom.messageBox.test.js
|
define(["intern!object", "intern/chai!assert", "wc/dom/messageBox", "wc/dom/classList", "wc/ui/icon", "wc/i18n/i18n", "./resources/test.utils!"],
function (registerSuite, assert, controller, classList, icon, i18n, testutils) {
"use strict";
/*
* Unit tests for wc/dom/messageBox
*/
var testHolder,
testBoxId = "messageboxtest-box1",
testMessageBoxHTML = "<section id='" + testBoxId + "' class='wc-messagebox'><h1></h1><div class='messages'></div></section>",
testContent;
function getTestBox(type) {
var box = document.getElementById(testBoxId),
iconName, title, boxHeading;
if (!box) {
testHolder.insertAdjacentHTML("beforeend", testMessageBoxHTML);
box = document.getElementById(testBoxId);
}
boxHeading = box.firstElementChild;
if (type && !icon.get(boxHeading)) {
classList.add(box, type);
switch (type) {
case "wc-messagebox-type-error" :
iconName = "fa-minus-circle";
title = i18n.get("messagetitle_error");
break;
case "wc-messagebox-type-warn" :
iconName = "fa-exclamation-triangle";
title = i18n.get("messagetitle_warn");
break;
case "wc-messagebox-type-info" :
iconName = "fa-info-circle";
title = i18n.get("messagetitle_info");
break;
case "wc-messagebox-type-success" :
iconName = "fa-check-circle";
title = i18n.get("messagetitle_success");
break;
}
if (iconName) {
icon.add(boxHeading, iconName);
icon.add(boxHeading, "fa-fw");
boxHeading.insertAdjacentHTML("beforeend", "<span>" + title + "</span>");
}
}
return box;
}
registerSuite({
name: "wc/dom/messageBox",
setup: function() {
testHolder = testutils.getTestHolder();
},
beforeEach: function() {
testHolder.innerHTML = testContent;
},
afterEach: function() {
testHolder.innerHTML = "";
},
testGetWidget: function() {
var widget = controller.getWidget();
assert.isOk(widget);
if (widget.constructor && widget.constructor.name) {
assert.strictEqual(widget.constructor.name, "Widget");
} else {
// rough but (barely) adequate test
// once we stop supporting IE we should be able to remove this
assert.isTrue(!!widget.isOneOfMe && !!widget.constructor.isOneOfMe);
}
assert.isTrue(widget.isOneOfMe(getTestBox()));
},
testIsOneOfMe_noArgs: function() {
assert.isFalse(controller.isOneOfMe());
},
testIsOneOfMe_notElementArgs: function() {
assert.isFalse(controller.isOneOfMe({}));
},
testIsOneOfMe_elementNotMessageBox: function() {
var element = document.createElement("span");
testHolder.appendChild(element);
assert.isFalse(controller.isOneOfMe(element));
},
testIsOneOfMe_generic: function() {
assert.isTrue(controller.isOneOfMe(getTestBox()));
},
testGet_noContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(), box);
|
testGet_inContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(testHolder), box);
},
testGet_noContainerAll: function() {
var box = getTestBox(),
found = controller.get(null, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGet_inContainerAll: function() {
var box = getTestBox(),
found = controller.get(testHolder, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGetErrorBoxWidget: function() {
var widget = controller.getErrorBoxWidget(),
box = getTestBox("wc-messagebox-type-error");
assert.isOk(widget);
assert.isTrue(widget.isOneOfMe(box));
}
});
}
);
|
},
|
random_line_split
|
wc.dom.messageBox.test.js
|
define(["intern!object", "intern/chai!assert", "wc/dom/messageBox", "wc/dom/classList", "wc/ui/icon", "wc/i18n/i18n", "./resources/test.utils!"],
function (registerSuite, assert, controller, classList, icon, i18n, testutils) {
"use strict";
/*
* Unit tests for wc/dom/messageBox
*/
var testHolder,
testBoxId = "messageboxtest-box1",
testMessageBoxHTML = "<section id='" + testBoxId + "' class='wc-messagebox'><h1></h1><div class='messages'></div></section>",
testContent;
function getTestBox(type)
|
registerSuite({
name: "wc/dom/messageBox",
setup: function() {
testHolder = testutils.getTestHolder();
},
beforeEach: function() {
testHolder.innerHTML = testContent;
},
afterEach: function() {
testHolder.innerHTML = "";
},
testGetWidget: function() {
var widget = controller.getWidget();
assert.isOk(widget);
if (widget.constructor && widget.constructor.name) {
assert.strictEqual(widget.constructor.name, "Widget");
} else {
// rough but (barely) adequate test
// once we stop supporting IE we should be able to remove this
assert.isTrue(!!widget.isOneOfMe && !!widget.constructor.isOneOfMe);
}
assert.isTrue(widget.isOneOfMe(getTestBox()));
},
testIsOneOfMe_noArgs: function() {
assert.isFalse(controller.isOneOfMe());
},
testIsOneOfMe_notElementArgs: function() {
assert.isFalse(controller.isOneOfMe({}));
},
testIsOneOfMe_elementNotMessageBox: function() {
var element = document.createElement("span");
testHolder.appendChild(element);
assert.isFalse(controller.isOneOfMe(element));
},
testIsOneOfMe_generic: function() {
assert.isTrue(controller.isOneOfMe(getTestBox()));
},
testGet_noContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(), box);
},
testGet_inContainer: function() {
var box = getTestBox(); // set up the box to find
assert.equal(controller.get(testHolder), box);
},
testGet_noContainerAll: function() {
var box = getTestBox(),
found = controller.get(null, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGet_inContainerAll: function() {
var box = getTestBox(),
found = controller.get(testHolder, true);
assert.equal(found.length, 1);
assert.equal(found[0], box);
},
testGetErrorBoxWidget: function() {
var widget = controller.getErrorBoxWidget(),
box = getTestBox("wc-messagebox-type-error");
assert.isOk(widget);
assert.isTrue(widget.isOneOfMe(box));
}
});
}
);
|
{
var box = document.getElementById(testBoxId),
iconName, title, boxHeading;
if (!box) {
testHolder.insertAdjacentHTML("beforeend", testMessageBoxHTML);
box = document.getElementById(testBoxId);
}
boxHeading = box.firstElementChild;
if (type && !icon.get(boxHeading)) {
classList.add(box, type);
switch (type) {
case "wc-messagebox-type-error" :
iconName = "fa-minus-circle";
title = i18n.get("messagetitle_error");
break;
case "wc-messagebox-type-warn" :
iconName = "fa-exclamation-triangle";
title = i18n.get("messagetitle_warn");
break;
case "wc-messagebox-type-info" :
iconName = "fa-info-circle";
title = i18n.get("messagetitle_info");
break;
case "wc-messagebox-type-success" :
iconName = "fa-check-circle";
title = i18n.get("messagetitle_success");
break;
}
if (iconName) {
icon.add(boxHeading, iconName);
icon.add(boxHeading, "fa-fw");
boxHeading.insertAdjacentHTML("beforeend", "<span>" + title + "</span>");
}
}
return box;
}
|
identifier_body
|
ProxyStore.js
|
/**
* ProxyStore is a superclass of {@link Ext.data.Store} and {@link Ext.data.BufferedStore}. It's never used directly,
* but offers a set of methods used by both of those subclasses.
*
* We've left it here in the docs for reference purposes, but unless you need to make a whole new type of Store, what
* you're probably looking for is {@link Ext.data.Store}. If you're still interested, here's a brief description of what
* ProxyStore is and is not.
*
* ProxyStore provides the basic configuration for anything that can be considered a Store. It expects to be
* given a {@link Ext.data.Model Model} that represents the type of data in the Store. It also expects to be given a
* {@link Ext.data.proxy.Proxy Proxy} that handles the loading of data into the Store.
*
* ProxyStore provides a few helpful methods such as {@link #method-load} and {@link #sync}, which load and save data
* respectively, passing the requests through the configured {@link #proxy}.
*
* Built-in Store subclasses add extra behavior to each of these functions. Note also that each ProxyStore subclass
* has its own way of storing data - in {@link Ext.data.Store} the data is saved as a flat {@link Ext.util.Collection Collection},
* whereas in {@link Ext.data.BufferedStore BufferedStore} we use a {@link Ext.data.PageMap} to maintain a client side cache of pages of records.
*
* The store provides filtering and sorting support. This sorting/filtering can happen on the client side
* or can be completed on the server. This is controlled by the {@link Ext.data.Store#remoteSort remoteSort} and
* {@link Ext.data.Store#remoteFilter remoteFilter} config options. For more information see the {@link #method-sort} and
* {@link Ext.data.Store#filter filter} methods.
*/
Ext.define('Ext.data.ProxyStore', {
extend: 'Ext.data.AbstractStore',
requires: [
'Ext.data.Model',
'Ext.data.proxy.Proxy',
'Ext.data.proxy.Memory',
'Ext.data.operation.*'
],
config: {
// @cmd-auto-dependency {aliasPrefix: "model.", mvc: true, blame: "all"}
/**
* @cfg {String/Ext.data.Model} model
* Name of the {@link Ext.data.Model Model} associated with this store. See
* {@link Ext.data.Model#entityName}.
*
* May also be the actual Model subclass.
*
* This config is required for the store to be able to read data unless you have defined
* the {@link #fields} config which will create an anonymous `Ext.data.Model`.
*/
model: undefined,
// @cmd-auto-dependency {aliasPrefix: "data.field."}
/**
* @cfg {Object[]} fields
* This may be used in place of specifying a {@link #model} configuration. The fields should be a
* set of {@link Ext.data.Field} configuration objects. The store will automatically create a {@link Ext.data.Model}
* with these fields. In general this configuration option should only be used for simple stores like
* a two-field store of ComboBox. For anything more complicated, such as specifying a particular id property or
* associations, a {@link Ext.data.Model} should be defined and specified for the {@link #model}
* config.
* @since 2.3.0
*/
fields: null,
// @cmd-auto-dependency {aliasPrefix : "proxy."}
/**
* @cfg {String/Ext.data.proxy.Proxy/Object} proxy
* The Proxy to use for this Store. This can be either a string, a config object or a Proxy instance -
* see {@link #setProxy} for details.
* @since 1.1.0
*/
proxy: undefined,
/**
* @cfg {Boolean/Object} autoLoad
* If data is not specified, and if autoLoad is true or an Object, this store's load method is automatically called
* after creation. If the value of autoLoad is an Object, this Object will be passed to the store's load method.
*
* It's important to note that {@link Ext.data.TreeStore Tree Stores} will
* load regardless of autoLoad's value if expand is set to true on the
* {@link Ext.data.TreeStore#root root node}.
*
* @since 2.3.0
*/
autoLoad: undefined,
/**
* @cfg {Boolean} autoSync
* True to automatically sync the Store with its Proxy after every edit to one of its Records. Defaults to false.
*/
autoSync: false,
/**
* @cfg {String} batchUpdateMode
* Sets the updating behavior based on batch synchronization. 'operation' (the default) will update the Store's
* internal representation of the data after each operation of the batch has completed, 'complete' will wait until
* the entire batch has been completed before updating the Store's data. 'complete' is a good choice for local
* storage proxies, 'operation' is better for remote proxies, where there is a comparatively high latency.
*/
batchUpdateMode: 'operation',
/**
* @cfg {Boolean} sortOnLoad
* If true, any sorters attached to this Store will be run after loading data, before the datachanged event is fired.
* Defaults to true, ignored if {@link Ext.data.Store#remoteSort remoteSort} is true
*/
sortOnLoad: true,
/**
* @cfg {Boolean} [trackRemoved=true]
* This config controls whether removed records are remembered by this store for
* later saving to the server.
*/
trackRemoved: true,
/**
* @private.
* The delay time to kick of the initial autoLoad task
*/
autoLoadDelay: 1
},
onClassExtended: function(cls, data, hooks) {
var model = data.model,
onBeforeClassCreated;
if (typeof model === 'string') {
onBeforeClassCreated = hooks.onBeforeCreated;
hooks.onBeforeCreated = function() {
var me = this,
args = arguments;
Ext.require(model, function() {
onBeforeClassCreated.apply(me, args);
});
};
}
},
/**
* @private
* @property {Boolean}
* The class name of the model that this store uses if no explicit {@link #model} is given
*/
implicitModel: 'Ext.data.Model',
blockLoadCounter: 0,
loadsWhileBlocked: 0,
/**
* @property {Object} lastOptions
* Property to hold the last options from a {@link #method-load} method call. This object is used for the {@link #method-reload}
* to reuse the same options. Please see {@link #method-reload} for a simple example on how to use the lastOptions property.
*/
/**
* @property {Number} autoSyncSuspended
* A counter to track suspensions.
* @private
*/
autoSyncSuspended: 0,
//documented above
constructor: function(config) {
var me = this;
// <debug>
var configModel = me.model;
// </debug>
/**
* @event beforeload
* Fires before a request is made for a new data object. If the beforeload handler returns false the load
* action will be canceled.
* @param {Ext.data.Store} store This Store
* @param {Ext.data.operation.Operation} operation The Ext.data.operation.Operation object that will be passed to the Proxy to
* load the Store
* @since 1.1.0
*/
/**
* @event load
* Fires whenever the store reads data from a remote data source.
* @param {Ext.data.Store} this
* @param {Ext.data.Model[]} records An array of records
* @param {Boolean} successful True if the operation was successful.
* @since 1.1.0
*/
/**
* @event write
* Fires whenever a successful write has been made via the configured {@link #proxy Proxy}
* @param {Ext.data.Store} store This Store
* @param {Ext.data.operation.Operation} operation The {@link Ext.data.operation.Operation Operation} object that was used in
* the write
* @since 3.4.0
*/
/**
* @event beforesync
* Fired before a call to {@link #sync} is executed. Return false from any listener to cancel the sync
* @param {Object} options Hash of all records to be synchronized, broken down into create, update and destroy
*/
/**
* @event metachange
* Fires when this store's underlying reader (available via the proxy) provides new metadata.
* Metadata usually consists of new field definitions, but can include any configuration data
* required by an application, and can be processed as needed in the event handler.
* This event is currently only fired for JsonReaders.
* @param {Ext.data.Store} this
* @param {Object} meta The JSON metadata
* @since 1.1.0
*/
/**
* Temporary cache in which removed model instances are kept until successfully
* synchronised with a Proxy, at which point this is cleared.
*
* This cache is maintained unless you set `trackRemoved` to `false`.
*
* @protected
* @property {Ext.data.Model[]} removed
*/
me.removed = [];
me.blockLoad();
me.callParent(arguments);
me.unblockLoad();
// <debug>
if (!me.getModel() && me.useModelWarning !== false && me.getStoreId() !== 'ext-empty-store') {
// There are a number of ways things could have gone wrong, try to give as much information as possible
var logMsg = [
Ext.getClassName(me) || 'Store',
' created with no model.'
];
if (typeof configModel === 'string') {
logMsg.push(" The name '", configModel, "'", ' does not correspond to a valid model.');
}
Ext.log.warn(logMsg.join(''));
}
// </debug>
},
updateAutoLoad: function(autoLoad) {
var me = this,
task;
// Ensure the data collection is set up
me.getData();
if (autoLoad) {
task = me.loadTask || (me.loadTask = new Ext.util.DelayedTask(null, null, null, null, false));
// Defer the load until the store (and probably the view) is fully constructed
task.delay(me.autoLoadDelay, me.attemptLoad, me, Ext.isObject(autoLoad) ? [autoLoad] : undefined);
}
},
/**
* Returns the total number of {@link Ext.data.Model Model} instances that the {@link Ext.data.proxy.Proxy Proxy}
* indicates exist. This will usually differ from {@link #getCount} when using paging - getCount returns the
* number of records loaded into the Store at the moment, getTotalCount returns the number of records that
* could be loaded into the Store if the Store contained all data
* @return {Number} The total number of Model instances available via the Proxy. 0 returned if
* no value has been set via the reader.
*/
getTotalCount: function() {
return this.totalCount || 0;
},
applyFields: function(fields) {
if (fields) {
this.createImplicitModel(fields);
}
},
applyModel: function(model) {
if (model) {
model = Ext.data.schema.Schema.lookupEntity(model);
}
// If no model, ensure that the fields config is converted to a model.
else {
this.getFields();
model = this.getModel() || this.createImplicitModel();
}
return model;
},
applyProxy: function(proxy) {
var model = this.getModel();
if (proxy !== null) {
if (proxy) {
if (proxy.isProxy) {
proxy.setModel(model);
} else {
if (Ext.isString(proxy)) {
proxy = {
type: proxy,
model: model
};
} else if (!proxy.model) {
proxy = Ext.apply({
model: model
}, proxy);
}
proxy = Ext.createByAlias('proxy.' + proxy.type, proxy);
proxy.autoCreated = true;
}
} else if (model) {
proxy = model.getProxy();
}
if (!proxy) {
proxy = Ext.createByAlias('proxy.memory');
proxy.autoCreated = true;
}
}
return proxy;
},
applyState: function (state) {
|
me.unblockLoad(doLoad);
},
updateProxy: function(proxy, oldProxy) {
this.proxyListeners = Ext.destroy(this.proxyListeners);
},
updateTrackRemoved: function (track) {
this.cleanRemoved();
this.removed = track ? [] : null;
},
/**
* @private
*/
onMetaChange: function(proxy, meta) {
this.fireEvent('metachange', this, meta);
},
//saves any phantom records
create: function(data, options) {
var me = this,
Model = me.getModel(),
instance = new Model(data),
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = [instance];
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('create', options);
return operation.execute();
},
read: function() {
return this.load.apply(this, arguments);
},
update: function(options) {
var me = this,
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = me.getUpdatedRecords();
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('update', options);
return operation.execute();
},
/**
* @private
* Callback for any write Operation over the Proxy. Updates the Store's MixedCollection to reflect
* the updates provided by the Proxy
*/
onProxyWrite: function(operation) {
var me = this,
success = operation.wasSuccessful(),
records = operation.getRecords();
switch (operation.getAction()) {
case 'create':
me.onCreateRecords(records, operation, success);
break;
case 'update':
me.onUpdateRecords(records, operation, success);
break;
case 'destroy':
me.onDestroyRecords(records, operation, success);
break;
}
if (success) {
me.fireEvent('write', me, operation);
me.fireEvent('datachanged', me);
}
},
// may be implemented by store subclasses
onCreateRecords: Ext.emptyFn,
// may be implemented by store subclasses
onUpdateRecords: Ext.emptyFn,
/**
* Removes any records when a write is returned from the server.
* @private
* @param {Ext.data.Model[]} records The array of removed records
* @param {Ext.data.operation.Operation} operation The operation that just completed
* @param {Boolean} success True if the operation was successful
*/
onDestroyRecords: function(records, operation, success) {
if (success) {
this.cleanRemoved();
}
},
// tells the attached proxy to destroy the given records
// @since 3.4.0
erase: function(options) {
var me = this,
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = me.getRemovedRecords();
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('destroy', options);
return operation.execute();
},
/**
* @private
* Attached as the 'operationcomplete' event listener to a proxy's Batch object. By default just calls through
* to onProxyWrite.
*/
onBatchOperationComplete: function(batch, operation) {
return this.onProxyWrite(operation);
},
/**
* @private
* Attached as the 'complete' event listener to a proxy's Batch object. Iterates over the batch operations
* and updates the Store's internal data MixedCollection.
*/
onBatchComplete: function(batch, operation) {
var me = this,
operations = batch.operations,
length = operations.length,
i;
if (me.batchUpdateMode !== 'operation') {
me.suspendEvents();
for (i = 0; i < length; i++) {
me.onProxyWrite(operations[i]);
}
me.resumeEvents();
}
me.isSyncing = false;
me.fireEvent('datachanged', me);
},
/**
* @private
*/
onBatchException: function(batch, operation) {
// //decide what to do... could continue with the next operation
// batch.start();
//
// //or retry the last operation
// batch.retry();
},
/**
* @private
* Filter function for new records.
*/
filterNew: function(item) {
// only want phantom records that are valid
return item.phantom === true && item.isValid();
},
/**
* Returns all Model instances that are either currently a phantom (e.g. have no id), or have an ID but have not
* yet been saved on this Store (this happens when adding a non-phantom record from another Store into this one)
* @return {Ext.data.Model[]} The Model instances
*/
getNewRecords: function() {
return [];
},
/**
* Returns all valid, non-phantom Model instances that have been updated in the Store but not yet synchronized with the Proxy.
* @return {Ext.data.Model[]} The updated Model instances
*/
getUpdatedRecords: function() {
return [];
},
/**
* Gets all {@link Ext.data.Model records} added or updated since the last commit. Note that the order of records
* returned is not deterministic and does not indicate the order in which records were modified. Note also that
* removed records are not included (use {@link #getRemovedRecords} for that).
* @return {Ext.data.Model[]} The added and updated Model instances
*/
getModifiedRecords : function(){
return [].concat(this.getNewRecords(), this.getUpdatedRecords());
},
/**
* @private
* Filter function for updated records.
*/
filterUpdated: function(item) {
// only want dirty records, not phantoms that are valid
return item.dirty === true && item.phantom !== true && item.isValid();
},
/**
* Returns any records that have been removed from the store but not yet destroyed on the proxy.
* @return {Ext.data.Model[]} The removed Model instances
*/
getRemovedRecords: function() {
return this.removed;
},
/**
* Synchronizes the store with its {@link #proxy}. This asks the proxy to batch together any new, updated
* and deleted records in the store, updating the store's internal representation of the records
* as each operation completes.
*
* @param {Object} [options] Object containing one or more properties supported by the sync method (these get
* passed along to the underlying proxy's {@link Ext.data.Proxy#batch batch} method):
*
* @param {Ext.data.Batch/Object} [options.batch] A {@link Ext.data.Batch} object (or batch config to apply
* to the created batch). If unspecified a default batch will be auto-created as needed.
*
* @param {Function} [options.callback] The function to be called upon completion of the sync.
* The callback is called regardless of success or failure and is passed the following parameters:
* @param {Ext.data.Batch} options.callback.batch The {@link Ext.data.Batch batch} that was processed,
* containing all operations in their current state after processing
* @param {Object} options.callback.options The options argument that was originally passed into sync
*
* @param {Function} [options.success] The function to be called upon successful completion of the sync. The
* success function is called only if no exceptions were reported in any operations. If one or more exceptions
* occurred then the failure function will be called instead. The success function is called
* with the following parameters:
* @param {Ext.data.Batch} options.success.batch The {@link Ext.data.Batch batch} that was processed,
* containing all operations in their current state after processing
* @param {Object} options.success.options The options argument that was originally passed into sync
*
* @param {Function} [options.failure] The function to be called upon unsuccessful completion of the sync. The
* failure function is called when one or more operations returns an exception during processing (even if some
* operations were also successful). In this case you can check the batch's {@link Ext.data.Batch#exceptions
* exceptions} array to see exactly which operations had exceptions. The failure function is called with the
* following parameters:
* @param {Ext.data.Batch} options.failure.batch The {@link Ext.data.Batch} that was processed, containing all
* operations in their current state after processing
* @param {Object} options.failure.options The options argument that was originally passed into sync
*
* @param {Object} [options.params] Additional params to send during the sync Operation(s).
*
* @param {Object} [options.scope] The scope in which to execute any callbacks (i.e. the `this` object inside
* the callback, success and/or failure functions). Defaults to the store's proxy.
*
* @return {Ext.data.Store} this
*/
sync: function(options) {
var me = this,
operations = {},
toCreate = me.getNewRecords(),
toUpdate = me.getUpdatedRecords(),
toDestroy = me.getRemovedRecords(),
needsSync = false;
//<debug>
if (me.isSyncing) {
Ext.log.warn('Sync called while a sync operation is in progress. Consider configuring autoSync as false.');
}
//</debug>
me.needsSync = false;
if (toCreate.length > 0) {
operations.create = toCreate;
needsSync = true;
}
if (toUpdate.length > 0) {
operations.update = toUpdate;
needsSync = true;
}
if (toDestroy.length > 0) {
operations.destroy = toDestroy;
needsSync = true;
}
if (needsSync && me.fireEvent('beforesync', operations) !== false) {
me.isSyncing = true;
options = options || {};
me.proxy.batch(Ext.apply(options, {
operations: operations,
listeners: me.getBatchListeners()
}));
}
return me;
},
/**
* @private
* Returns an object which is passed in as the listeners argument to proxy.batch inside this.sync.
* This is broken out into a separate function to allow for customisation of the listeners
* @return {Object} The listeners object
*/
getBatchListeners: function() {
var me = this,
listeners = {
scope: me,
exception: me.onBatchException,
complete: me.onBatchComplete
};
if (me.batchUpdateMode === 'operation') {
listeners.operationcomplete = me.onBatchOperationComplete;
}
return listeners;
},
/**
* Saves all pending changes via the configured {@link #proxy}. Use {@link #sync} instead.
* @deprecated 4.0.0 Will be removed in the next major version
*/
save: function() {
return this.sync.apply(this, arguments);
},
/**
* Loads the Store using its configured {@link #proxy}.
* @param {Object} [options] This is passed into the {@link Ext.data.operation.Operation Operation}
* object that is created and then sent to the proxy's {@link Ext.data.proxy.Proxy#read} function
*
* @return {Ext.data.Store} this
* @since 1.1.0
*/
load: function(options) {
// Prevent loads from being triggered while applying initial configs
if (this.isLoadBlocked()) {
return;
}
var me = this,
operation;
me.setLoadOptions(options);
if (me.getRemoteSort() && options.sorters) {
me.fireEvent('beforesort', me, options.sorters);
}
operation = Ext.apply({
internalScope: me,
internalCallback: me.onProxyLoad,
scope: me
}, options);
me.lastOptions = operation;
operation = me.createOperation('read', operation);
if (me.fireEvent('beforeload', me, operation) !== false) {
me.onBeforeLoad(operation);
me.loading = true;
me.clearLoadTask();
operation.execute();
}
return me;
},
/**
* Reloads the store using the last options passed to the {@link #method-load} method. You can use the reload method to reload the
* store using the parameters from the last load() call. For example:
*
* store.load({
* params : {
* userid : 22216
* }
* });
*
* //...
*
* store.reload();
*
* The initial {@link #method-load} execution will pass the `userid` parameter in the request. The {@link #reload} execution
* will also send the same `userid` parameter in its request as it will reuse the `params` object from the last {@link #method-load} call.
*
* You can override a param by passing in the config object with the `params` object:
*
* store.load({
* params : {
* userid : 22216,
* foo : 'bar'
* }
* });
*
* //...
*
* store.reload({
* params : {
* userid : 1234
* }
* });
*
* The initial {@link #method-load} execution sends the `userid` and `foo` parameters but in the {@link #reload} it only sends
* the `userid` paramter because you are overriding the `params` config not just overriding the one param. To only change a single param
* but keep other params, you will have to get the last params from the {@link #lastOptions} property:
*
* var lastOptions = store.lastOptions,
* lastParams = Ext.clone(lastOptions.params); // make a copy of the last params so we don't affect future reload() calls
*
* lastParams.userid = 1234;
*
* store.reload({
* params : lastParams
* });
*
* This will now send the `userid` parameter as `1234` and the `foo` param as `'bar'`.
*
* @param {Object} [options] A config object which contains options which may override the options passed to the previous load call. See the
* {@link #method-load} method for valid configs.
*/
reload: function(options) {
var o = Ext.apply({}, options, this.lastOptions);
return this.load(o);
},
onEndUpdate: function() {
var me = this;
if (me.needsSync && me.autoSync && !me.autoSyncSuspended) {
me.sync();
}
},
/**
* @private
* A model instance should call this method on the Store it has been {@link Ext.data.Model#join joined} to..
* @param {Ext.data.Model} record The model instance that was edited
* @since 3.4.0
*/
afterReject: function(record) {
var me = this;
// Must pass the 5th param (modifiedFieldNames) as null, otherwise the
// event firing machinery appends the listeners "options" object to the arg list
// which may get used as the modified fields array by a handler.
// This array is used for selective grid cell updating by Grid View.
// Null will be treated as though all cells need updating.
if (me.contains(record)) {
me.onUpdate(record, Ext.data.Model.REJECT, null);
me.fireEvent('update', me, record, Ext.data.Model.REJECT, null);
}
},
/**
* @private
* A model instance should call this method on the Store it has been {@link Ext.data.Model#join joined} to.
* @param {Ext.data.Model} record The model instance that was edited
* @since 3.4.0
*/
afterCommit: function(record, modifiedFieldNames) {
var me = this;
if (!modifiedFieldNames) {
modifiedFieldNames = null;
}
if (me.contains(record)) {
me.onUpdate(record, Ext.data.Model.COMMIT, modifiedFieldNames);
me.fireEvent('update', me, record, Ext.data.Model.COMMIT, modifiedFieldNames);
}
},
afterErase: function(record) {
this.onErase(record);
},
onErase: Ext.emptyFn,
onUpdate: Ext.emptyFn,
/**
* @private
*/
onDestroy: function() {
var me = this,
proxy = me.getProxy();
me.blockLoad();
me.clearData();
me.setProxy(null);
if (proxy.autoCreated) {
proxy.destroy();
}
me.setModel(null);
},
/**
* Returns true if the store has a pending load task.
* @return {Boolean} `true` if the store has a pending load task.
* @private
*/
hasPendingLoad: function() {
return !!this.loadTask || this.isLoading();
},
/**
* Returns true if the Store is currently performing a load operation
* @return {Boolean} `true` if the Store is currently loading
*/
isLoading: function() {
return !!this.loading;
},
/**
* Returns `true` if the Store has been loaded.
* @return {Boolean} `true` if the Store has been loaded.
*/
isLoaded: function() {
return this.loadCount > 0;
},
/**
* Suspends automatically syncing the Store with its Proxy. Only applicable if {@link #autoSync} is `true`
*/
suspendAutoSync: function() {
++this.autoSyncSuspended;
},
/**
* Resumes automatically syncing the Store with its Proxy. Only applicable if {@link #autoSync} is `true`
* @param {Boolean} syncNow Pass `true` to synchronize now. Only synchronizes with the Proxy if the suspension
* count has gone to zero (We are not under a higher level of suspension)
*
*/
resumeAutoSync: function(syncNow) {
var me = this;
//<debug>
if (!me.autoSyncSuspended) {
Ext.log.warn('Mismatched call to resumeAutoSync - auto synchronization is currently not suspended.');
}
//</debug>
if (me.autoSyncSuspended && ! --me.autoSyncSuspended) {
if (syncNow) {
me.sync();
}
}
},
/**
* Removes all records from the store. This method does a "fast remove",
* individual remove events are not called. The {@link #clear} event is
* fired upon completion.
* @method
* @since 1.1.0
*/
removeAll: Ext.emptyFn,
// individual store subclasses should implement a "fast" remove
// and fire a clear event afterwards
// to be implemented by subclasses
clearData: Ext.emptyFn,
privates: {
onExtraParamsChanged: function() {
},
attemptLoad: function(options) {
if (this.isLoadBlocked()) {
++this.loadsWhileBlocked;
return;
}
this.load(options);
},
blockLoad: function (value) {
++this.blockLoadCounter;
},
clearLoadTask: function() {
var loadTask = this.loadTask;
if (loadTask) {
loadTask.cancel();
this.loadTask = null;
}
},
cleanRemoved: function() {
var removed = this.removed,
len, i;
if (removed) {
for (i = 0, len = removed.length; i < len; ++i) {
removed[i].unjoin(this);
}
removed.length = 0;
}
},
createOperation: function(type, options) {
var me = this,
proxy = me.getProxy(),
listeners;
if (!me.proxyListeners) {
listeners = {
scope: me,
destroyable: true,
beginprocessresponse: me.beginUpdate,
endprocessresponse: me.endUpdate
};
if (!me.disableMetaChangeEvent) {
listeners.metachange = me.onMetaChange;
}
me.proxyListeners = proxy.on(listeners);
}
return proxy.createOperation(type, options);
},
createImplicitModel: function(fields) {
var me = this,
modelCfg = {
extend: me.implicitModel,
statics: {
defaultProxy: 'memory'
}
},
proxy, model;
if (fields) {
modelCfg.fields = fields;
}
model = Ext.define(null, modelCfg);
me.setModel(model);
proxy = me.getProxy();
if (proxy) {
model.setProxy(proxy);
} else {
me.setProxy(model.getProxy());
}
},
isLoadBlocked: function () {
return !!this.blockLoadCounter;
},
loadsSynchronously: function() {
return this.getProxy().isSynchronous;
},
onBeforeLoad: Ext.privateFn,
removeFromRemoved: function(record) {
var removed = this.removed;
if (removed) {
Ext.Array.remove(removed, record);
record.unjoin(this);
}
},
setLoadOptions: function(options) {
var me = this,
filters, sorters;
if (me.getRemoteFilter()) {
filters = me.getFilters(false);
if (filters && filters.getCount()) {
options.filters = filters.getRange();
}
}
if (me.getRemoteSort()) {
sorters = me.getSorters(false);
if (sorters && sorters.getCount()) {
options.sorters = sorters.getRange();
}
}
},
unblockLoad: function (doLoad) {
var me = this,
loadsWhileBlocked = me.loadsWhileBlocked;
--me.blockLoadCounter;
if (!me.blockLoadCounter) {
me.loadsWhileBlocked = 0;
if (doLoad && loadsWhileBlocked) {
me.load();
}
}
}
}
});
|
var me = this,
doLoad = me.getAutoLoad() || me.isLoaded();
me.blockLoad();
me.callParent([state]);
|
random_line_split
|
ProxyStore.js
|
/**
* ProxyStore is a superclass of {@link Ext.data.Store} and {@link Ext.data.BufferedStore}. It's never used directly,
* but offers a set of methods used by both of those subclasses.
*
* We've left it here in the docs for reference purposes, but unless you need to make a whole new type of Store, what
* you're probably looking for is {@link Ext.data.Store}. If you're still interested, here's a brief description of what
* ProxyStore is and is not.
*
* ProxyStore provides the basic configuration for anything that can be considered a Store. It expects to be
* given a {@link Ext.data.Model Model} that represents the type of data in the Store. It also expects to be given a
* {@link Ext.data.proxy.Proxy Proxy} that handles the loading of data into the Store.
*
* ProxyStore provides a few helpful methods such as {@link #method-load} and {@link #sync}, which load and save data
* respectively, passing the requests through the configured {@link #proxy}.
*
* Built-in Store subclasses add extra behavior to each of these functions. Note also that each ProxyStore subclass
* has its own way of storing data - in {@link Ext.data.Store} the data is saved as a flat {@link Ext.util.Collection Collection},
* whereas in {@link Ext.data.BufferedStore BufferedStore} we use a {@link Ext.data.PageMap} to maintain a client side cache of pages of records.
*
* The store provides filtering and sorting support. This sorting/filtering can happen on the client side
* or can be completed on the server. This is controlled by the {@link Ext.data.Store#remoteSort remoteSort} and
* {@link Ext.data.Store#remoteFilter remoteFilter} config options. For more information see the {@link #method-sort} and
* {@link Ext.data.Store#filter filter} methods.
*/
Ext.define('Ext.data.ProxyStore', {
extend: 'Ext.data.AbstractStore',
requires: [
'Ext.data.Model',
'Ext.data.proxy.Proxy',
'Ext.data.proxy.Memory',
'Ext.data.operation.*'
],
config: {
// @cmd-auto-dependency {aliasPrefix: "model.", mvc: true, blame: "all"}
/**
* @cfg {String/Ext.data.Model} model
* Name of the {@link Ext.data.Model Model} associated with this store. See
* {@link Ext.data.Model#entityName}.
*
* May also be the actual Model subclass.
*
* This config is required for the store to be able to read data unless you have defined
* the {@link #fields} config which will create an anonymous `Ext.data.Model`.
*/
model: undefined,
// @cmd-auto-dependency {aliasPrefix: "data.field."}
/**
* @cfg {Object[]} fields
* This may be used in place of specifying a {@link #model} configuration. The fields should be a
* set of {@link Ext.data.Field} configuration objects. The store will automatically create a {@link Ext.data.Model}
* with these fields. In general this configuration option should only be used for simple stores like
* a two-field store of ComboBox. For anything more complicated, such as specifying a particular id property or
* associations, a {@link Ext.data.Model} should be defined and specified for the {@link #model}
* config.
* @since 2.3.0
*/
fields: null,
// @cmd-auto-dependency {aliasPrefix : "proxy."}
/**
* @cfg {String/Ext.data.proxy.Proxy/Object} proxy
* The Proxy to use for this Store. This can be either a string, a config object or a Proxy instance -
* see {@link #setProxy} for details.
* @since 1.1.0
*/
proxy: undefined,
/**
* @cfg {Boolean/Object} autoLoad
* If data is not specified, and if autoLoad is true or an Object, this store's load method is automatically called
* after creation. If the value of autoLoad is an Object, this Object will be passed to the store's load method.
*
* It's important to note that {@link Ext.data.TreeStore Tree Stores} will
* load regardless of autoLoad's value if expand is set to true on the
* {@link Ext.data.TreeStore#root root node}.
*
* @since 2.3.0
*/
autoLoad: undefined,
/**
* @cfg {Boolean} autoSync
* True to automatically sync the Store with its Proxy after every edit to one of its Records. Defaults to false.
*/
autoSync: false,
/**
* @cfg {String} batchUpdateMode
* Sets the updating behavior based on batch synchronization. 'operation' (the default) will update the Store's
* internal representation of the data after each operation of the batch has completed, 'complete' will wait until
* the entire batch has been completed before updating the Store's data. 'complete' is a good choice for local
* storage proxies, 'operation' is better for remote proxies, where there is a comparatively high latency.
*/
batchUpdateMode: 'operation',
/**
* @cfg {Boolean} sortOnLoad
* If true, any sorters attached to this Store will be run after loading data, before the datachanged event is fired.
* Defaults to true, ignored if {@link Ext.data.Store#remoteSort remoteSort} is true
*/
sortOnLoad: true,
/**
* @cfg {Boolean} [trackRemoved=true]
* This config controls whether removed records are remembered by this store for
* later saving to the server.
*/
trackRemoved: true,
/**
* @private.
* The delay time to kick of the initial autoLoad task
*/
autoLoadDelay: 1
},
onClassExtended: function(cls, data, hooks) {
var model = data.model,
onBeforeClassCreated;
if (typeof model === 'string') {
onBeforeClassCreated = hooks.onBeforeCreated;
hooks.onBeforeCreated = function() {
var me = this,
args = arguments;
Ext.require(model, function() {
onBeforeClassCreated.apply(me, args);
});
};
}
},
/**
* @private
* @property {Boolean}
* The class name of the model that this store uses if no explicit {@link #model} is given
*/
implicitModel: 'Ext.data.Model',
blockLoadCounter: 0,
loadsWhileBlocked: 0,
/**
* @property {Object} lastOptions
* Property to hold the last options from a {@link #method-load} method call. This object is used for the {@link #method-reload}
* to reuse the same options. Please see {@link #method-reload} for a simple example on how to use the lastOptions property.
*/
/**
* @property {Number} autoSyncSuspended
* A counter to track suspensions.
* @private
*/
autoSyncSuspended: 0,
//documented above
constructor: function(config) {
var me = this;
// <debug>
var configModel = me.model;
// </debug>
/**
* @event beforeload
* Fires before a request is made for a new data object. If the beforeload handler returns false the load
* action will be canceled.
* @param {Ext.data.Store} store This Store
* @param {Ext.data.operation.Operation} operation The Ext.data.operation.Operation object that will be passed to the Proxy to
* load the Store
* @since 1.1.0
*/
/**
* @event load
* Fires whenever the store reads data from a remote data source.
* @param {Ext.data.Store} this
* @param {Ext.data.Model[]} records An array of records
* @param {Boolean} successful True if the operation was successful.
* @since 1.1.0
*/
/**
* @event write
* Fires whenever a successful write has been made via the configured {@link #proxy Proxy}
* @param {Ext.data.Store} store This Store
* @param {Ext.data.operation.Operation} operation The {@link Ext.data.operation.Operation Operation} object that was used in
* the write
* @since 3.4.0
*/
/**
* @event beforesync
* Fired before a call to {@link #sync} is executed. Return false from any listener to cancel the sync
* @param {Object} options Hash of all records to be synchronized, broken down into create, update and destroy
*/
/**
* @event metachange
* Fires when this store's underlying reader (available via the proxy) provides new metadata.
* Metadata usually consists of new field definitions, but can include any configuration data
* required by an application, and can be processed as needed in the event handler.
* This event is currently only fired for JsonReaders.
* @param {Ext.data.Store} this
* @param {Object} meta The JSON metadata
* @since 1.1.0
*/
/**
* Temporary cache in which removed model instances are kept until successfully
* synchronised with a Proxy, at which point this is cleared.
*
* This cache is maintained unless you set `trackRemoved` to `false`.
*
* @protected
* @property {Ext.data.Model[]} removed
*/
me.removed = [];
me.blockLoad();
me.callParent(arguments);
me.unblockLoad();
// <debug>
if (!me.getModel() && me.useModelWarning !== false && me.getStoreId() !== 'ext-empty-store') {
// There are a number of ways things could have gone wrong, try to give as much information as possible
var logMsg = [
Ext.getClassName(me) || 'Store',
' created with no model.'
];
if (typeof configModel === 'string') {
logMsg.push(" The name '", configModel, "'", ' does not correspond to a valid model.');
}
Ext.log.warn(logMsg.join(''));
}
// </debug>
},
updateAutoLoad: function(autoLoad) {
var me = this,
task;
// Ensure the data collection is set up
me.getData();
if (autoLoad) {
task = me.loadTask || (me.loadTask = new Ext.util.DelayedTask(null, null, null, null, false));
// Defer the load until the store (and probably the view) is fully constructed
task.delay(me.autoLoadDelay, me.attemptLoad, me, Ext.isObject(autoLoad) ? [autoLoad] : undefined);
}
},
/**
* Returns the total number of {@link Ext.data.Model Model} instances that the {@link Ext.data.proxy.Proxy Proxy}
* indicates exist. This will usually differ from {@link #getCount} when using paging - getCount returns the
* number of records loaded into the Store at the moment, getTotalCount returns the number of records that
* could be loaded into the Store if the Store contained all data
* @return {Number} The total number of Model instances available via the Proxy. 0 returned if
* no value has been set via the reader.
*/
getTotalCount: function() {
return this.totalCount || 0;
},
applyFields: function(fields) {
if (fields) {
this.createImplicitModel(fields);
}
},
applyModel: function(model) {
if (model)
|
// If no model, ensure that the fields config is converted to a model.
else {
this.getFields();
model = this.getModel() || this.createImplicitModel();
}
return model;
},
applyProxy: function(proxy) {
var model = this.getModel();
if (proxy !== null) {
if (proxy) {
if (proxy.isProxy) {
proxy.setModel(model);
} else {
if (Ext.isString(proxy)) {
proxy = {
type: proxy,
model: model
};
} else if (!proxy.model) {
proxy = Ext.apply({
model: model
}, proxy);
}
proxy = Ext.createByAlias('proxy.' + proxy.type, proxy);
proxy.autoCreated = true;
}
} else if (model) {
proxy = model.getProxy();
}
if (!proxy) {
proxy = Ext.createByAlias('proxy.memory');
proxy.autoCreated = true;
}
}
return proxy;
},
applyState: function (state) {
var me = this,
doLoad = me.getAutoLoad() || me.isLoaded();
me.blockLoad();
me.callParent([state]);
me.unblockLoad(doLoad);
},
updateProxy: function(proxy, oldProxy) {
this.proxyListeners = Ext.destroy(this.proxyListeners);
},
updateTrackRemoved: function (track) {
this.cleanRemoved();
this.removed = track ? [] : null;
},
/**
* @private
*/
onMetaChange: function(proxy, meta) {
this.fireEvent('metachange', this, meta);
},
//saves any phantom records
create: function(data, options) {
var me = this,
Model = me.getModel(),
instance = new Model(data),
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = [instance];
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('create', options);
return operation.execute();
},
read: function() {
return this.load.apply(this, arguments);
},
update: function(options) {
var me = this,
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = me.getUpdatedRecords();
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('update', options);
return operation.execute();
},
/**
* @private
* Callback for any write Operation over the Proxy. Updates the Store's MixedCollection to reflect
* the updates provided by the Proxy
*/
onProxyWrite: function(operation) {
var me = this,
success = operation.wasSuccessful(),
records = operation.getRecords();
switch (operation.getAction()) {
case 'create':
me.onCreateRecords(records, operation, success);
break;
case 'update':
me.onUpdateRecords(records, operation, success);
break;
case 'destroy':
me.onDestroyRecords(records, operation, success);
break;
}
if (success) {
me.fireEvent('write', me, operation);
me.fireEvent('datachanged', me);
}
},
// may be implemented by store subclasses
onCreateRecords: Ext.emptyFn,
// may be implemented by store subclasses
onUpdateRecords: Ext.emptyFn,
/**
* Removes any records when a write is returned from the server.
* @private
* @param {Ext.data.Model[]} records The array of removed records
* @param {Ext.data.operation.Operation} operation The operation that just completed
* @param {Boolean} success True if the operation was successful
*/
onDestroyRecords: function(records, operation, success) {
if (success) {
this.cleanRemoved();
}
},
// tells the attached proxy to destroy the given records
// @since 3.4.0
erase: function(options) {
var me = this,
operation;
options = Ext.apply({}, options);
if (!options.records) {
options.records = me.getRemovedRecords();
}
options.internalScope = me;
options.internalCallback = me.onProxyWrite;
operation = me.createOperation('destroy', options);
return operation.execute();
},
/**
* @private
* Attached as the 'operationcomplete' event listener to a proxy's Batch object. By default just calls through
* to onProxyWrite.
*/
onBatchOperationComplete: function(batch, operation) {
return this.onProxyWrite(operation);
},
/**
* @private
* Attached as the 'complete' event listener to a proxy's Batch object. Iterates over the batch operations
* and updates the Store's internal data MixedCollection.
*/
onBatchComplete: function(batch, operation) {
var me = this,
operations = batch.operations,
length = operations.length,
i;
if (me.batchUpdateMode !== 'operation') {
me.suspendEvents();
for (i = 0; i < length; i++) {
me.onProxyWrite(operations[i]);
}
me.resumeEvents();
}
me.isSyncing = false;
me.fireEvent('datachanged', me);
},
/**
* @private
*/
onBatchException: function(batch, operation) {
// //decide what to do... could continue with the next operation
// batch.start();
//
// //or retry the last operation
// batch.retry();
},
/**
* @private
* Filter function for new records.
*/
filterNew: function(item) {
// only want phantom records that are valid
return item.phantom === true && item.isValid();
},
/**
* Returns all Model instances that are either currently a phantom (e.g. have no id), or have an ID but have not
* yet been saved on this Store (this happens when adding a non-phantom record from another Store into this one)
* @return {Ext.data.Model[]} The Model instances
*/
getNewRecords: function() {
return [];
},
/**
* Returns all valid, non-phantom Model instances that have been updated in the Store but not yet synchronized with the Proxy.
* @return {Ext.data.Model[]} The updated Model instances
*/
getUpdatedRecords: function() {
return [];
},
/**
* Gets all {@link Ext.data.Model records} added or updated since the last commit. Note that the order of records
* returned is not deterministic and does not indicate the order in which records were modified. Note also that
* removed records are not included (use {@link #getRemovedRecords} for that).
* @return {Ext.data.Model[]} The added and updated Model instances
*/
getModifiedRecords : function(){
return [].concat(this.getNewRecords(), this.getUpdatedRecords());
},
/**
* @private
* Filter function for updated records.
*/
filterUpdated: function(item) {
// only want dirty records, not phantoms that are valid
return item.dirty === true && item.phantom !== true && item.isValid();
},
/**
* Returns any records that have been removed from the store but not yet destroyed on the proxy.
* @return {Ext.data.Model[]} The removed Model instances
*/
getRemovedRecords: function() {
return this.removed;
},
/**
* Synchronizes the store with its {@link #proxy}. This asks the proxy to batch together any new, updated
* and deleted records in the store, updating the store's internal representation of the records
* as each operation completes.
*
* @param {Object} [options] Object containing one or more properties supported by the sync method (these get
* passed along to the underlying proxy's {@link Ext.data.Proxy#batch batch} method):
*
* @param {Ext.data.Batch/Object} [options.batch] A {@link Ext.data.Batch} object (or batch config to apply
* to the created batch). If unspecified a default batch will be auto-created as needed.
*
* @param {Function} [options.callback] The function to be called upon completion of the sync.
* The callback is called regardless of success or failure and is passed the following parameters:
* @param {Ext.data.Batch} options.callback.batch The {@link Ext.data.Batch batch} that was processed,
* containing all operations in their current state after processing
* @param {Object} options.callback.options The options argument that was originally passed into sync
*
* @param {Function} [options.success] The function to be called upon successful completion of the sync. The
* success function is called only if no exceptions were reported in any operations. If one or more exceptions
* occurred then the failure function will be called instead. The success function is called
* with the following parameters:
* @param {Ext.data.Batch} options.success.batch The {@link Ext.data.Batch batch} that was processed,
* containing all operations in their current state after processing
* @param {Object} options.success.options The options argument that was originally passed into sync
*
* @param {Function} [options.failure] The function to be called upon unsuccessful completion of the sync. The
* failure function is called when one or more operations returns an exception during processing (even if some
* operations were also successful). In this case you can check the batch's {@link Ext.data.Batch#exceptions
* exceptions} array to see exactly which operations had exceptions. The failure function is called with the
* following parameters:
* @param {Ext.data.Batch} options.failure.batch The {@link Ext.data.Batch} that was processed, containing all
* operations in their current state after processing
* @param {Object} options.failure.options The options argument that was originally passed into sync
*
* @param {Object} [options.params] Additional params to send during the sync Operation(s).
*
* @param {Object} [options.scope] The scope in which to execute any callbacks (i.e. the `this` object inside
* the callback, success and/or failure functions). Defaults to the store's proxy.
*
* @return {Ext.data.Store} this
*/
sync: function(options) {
var me = this,
operations = {},
toCreate = me.getNewRecords(),
toUpdate = me.getUpdatedRecords(),
toDestroy = me.getRemovedRecords(),
needsSync = false;
//<debug>
if (me.isSyncing) {
Ext.log.warn('Sync called while a sync operation is in progress. Consider configuring autoSync as false.');
}
//</debug>
me.needsSync = false;
if (toCreate.length > 0) {
operations.create = toCreate;
needsSync = true;
}
if (toUpdate.length > 0) {
operations.update = toUpdate;
needsSync = true;
}
if (toDestroy.length > 0) {
operations.destroy = toDestroy;
needsSync = true;
}
if (needsSync && me.fireEvent('beforesync', operations) !== false) {
me.isSyncing = true;
options = options || {};
me.proxy.batch(Ext.apply(options, {
operations: operations,
listeners: me.getBatchListeners()
}));
}
return me;
},
/**
* @private
* Returns an object which is passed in as the listeners argument to proxy.batch inside this.sync.
* This is broken out into a separate function to allow for customisation of the listeners
* @return {Object} The listeners object
*/
getBatchListeners: function() {
var me = this,
listeners = {
scope: me,
exception: me.onBatchException,
complete: me.onBatchComplete
};
if (me.batchUpdateMode === 'operation') {
listeners.operationcomplete = me.onBatchOperationComplete;
}
return listeners;
},
/**
* Saves all pending changes via the configured {@link #proxy}. Use {@link #sync} instead.
* @deprecated 4.0.0 Will be removed in the next major version
*/
save: function() {
return this.sync.apply(this, arguments);
},
/**
* Loads the Store using its configured {@link #proxy}.
* @param {Object} [options] This is passed into the {@link Ext.data.operation.Operation Operation}
* object that is created and then sent to the proxy's {@link Ext.data.proxy.Proxy#read} function
*
* @return {Ext.data.Store} this
* @since 1.1.0
*/
load: function(options) {
// Prevent loads from being triggered while applying initial configs
if (this.isLoadBlocked()) {
return;
}
var me = this,
operation;
me.setLoadOptions(options);
if (me.getRemoteSort() && options.sorters) {
me.fireEvent('beforesort', me, options.sorters);
}
operation = Ext.apply({
internalScope: me,
internalCallback: me.onProxyLoad,
scope: me
}, options);
me.lastOptions = operation;
operation = me.createOperation('read', operation);
if (me.fireEvent('beforeload', me, operation) !== false) {
me.onBeforeLoad(operation);
me.loading = true;
me.clearLoadTask();
operation.execute();
}
return me;
},
/**
* Reloads the store using the last options passed to the {@link #method-load} method. You can use the reload method to reload the
* store using the parameters from the last load() call. For example:
*
* store.load({
* params : {
* userid : 22216
* }
* });
*
* //...
*
* store.reload();
*
* The initial {@link #method-load} execution will pass the `userid` parameter in the request. The {@link #reload} execution
* will also send the same `userid` parameter in its request as it will reuse the `params` object from the last {@link #method-load} call.
*
* You can override a param by passing in the config object with the `params` object:
*
* store.load({
* params : {
* userid : 22216,
* foo : 'bar'
* }
* });
*
* //...
*
* store.reload({
* params : {
* userid : 1234
* }
* });
*
* The initial {@link #method-load} execution sends the `userid` and `foo` parameters but in the {@link #reload} it only sends
* the `userid` paramter because you are overriding the `params` config not just overriding the one param. To only change a single param
* but keep other params, you will have to get the last params from the {@link #lastOptions} property:
*
* var lastOptions = store.lastOptions,
* lastParams = Ext.clone(lastOptions.params); // make a copy of the last params so we don't affect future reload() calls
*
* lastParams.userid = 1234;
*
* store.reload({
* params : lastParams
* });
*
* This will now send the `userid` parameter as `1234` and the `foo` param as `'bar'`.
*
* @param {Object} [options] A config object which contains options which may override the options passed to the previous load call. See the
* {@link #method-load} method for valid configs.
*/
reload: function(options) {
var o = Ext.apply({}, options, this.lastOptions);
return this.load(o);
},
onEndUpdate: function() {
var me = this;
if (me.needsSync && me.autoSync && !me.autoSyncSuspended) {
me.sync();
}
},
/**
* @private
* A model instance should call this method on the Store it has been {@link Ext.data.Model#join joined} to..
* @param {Ext.data.Model} record The model instance that was edited
* @since 3.4.0
*/
afterReject: function(record) {
var me = this;
// Must pass the 5th param (modifiedFieldNames) as null, otherwise the
// event firing machinery appends the listeners "options" object to the arg list
// which may get used as the modified fields array by a handler.
// This array is used for selective grid cell updating by Grid View.
// Null will be treated as though all cells need updating.
if (me.contains(record)) {
me.onUpdate(record, Ext.data.Model.REJECT, null);
me.fireEvent('update', me, record, Ext.data.Model.REJECT, null);
}
},
/**
* @private
* A model instance should call this method on the Store it has been {@link Ext.data.Model#join joined} to.
* @param {Ext.data.Model} record The model instance that was edited
* @since 3.4.0
*/
afterCommit: function(record, modifiedFieldNames) {
var me = this;
if (!modifiedFieldNames) {
modifiedFieldNames = null;
}
if (me.contains(record)) {
me.onUpdate(record, Ext.data.Model.COMMIT, modifiedFieldNames);
me.fireEvent('update', me, record, Ext.data.Model.COMMIT, modifiedFieldNames);
}
},
afterErase: function(record) {
this.onErase(record);
},
onErase: Ext.emptyFn,
onUpdate: Ext.emptyFn,
/**
* @private
*/
onDestroy: function() {
var me = this,
proxy = me.getProxy();
me.blockLoad();
me.clearData();
me.setProxy(null);
if (proxy.autoCreated) {
proxy.destroy();
}
me.setModel(null);
},
/**
* Returns true if the store has a pending load task.
* @return {Boolean} `true` if the store has a pending load task.
* @private
*/
hasPendingLoad: function() {
return !!this.loadTask || this.isLoading();
},
/**
* Returns true if the Store is currently performing a load operation
* @return {Boolean} `true` if the Store is currently loading
*/
isLoading: function() {
return !!this.loading;
},
/**
* Returns `true` if the Store has been loaded.
* @return {Boolean} `true` if the Store has been loaded.
*/
isLoaded: function() {
return this.loadCount > 0;
},
/**
* Suspends automatically syncing the Store with its Proxy. Only applicable if {@link #autoSync} is `true`
*/
suspendAutoSync: function() {
++this.autoSyncSuspended;
},
/**
* Resumes automatically syncing the Store with its Proxy. Only applicable if {@link #autoSync} is `true`
* @param {Boolean} syncNow Pass `true` to synchronize now. Only synchronizes with the Proxy if the suspension
* count has gone to zero (We are not under a higher level of suspension)
*
*/
resumeAutoSync: function(syncNow) {
var me = this;
//<debug>
if (!me.autoSyncSuspended) {
Ext.log.warn('Mismatched call to resumeAutoSync - auto synchronization is currently not suspended.');
}
//</debug>
if (me.autoSyncSuspended && ! --me.autoSyncSuspended) {
if (syncNow) {
me.sync();
}
}
},
/**
* Removes all records from the store. This method does a "fast remove",
* individual remove events are not called. The {@link #clear} event is
* fired upon completion.
* @method
* @since 1.1.0
*/
removeAll: Ext.emptyFn,
// individual store subclasses should implement a "fast" remove
// and fire a clear event afterwards
// to be implemented by subclasses
clearData: Ext.emptyFn,
privates: {
onExtraParamsChanged: function() {
},
attemptLoad: function(options) {
if (this.isLoadBlocked()) {
++this.loadsWhileBlocked;
return;
}
this.load(options);
},
blockLoad: function (value) {
++this.blockLoadCounter;
},
clearLoadTask: function() {
var loadTask = this.loadTask;
if (loadTask) {
loadTask.cancel();
this.loadTask = null;
}
},
cleanRemoved: function() {
var removed = this.removed,
len, i;
if (removed) {
for (i = 0, len = removed.length; i < len; ++i) {
removed[i].unjoin(this);
}
removed.length = 0;
}
},
createOperation: function(type, options) {
var me = this,
proxy = me.getProxy(),
listeners;
if (!me.proxyListeners) {
listeners = {
scope: me,
destroyable: true,
beginprocessresponse: me.beginUpdate,
endprocessresponse: me.endUpdate
};
if (!me.disableMetaChangeEvent) {
listeners.metachange = me.onMetaChange;
}
me.proxyListeners = proxy.on(listeners);
}
return proxy.createOperation(type, options);
},
createImplicitModel: function(fields) {
var me = this,
modelCfg = {
extend: me.implicitModel,
statics: {
defaultProxy: 'memory'
}
},
proxy, model;
if (fields) {
modelCfg.fields = fields;
}
model = Ext.define(null, modelCfg);
me.setModel(model);
proxy = me.getProxy();
if (proxy) {
model.setProxy(proxy);
} else {
me.setProxy(model.getProxy());
}
},
isLoadBlocked: function () {
return !!this.blockLoadCounter;
},
loadsSynchronously: function() {
return this.getProxy().isSynchronous;
},
onBeforeLoad: Ext.privateFn,
removeFromRemoved: function(record) {
var removed = this.removed;
if (removed) {
Ext.Array.remove(removed, record);
record.unjoin(this);
}
},
setLoadOptions: function(options) {
var me = this,
filters, sorters;
if (me.getRemoteFilter()) {
filters = me.getFilters(false);
if (filters && filters.getCount()) {
options.filters = filters.getRange();
}
}
if (me.getRemoteSort()) {
sorters = me.getSorters(false);
if (sorters && sorters.getCount()) {
options.sorters = sorters.getRange();
}
}
},
unblockLoad: function (doLoad) {
var me = this,
loadsWhileBlocked = me.loadsWhileBlocked;
--me.blockLoadCounter;
if (!me.blockLoadCounter) {
me.loadsWhileBlocked = 0;
if (doLoad && loadsWhileBlocked) {
me.load();
}
}
}
}
});
|
{
model = Ext.data.schema.Schema.lookupEntity(model);
}
|
conditional_block
|
parser.rs
|
extern crate dirs;
use regex::Regex;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use {Creds, Result};
pub fn get_credentials(conf: String) -> Result<Creds> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
// Build path to config file
path.push(conf);
let content = read_config_file(path.as_path())?;
let user = extract_info(r"set imap_user=(\w*)", &content)?;
let pass = extract_info(r"set imap_pass=(\w*)", &content)?;
let host = extract_info(r"set folder=imaps?://(.+):\d+", &content)?;
let port = extract_info(r"set folder=imaps?://.+:(\d+)", &content)?;
let port = port.parse()?;
Ok(Creds {
user: user,
pass: pass,
host: host,
port: port,
})
}
pub fn
|
(pattern: &str, text: &str) -> Result<String> {
let re = Regex::new(pattern)?;
let cap = re.captures(text).ok_or("Couldn't match")?;
let xtr = cap.get(1).ok_or("No captures")?;
Ok(xtr.as_str().to_string())
}
fn read_config_file(path: &Path) -> Result<String> {
let mut content = String::new();
let mut file = File::open(&path)?;
file.read_to_string(&mut content)?;
Ok(content)
}
pub fn get_db_path() -> Result<String> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
path.push(::DB);
let path_str = path.to_str()
.ok_or("Can't convert path into string")?;
Ok(path_str.to_string())
}
|
extract_info
|
identifier_name
|
parser.rs
|
extern crate dirs;
use regex::Regex;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use {Creds, Result};
pub fn get_credentials(conf: String) -> Result<Creds> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
// Build path to config file
path.push(conf);
let content = read_config_file(path.as_path())?;
let user = extract_info(r"set imap_user=(\w*)", &content)?;
let pass = extract_info(r"set imap_pass=(\w*)", &content)?;
let host = extract_info(r"set folder=imaps?://(.+):\d+", &content)?;
let port = extract_info(r"set folder=imaps?://.+:(\d+)", &content)?;
let port = port.parse()?;
Ok(Creds {
user: user,
pass: pass,
host: host,
port: port,
})
}
pub fn extract_info(pattern: &str, text: &str) -> Result<String> {
let re = Regex::new(pattern)?;
let cap = re.captures(text).ok_or("Couldn't match")?;
let xtr = cap.get(1).ok_or("No captures")?;
Ok(xtr.as_str().to_string())
}
fn read_config_file(path: &Path) -> Result<String> {
let mut content = String::new();
let mut file = File::open(&path)?;
file.read_to_string(&mut content)?;
Ok(content)
}
pub fn get_db_path() -> Result<String> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
|
path.push(::DB);
let path_str = path.to_str()
.ok_or("Can't convert path into string")?;
Ok(path_str.to_string())
}
|
random_line_split
|
|
parser.rs
|
extern crate dirs;
use regex::Regex;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use {Creds, Result};
pub fn get_credentials(conf: String) -> Result<Creds> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
// Build path to config file
path.push(conf);
let content = read_config_file(path.as_path())?;
let user = extract_info(r"set imap_user=(\w*)", &content)?;
let pass = extract_info(r"set imap_pass=(\w*)", &content)?;
let host = extract_info(r"set folder=imaps?://(.+):\d+", &content)?;
let port = extract_info(r"set folder=imaps?://.+:(\d+)", &content)?;
let port = port.parse()?;
Ok(Creds {
user: user,
pass: pass,
host: host,
port: port,
})
}
pub fn extract_info(pattern: &str, text: &str) -> Result<String>
|
fn read_config_file(path: &Path) -> Result<String> {
let mut content = String::new();
let mut file = File::open(&path)?;
file.read_to_string(&mut content)?;
Ok(content)
}
pub fn get_db_path() -> Result<String> {
let mut path = dirs::home_dir().ok_or("Can't get home dir")?;
path.push(::DB);
let path_str = path.to_str()
.ok_or("Can't convert path into string")?;
Ok(path_str.to_string())
}
|
{
let re = Regex::new(pattern)?;
let cap = re.captures(text).ok_or("Couldn't match")?;
let xtr = cap.get(1).ok_or("No captures")?;
Ok(xtr.as_str().to_string())
}
|
identifier_body
|
startSandbox.js
|
import express from 'express';
import unpackByOutpoint from './unpackByOutpoint';
// Polyfills and `lbry-redux`
global.fetch = require('node-fetch');
global.window = global;
if (typeof global.fetch === 'object') {
global.fetch = global.fetch.default;
}
const { Lbry } = require('lbry-redux');
delete global.window;
export default async function startSandbox() {
const port = 5278;
const sandbox = express();
sandbox.get('/set/:outpoint', async (req, res) => {
const { outpoint } = req.params;
const resolvedPath = await unpackByOutpoint(Lbry, outpoint);
sandbox.use(`/sandbox/${outpoint}/`, express.static(resolvedPath));
res.send(`/sandbox/${outpoint}/`);
});
sandbox
.listen(port, 'localhost', () => console.log(`Sandbox listening on port ${port}.`))
.on('error', err => {
if (err.code === 'EADDRINUSE')
|
});
}
|
{
console.log(
`Server already listening at localhost:${port}. This is probably another LBRY app running. If not, games in the app will not work.`
);
}
|
conditional_block
|
startSandbox.js
|
import express from 'express';
import unpackByOutpoint from './unpackByOutpoint';
// Polyfills and `lbry-redux`
global.fetch = require('node-fetch');
global.window = global;
if (typeof global.fetch === 'object') {
global.fetch = global.fetch.default;
}
const { Lbry } = require('lbry-redux');
|
const sandbox = express();
sandbox.get('/set/:outpoint', async (req, res) => {
const { outpoint } = req.params;
const resolvedPath = await unpackByOutpoint(Lbry, outpoint);
sandbox.use(`/sandbox/${outpoint}/`, express.static(resolvedPath));
res.send(`/sandbox/${outpoint}/`);
});
sandbox
.listen(port, 'localhost', () => console.log(`Sandbox listening on port ${port}.`))
.on('error', err => {
if (err.code === 'EADDRINUSE') {
console.log(
`Server already listening at localhost:${port}. This is probably another LBRY app running. If not, games in the app will not work.`
);
}
});
}
|
delete global.window;
export default async function startSandbox() {
const port = 5278;
|
random_line_split
|
startSandbox.js
|
import express from 'express';
import unpackByOutpoint from './unpackByOutpoint';
// Polyfills and `lbry-redux`
global.fetch = require('node-fetch');
global.window = global;
if (typeof global.fetch === 'object') {
global.fetch = global.fetch.default;
}
const { Lbry } = require('lbry-redux');
delete global.window;
export default async function
|
() {
const port = 5278;
const sandbox = express();
sandbox.get('/set/:outpoint', async (req, res) => {
const { outpoint } = req.params;
const resolvedPath = await unpackByOutpoint(Lbry, outpoint);
sandbox.use(`/sandbox/${outpoint}/`, express.static(resolvedPath));
res.send(`/sandbox/${outpoint}/`);
});
sandbox
.listen(port, 'localhost', () => console.log(`Sandbox listening on port ${port}.`))
.on('error', err => {
if (err.code === 'EADDRINUSE') {
console.log(
`Server already listening at localhost:${port}. This is probably another LBRY app running. If not, games in the app will not work.`
);
}
});
}
|
startSandbox
|
identifier_name
|
startSandbox.js
|
import express from 'express';
import unpackByOutpoint from './unpackByOutpoint';
// Polyfills and `lbry-redux`
global.fetch = require('node-fetch');
global.window = global;
if (typeof global.fetch === 'object') {
global.fetch = global.fetch.default;
}
const { Lbry } = require('lbry-redux');
delete global.window;
export default async function startSandbox()
|
{
const port = 5278;
const sandbox = express();
sandbox.get('/set/:outpoint', async (req, res) => {
const { outpoint } = req.params;
const resolvedPath = await unpackByOutpoint(Lbry, outpoint);
sandbox.use(`/sandbox/${outpoint}/`, express.static(resolvedPath));
res.send(`/sandbox/${outpoint}/`);
});
sandbox
.listen(port, 'localhost', () => console.log(`Sandbox listening on port ${port}.`))
.on('error', err => {
if (err.code === 'EADDRINUSE') {
console.log(
`Server already listening at localhost:${port}. This is probably another LBRY app running. If not, games in the app will not work.`
);
}
});
}
|
identifier_body
|
|
Ping.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _os = require('os');
var _os2 = _interopRequireDefault(_os);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var PingMessage = function () {
function PingMessage() {
_classCallCheck(this, PingMessage);
}
_createClass(PingMessage, null, [{
key: 'construct',
value: function construct(id) {
return {
id: id,
msg: 'I am still alive!',
os: {
|
load: _os2.default.loadavg(),
uptime: _os2.default.uptime()
}
};
}
}]);
return PingMessage;
}();
exports.default = PingMessage;
|
mem: {
total: _os2.default.totalmem(),
free: _os2.default.freemem()
},
|
random_line_split
|
Ping.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _os = require('os');
var _os2 = _interopRequireDefault(_os);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var PingMessage = function () {
function PingMessage()
|
_createClass(PingMessage, null, [{
key: 'construct',
value: function construct(id) {
return {
id: id,
msg: 'I am still alive!',
os: {
mem: {
total: _os2.default.totalmem(),
free: _os2.default.freemem()
},
load: _os2.default.loadavg(),
uptime: _os2.default.uptime()
}
};
}
}]);
return PingMessage;
}();
exports.default = PingMessage;
|
{
_classCallCheck(this, PingMessage);
}
|
identifier_body
|
Ping.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _os = require('os');
var _os2 = _interopRequireDefault(_os);
function
|
(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var PingMessage = function () {
function PingMessage() {
_classCallCheck(this, PingMessage);
}
_createClass(PingMessage, null, [{
key: 'construct',
value: function construct(id) {
return {
id: id,
msg: 'I am still alive!',
os: {
mem: {
total: _os2.default.totalmem(),
free: _os2.default.freemem()
},
load: _os2.default.loadavg(),
uptime: _os2.default.uptime()
}
};
}
}]);
return PingMessage;
}();
exports.default = PingMessage;
|
_interopRequireDefault
|
identifier_name
|
index.js
|
'use strict';
const { messages, ruleName } = require('..');
testRule({
ruleName,
config: [
{
border: 2,
'/^margin/': 1,
},
],
accept: [
{
code: 'a { margin: 0; }',
},
{
code: 'a { margin: 1px; }',
},
{
code: 'a { margin: var(--foo); }',
description: 'deals with CSS variables',
},
{
code: 'a { margin: 1px /* 3px */; }',
description: 'ignore values in comments',
},
{
code: 'a { margin-inline: 1px; }',
},
{
code: 'a { margin: ; }',
},
{
code: 'a { border: 1px; }',
},
{
code: 'a { border: 1px solid; }',
},
{
code: 'a { transition: margin-right 2s ease-in-out; }',
description: 'irrelevant shorthand',
},
],
reject: [
{
code: 'a { margin: 1px 2px; }',
message: messages.rejected('margin', 1),
line: 1,
column: 5,
},
{
code: 'a { margin-inline: 1px 2px; }',
message: messages.rejected('margin-inline', 1),
line: 1,
column: 5,
},
{
code: 'a { margin: var(--foo) var(--bar); }',
message: messages.rejected('margin', 1),
line: 1,
column: 5,
description: 'deals with CSS variables',
},
{
code: 'a { margin: 1px 2px 3px 4px; }',
message: messages.rejected('margin', 1),
line: 1,
column: 5,
},
{
code: 'a { margin: 0 0 0 0; }',
|
line: 1,
column: 5,
},
{
code: 'a { border: 1px solid blue; }',
message: messages.rejected('border', 2),
line: 1,
column: 5,
},
],
});
|
message: messages.rejected('margin', 1),
|
random_line_split
|
ctypes.rs
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::Ident;
use crate::conversion::api::ApiName;
use crate::types::Namespace;
use crate::{conversion::api::Api, known_types::known_types, types::QualifiedName};
use super::fun::FnPhase;
/// Spot any variable-length C types (e.g. unsigned long)
/// used in the [Api]s and append those as extra APIs.
pub(crate) fn append_ctype_information(apis: &mut Vec<Api<FnPhase>>) {
let ctypes: HashMap<Ident, QualifiedName> = apis
.iter()
.flat_map(|api| api.deps())
.filter(|ty| known_types().is_ctype(ty))
.map(|ty| (ty.get_final_ident(), ty))
.collect();
for (id, typename) in ctypes {
apis.push(Api::CType {
name: ApiName::new(&Namespace::new(), id),
typename,
});
}
|
}
|
random_line_split
|
|
ctypes.rs
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::Ident;
use crate::conversion::api::ApiName;
use crate::types::Namespace;
use crate::{conversion::api::Api, known_types::known_types, types::QualifiedName};
use super::fun::FnPhase;
/// Spot any variable-length C types (e.g. unsigned long)
/// used in the [Api]s and append those as extra APIs.
pub(crate) fn
|
(apis: &mut Vec<Api<FnPhase>>) {
let ctypes: HashMap<Ident, QualifiedName> = apis
.iter()
.flat_map(|api| api.deps())
.filter(|ty| known_types().is_ctype(ty))
.map(|ty| (ty.get_final_ident(), ty))
.collect();
for (id, typename) in ctypes {
apis.push(Api::CType {
name: ApiName::new(&Namespace::new(), id),
typename,
});
}
}
|
append_ctype_information
|
identifier_name
|
ctypes.rs
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::Ident;
use crate::conversion::api::ApiName;
use crate::types::Namespace;
use crate::{conversion::api::Api, known_types::known_types, types::QualifiedName};
use super::fun::FnPhase;
/// Spot any variable-length C types (e.g. unsigned long)
/// used in the [Api]s and append those as extra APIs.
pub(crate) fn append_ctype_information(apis: &mut Vec<Api<FnPhase>>)
|
{
let ctypes: HashMap<Ident, QualifiedName> = apis
.iter()
.flat_map(|api| api.deps())
.filter(|ty| known_types().is_ctype(ty))
.map(|ty| (ty.get_final_ident(), ty))
.collect();
for (id, typename) in ctypes {
apis.push(Api::CType {
name: ApiName::new(&Namespace::new(), id),
typename,
});
}
}
|
identifier_body
|
|
test_runSimulation.py
|
from .. import runSimulation
import pytest, os
def
|
():
infoFile = open(os.path.dirname(__file__) + '/../EngFrJapGerm.txt')
runSim1 = runSimulation.runSimulation(infoFile.readlines(), 611)
infoFile.close()
# These four ids are present in EngFrJapGerm: French=584, English=611, German=2253, Japanese=3856
# Check that makeSelectedSentenceList returns the proper number of sentences for each id
runSim1.makeSelectedSentenceList()
assert runSim1.selectedSentences
assert len(runSim1.selectedSentences) == 540
runSim1.targetGrammar = 584
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 756
runSim1.targetGrammar = 2253
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1134
runSim1.targetGrammar = 3856
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1092
# Check that makeSelectedSentencesList returns empty lists
# when given ids that don't exist in the orignal txt file
runSim1.targetGrammar = 612
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
runSim1.targetGrammar = None
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
|
test_makeSelectedSentences
|
identifier_name
|
test_runSimulation.py
|
from .. import runSimulation
import pytest, os
def test_makeSelectedSentences():
|
infoFile = open(os.path.dirname(__file__) + '/../EngFrJapGerm.txt')
runSim1 = runSimulation.runSimulation(infoFile.readlines(), 611)
infoFile.close()
# These four ids are present in EngFrJapGerm: French=584, English=611, German=2253, Japanese=3856
# Check that makeSelectedSentenceList returns the proper number of sentences for each id
runSim1.makeSelectedSentenceList()
assert runSim1.selectedSentences
assert len(runSim1.selectedSentences) == 540
runSim1.targetGrammar = 584
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 756
runSim1.targetGrammar = 2253
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1134
runSim1.targetGrammar = 3856
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1092
# Check that makeSelectedSentencesList returns empty lists
# when given ids that don't exist in the orignal txt file
runSim1.targetGrammar = 612
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
runSim1.targetGrammar = None
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
|
identifier_body
|
|
test_runSimulation.py
|
from .. import runSimulation
import pytest, os
def test_makeSelectedSentences():
infoFile = open(os.path.dirname(__file__) + '/../EngFrJapGerm.txt')
runSim1 = runSimulation.runSimulation(infoFile.readlines(), 611)
infoFile.close()
# These four ids are present in EngFrJapGerm: French=584, English=611, German=2253, Japanese=3856
# Check that makeSelectedSentenceList returns the proper number of sentences for each id
runSim1.makeSelectedSentenceList()
assert runSim1.selectedSentences
assert len(runSim1.selectedSentences) == 540
runSim1.targetGrammar = 584
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 756
runSim1.targetGrammar = 2253
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1134
runSim1.targetGrammar = 3856
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert len(runSim1.selectedSentences) == 1092
# Check that makeSelectedSentencesList returns empty lists
|
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
runSim1.targetGrammar = None
runSim1.selectedSentences = []
runSim1.makeSelectedSentenceList()
assert not runSim1.selectedSentences
|
# when given ids that don't exist in the orignal txt file
runSim1.targetGrammar = 612
|
random_line_split
|
lexer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{errors::*, parser::syntax::make_loc, FileCommentMap, MatchedFileCommentMap};
use codespan::{ByteIndex, Span};
use move_ir_types::location::Loc;
use std::{collections::BTreeMap, fmt};
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Tok {
EOF,
AddressValue,
NumValue,
U8Value,
U64Value,
U128Value,
ByteStringValue,
IdentifierValue,
Exclaim,
ExclaimEqual,
Percent,
Amp,
AmpAmp,
AmpMut,
LParen,
RParen,
LBracket,
RBracket,
Star,
Plus,
Comma,
Minus,
Period,
PeriodPeriod,
Slash,
Colon,
ColonColon,
Semicolon,
Less,
LessEqual,
LessLess,
Equal,
EqualEqual,
EqualEqualGreater,
Greater,
GreaterEqual,
GreaterGreater,
Caret,
Abort,
Acquires,
As,
Break,
Continue,
Copy,
Copyable,
Define,
Else,
False,
If,
Invariant,
Let,
Loop,
Module,
Move,
Native,
Public,
Resource,
Return,
Spec,
Struct,
True,
Use,
While,
LBrace,
Pipe,
PipePipe,
RBrace,
Fun,
Script,
Const,
Friend,
}
impl fmt::Display for Tok {
fn fmt<'f>(&self, formatter: &mut fmt::Formatter<'f>) -> Result<(), fmt::Error> {
use Tok::*;
let s = match *self {
EOF => "[end-of-file]",
AddressValue => "[Address]",
NumValue => "[Num]",
U8Value => "[U8]",
U64Value => "[U64]",
U128Value => "[U128]",
ByteStringValue => "[ByteString]",
IdentifierValue => "[Identifier]",
Exclaim => "!",
ExclaimEqual => "!=",
Percent => "%",
Amp => "&",
AmpAmp => "&&",
AmpMut => "&mut",
LParen => "(",
RParen => ")",
LBracket => "[",
RBracket => "]",
Star => "*",
Plus => "+",
Comma => ",",
Minus => "-",
Period => ".",
PeriodPeriod => "..",
Slash => "/",
Colon => ":",
ColonColon => "::",
Semicolon => ";",
Less => "<",
LessEqual => "<=",
LessLess => "<<",
Equal => "=",
EqualEqual => "==",
EqualEqualGreater => "==>",
Greater => ">",
GreaterEqual => ">=",
GreaterGreater => ">>",
Caret => "^",
Abort => "abort",
Acquires => "acquires",
As => "as",
Break => "break",
Continue => "continue",
Copy => "copy",
Copyable => "copyable",
Define => "define",
Else => "else",
False => "false",
If => "if",
Invariant => "invariant",
Let => "let",
Loop => "loop",
Module => "module",
Move => "move",
Native => "native",
Public => "public",
Resource => "resource",
Return => "return",
Spec => "spec",
Struct => "struct",
True => "true",
Use => "use",
While => "while",
LBrace => "{",
Pipe => "|",
PipePipe => "||",
RBrace => "}",
Fun => "fun",
Script => "script",
Const => "const",
Friend => "friend",
};
fmt::Display::fmt(s, formatter)
}
}
pub struct Lexer<'input> {
text: &'input str,
file: &'static str,
doc_comments: FileCommentMap,
matched_doc_comments: MatchedFileCommentMap,
prev_end: usize,
cur_start: usize,
cur_end: usize,
token: Tok,
}
impl<'input> Lexer<'input> {
pub fn new(
text: &'input str,
file: &'static str,
doc_comments: BTreeMap<Span, String>,
) -> Lexer<'input> {
Lexer {
text,
file,
doc_comments,
matched_doc_comments: BTreeMap::new(),
prev_end: 0,
cur_start: 0,
cur_end: 0,
token: Tok::EOF,
}
}
pub fn peek(&self) -> Tok {
self.token
}
pub fn content(&self) -> &str {
&self.text[self.cur_start..self.cur_end]
}
pub fn file_name(&self) -> &'static str {
self.file
}
pub fn start_loc(&self) -> usize {
self.cur_start
}
pub fn previous_end_loc(&self) -> usize {
self.prev_end
}
// Look ahead to the next token after the current one and return it without advancing
// the state of the lexer.
pub fn lookahead(&self) -> Result<Tok, Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (tok, _) = find_token(self.file, text, offset)?;
Ok(tok)
}
// Look ahead to the next two tokens after the current one and return them without advancing
// the state of the lexer.
pub fn lookahead2(&self) -> Result<(Tok, Tok), Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (first, length) = find_token(self.file, text, offset)?;
let text2 = self.text[offset + length..].trim_start();
let offset2 = self.text.len() - text2.len();
let (second, _) = find_token(self.file, text2, offset2)?;
Ok((first, second))
}
// Matches the doc comments after the last token (or the beginning of the file) to the position
// of the current token. This moves the comments out of `doc_comments` and
// into `matched_doc_comments`. At the end of parsing, if `doc_comments` is not empty, errors
// for stale doc comments will be produced.
//
// Calling this function during parsing effectively marks a valid point for documentation
// comments. The documentation comments are not stored in the AST, but can be retrieved by
// using the start position of an item as an index into `matched_doc_comments`.
pub fn match_doc_comments(&mut self) {
let start = self.previous_end_loc() as u32;
let end = self.cur_start as u32;
let mut matched = vec![];
let merged = self
.doc_comments
.range(Span::new(start, start)..Span::new(end, end))
.map(|(span, s)| {
matched.push(*span);
s.clone()
})
.collect::<Vec<String>>()
.join("\n");
for span in matched {
self.doc_comments.remove(&span);
}
self.matched_doc_comments.insert(ByteIndex(end), merged);
}
// At the end of parsing, checks whether there are any unmatched documentation comments,
// producing errors if so. Otherwise returns a map from file position to associated
// documentation.
pub fn check_and_get_doc_comments(&mut self) -> Result<MatchedFileCommentMap, Errors>
|
pub fn advance(&mut self) -> Result<(), Error> {
self.prev_end = self.cur_end;
let text = self.text[self.cur_end..].trim_start();
self.cur_start = self.text.len() - text.len();
let (token, len) = find_token(self.file, text, self.cur_start)?;
self.cur_end = self.cur_start + len;
self.token = token;
Ok(())
}
// Replace the current token. The lexer will always match the longest token,
// but sometimes the parser will prefer to replace it with a shorter one,
// e.g., ">" instead of ">>".
pub fn replace_token(&mut self, token: Tok, len: usize) {
self.token = token;
self.cur_end = self.cur_start + len
}
}
// Find the next token and its length without changing the state of the lexer.
fn find_token(file: &'static str, text: &str, start_offset: usize) -> Result<(Tok, usize), Error> {
let c: char = match text.chars().next() {
Some(next_char) => next_char,
None => {
return Ok((Tok::EOF, 0));
}
};
let (tok, len) = match c {
'0'..='9' => {
if text.starts_with("0x") && text.len() > 2 {
let hex_len = get_hex_digits_len(&text[2..]);
if hex_len == 0 {
// Fall back to treating this as a "0" token.
(Tok::NumValue, 1)
} else {
(Tok::AddressValue, 2 + hex_len)
}
} else {
get_decimal_number(&text)
}
}
'A'..='Z' | 'a'..='z' | '_' => {
if text.starts_with("x\"") || text.starts_with("b\"") {
let line = &text.lines().next().unwrap()[2..];
match get_string_len(line) {
Some(last_quote) => (Tok::ByteStringValue, 2 + last_quote + 1),
None => {
return Err(vec![(
make_loc(file, start_offset, start_offset + line.len() + 2),
"Missing closing quote (\") after byte string".to_string(),
)])
}
}
} else {
let len = get_name_len(&text);
(get_name_token(&text[..len]), len)
}
}
'&' => {
if text.starts_with("&mut ") {
(Tok::AmpMut, 5)
} else if text.starts_with("&&") {
(Tok::AmpAmp, 2)
} else {
(Tok::Amp, 1)
}
}
'|' => {
if text.starts_with("||") {
(Tok::PipePipe, 2)
} else {
(Tok::Pipe, 1)
}
}
'=' => {
if text.starts_with("==>") {
(Tok::EqualEqualGreater, 3)
} else if text.starts_with("==") {
(Tok::EqualEqual, 2)
} else {
(Tok::Equal, 1)
}
}
'!' => {
if text.starts_with("!=") {
(Tok::ExclaimEqual, 2)
} else {
(Tok::Exclaim, 1)
}
}
'<' => {
if text.starts_with("<=") {
(Tok::LessEqual, 2)
} else if text.starts_with("<<") {
(Tok::LessLess, 2)
} else {
(Tok::Less, 1)
}
}
'>' => {
if text.starts_with(">=") {
(Tok::GreaterEqual, 2)
} else if text.starts_with(">>") {
(Tok::GreaterGreater, 2)
} else {
(Tok::Greater, 1)
}
}
':' => {
if text.starts_with("::") {
(Tok::ColonColon, 2)
} else {
(Tok::Colon, 1)
}
}
'%' => (Tok::Percent, 1),
'(' => (Tok::LParen, 1),
')' => (Tok::RParen, 1),
'[' => (Tok::LBracket, 1),
']' => (Tok::RBracket, 1),
'*' => (Tok::Star, 1),
'+' => (Tok::Plus, 1),
',' => (Tok::Comma, 1),
'-' => (Tok::Minus, 1),
'.' => {
if text.starts_with("..") {
(Tok::PeriodPeriod, 2)
} else {
(Tok::Period, 1)
}
}
'/' => (Tok::Slash, 1),
';' => (Tok::Semicolon, 1),
'^' => (Tok::Caret, 1),
'{' => (Tok::LBrace, 1),
'}' => (Tok::RBrace, 1),
_ => {
let loc = make_loc(file, start_offset, start_offset);
return Err(vec![(loc, format!("Invalid character: '{}'", c))]);
}
};
Ok((tok, len))
}
// Return the length of the substring matching [a-zA-Z0-9_]. Note that
// this does not do any special check for whether the first character
// starts with a number, so the caller is responsible for any additional
// checks on the first character.
fn get_name_len(text: &str) -> usize {
text.chars()
.position(|c| !matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
fn get_decimal_number(text: &str) -> (Tok, usize) {
let len = text
.chars()
.position(|c| !matches!(c, '0'..='9'))
.unwrap_or_else(|| text.len());
let rest = &text[len..];
if rest.starts_with("u8") {
(Tok::U8Value, len + 2)
} else if rest.starts_with("u64") {
(Tok::U64Value, len + 3)
} else if rest.starts_with("u128") {
(Tok::U128Value, len + 4)
} else {
(Tok::NumValue, len)
}
}
// Return the length of the substring containing characters in [0-9a-fA-F].
fn get_hex_digits_len(text: &str) -> usize {
text.find(|c| !matches!(c, 'a'..='f' | 'A'..='F' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
// Return the length of the quoted string, or None if there is no closing quote.
fn get_string_len(text: &str) -> Option<usize> {
let mut pos = 0;
let mut iter = text.chars();
while let Some(chr) = iter.next() {
if chr == '\\' {
// Skip over the escaped character (e.g., a quote or another backslash)
if iter.next().is_some() {
pos += 1;
}
} else if chr == '"' {
return Some(pos);
}
pos += 1;
}
None
}
fn get_name_token(name: &str) -> Tok {
match name {
"abort" => Tok::Abort,
"acquires" => Tok::Acquires,
"as" => Tok::As,
"break" => Tok::Break,
"const" => Tok::Const,
"continue" => Tok::Continue,
"copy" => Tok::Copy,
"copyable" => Tok::Copyable,
"define" => Tok::Define,
"else" => Tok::Else,
"false" => Tok::False,
"fun" => Tok::Fun,
"friend" => Tok::Friend,
"if" => Tok::If,
"invariant" => Tok::Invariant,
"let" => Tok::Let,
"loop" => Tok::Loop,
"module" => Tok::Module,
"move" => Tok::Move,
"native" => Tok::Native,
"public" => Tok::Public,
"resource" => Tok::Resource,
"return" => Tok::Return,
"script" => Tok::Script,
"spec" => Tok::Spec,
"struct" => Tok::Struct,
"true" => Tok::True,
"use" => Tok::Use,
"while" => Tok::While,
_ => Tok::IdentifierValue,
}
}
|
{
let errors = self
.doc_comments
.iter()
.map(|(span, _)| {
vec![(
Loc::new(self.file, *span),
"documentation comment cannot be matched to a language item".to_string(),
)]
})
.collect::<Errors>();
if errors.is_empty() {
Ok(std::mem::take(&mut self.matched_doc_comments))
} else {
Err(errors)
}
}
|
identifier_body
|
lexer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{errors::*, parser::syntax::make_loc, FileCommentMap, MatchedFileCommentMap};
use codespan::{ByteIndex, Span};
use move_ir_types::location::Loc;
use std::{collections::BTreeMap, fmt};
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Tok {
EOF,
AddressValue,
NumValue,
U8Value,
U64Value,
U128Value,
ByteStringValue,
IdentifierValue,
Exclaim,
ExclaimEqual,
Percent,
Amp,
AmpAmp,
AmpMut,
LParen,
RParen,
LBracket,
RBracket,
Star,
Plus,
Comma,
Minus,
Period,
PeriodPeriod,
Slash,
Colon,
ColonColon,
Semicolon,
Less,
LessEqual,
LessLess,
Equal,
EqualEqual,
EqualEqualGreater,
Greater,
GreaterEqual,
GreaterGreater,
Caret,
Abort,
Acquires,
As,
Break,
Continue,
Copy,
Copyable,
Define,
Else,
False,
If,
Invariant,
Let,
Loop,
Module,
Move,
Native,
Public,
Resource,
Return,
Spec,
Struct,
True,
Use,
While,
LBrace,
Pipe,
PipePipe,
RBrace,
Fun,
Script,
Const,
Friend,
}
impl fmt::Display for Tok {
fn fmt<'f>(&self, formatter: &mut fmt::Formatter<'f>) -> Result<(), fmt::Error> {
use Tok::*;
let s = match *self {
EOF => "[end-of-file]",
AddressValue => "[Address]",
NumValue => "[Num]",
U8Value => "[U8]",
U64Value => "[U64]",
U128Value => "[U128]",
ByteStringValue => "[ByteString]",
IdentifierValue => "[Identifier]",
Exclaim => "!",
ExclaimEqual => "!=",
Percent => "%",
Amp => "&",
AmpAmp => "&&",
AmpMut => "&mut",
LParen => "(",
RParen => ")",
LBracket => "[",
RBracket => "]",
Star => "*",
Plus => "+",
Comma => ",",
Minus => "-",
Period => ".",
PeriodPeriod => "..",
Slash => "/",
Colon => ":",
ColonColon => "::",
Semicolon => ";",
Less => "<",
LessEqual => "<=",
LessLess => "<<",
Equal => "=",
EqualEqual => "==",
EqualEqualGreater => "==>",
Greater => ">",
GreaterEqual => ">=",
GreaterGreater => ">>",
Caret => "^",
Abort => "abort",
Acquires => "acquires",
As => "as",
Break => "break",
Continue => "continue",
Copy => "copy",
Copyable => "copyable",
Define => "define",
Else => "else",
False => "false",
If => "if",
Invariant => "invariant",
Let => "let",
Loop => "loop",
Module => "module",
Move => "move",
Native => "native",
Public => "public",
Resource => "resource",
Return => "return",
|
True => "true",
Use => "use",
While => "while",
LBrace => "{",
Pipe => "|",
PipePipe => "||",
RBrace => "}",
Fun => "fun",
Script => "script",
Const => "const",
Friend => "friend",
};
fmt::Display::fmt(s, formatter)
}
}
pub struct Lexer<'input> {
text: &'input str,
file: &'static str,
doc_comments: FileCommentMap,
matched_doc_comments: MatchedFileCommentMap,
prev_end: usize,
cur_start: usize,
cur_end: usize,
token: Tok,
}
impl<'input> Lexer<'input> {
pub fn new(
text: &'input str,
file: &'static str,
doc_comments: BTreeMap<Span, String>,
) -> Lexer<'input> {
Lexer {
text,
file,
doc_comments,
matched_doc_comments: BTreeMap::new(),
prev_end: 0,
cur_start: 0,
cur_end: 0,
token: Tok::EOF,
}
}
pub fn peek(&self) -> Tok {
self.token
}
pub fn content(&self) -> &str {
&self.text[self.cur_start..self.cur_end]
}
pub fn file_name(&self) -> &'static str {
self.file
}
pub fn start_loc(&self) -> usize {
self.cur_start
}
pub fn previous_end_loc(&self) -> usize {
self.prev_end
}
// Look ahead to the next token after the current one and return it without advancing
// the state of the lexer.
pub fn lookahead(&self) -> Result<Tok, Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (tok, _) = find_token(self.file, text, offset)?;
Ok(tok)
}
// Look ahead to the next two tokens after the current one and return them without advancing
// the state of the lexer.
pub fn lookahead2(&self) -> Result<(Tok, Tok), Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (first, length) = find_token(self.file, text, offset)?;
let text2 = self.text[offset + length..].trim_start();
let offset2 = self.text.len() - text2.len();
let (second, _) = find_token(self.file, text2, offset2)?;
Ok((first, second))
}
// Matches the doc comments after the last token (or the beginning of the file) to the position
// of the current token. This moves the comments out of `doc_comments` and
// into `matched_doc_comments`. At the end of parsing, if `doc_comments` is not empty, errors
// for stale doc comments will be produced.
//
// Calling this function during parsing effectively marks a valid point for documentation
// comments. The documentation comments are not stored in the AST, but can be retrieved by
// using the start position of an item as an index into `matched_doc_comments`.
pub fn match_doc_comments(&mut self) {
let start = self.previous_end_loc() as u32;
let end = self.cur_start as u32;
let mut matched = vec![];
let merged = self
.doc_comments
.range(Span::new(start, start)..Span::new(end, end))
.map(|(span, s)| {
matched.push(*span);
s.clone()
})
.collect::<Vec<String>>()
.join("\n");
for span in matched {
self.doc_comments.remove(&span);
}
self.matched_doc_comments.insert(ByteIndex(end), merged);
}
// At the end of parsing, checks whether there are any unmatched documentation comments,
// producing errors if so. Otherwise returns a map from file position to associated
// documentation.
pub fn check_and_get_doc_comments(&mut self) -> Result<MatchedFileCommentMap, Errors> {
let errors = self
.doc_comments
.iter()
.map(|(span, _)| {
vec![(
Loc::new(self.file, *span),
"documentation comment cannot be matched to a language item".to_string(),
)]
})
.collect::<Errors>();
if errors.is_empty() {
Ok(std::mem::take(&mut self.matched_doc_comments))
} else {
Err(errors)
}
}
pub fn advance(&mut self) -> Result<(), Error> {
self.prev_end = self.cur_end;
let text = self.text[self.cur_end..].trim_start();
self.cur_start = self.text.len() - text.len();
let (token, len) = find_token(self.file, text, self.cur_start)?;
self.cur_end = self.cur_start + len;
self.token = token;
Ok(())
}
// Replace the current token. The lexer will always match the longest token,
// but sometimes the parser will prefer to replace it with a shorter one,
// e.g., ">" instead of ">>".
pub fn replace_token(&mut self, token: Tok, len: usize) {
self.token = token;
self.cur_end = self.cur_start + len
}
}
// Find the next token and its length without changing the state of the lexer.
fn find_token(file: &'static str, text: &str, start_offset: usize) -> Result<(Tok, usize), Error> {
let c: char = match text.chars().next() {
Some(next_char) => next_char,
None => {
return Ok((Tok::EOF, 0));
}
};
let (tok, len) = match c {
'0'..='9' => {
if text.starts_with("0x") && text.len() > 2 {
let hex_len = get_hex_digits_len(&text[2..]);
if hex_len == 0 {
// Fall back to treating this as a "0" token.
(Tok::NumValue, 1)
} else {
(Tok::AddressValue, 2 + hex_len)
}
} else {
get_decimal_number(&text)
}
}
'A'..='Z' | 'a'..='z' | '_' => {
if text.starts_with("x\"") || text.starts_with("b\"") {
let line = &text.lines().next().unwrap()[2..];
match get_string_len(line) {
Some(last_quote) => (Tok::ByteStringValue, 2 + last_quote + 1),
None => {
return Err(vec![(
make_loc(file, start_offset, start_offset + line.len() + 2),
"Missing closing quote (\") after byte string".to_string(),
)])
}
}
} else {
let len = get_name_len(&text);
(get_name_token(&text[..len]), len)
}
}
'&' => {
if text.starts_with("&mut ") {
(Tok::AmpMut, 5)
} else if text.starts_with("&&") {
(Tok::AmpAmp, 2)
} else {
(Tok::Amp, 1)
}
}
'|' => {
if text.starts_with("||") {
(Tok::PipePipe, 2)
} else {
(Tok::Pipe, 1)
}
}
'=' => {
if text.starts_with("==>") {
(Tok::EqualEqualGreater, 3)
} else if text.starts_with("==") {
(Tok::EqualEqual, 2)
} else {
(Tok::Equal, 1)
}
}
'!' => {
if text.starts_with("!=") {
(Tok::ExclaimEqual, 2)
} else {
(Tok::Exclaim, 1)
}
}
'<' => {
if text.starts_with("<=") {
(Tok::LessEqual, 2)
} else if text.starts_with("<<") {
(Tok::LessLess, 2)
} else {
(Tok::Less, 1)
}
}
'>' => {
if text.starts_with(">=") {
(Tok::GreaterEqual, 2)
} else if text.starts_with(">>") {
(Tok::GreaterGreater, 2)
} else {
(Tok::Greater, 1)
}
}
':' => {
if text.starts_with("::") {
(Tok::ColonColon, 2)
} else {
(Tok::Colon, 1)
}
}
'%' => (Tok::Percent, 1),
'(' => (Tok::LParen, 1),
')' => (Tok::RParen, 1),
'[' => (Tok::LBracket, 1),
']' => (Tok::RBracket, 1),
'*' => (Tok::Star, 1),
'+' => (Tok::Plus, 1),
',' => (Tok::Comma, 1),
'-' => (Tok::Minus, 1),
'.' => {
if text.starts_with("..") {
(Tok::PeriodPeriod, 2)
} else {
(Tok::Period, 1)
}
}
'/' => (Tok::Slash, 1),
';' => (Tok::Semicolon, 1),
'^' => (Tok::Caret, 1),
'{' => (Tok::LBrace, 1),
'}' => (Tok::RBrace, 1),
_ => {
let loc = make_loc(file, start_offset, start_offset);
return Err(vec![(loc, format!("Invalid character: '{}'", c))]);
}
};
Ok((tok, len))
}
// Return the length of the substring matching [a-zA-Z0-9_]. Note that
// this does not do any special check for whether the first character
// starts with a number, so the caller is responsible for any additional
// checks on the first character.
fn get_name_len(text: &str) -> usize {
text.chars()
.position(|c| !matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
fn get_decimal_number(text: &str) -> (Tok, usize) {
let len = text
.chars()
.position(|c| !matches!(c, '0'..='9'))
.unwrap_or_else(|| text.len());
let rest = &text[len..];
if rest.starts_with("u8") {
(Tok::U8Value, len + 2)
} else if rest.starts_with("u64") {
(Tok::U64Value, len + 3)
} else if rest.starts_with("u128") {
(Tok::U128Value, len + 4)
} else {
(Tok::NumValue, len)
}
}
// Return the length of the substring containing characters in [0-9a-fA-F].
fn get_hex_digits_len(text: &str) -> usize {
text.find(|c| !matches!(c, 'a'..='f' | 'A'..='F' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
// Return the length of the quoted string, or None if there is no closing quote.
fn get_string_len(text: &str) -> Option<usize> {
let mut pos = 0;
let mut iter = text.chars();
while let Some(chr) = iter.next() {
if chr == '\\' {
// Skip over the escaped character (e.g., a quote or another backslash)
if iter.next().is_some() {
pos += 1;
}
} else if chr == '"' {
return Some(pos);
}
pos += 1;
}
None
}
fn get_name_token(name: &str) -> Tok {
match name {
"abort" => Tok::Abort,
"acquires" => Tok::Acquires,
"as" => Tok::As,
"break" => Tok::Break,
"const" => Tok::Const,
"continue" => Tok::Continue,
"copy" => Tok::Copy,
"copyable" => Tok::Copyable,
"define" => Tok::Define,
"else" => Tok::Else,
"false" => Tok::False,
"fun" => Tok::Fun,
"friend" => Tok::Friend,
"if" => Tok::If,
"invariant" => Tok::Invariant,
"let" => Tok::Let,
"loop" => Tok::Loop,
"module" => Tok::Module,
"move" => Tok::Move,
"native" => Tok::Native,
"public" => Tok::Public,
"resource" => Tok::Resource,
"return" => Tok::Return,
"script" => Tok::Script,
"spec" => Tok::Spec,
"struct" => Tok::Struct,
"true" => Tok::True,
"use" => Tok::Use,
"while" => Tok::While,
_ => Tok::IdentifierValue,
}
}
|
Spec => "spec",
Struct => "struct",
|
random_line_split
|
lexer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{errors::*, parser::syntax::make_loc, FileCommentMap, MatchedFileCommentMap};
use codespan::{ByteIndex, Span};
use move_ir_types::location::Loc;
use std::{collections::BTreeMap, fmt};
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Tok {
EOF,
AddressValue,
NumValue,
U8Value,
U64Value,
U128Value,
ByteStringValue,
IdentifierValue,
Exclaim,
ExclaimEqual,
Percent,
Amp,
AmpAmp,
AmpMut,
LParen,
RParen,
LBracket,
RBracket,
Star,
Plus,
Comma,
Minus,
Period,
PeriodPeriod,
Slash,
Colon,
ColonColon,
Semicolon,
Less,
LessEqual,
LessLess,
Equal,
EqualEqual,
EqualEqualGreater,
Greater,
GreaterEqual,
GreaterGreater,
Caret,
Abort,
Acquires,
As,
Break,
Continue,
Copy,
Copyable,
Define,
Else,
False,
If,
Invariant,
Let,
Loop,
Module,
Move,
Native,
Public,
Resource,
Return,
Spec,
Struct,
True,
Use,
While,
LBrace,
Pipe,
PipePipe,
RBrace,
Fun,
Script,
Const,
Friend,
}
impl fmt::Display for Tok {
fn fmt<'f>(&self, formatter: &mut fmt::Formatter<'f>) -> Result<(), fmt::Error> {
use Tok::*;
let s = match *self {
EOF => "[end-of-file]",
AddressValue => "[Address]",
NumValue => "[Num]",
U8Value => "[U8]",
U64Value => "[U64]",
U128Value => "[U128]",
ByteStringValue => "[ByteString]",
IdentifierValue => "[Identifier]",
Exclaim => "!",
ExclaimEqual => "!=",
Percent => "%",
Amp => "&",
AmpAmp => "&&",
AmpMut => "&mut",
LParen => "(",
RParen => ")",
LBracket => "[",
RBracket => "]",
Star => "*",
Plus => "+",
Comma => ",",
Minus => "-",
Period => ".",
PeriodPeriod => "..",
Slash => "/",
Colon => ":",
ColonColon => "::",
Semicolon => ";",
Less => "<",
LessEqual => "<=",
LessLess => "<<",
Equal => "=",
EqualEqual => "==",
EqualEqualGreater => "==>",
Greater => ">",
GreaterEqual => ">=",
GreaterGreater => ">>",
Caret => "^",
Abort => "abort",
Acquires => "acquires",
As => "as",
Break => "break",
Continue => "continue",
Copy => "copy",
Copyable => "copyable",
Define => "define",
Else => "else",
False => "false",
If => "if",
Invariant => "invariant",
Let => "let",
Loop => "loop",
Module => "module",
Move => "move",
Native => "native",
Public => "public",
Resource => "resource",
Return => "return",
Spec => "spec",
Struct => "struct",
True => "true",
Use => "use",
While => "while",
LBrace => "{",
Pipe => "|",
PipePipe => "||",
RBrace => "}",
Fun => "fun",
Script => "script",
Const => "const",
Friend => "friend",
};
fmt::Display::fmt(s, formatter)
}
}
pub struct Lexer<'input> {
text: &'input str,
file: &'static str,
doc_comments: FileCommentMap,
matched_doc_comments: MatchedFileCommentMap,
prev_end: usize,
cur_start: usize,
cur_end: usize,
token: Tok,
}
impl<'input> Lexer<'input> {
pub fn new(
text: &'input str,
file: &'static str,
doc_comments: BTreeMap<Span, String>,
) -> Lexer<'input> {
Lexer {
text,
file,
doc_comments,
matched_doc_comments: BTreeMap::new(),
prev_end: 0,
cur_start: 0,
cur_end: 0,
token: Tok::EOF,
}
}
pub fn peek(&self) -> Tok {
self.token
}
pub fn content(&self) -> &str {
&self.text[self.cur_start..self.cur_end]
}
pub fn file_name(&self) -> &'static str {
self.file
}
pub fn start_loc(&self) -> usize {
self.cur_start
}
pub fn previous_end_loc(&self) -> usize {
self.prev_end
}
// Look ahead to the next token after the current one and return it without advancing
// the state of the lexer.
pub fn lookahead(&self) -> Result<Tok, Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (tok, _) = find_token(self.file, text, offset)?;
Ok(tok)
}
// Look ahead to the next two tokens after the current one and return them without advancing
// the state of the lexer.
pub fn lookahead2(&self) -> Result<(Tok, Tok), Error> {
let text = self.text[self.cur_end..].trim_start();
let offset = self.text.len() - text.len();
let (first, length) = find_token(self.file, text, offset)?;
let text2 = self.text[offset + length..].trim_start();
let offset2 = self.text.len() - text2.len();
let (second, _) = find_token(self.file, text2, offset2)?;
Ok((first, second))
}
// Matches the doc comments after the last token (or the beginning of the file) to the position
// of the current token. This moves the comments out of `doc_comments` and
// into `matched_doc_comments`. At the end of parsing, if `doc_comments` is not empty, errors
// for stale doc comments will be produced.
//
// Calling this function during parsing effectively marks a valid point for documentation
// comments. The documentation comments are not stored in the AST, but can be retrieved by
// using the start position of an item as an index into `matched_doc_comments`.
pub fn
|
(&mut self) {
let start = self.previous_end_loc() as u32;
let end = self.cur_start as u32;
let mut matched = vec![];
let merged = self
.doc_comments
.range(Span::new(start, start)..Span::new(end, end))
.map(|(span, s)| {
matched.push(*span);
s.clone()
})
.collect::<Vec<String>>()
.join("\n");
for span in matched {
self.doc_comments.remove(&span);
}
self.matched_doc_comments.insert(ByteIndex(end), merged);
}
// At the end of parsing, checks whether there are any unmatched documentation comments,
// producing errors if so. Otherwise returns a map from file position to associated
// documentation.
pub fn check_and_get_doc_comments(&mut self) -> Result<MatchedFileCommentMap, Errors> {
let errors = self
.doc_comments
.iter()
.map(|(span, _)| {
vec![(
Loc::new(self.file, *span),
"documentation comment cannot be matched to a language item".to_string(),
)]
})
.collect::<Errors>();
if errors.is_empty() {
Ok(std::mem::take(&mut self.matched_doc_comments))
} else {
Err(errors)
}
}
pub fn advance(&mut self) -> Result<(), Error> {
self.prev_end = self.cur_end;
let text = self.text[self.cur_end..].trim_start();
self.cur_start = self.text.len() - text.len();
let (token, len) = find_token(self.file, text, self.cur_start)?;
self.cur_end = self.cur_start + len;
self.token = token;
Ok(())
}
// Replace the current token. The lexer will always match the longest token,
// but sometimes the parser will prefer to replace it with a shorter one,
// e.g., ">" instead of ">>".
pub fn replace_token(&mut self, token: Tok, len: usize) {
self.token = token;
self.cur_end = self.cur_start + len
}
}
// Find the next token and its length without changing the state of the lexer.
fn find_token(file: &'static str, text: &str, start_offset: usize) -> Result<(Tok, usize), Error> {
let c: char = match text.chars().next() {
Some(next_char) => next_char,
None => {
return Ok((Tok::EOF, 0));
}
};
let (tok, len) = match c {
'0'..='9' => {
if text.starts_with("0x") && text.len() > 2 {
let hex_len = get_hex_digits_len(&text[2..]);
if hex_len == 0 {
// Fall back to treating this as a "0" token.
(Tok::NumValue, 1)
} else {
(Tok::AddressValue, 2 + hex_len)
}
} else {
get_decimal_number(&text)
}
}
'A'..='Z' | 'a'..='z' | '_' => {
if text.starts_with("x\"") || text.starts_with("b\"") {
let line = &text.lines().next().unwrap()[2..];
match get_string_len(line) {
Some(last_quote) => (Tok::ByteStringValue, 2 + last_quote + 1),
None => {
return Err(vec![(
make_loc(file, start_offset, start_offset + line.len() + 2),
"Missing closing quote (\") after byte string".to_string(),
)])
}
}
} else {
let len = get_name_len(&text);
(get_name_token(&text[..len]), len)
}
}
'&' => {
if text.starts_with("&mut ") {
(Tok::AmpMut, 5)
} else if text.starts_with("&&") {
(Tok::AmpAmp, 2)
} else {
(Tok::Amp, 1)
}
}
'|' => {
if text.starts_with("||") {
(Tok::PipePipe, 2)
} else {
(Tok::Pipe, 1)
}
}
'=' => {
if text.starts_with("==>") {
(Tok::EqualEqualGreater, 3)
} else if text.starts_with("==") {
(Tok::EqualEqual, 2)
} else {
(Tok::Equal, 1)
}
}
'!' => {
if text.starts_with("!=") {
(Tok::ExclaimEqual, 2)
} else {
(Tok::Exclaim, 1)
}
}
'<' => {
if text.starts_with("<=") {
(Tok::LessEqual, 2)
} else if text.starts_with("<<") {
(Tok::LessLess, 2)
} else {
(Tok::Less, 1)
}
}
'>' => {
if text.starts_with(">=") {
(Tok::GreaterEqual, 2)
} else if text.starts_with(">>") {
(Tok::GreaterGreater, 2)
} else {
(Tok::Greater, 1)
}
}
':' => {
if text.starts_with("::") {
(Tok::ColonColon, 2)
} else {
(Tok::Colon, 1)
}
}
'%' => (Tok::Percent, 1),
'(' => (Tok::LParen, 1),
')' => (Tok::RParen, 1),
'[' => (Tok::LBracket, 1),
']' => (Tok::RBracket, 1),
'*' => (Tok::Star, 1),
'+' => (Tok::Plus, 1),
',' => (Tok::Comma, 1),
'-' => (Tok::Minus, 1),
'.' => {
if text.starts_with("..") {
(Tok::PeriodPeriod, 2)
} else {
(Tok::Period, 1)
}
}
'/' => (Tok::Slash, 1),
';' => (Tok::Semicolon, 1),
'^' => (Tok::Caret, 1),
'{' => (Tok::LBrace, 1),
'}' => (Tok::RBrace, 1),
_ => {
let loc = make_loc(file, start_offset, start_offset);
return Err(vec![(loc, format!("Invalid character: '{}'", c))]);
}
};
Ok((tok, len))
}
// Return the length of the substring matching [a-zA-Z0-9_]. Note that
// this does not do any special check for whether the first character
// starts with a number, so the caller is responsible for any additional
// checks on the first character.
fn get_name_len(text: &str) -> usize {
text.chars()
.position(|c| !matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
fn get_decimal_number(text: &str) -> (Tok, usize) {
let len = text
.chars()
.position(|c| !matches!(c, '0'..='9'))
.unwrap_or_else(|| text.len());
let rest = &text[len..];
if rest.starts_with("u8") {
(Tok::U8Value, len + 2)
} else if rest.starts_with("u64") {
(Tok::U64Value, len + 3)
} else if rest.starts_with("u128") {
(Tok::U128Value, len + 4)
} else {
(Tok::NumValue, len)
}
}
// Return the length of the substring containing characters in [0-9a-fA-F].
fn get_hex_digits_len(text: &str) -> usize {
text.find(|c| !matches!(c, 'a'..='f' | 'A'..='F' | '0'..='9'))
.unwrap_or_else(|| text.len())
}
// Return the length of the quoted string, or None if there is no closing quote.
fn get_string_len(text: &str) -> Option<usize> {
let mut pos = 0;
let mut iter = text.chars();
while let Some(chr) = iter.next() {
if chr == '\\' {
// Skip over the escaped character (e.g., a quote or another backslash)
if iter.next().is_some() {
pos += 1;
}
} else if chr == '"' {
return Some(pos);
}
pos += 1;
}
None
}
fn get_name_token(name: &str) -> Tok {
match name {
"abort" => Tok::Abort,
"acquires" => Tok::Acquires,
"as" => Tok::As,
"break" => Tok::Break,
"const" => Tok::Const,
"continue" => Tok::Continue,
"copy" => Tok::Copy,
"copyable" => Tok::Copyable,
"define" => Tok::Define,
"else" => Tok::Else,
"false" => Tok::False,
"fun" => Tok::Fun,
"friend" => Tok::Friend,
"if" => Tok::If,
"invariant" => Tok::Invariant,
"let" => Tok::Let,
"loop" => Tok::Loop,
"module" => Tok::Module,
"move" => Tok::Move,
"native" => Tok::Native,
"public" => Tok::Public,
"resource" => Tok::Resource,
"return" => Tok::Return,
"script" => Tok::Script,
"spec" => Tok::Spec,
"struct" => Tok::Struct,
"true" => Tok::True,
"use" => Tok::Use,
"while" => Tok::While,
_ => Tok::IdentifierValue,
}
}
|
match_doc_comments
|
identifier_name
|
tnzcore.ts
|
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="zh_CN">
<context>
<name>QObject</name>
<message>
<source>colors</source>
<translation>颜色</translation>
</message>
<message>
<source>Unidentified Action</source>
<translation>未标识的动作</translation>
</message>
<message>
<source>Skipping frame.</source>
<translation>跳帧。</translation>
</message>
<message>
<source>Malformed frame name</source>
<translation>不合格的帧名</translation>
</message>
</context>
<context>
<name>TCenterLineStrokeStyle</name>
<message>
<source>Constant</source>
<translation>恒定</translation>
</message>
|
<source>Thickness</source>
<translation>粗细</translation>
</message>
</context>
<context>
<name>TRasterImagePatternStrokeStyle</name>
<message>
<source>Distance</source>
<translation>距离</translation>
</message>
<message>
<source>Rotation</source>
<translation>旋转</translation>
</message>
</context>
<context>
<name>TVectorImagePatternStrokeStyle</name>
<message>
<source>Distance</source>
<translation>距离</translation>
</message>
<message>
<source>Rotation</source>
<translation>旋转</translation>
</message>
</context>
</TS>
|
<message>
|
random_line_split
|
sf_tuto.py
|
import time
import numpy
from golib.config.golib_conf import gsize, B, W, E
import camkifu.stone
from camkifu.core import imgutil
class StonesFinderTuto(camkifu.stone.StonesFinder):
""" This class has been used to write a tutorial on how to create a new StonesFinder.
Run Camkifu with this class as the default StonesFinder in order to replay one step of the tuto.
In order to select the step, rename the desired method below to '_find(...)' .
"""
def __init__(self, vmanager):
super().__init__(vmanager)
self.canvas = None
def _learn(self):
pass
# ------------------------------------------------------
#
# TUTORIAL STEPS
#
# ------------------------------------------------------
def _find_minimal(self, goban_img):
""" Implementation 1 of _find() from the tutorial.
"""
imgutil.draw_str(goban_img, "Hello stones finding tutorial !")
self._show(goban_img)
def _find_suggest(self, _):
""" Implementation 2 of _find() from the tutorial.
"""
# check emptiness to avoid complaints since this method will be called in a loop
if self.is_empty(2, 12):
# using "numpy" coordinates frame for x and y
self.suggest(B, 2, 12)
def _find_bulk(self, _):
""" Implementation 3 of _find() from the tutorial.
"""
# using "numpy" coordinates frame for x and y
black = ((W, 8, 8), (W, 8, 10), (W, 10, 8), (W, 10, 10))
white = ((B, 7, 7), (B, 7, 11), (B, 11, 7), (B, 11, 11), (B, 9, 9))
add = black if self.total_f_processed % 2 else white
rem = white if self.total_f_processed % 2 else black
moves = []
for color, r, c in add:
moves.append((color, r, c))
for _, r, c in rem:
|
self.bulk_update(moves)
def _find_getrect(self, goban_img):
""" Implementation 4 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r in range(gsize): # row index
for c in range(gsize): # column index
if r == c or r == gsize - c - 1:
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_border(self, goban_img):
""" Implementation 5 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_border(2): # 2 is the line height as in go vocabulary (0-based)
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_spiral(self, goban_img):
""" Implementation 6 of _find() from the tutorial.
"""
count = 0
if self.canvas is None:
self.canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_spiral():
if count == self.total_f_processed % gsize ** 2:
x0, y0, x1, y1 = self.getrect(r, c)
self.canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
break
count += 1
self.last_shown = 0 # force display of all images
self._show(self.canvas)
|
if not self.is_empty(r, c):
moves.append((E, r, c))
time.sleep(0.7)
|
random_line_split
|
sf_tuto.py
|
import time
import numpy
from golib.config.golib_conf import gsize, B, W, E
import camkifu.stone
from camkifu.core import imgutil
class StonesFinderTuto(camkifu.stone.StonesFinder):
""" This class has been used to write a tutorial on how to create a new StonesFinder.
Run Camkifu with this class as the default StonesFinder in order to replay one step of the tuto.
In order to select the step, rename the desired method below to '_find(...)' .
"""
def __init__(self, vmanager):
super().__init__(vmanager)
self.canvas = None
def _learn(self):
pass
# ------------------------------------------------------
#
# TUTORIAL STEPS
#
# ------------------------------------------------------
def _find_minimal(self, goban_img):
""" Implementation 1 of _find() from the tutorial.
"""
imgutil.draw_str(goban_img, "Hello stones finding tutorial !")
self._show(goban_img)
def
|
(self, _):
""" Implementation 2 of _find() from the tutorial.
"""
# check emptiness to avoid complaints since this method will be called in a loop
if self.is_empty(2, 12):
# using "numpy" coordinates frame for x and y
self.suggest(B, 2, 12)
def _find_bulk(self, _):
""" Implementation 3 of _find() from the tutorial.
"""
# using "numpy" coordinates frame for x and y
black = ((W, 8, 8), (W, 8, 10), (W, 10, 8), (W, 10, 10))
white = ((B, 7, 7), (B, 7, 11), (B, 11, 7), (B, 11, 11), (B, 9, 9))
add = black if self.total_f_processed % 2 else white
rem = white if self.total_f_processed % 2 else black
moves = []
for color, r, c in add:
moves.append((color, r, c))
for _, r, c in rem:
if not self.is_empty(r, c):
moves.append((E, r, c))
time.sleep(0.7)
self.bulk_update(moves)
def _find_getrect(self, goban_img):
""" Implementation 4 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r in range(gsize): # row index
for c in range(gsize): # column index
if r == c or r == gsize - c - 1:
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_border(self, goban_img):
""" Implementation 5 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_border(2): # 2 is the line height as in go vocabulary (0-based)
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_spiral(self, goban_img):
""" Implementation 6 of _find() from the tutorial.
"""
count = 0
if self.canvas is None:
self.canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_spiral():
if count == self.total_f_processed % gsize ** 2:
x0, y0, x1, y1 = self.getrect(r, c)
self.canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
break
count += 1
self.last_shown = 0 # force display of all images
self._show(self.canvas)
|
_find_suggest
|
identifier_name
|
sf_tuto.py
|
import time
import numpy
from golib.config.golib_conf import gsize, B, W, E
import camkifu.stone
from camkifu.core import imgutil
class StonesFinderTuto(camkifu.stone.StonesFinder):
""" This class has been used to write a tutorial on how to create a new StonesFinder.
Run Camkifu with this class as the default StonesFinder in order to replay one step of the tuto.
In order to select the step, rename the desired method below to '_find(...)' .
"""
def __init__(self, vmanager):
super().__init__(vmanager)
self.canvas = None
def _learn(self):
pass
# ------------------------------------------------------
#
# TUTORIAL STEPS
#
# ------------------------------------------------------
def _find_minimal(self, goban_img):
""" Implementation 1 of _find() from the tutorial.
"""
imgutil.draw_str(goban_img, "Hello stones finding tutorial !")
self._show(goban_img)
def _find_suggest(self, _):
""" Implementation 2 of _find() from the tutorial.
"""
# check emptiness to avoid complaints since this method will be called in a loop
if self.is_empty(2, 12):
# using "numpy" coordinates frame for x and y
self.suggest(B, 2, 12)
def _find_bulk(self, _):
""" Implementation 3 of _find() from the tutorial.
"""
# using "numpy" coordinates frame for x and y
black = ((W, 8, 8), (W, 8, 10), (W, 10, 8), (W, 10, 10))
white = ((B, 7, 7), (B, 7, 11), (B, 11, 7), (B, 11, 11), (B, 9, 9))
add = black if self.total_f_processed % 2 else white
rem = white if self.total_f_processed % 2 else black
moves = []
for color, r, c in add:
moves.append((color, r, c))
for _, r, c in rem:
if not self.is_empty(r, c):
moves.append((E, r, c))
time.sleep(0.7)
self.bulk_update(moves)
def _find_getrect(self, goban_img):
""" Implementation 4 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r in range(gsize): # row index
for c in range(gsize): # column index
|
self._show(canvas)
def _find_border(self, goban_img):
""" Implementation 5 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_border(2): # 2 is the line height as in go vocabulary (0-based)
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_spiral(self, goban_img):
""" Implementation 6 of _find() from the tutorial.
"""
count = 0
if self.canvas is None:
self.canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_spiral():
if count == self.total_f_processed % gsize ** 2:
x0, y0, x1, y1 = self.getrect(r, c)
self.canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
break
count += 1
self.last_shown = 0 # force display of all images
self._show(self.canvas)
|
if r == c or r == gsize - c - 1:
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
|
conditional_block
|
sf_tuto.py
|
import time
import numpy
from golib.config.golib_conf import gsize, B, W, E
import camkifu.stone
from camkifu.core import imgutil
class StonesFinderTuto(camkifu.stone.StonesFinder):
""" This class has been used to write a tutorial on how to create a new StonesFinder.
Run Camkifu with this class as the default StonesFinder in order to replay one step of the tuto.
In order to select the step, rename the desired method below to '_find(...)' .
"""
def __init__(self, vmanager):
super().__init__(vmanager)
self.canvas = None
def _learn(self):
|
# ------------------------------------------------------
#
# TUTORIAL STEPS
#
# ------------------------------------------------------
def _find_minimal(self, goban_img):
""" Implementation 1 of _find() from the tutorial.
"""
imgutil.draw_str(goban_img, "Hello stones finding tutorial !")
self._show(goban_img)
def _find_suggest(self, _):
""" Implementation 2 of _find() from the tutorial.
"""
# check emptiness to avoid complaints since this method will be called in a loop
if self.is_empty(2, 12):
# using "numpy" coordinates frame for x and y
self.suggest(B, 2, 12)
def _find_bulk(self, _):
""" Implementation 3 of _find() from the tutorial.
"""
# using "numpy" coordinates frame for x and y
black = ((W, 8, 8), (W, 8, 10), (W, 10, 8), (W, 10, 10))
white = ((B, 7, 7), (B, 7, 11), (B, 11, 7), (B, 11, 11), (B, 9, 9))
add = black if self.total_f_processed % 2 else white
rem = white if self.total_f_processed % 2 else black
moves = []
for color, r, c in add:
moves.append((color, r, c))
for _, r, c in rem:
if not self.is_empty(r, c):
moves.append((E, r, c))
time.sleep(0.7)
self.bulk_update(moves)
def _find_getrect(self, goban_img):
""" Implementation 4 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r in range(gsize): # row index
for c in range(gsize): # column index
if r == c or r == gsize - c - 1:
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_border(self, goban_img):
""" Implementation 5 of _find() from the tutorial.
"""
canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_border(2): # 2 is the line height as in go vocabulary (0-based)
x0, y0, x1, y1 = self.getrect(r, c)
canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
self._show(canvas)
def _find_spiral(self, goban_img):
""" Implementation 6 of _find() from the tutorial.
"""
count = 0
if self.canvas is None:
self.canvas = numpy.zeros_like(goban_img)
for r, c in self._empties_spiral():
if count == self.total_f_processed % gsize ** 2:
x0, y0, x1, y1 = self.getrect(r, c)
self.canvas[x0:x1, y0:y1] = goban_img[x0:x1, y0:y1]
break
count += 1
self.last_shown = 0 # force display of all images
self._show(self.canvas)
|
pass
|
identifier_body
|
dhcp.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use applayer;
use core;
use core::{ALPROTO_UNKNOWN, AppProto, Flow};
use dhcp::parser::*;
use libc;
use log::*;
use nom;
use parser::*;
use std;
use std::ffi::{CStr,CString};
use std::mem::transmute;
static mut ALPROTO_DHCP: AppProto = ALPROTO_UNKNOWN;
static DHCP_MIN_FRAME_LEN: u32 = 232;
pub const BOOTP_REQUEST: u8 = 1;
pub const BOOTP_REPLY: u8 = 2;
// DHCP option types. Names based on IANA naming:
// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.xhtml
pub const DHCP_OPT_SUBNET_MASK: u8 = 1;
pub const DHCP_OPT_ROUTERS: u8 = 3;
pub const DHCP_OPT_DNS_SERVER: u8 = 6;
pub const DHCP_OPT_HOSTNAME: u8 = 12;
pub const DHCP_OPT_REQUESTED_IP: u8 = 50;
pub const DHCP_OPT_ADDRESS_TIME: u8 = 51;
pub const DHCP_OPT_TYPE: u8 = 53;
pub const DHCP_OPT_SERVER_ID: u8 = 54;
pub const DHCP_OPT_PARAMETER_LIST: u8 = 55;
pub const DHCP_OPT_RENEWAL_TIME: u8 = 58;
pub const DHCP_OPT_REBINDING_TIME: u8 = 59;
pub const DHCP_OPT_CLIENT_ID: u8 = 61;
pub const DHCP_OPT_END: u8 = 255;
/// DHCP message types.
pub const DHCP_TYPE_DISCOVER: u8 = 1;
pub const DHCP_TYPE_OFFER: u8 = 2;
pub const DHCP_TYPE_REQUEST: u8 = 3;
pub const DHCP_TYPE_DECLINE: u8 = 4;
pub const DHCP_TYPE_ACK: u8 = 5;
pub const DHCP_TYPE_NAK: u8 = 6;
pub const DHCP_TYPE_RELEASE: u8 = 7;
pub const DHCP_TYPE_INFORM: u8 = 8;
/// DHCP parameter types.
/// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.txt
pub const DHCP_PARAM_SUBNET_MASK: u8 = 1;
pub const DHCP_PARAM_ROUTER: u8 = 3;
pub const DHCP_PARAM_DNS_SERVER: u8 = 6;
pub const DHCP_PARAM_DOMAIN: u8 = 15;
pub const DHCP_PARAM_ARP_TIMEOUT: u8 = 35;
pub const DHCP_PARAM_NTP_SERVER: u8 = 42;
pub const DHCP_PARAM_TFTP_SERVER_NAME: u8 = 66;
pub const DHCP_PARAM_TFTP_SERVER_IP: u8 = 150;
#[repr(u32)]
pub enum DHCPEvent {
TruncatedOptions = 0,
MalformedOptions,
}
/// The concept of a transaction is more to satisfy the Suricata
/// app-layer. This DHCP parser is actually stateless where each
/// message is its own transaction.
pub struct DHCPTransaction {
tx_id: u64,
pub message: DHCPMessage,
logged: applayer::LoggerFlags,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
}
impl DHCPTransaction {
pub fn new(id: u64, message: DHCPMessage) -> DHCPTransaction {
DHCPTransaction {
tx_id: id,
message: message,
logged: applayer::LoggerFlags::new(),
de_state: None,
events: std::ptr::null_mut(),
}
}
}
export_tx_get_detect_state!(rs_dhcp_tx_get_detect_state, DHCPTransaction);
export_tx_set_detect_state!(rs_dhcp_tx_set_detect_state, DHCPTransaction);
pub struct DHCPState {
// Internal transaction ID.
tx_id: u64,
// List of transactions.
transactions: Vec<DHCPTransaction>,
events: u16,
}
impl DHCPState {
pub fn new() -> DHCPState {
return DHCPState {
tx_id: 0,
transactions: Vec::new(),
events: 0,
};
}
pub fn parse(&mut self, input: &[u8]) -> bool {
match dhcp_parse(input) {
nom::IResult::Done(_, message) => {
let malformed_options = message.malformed_options;
let truncated_options = message.truncated_options;
self.tx_id += 1;
let transaction = DHCPTransaction::new(self.tx_id, message);
self.transactions.push(transaction);
if malformed_options {
self.set_event(DHCPEvent::MalformedOptions);
}
if truncated_options {
self.set_event(DHCPEvent::TruncatedOptions);
}
return true;
}
_ => {
return false;
}
}
}
pub fn get_tx(&mut self, tx_id: u64) -> Option<&DHCPTransaction> {
for tx in &mut self.transactions {
if tx.tx_id == tx_id + 1 {
return Some(tx);
}
}
return None;
}
fn free_tx(&mut self, tx_id: u64) {
let len = self.transactions.len();
let mut found = false;
let mut index = 0;
for i in 0..len {
let tx = &self.transactions[i];
if tx.tx_id == tx_id + 1 {
found = true;
index = i;
break;
}
}
if found
|
}
fn set_event(&mut self, event: DHCPEvent) {
if let Some(tx) = self.transactions.last_mut() {
core::sc_app_layer_decoder_events_set_event_raw(
&mut tx.events, event as u8);
self.events += 1;
}
}
fn get_tx_iterator(&mut self, min_tx_id: u64, state: &mut u64) ->
Option<(&DHCPTransaction, u64, bool)>
{
let mut index = *state as usize;
let len = self.transactions.len();
while index < len {
let tx = &self.transactions[index];
if tx.tx_id < min_tx_id + 1 {
index += 1;
continue;
}
*state = index as u64 + 1;
return Some((tx, tx.tx_id - 1, (len - index) > 1));
}
return None;
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_probing_parser(_flow: *const Flow,
input: *const libc::uint8_t,
input_len: u32,
_offset: *const u32) -> AppProto {
if input_len < DHCP_MIN_FRAME_LEN {
return ALPROTO_UNKNOWN;
}
let slice = build_slice!(input, input_len as usize);
match parse_header(slice) {
nom::IResult::Done(_, _) => {
return unsafe { ALPROTO_DHCP };
}
_ => {
return ALPROTO_UNKNOWN;
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_alstate_progress(_tx: *mut libc::c_void,
_direction: libc::uint8_t) -> libc::c_int {
// As this is a stateless parser, simply use 1.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_progress_completion_status(
_direction: libc::uint8_t) -> libc::c_int {
// The presence of a transaction means we are complete.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx(state: *mut libc::c_void,
tx_id: libc::uint64_t) -> *mut libc::c_void {
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => {
return unsafe { transmute(tx) };
}
None => {
return std::ptr::null_mut();
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_count(state: *mut libc::c_void) -> libc::uint64_t {
let state = cast_pointer!(state, DHCPState);
return state.tx_id;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_parse(_flow: *const core::Flow,
state: *mut libc::c_void,
_pstate: *mut libc::c_void,
input: *const libc::uint8_t,
input_len: u32,
_data: *const libc::c_void,
_flags: u8) -> i8 {
let state = cast_pointer!(state, DHCPState);
let buf = build_slice!(input, input_len as usize);
if state.parse(buf) {
return 1;
}
return -1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_tx_free(
state: *mut libc::c_void,
tx_id: libc::uint64_t)
{
let state = cast_pointer!(state, DHCPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_new() -> *mut libc::c_void {
let state = DHCPState::new();
let boxed = Box::new(state);
return unsafe {
transmute(boxed)
};
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_free(state: *mut libc::c_void) {
// Just unbox...
let _drop: Box<DHCPState> = unsafe { transmute(state) };
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_logged(_state: *mut libc::c_void, tx: *mut libc::c_void) -> u32 {
let tx = cast_pointer!(tx, DHCPTransaction);
return tx.logged.get();
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_set_logged(_state: *mut libc::c_void,
tx: *mut libc::c_void,
logged: libc::uint32_t) {
let tx = cast_pointer!(tx, DHCPTransaction);
tx.logged.set(logged);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_events(state: *mut libc::c_void,
tx_id: libc::uint64_t)
-> *mut core::AppLayerDecoderEvents
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => tx.events,
_ => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_event_info(
event_name: *const libc::c_char,
event_id: *mut libc::c_int,
event_type: *mut core::AppLayerEventType)
-> libc::c_int
{
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"malformed_options" => DHCPEvent::MalformedOptions as i32,
"truncated_options" => DHCPEvent::TruncatedOptions as i32,
_ => -1, // unknown event
}
},
Err(_) => -1, // UTF-8 conversion failed
};
unsafe{
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as libc::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_iterator(
_ipproto: libc::uint8_t,
_alproto: AppProto,
state: *mut libc::c_void,
min_tx_id: libc::uint64_t,
_max_tx_id: libc::uint64_t,
istate: &mut libc::uint64_t)
-> applayer::AppLayerGetTxIterTuple
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx_iterator(min_tx_id, istate) {
Some((tx, out_tx_id, has_next)) => {
let c_tx = unsafe { transmute(tx) };
let ires = applayer::AppLayerGetTxIterTuple::with_values(
c_tx, out_tx_id, has_next);
return ires;
}
None => {
return applayer::AppLayerGetTxIterTuple::not_found();
}
}
}
const PARSER_NAME: &'static [u8] = b"dhcp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_dhcp_register_parser() {
SCLogDebug!("Registering DHCP parser.");
let ports = CString::new("[67,68]").unwrap();
let parser = RustParser {
name: PARSER_NAME.as_ptr() as *const libc::c_char,
default_port: ports.as_ptr(),
ipproto: libc::IPPROTO_UDP,
probe_ts: rs_dhcp_probing_parser,
probe_tc: rs_dhcp_probing_parser,
min_depth: 0,
max_depth: 16,
state_new: rs_dhcp_state_new,
state_free: rs_dhcp_state_free,
tx_free: rs_dhcp_state_tx_free,
parse_ts: rs_dhcp_parse,
parse_tc: rs_dhcp_parse,
get_tx_count: rs_dhcp_state_get_tx_count,
get_tx: rs_dhcp_state_get_tx,
tx_get_comp_st: rs_dhcp_state_progress_completion_status,
tx_get_progress: rs_dhcp_tx_get_alstate_progress,
get_tx_logged: Some(rs_dhcp_tx_get_logged),
set_tx_logged: Some(rs_dhcp_tx_set_logged),
get_de_state: rs_dhcp_tx_get_detect_state,
set_de_state: rs_dhcp_tx_set_detect_state,
get_events: Some(rs_dhcp_state_get_events),
get_eventinfo: Some(rs_dhcp_state_get_event_info),
localstorage_new: None,
localstorage_free: None,
get_tx_mpm_id: None,
set_tx_mpm_id: None,
get_files: None,
get_tx_iterator: Some(rs_dhcp_state_get_tx_iterator),
};
let ip_proto_str = CString::new("udp").unwrap();
if AppLayerProtoDetectConfProtoDetectionEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let alproto = AppLayerRegisterProtocolDetection(&parser, 1);
ALPROTO_DHCP = alproto;
if AppLayerParserConfParserEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let _ = AppLayerRegisterParser(&parser, alproto);
}
} else {
SCLogDebug!("Protocol detector and parser disabled for DHCP.");
}
}
|
{
self.transactions.remove(index);
}
|
conditional_block
|
dhcp.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use applayer;
use core;
use core::{ALPROTO_UNKNOWN, AppProto, Flow};
use dhcp::parser::*;
use libc;
use log::*;
use nom;
use parser::*;
use std;
use std::ffi::{CStr,CString};
use std::mem::transmute;
static mut ALPROTO_DHCP: AppProto = ALPROTO_UNKNOWN;
static DHCP_MIN_FRAME_LEN: u32 = 232;
pub const BOOTP_REQUEST: u8 = 1;
pub const BOOTP_REPLY: u8 = 2;
// DHCP option types. Names based on IANA naming:
// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.xhtml
pub const DHCP_OPT_SUBNET_MASK: u8 = 1;
pub const DHCP_OPT_ROUTERS: u8 = 3;
pub const DHCP_OPT_DNS_SERVER: u8 = 6;
pub const DHCP_OPT_HOSTNAME: u8 = 12;
pub const DHCP_OPT_REQUESTED_IP: u8 = 50;
pub const DHCP_OPT_ADDRESS_TIME: u8 = 51;
pub const DHCP_OPT_TYPE: u8 = 53;
pub const DHCP_OPT_SERVER_ID: u8 = 54;
pub const DHCP_OPT_PARAMETER_LIST: u8 = 55;
pub const DHCP_OPT_RENEWAL_TIME: u8 = 58;
pub const DHCP_OPT_REBINDING_TIME: u8 = 59;
pub const DHCP_OPT_CLIENT_ID: u8 = 61;
pub const DHCP_OPT_END: u8 = 255;
/// DHCP message types.
pub const DHCP_TYPE_DISCOVER: u8 = 1;
pub const DHCP_TYPE_OFFER: u8 = 2;
pub const DHCP_TYPE_REQUEST: u8 = 3;
pub const DHCP_TYPE_DECLINE: u8 = 4;
pub const DHCP_TYPE_ACK: u8 = 5;
pub const DHCP_TYPE_NAK: u8 = 6;
pub const DHCP_TYPE_RELEASE: u8 = 7;
pub const DHCP_TYPE_INFORM: u8 = 8;
/// DHCP parameter types.
/// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.txt
pub const DHCP_PARAM_SUBNET_MASK: u8 = 1;
pub const DHCP_PARAM_ROUTER: u8 = 3;
pub const DHCP_PARAM_DNS_SERVER: u8 = 6;
pub const DHCP_PARAM_DOMAIN: u8 = 15;
pub const DHCP_PARAM_ARP_TIMEOUT: u8 = 35;
pub const DHCP_PARAM_NTP_SERVER: u8 = 42;
pub const DHCP_PARAM_TFTP_SERVER_NAME: u8 = 66;
pub const DHCP_PARAM_TFTP_SERVER_IP: u8 = 150;
#[repr(u32)]
pub enum DHCPEvent {
TruncatedOptions = 0,
MalformedOptions,
}
/// The concept of a transaction is more to satisfy the Suricata
/// app-layer. This DHCP parser is actually stateless where each
/// message is its own transaction.
pub struct DHCPTransaction {
tx_id: u64,
pub message: DHCPMessage,
logged: applayer::LoggerFlags,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
}
impl DHCPTransaction {
pub fn new(id: u64, message: DHCPMessage) -> DHCPTransaction {
DHCPTransaction {
tx_id: id,
message: message,
logged: applayer::LoggerFlags::new(),
de_state: None,
events: std::ptr::null_mut(),
}
}
}
export_tx_get_detect_state!(rs_dhcp_tx_get_detect_state, DHCPTransaction);
export_tx_set_detect_state!(rs_dhcp_tx_set_detect_state, DHCPTransaction);
pub struct DHCPState {
// Internal transaction ID.
tx_id: u64,
// List of transactions.
transactions: Vec<DHCPTransaction>,
events: u16,
}
impl DHCPState {
pub fn
|
() -> DHCPState {
return DHCPState {
tx_id: 0,
transactions: Vec::new(),
events: 0,
};
}
pub fn parse(&mut self, input: &[u8]) -> bool {
match dhcp_parse(input) {
nom::IResult::Done(_, message) => {
let malformed_options = message.malformed_options;
let truncated_options = message.truncated_options;
self.tx_id += 1;
let transaction = DHCPTransaction::new(self.tx_id, message);
self.transactions.push(transaction);
if malformed_options {
self.set_event(DHCPEvent::MalformedOptions);
}
if truncated_options {
self.set_event(DHCPEvent::TruncatedOptions);
}
return true;
}
_ => {
return false;
}
}
}
pub fn get_tx(&mut self, tx_id: u64) -> Option<&DHCPTransaction> {
for tx in &mut self.transactions {
if tx.tx_id == tx_id + 1 {
return Some(tx);
}
}
return None;
}
fn free_tx(&mut self, tx_id: u64) {
let len = self.transactions.len();
let mut found = false;
let mut index = 0;
for i in 0..len {
let tx = &self.transactions[i];
if tx.tx_id == tx_id + 1 {
found = true;
index = i;
break;
}
}
if found {
self.transactions.remove(index);
}
}
fn set_event(&mut self, event: DHCPEvent) {
if let Some(tx) = self.transactions.last_mut() {
core::sc_app_layer_decoder_events_set_event_raw(
&mut tx.events, event as u8);
self.events += 1;
}
}
fn get_tx_iterator(&mut self, min_tx_id: u64, state: &mut u64) ->
Option<(&DHCPTransaction, u64, bool)>
{
let mut index = *state as usize;
let len = self.transactions.len();
while index < len {
let tx = &self.transactions[index];
if tx.tx_id < min_tx_id + 1 {
index += 1;
continue;
}
*state = index as u64 + 1;
return Some((tx, tx.tx_id - 1, (len - index) > 1));
}
return None;
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_probing_parser(_flow: *const Flow,
input: *const libc::uint8_t,
input_len: u32,
_offset: *const u32) -> AppProto {
if input_len < DHCP_MIN_FRAME_LEN {
return ALPROTO_UNKNOWN;
}
let slice = build_slice!(input, input_len as usize);
match parse_header(slice) {
nom::IResult::Done(_, _) => {
return unsafe { ALPROTO_DHCP };
}
_ => {
return ALPROTO_UNKNOWN;
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_alstate_progress(_tx: *mut libc::c_void,
_direction: libc::uint8_t) -> libc::c_int {
// As this is a stateless parser, simply use 1.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_progress_completion_status(
_direction: libc::uint8_t) -> libc::c_int {
// The presence of a transaction means we are complete.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx(state: *mut libc::c_void,
tx_id: libc::uint64_t) -> *mut libc::c_void {
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => {
return unsafe { transmute(tx) };
}
None => {
return std::ptr::null_mut();
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_count(state: *mut libc::c_void) -> libc::uint64_t {
let state = cast_pointer!(state, DHCPState);
return state.tx_id;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_parse(_flow: *const core::Flow,
state: *mut libc::c_void,
_pstate: *mut libc::c_void,
input: *const libc::uint8_t,
input_len: u32,
_data: *const libc::c_void,
_flags: u8) -> i8 {
let state = cast_pointer!(state, DHCPState);
let buf = build_slice!(input, input_len as usize);
if state.parse(buf) {
return 1;
}
return -1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_tx_free(
state: *mut libc::c_void,
tx_id: libc::uint64_t)
{
let state = cast_pointer!(state, DHCPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_new() -> *mut libc::c_void {
let state = DHCPState::new();
let boxed = Box::new(state);
return unsafe {
transmute(boxed)
};
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_free(state: *mut libc::c_void) {
// Just unbox...
let _drop: Box<DHCPState> = unsafe { transmute(state) };
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_logged(_state: *mut libc::c_void, tx: *mut libc::c_void) -> u32 {
let tx = cast_pointer!(tx, DHCPTransaction);
return tx.logged.get();
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_set_logged(_state: *mut libc::c_void,
tx: *mut libc::c_void,
logged: libc::uint32_t) {
let tx = cast_pointer!(tx, DHCPTransaction);
tx.logged.set(logged);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_events(state: *mut libc::c_void,
tx_id: libc::uint64_t)
-> *mut core::AppLayerDecoderEvents
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => tx.events,
_ => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_event_info(
event_name: *const libc::c_char,
event_id: *mut libc::c_int,
event_type: *mut core::AppLayerEventType)
-> libc::c_int
{
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"malformed_options" => DHCPEvent::MalformedOptions as i32,
"truncated_options" => DHCPEvent::TruncatedOptions as i32,
_ => -1, // unknown event
}
},
Err(_) => -1, // UTF-8 conversion failed
};
unsafe{
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as libc::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_iterator(
_ipproto: libc::uint8_t,
_alproto: AppProto,
state: *mut libc::c_void,
min_tx_id: libc::uint64_t,
_max_tx_id: libc::uint64_t,
istate: &mut libc::uint64_t)
-> applayer::AppLayerGetTxIterTuple
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx_iterator(min_tx_id, istate) {
Some((tx, out_tx_id, has_next)) => {
let c_tx = unsafe { transmute(tx) };
let ires = applayer::AppLayerGetTxIterTuple::with_values(
c_tx, out_tx_id, has_next);
return ires;
}
None => {
return applayer::AppLayerGetTxIterTuple::not_found();
}
}
}
const PARSER_NAME: &'static [u8] = b"dhcp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_dhcp_register_parser() {
SCLogDebug!("Registering DHCP parser.");
let ports = CString::new("[67,68]").unwrap();
let parser = RustParser {
name: PARSER_NAME.as_ptr() as *const libc::c_char,
default_port: ports.as_ptr(),
ipproto: libc::IPPROTO_UDP,
probe_ts: rs_dhcp_probing_parser,
probe_tc: rs_dhcp_probing_parser,
min_depth: 0,
max_depth: 16,
state_new: rs_dhcp_state_new,
state_free: rs_dhcp_state_free,
tx_free: rs_dhcp_state_tx_free,
parse_ts: rs_dhcp_parse,
parse_tc: rs_dhcp_parse,
get_tx_count: rs_dhcp_state_get_tx_count,
get_tx: rs_dhcp_state_get_tx,
tx_get_comp_st: rs_dhcp_state_progress_completion_status,
tx_get_progress: rs_dhcp_tx_get_alstate_progress,
get_tx_logged: Some(rs_dhcp_tx_get_logged),
set_tx_logged: Some(rs_dhcp_tx_set_logged),
get_de_state: rs_dhcp_tx_get_detect_state,
set_de_state: rs_dhcp_tx_set_detect_state,
get_events: Some(rs_dhcp_state_get_events),
get_eventinfo: Some(rs_dhcp_state_get_event_info),
localstorage_new: None,
localstorage_free: None,
get_tx_mpm_id: None,
set_tx_mpm_id: None,
get_files: None,
get_tx_iterator: Some(rs_dhcp_state_get_tx_iterator),
};
let ip_proto_str = CString::new("udp").unwrap();
if AppLayerProtoDetectConfProtoDetectionEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let alproto = AppLayerRegisterProtocolDetection(&parser, 1);
ALPROTO_DHCP = alproto;
if AppLayerParserConfParserEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let _ = AppLayerRegisterParser(&parser, alproto);
}
} else {
SCLogDebug!("Protocol detector and parser disabled for DHCP.");
}
}
|
new
|
identifier_name
|
dhcp.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use applayer;
use core;
use core::{ALPROTO_UNKNOWN, AppProto, Flow};
use dhcp::parser::*;
use libc;
use log::*;
use nom;
use parser::*;
use std;
use std::ffi::{CStr,CString};
use std::mem::transmute;
static mut ALPROTO_DHCP: AppProto = ALPROTO_UNKNOWN;
static DHCP_MIN_FRAME_LEN: u32 = 232;
pub const BOOTP_REQUEST: u8 = 1;
pub const BOOTP_REPLY: u8 = 2;
// DHCP option types. Names based on IANA naming:
// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.xhtml
pub const DHCP_OPT_SUBNET_MASK: u8 = 1;
pub const DHCP_OPT_ROUTERS: u8 = 3;
pub const DHCP_OPT_DNS_SERVER: u8 = 6;
pub const DHCP_OPT_HOSTNAME: u8 = 12;
pub const DHCP_OPT_REQUESTED_IP: u8 = 50;
pub const DHCP_OPT_ADDRESS_TIME: u8 = 51;
pub const DHCP_OPT_TYPE: u8 = 53;
pub const DHCP_OPT_SERVER_ID: u8 = 54;
pub const DHCP_OPT_PARAMETER_LIST: u8 = 55;
pub const DHCP_OPT_RENEWAL_TIME: u8 = 58;
pub const DHCP_OPT_REBINDING_TIME: u8 = 59;
pub const DHCP_OPT_CLIENT_ID: u8 = 61;
pub const DHCP_OPT_END: u8 = 255;
/// DHCP message types.
pub const DHCP_TYPE_DISCOVER: u8 = 1;
pub const DHCP_TYPE_OFFER: u8 = 2;
pub const DHCP_TYPE_REQUEST: u8 = 3;
pub const DHCP_TYPE_DECLINE: u8 = 4;
pub const DHCP_TYPE_ACK: u8 = 5;
pub const DHCP_TYPE_NAK: u8 = 6;
pub const DHCP_TYPE_RELEASE: u8 = 7;
pub const DHCP_TYPE_INFORM: u8 = 8;
/// DHCP parameter types.
/// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.txt
pub const DHCP_PARAM_SUBNET_MASK: u8 = 1;
pub const DHCP_PARAM_ROUTER: u8 = 3;
pub const DHCP_PARAM_DNS_SERVER: u8 = 6;
pub const DHCP_PARAM_DOMAIN: u8 = 15;
pub const DHCP_PARAM_ARP_TIMEOUT: u8 = 35;
pub const DHCP_PARAM_NTP_SERVER: u8 = 42;
pub const DHCP_PARAM_TFTP_SERVER_NAME: u8 = 66;
pub const DHCP_PARAM_TFTP_SERVER_IP: u8 = 150;
#[repr(u32)]
pub enum DHCPEvent {
TruncatedOptions = 0,
MalformedOptions,
}
/// The concept of a transaction is more to satisfy the Suricata
/// app-layer. This DHCP parser is actually stateless where each
/// message is its own transaction.
pub struct DHCPTransaction {
tx_id: u64,
pub message: DHCPMessage,
logged: applayer::LoggerFlags,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
}
impl DHCPTransaction {
pub fn new(id: u64, message: DHCPMessage) -> DHCPTransaction {
DHCPTransaction {
tx_id: id,
message: message,
logged: applayer::LoggerFlags::new(),
de_state: None,
events: std::ptr::null_mut(),
}
}
}
export_tx_get_detect_state!(rs_dhcp_tx_get_detect_state, DHCPTransaction);
export_tx_set_detect_state!(rs_dhcp_tx_set_detect_state, DHCPTransaction);
pub struct DHCPState {
// Internal transaction ID.
tx_id: u64,
// List of transactions.
transactions: Vec<DHCPTransaction>,
events: u16,
}
impl DHCPState {
pub fn new() -> DHCPState {
return DHCPState {
tx_id: 0,
transactions: Vec::new(),
events: 0,
};
}
pub fn parse(&mut self, input: &[u8]) -> bool {
match dhcp_parse(input) {
nom::IResult::Done(_, message) => {
let malformed_options = message.malformed_options;
let truncated_options = message.truncated_options;
self.tx_id += 1;
let transaction = DHCPTransaction::new(self.tx_id, message);
self.transactions.push(transaction);
if malformed_options {
self.set_event(DHCPEvent::MalformedOptions);
}
if truncated_options {
self.set_event(DHCPEvent::TruncatedOptions);
}
return true;
}
_ => {
return false;
}
}
}
pub fn get_tx(&mut self, tx_id: u64) -> Option<&DHCPTransaction> {
for tx in &mut self.transactions {
if tx.tx_id == tx_id + 1 {
return Some(tx);
}
}
return None;
}
fn free_tx(&mut self, tx_id: u64) {
let len = self.transactions.len();
let mut found = false;
let mut index = 0;
for i in 0..len {
let tx = &self.transactions[i];
if tx.tx_id == tx_id + 1 {
found = true;
index = i;
break;
}
}
if found {
self.transactions.remove(index);
}
}
fn set_event(&mut self, event: DHCPEvent) {
if let Some(tx) = self.transactions.last_mut() {
core::sc_app_layer_decoder_events_set_event_raw(
&mut tx.events, event as u8);
self.events += 1;
}
}
fn get_tx_iterator(&mut self, min_tx_id: u64, state: &mut u64) ->
Option<(&DHCPTransaction, u64, bool)>
{
let mut index = *state as usize;
let len = self.transactions.len();
while index < len {
let tx = &self.transactions[index];
if tx.tx_id < min_tx_id + 1 {
index += 1;
continue;
}
*state = index as u64 + 1;
return Some((tx, tx.tx_id - 1, (len - index) > 1));
}
return None;
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_probing_parser(_flow: *const Flow,
input: *const libc::uint8_t,
input_len: u32,
_offset: *const u32) -> AppProto {
if input_len < DHCP_MIN_FRAME_LEN {
return ALPROTO_UNKNOWN;
}
let slice = build_slice!(input, input_len as usize);
match parse_header(slice) {
nom::IResult::Done(_, _) => {
return unsafe { ALPROTO_DHCP };
}
_ => {
return ALPROTO_UNKNOWN;
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_alstate_progress(_tx: *mut libc::c_void,
_direction: libc::uint8_t) -> libc::c_int {
// As this is a stateless parser, simply use 1.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_progress_completion_status(
_direction: libc::uint8_t) -> libc::c_int {
// The presence of a transaction means we are complete.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx(state: *mut libc::c_void,
tx_id: libc::uint64_t) -> *mut libc::c_void {
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => {
return unsafe { transmute(tx) };
}
None => {
return std::ptr::null_mut();
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_count(state: *mut libc::c_void) -> libc::uint64_t {
let state = cast_pointer!(state, DHCPState);
return state.tx_id;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_parse(_flow: *const core::Flow,
state: *mut libc::c_void,
_pstate: *mut libc::c_void,
input: *const libc::uint8_t,
input_len: u32,
_data: *const libc::c_void,
_flags: u8) -> i8 {
let state = cast_pointer!(state, DHCPState);
let buf = build_slice!(input, input_len as usize);
if state.parse(buf) {
return 1;
}
return -1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_tx_free(
state: *mut libc::c_void,
tx_id: libc::uint64_t)
{
let state = cast_pointer!(state, DHCPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_new() -> *mut libc::c_void {
let state = DHCPState::new();
let boxed = Box::new(state);
return unsafe {
transmute(boxed)
};
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_free(state: *mut libc::c_void)
|
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_logged(_state: *mut libc::c_void, tx: *mut libc::c_void) -> u32 {
let tx = cast_pointer!(tx, DHCPTransaction);
return tx.logged.get();
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_set_logged(_state: *mut libc::c_void,
tx: *mut libc::c_void,
logged: libc::uint32_t) {
let tx = cast_pointer!(tx, DHCPTransaction);
tx.logged.set(logged);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_events(state: *mut libc::c_void,
tx_id: libc::uint64_t)
-> *mut core::AppLayerDecoderEvents
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => tx.events,
_ => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_event_info(
event_name: *const libc::c_char,
event_id: *mut libc::c_int,
event_type: *mut core::AppLayerEventType)
-> libc::c_int
{
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"malformed_options" => DHCPEvent::MalformedOptions as i32,
"truncated_options" => DHCPEvent::TruncatedOptions as i32,
_ => -1, // unknown event
}
},
Err(_) => -1, // UTF-8 conversion failed
};
unsafe{
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as libc::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_iterator(
_ipproto: libc::uint8_t,
_alproto: AppProto,
state: *mut libc::c_void,
min_tx_id: libc::uint64_t,
_max_tx_id: libc::uint64_t,
istate: &mut libc::uint64_t)
-> applayer::AppLayerGetTxIterTuple
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx_iterator(min_tx_id, istate) {
Some((tx, out_tx_id, has_next)) => {
let c_tx = unsafe { transmute(tx) };
let ires = applayer::AppLayerGetTxIterTuple::with_values(
c_tx, out_tx_id, has_next);
return ires;
}
None => {
return applayer::AppLayerGetTxIterTuple::not_found();
}
}
}
const PARSER_NAME: &'static [u8] = b"dhcp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_dhcp_register_parser() {
SCLogDebug!("Registering DHCP parser.");
let ports = CString::new("[67,68]").unwrap();
let parser = RustParser {
name: PARSER_NAME.as_ptr() as *const libc::c_char,
default_port: ports.as_ptr(),
ipproto: libc::IPPROTO_UDP,
probe_ts: rs_dhcp_probing_parser,
probe_tc: rs_dhcp_probing_parser,
min_depth: 0,
max_depth: 16,
state_new: rs_dhcp_state_new,
state_free: rs_dhcp_state_free,
tx_free: rs_dhcp_state_tx_free,
parse_ts: rs_dhcp_parse,
parse_tc: rs_dhcp_parse,
get_tx_count: rs_dhcp_state_get_tx_count,
get_tx: rs_dhcp_state_get_tx,
tx_get_comp_st: rs_dhcp_state_progress_completion_status,
tx_get_progress: rs_dhcp_tx_get_alstate_progress,
get_tx_logged: Some(rs_dhcp_tx_get_logged),
set_tx_logged: Some(rs_dhcp_tx_set_logged),
get_de_state: rs_dhcp_tx_get_detect_state,
set_de_state: rs_dhcp_tx_set_detect_state,
get_events: Some(rs_dhcp_state_get_events),
get_eventinfo: Some(rs_dhcp_state_get_event_info),
localstorage_new: None,
localstorage_free: None,
get_tx_mpm_id: None,
set_tx_mpm_id: None,
get_files: None,
get_tx_iterator: Some(rs_dhcp_state_get_tx_iterator),
};
let ip_proto_str = CString::new("udp").unwrap();
if AppLayerProtoDetectConfProtoDetectionEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let alproto = AppLayerRegisterProtocolDetection(&parser, 1);
ALPROTO_DHCP = alproto;
if AppLayerParserConfParserEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let _ = AppLayerRegisterParser(&parser, alproto);
}
} else {
SCLogDebug!("Protocol detector and parser disabled for DHCP.");
}
}
|
{
// Just unbox...
let _drop: Box<DHCPState> = unsafe { transmute(state) };
}
|
identifier_body
|
dhcp.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use applayer;
use core;
use core::{ALPROTO_UNKNOWN, AppProto, Flow};
use dhcp::parser::*;
use libc;
use log::*;
use nom;
use parser::*;
use std;
use std::ffi::{CStr,CString};
use std::mem::transmute;
static mut ALPROTO_DHCP: AppProto = ALPROTO_UNKNOWN;
static DHCP_MIN_FRAME_LEN: u32 = 232;
pub const BOOTP_REQUEST: u8 = 1;
pub const BOOTP_REPLY: u8 = 2;
// DHCP option types. Names based on IANA naming:
// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.xhtml
pub const DHCP_OPT_SUBNET_MASK: u8 = 1;
pub const DHCP_OPT_ROUTERS: u8 = 3;
pub const DHCP_OPT_DNS_SERVER: u8 = 6;
pub const DHCP_OPT_HOSTNAME: u8 = 12;
pub const DHCP_OPT_REQUESTED_IP: u8 = 50;
pub const DHCP_OPT_ADDRESS_TIME: u8 = 51;
pub const DHCP_OPT_TYPE: u8 = 53;
pub const DHCP_OPT_SERVER_ID: u8 = 54;
pub const DHCP_OPT_PARAMETER_LIST: u8 = 55;
pub const DHCP_OPT_RENEWAL_TIME: u8 = 58;
pub const DHCP_OPT_REBINDING_TIME: u8 = 59;
pub const DHCP_OPT_CLIENT_ID: u8 = 61;
pub const DHCP_OPT_END: u8 = 255;
/// DHCP message types.
pub const DHCP_TYPE_DISCOVER: u8 = 1;
pub const DHCP_TYPE_OFFER: u8 = 2;
pub const DHCP_TYPE_REQUEST: u8 = 3;
pub const DHCP_TYPE_DECLINE: u8 = 4;
pub const DHCP_TYPE_ACK: u8 = 5;
pub const DHCP_TYPE_NAK: u8 = 6;
pub const DHCP_TYPE_RELEASE: u8 = 7;
pub const DHCP_TYPE_INFORM: u8 = 8;
/// DHCP parameter types.
/// https://www.iana.org/assignments/bootp-dhcp-parameters/bootp-dhcp-parameters.txt
pub const DHCP_PARAM_SUBNET_MASK: u8 = 1;
pub const DHCP_PARAM_ROUTER: u8 = 3;
pub const DHCP_PARAM_DNS_SERVER: u8 = 6;
pub const DHCP_PARAM_DOMAIN: u8 = 15;
pub const DHCP_PARAM_ARP_TIMEOUT: u8 = 35;
pub const DHCP_PARAM_NTP_SERVER: u8 = 42;
pub const DHCP_PARAM_TFTP_SERVER_NAME: u8 = 66;
pub const DHCP_PARAM_TFTP_SERVER_IP: u8 = 150;
#[repr(u32)]
pub enum DHCPEvent {
TruncatedOptions = 0,
MalformedOptions,
}
/// The concept of a transaction is more to satisfy the Suricata
/// app-layer. This DHCP parser is actually stateless where each
/// message is its own transaction.
pub struct DHCPTransaction {
tx_id: u64,
pub message: DHCPMessage,
logged: applayer::LoggerFlags,
de_state: Option<*mut core::DetectEngineState>,
events: *mut core::AppLayerDecoderEvents,
}
impl DHCPTransaction {
pub fn new(id: u64, message: DHCPMessage) -> DHCPTransaction {
DHCPTransaction {
tx_id: id,
message: message,
logged: applayer::LoggerFlags::new(),
de_state: None,
events: std::ptr::null_mut(),
}
}
}
export_tx_get_detect_state!(rs_dhcp_tx_get_detect_state, DHCPTransaction);
export_tx_set_detect_state!(rs_dhcp_tx_set_detect_state, DHCPTransaction);
pub struct DHCPState {
// Internal transaction ID.
tx_id: u64,
// List of transactions.
transactions: Vec<DHCPTransaction>,
events: u16,
}
impl DHCPState {
pub fn new() -> DHCPState {
return DHCPState {
tx_id: 0,
transactions: Vec::new(),
events: 0,
};
}
pub fn parse(&mut self, input: &[u8]) -> bool {
match dhcp_parse(input) {
nom::IResult::Done(_, message) => {
let malformed_options = message.malformed_options;
let truncated_options = message.truncated_options;
self.tx_id += 1;
let transaction = DHCPTransaction::new(self.tx_id, message);
self.transactions.push(transaction);
if malformed_options {
self.set_event(DHCPEvent::MalformedOptions);
}
if truncated_options {
self.set_event(DHCPEvent::TruncatedOptions);
}
return true;
}
_ => {
return false;
}
}
}
pub fn get_tx(&mut self, tx_id: u64) -> Option<&DHCPTransaction> {
for tx in &mut self.transactions {
if tx.tx_id == tx_id + 1 {
return Some(tx);
}
}
return None;
}
fn free_tx(&mut self, tx_id: u64) {
let len = self.transactions.len();
let mut found = false;
let mut index = 0;
for i in 0..len {
let tx = &self.transactions[i];
if tx.tx_id == tx_id + 1 {
found = true;
index = i;
break;
}
}
if found {
self.transactions.remove(index);
}
}
fn set_event(&mut self, event: DHCPEvent) {
if let Some(tx) = self.transactions.last_mut() {
core::sc_app_layer_decoder_events_set_event_raw(
&mut tx.events, event as u8);
self.events += 1;
}
}
fn get_tx_iterator(&mut self, min_tx_id: u64, state: &mut u64) ->
Option<(&DHCPTransaction, u64, bool)>
{
let mut index = *state as usize;
let len = self.transactions.len();
while index < len {
let tx = &self.transactions[index];
if tx.tx_id < min_tx_id + 1 {
index += 1;
continue;
}
*state = index as u64 + 1;
return Some((tx, tx.tx_id - 1, (len - index) > 1));
}
return None;
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_probing_parser(_flow: *const Flow,
input: *const libc::uint8_t,
input_len: u32,
_offset: *const u32) -> AppProto {
if input_len < DHCP_MIN_FRAME_LEN {
return ALPROTO_UNKNOWN;
}
let slice = build_slice!(input, input_len as usize);
match parse_header(slice) {
nom::IResult::Done(_, _) => {
return unsafe { ALPROTO_DHCP };
}
_ => {
return ALPROTO_UNKNOWN;
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_alstate_progress(_tx: *mut libc::c_void,
_direction: libc::uint8_t) -> libc::c_int {
// As this is a stateless parser, simply use 1.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_progress_completion_status(
_direction: libc::uint8_t) -> libc::c_int {
// The presence of a transaction means we are complete.
return 1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx(state: *mut libc::c_void,
tx_id: libc::uint64_t) -> *mut libc::c_void {
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => {
return unsafe { transmute(tx) };
}
None => {
return std::ptr::null_mut();
}
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_count(state: *mut libc::c_void) -> libc::uint64_t {
let state = cast_pointer!(state, DHCPState);
return state.tx_id;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_parse(_flow: *const core::Flow,
state: *mut libc::c_void,
_pstate: *mut libc::c_void,
input: *const libc::uint8_t,
input_len: u32,
_data: *const libc::c_void,
_flags: u8) -> i8 {
let state = cast_pointer!(state, DHCPState);
let buf = build_slice!(input, input_len as usize);
if state.parse(buf) {
return 1;
}
return -1;
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_tx_free(
state: *mut libc::c_void,
tx_id: libc::uint64_t)
{
let state = cast_pointer!(state, DHCPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_new() -> *mut libc::c_void {
let state = DHCPState::new();
let boxed = Box::new(state);
return unsafe {
transmute(boxed)
};
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_free(state: *mut libc::c_void) {
// Just unbox...
let _drop: Box<DHCPState> = unsafe { transmute(state) };
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_get_logged(_state: *mut libc::c_void, tx: *mut libc::c_void) -> u32 {
let tx = cast_pointer!(tx, DHCPTransaction);
return tx.logged.get();
}
#[no_mangle]
pub extern "C" fn rs_dhcp_tx_set_logged(_state: *mut libc::c_void,
|
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_events(state: *mut libc::c_void,
tx_id: libc::uint64_t)
-> *mut core::AppLayerDecoderEvents
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx(tx_id) {
Some(tx) => tx.events,
_ => std::ptr::null_mut(),
}
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_event_info(
event_name: *const libc::c_char,
event_id: *mut libc::c_int,
event_type: *mut core::AppLayerEventType)
-> libc::c_int
{
if event_name == std::ptr::null() {
return -1;
}
let c_event_name: &CStr = unsafe { CStr::from_ptr(event_name) };
let event = match c_event_name.to_str() {
Ok(s) => {
match s {
"malformed_options" => DHCPEvent::MalformedOptions as i32,
"truncated_options" => DHCPEvent::TruncatedOptions as i32,
_ => -1, // unknown event
}
},
Err(_) => -1, // UTF-8 conversion failed
};
unsafe{
*event_type = core::APP_LAYER_EVENT_TYPE_TRANSACTION;
*event_id = event as libc::c_int;
};
0
}
#[no_mangle]
pub extern "C" fn rs_dhcp_state_get_tx_iterator(
_ipproto: libc::uint8_t,
_alproto: AppProto,
state: *mut libc::c_void,
min_tx_id: libc::uint64_t,
_max_tx_id: libc::uint64_t,
istate: &mut libc::uint64_t)
-> applayer::AppLayerGetTxIterTuple
{
let state = cast_pointer!(state, DHCPState);
match state.get_tx_iterator(min_tx_id, istate) {
Some((tx, out_tx_id, has_next)) => {
let c_tx = unsafe { transmute(tx) };
let ires = applayer::AppLayerGetTxIterTuple::with_values(
c_tx, out_tx_id, has_next);
return ires;
}
None => {
return applayer::AppLayerGetTxIterTuple::not_found();
}
}
}
const PARSER_NAME: &'static [u8] = b"dhcp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_dhcp_register_parser() {
SCLogDebug!("Registering DHCP parser.");
let ports = CString::new("[67,68]").unwrap();
let parser = RustParser {
name: PARSER_NAME.as_ptr() as *const libc::c_char,
default_port: ports.as_ptr(),
ipproto: libc::IPPROTO_UDP,
probe_ts: rs_dhcp_probing_parser,
probe_tc: rs_dhcp_probing_parser,
min_depth: 0,
max_depth: 16,
state_new: rs_dhcp_state_new,
state_free: rs_dhcp_state_free,
tx_free: rs_dhcp_state_tx_free,
parse_ts: rs_dhcp_parse,
parse_tc: rs_dhcp_parse,
get_tx_count: rs_dhcp_state_get_tx_count,
get_tx: rs_dhcp_state_get_tx,
tx_get_comp_st: rs_dhcp_state_progress_completion_status,
tx_get_progress: rs_dhcp_tx_get_alstate_progress,
get_tx_logged: Some(rs_dhcp_tx_get_logged),
set_tx_logged: Some(rs_dhcp_tx_set_logged),
get_de_state: rs_dhcp_tx_get_detect_state,
set_de_state: rs_dhcp_tx_set_detect_state,
get_events: Some(rs_dhcp_state_get_events),
get_eventinfo: Some(rs_dhcp_state_get_event_info),
localstorage_new: None,
localstorage_free: None,
get_tx_mpm_id: None,
set_tx_mpm_id: None,
get_files: None,
get_tx_iterator: Some(rs_dhcp_state_get_tx_iterator),
};
let ip_proto_str = CString::new("udp").unwrap();
if AppLayerProtoDetectConfProtoDetectionEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let alproto = AppLayerRegisterProtocolDetection(&parser, 1);
ALPROTO_DHCP = alproto;
if AppLayerParserConfParserEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let _ = AppLayerRegisterParser(&parser, alproto);
}
} else {
SCLogDebug!("Protocol detector and parser disabled for DHCP.");
}
}
|
tx: *mut libc::c_void,
logged: libc::uint32_t) {
let tx = cast_pointer!(tx, DHCPTransaction);
tx.logged.set(logged);
}
|
random_line_split
|
serializers.py
|
from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.api import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'description', 'id', 'preview',
'pullquote_attribution', 'pullquote_rating', 'pullquote_text',
'url')
model = FeedApp
url_basename = 'feedapp'
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditem'
def validate(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
|
return
|
if getattr(obj, item_type):
return item_type
|
conditional_block
|
serializers.py
|
from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.api import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'description', 'id', 'preview',
'pullquote_attribution', 'pullquote_rating', 'pullquote_text',
'url')
model = FeedApp
url_basename = 'feedapp'
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditem'
def
|
(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
if getattr(obj, item_type):
return item_type
return
|
validate
|
identifier_name
|
serializers.py
|
from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
|
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.api import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'description', 'id', 'preview',
'pullquote_attribution', 'pullquote_rating', 'pullquote_text',
'url')
model = FeedApp
url_basename = 'feedapp'
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditem'
def validate(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
if getattr(obj, item_type):
return item_type
return
|
random_line_split
|
|
serializers.py
|
from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.api import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
|
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditem'
def validate(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
if getattr(obj, item_type):
return item_type
return
|
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'description', 'id', 'preview',
'pullquote_attribution', 'pullquote_rating', 'pullquote_text',
'url')
model = FeedApp
url_basename = 'feedapp'
|
identifier_body
|
extern-call.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc;
mod rustrt {
use std::libc;
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
fact(data - 1u) * data
}
}
fn fact(n: uint) -> uint {
unsafe {
debug!("n = %?", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main()
|
{
let result = fact(10u);
debug!("result = %?", result);
assert_eq!(result, 3628800u);
}
|
identifier_body
|
|
extern-call.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc;
mod rustrt {
use std::libc;
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u
|
else {
fact(data - 1u) * data
}
}
fn fact(n: uint) -> uint {
unsafe {
debug!("n = %?", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
let result = fact(10u);
debug!("result = %?", result);
assert_eq!(result, 3628800u);
}
|
{
data
}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.