file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
HelpOutlineTwoTone.js | "use strict";
| var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M11 16h2v2h-2zm1-14C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zm0-14c-2.21 0-4 1.79-4 4h2c0-1.1.9-2 2-2s2 .9 2 2c0 2-3 1.75-3 5h2c0-2.25 3-2.5 3-5 0-2.21-1.79-4-4-4z"
}), 'HelpOutlineTwoTone');
exports.default = _default; | random_line_split |
|
empty.rs | use crate::Stream;
use core::marker::PhantomData;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`empty`](fn@empty) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Empty<T>(PhantomData<T>);
impl<T> Unpin for Empty<T> {}
unsafe impl<T> Send for Empty<T> {}
unsafe impl<T> Sync for Empty<T> {}
/// Creates a stream that yields nothing.
///
/// The returned stream is immediately ready and returns `None`. Use
/// [`stream::pending()`](super::pending()) to obtain a stream that is never
/// ready.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// #[tokio::main]
/// async fn main() {
/// let mut none = stream::empty::<i32>();
///
/// assert_eq!(None, none.next().await);
/// }
/// ``` | }
impl<T> Stream for Empty<T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<T>> {
Poll::Ready(None)
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(0))
}
} | pub const fn empty<T>() -> Empty<T> {
Empty(PhantomData) | random_line_split |
empty.rs | use crate::Stream;
use core::marker::PhantomData;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`empty`](fn@empty) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Empty<T>(PhantomData<T>);
impl<T> Unpin for Empty<T> {}
unsafe impl<T> Send for Empty<T> {}
unsafe impl<T> Sync for Empty<T> {}
/// Creates a stream that yields nothing.
///
/// The returned stream is immediately ready and returns `None`. Use
/// [`stream::pending()`](super::pending()) to obtain a stream that is never
/// ready.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// #[tokio::main]
/// async fn main() {
/// let mut none = stream::empty::<i32>();
///
/// assert_eq!(None, none.next().await);
/// }
/// ```
pub const fn empty<T>() -> Empty<T> {
Empty(PhantomData)
}
impl<T> Stream for Empty<T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<T>> {
Poll::Ready(None)
}
fn | (&self) -> (usize, Option<usize>) {
(0, Some(0))
}
}
| size_hint | identifier_name |
cpu.py | # (void)walker hardware platform support
# Copyright (C) 2012-2013 David Holm <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import abc
from ..utils import OrderedDict
from ..utils import enum
Architecture = enum('Test', 'X86', 'X8664', 'Mips', 'Arm', 'Generic',
enum_type='Architecture')
class Register(object):
_register_fmt = {16: '0x%032lX',
10: '0x%020lX',
8: '0x%016lX',
4: '0x%08lX',
2: '0x%04lX',
1: '0x%02lX'}
def __init__(self, name):
self._name = name
def name(self):
return self._name
def | (self):
raise NotImplementedError
def value(self):
raise NotImplementedError
def str(self):
if self.value() is not None:
return self._register_fmt[self.size()] % self.value()
chars_per_byte = 2
return ''.join(['-' * (self.size() * chars_per_byte)])
def create_static_register(register):
class StaticRegister(type(register), object):
def __init__(self, name):
super(StaticRegister, self).__init__(name)
self._size = register.size()
self._value = register.value()
def size(self):
return self._size
def value(self):
return self._value
return StaticRegister(register.name())
class Cpu(object):
__metaclass__ = abc.ABCMeta
def __init__(self, cpu_factory, registers):
self._registers = OrderedDict()
for group, register_list in registers.iteritems():
registers = OrderedDict([(x.name(),
cpu_factory.create_register(self, x))
for x in register_list])
self._registers[group] = registers
@classmethod
@abc.abstractmethod
def architecture(cls):
raise NotImplementedError
def register(self, name):
for register_dict in self._registers.itervalues():
if name in register_dict:
return register_dict[name]
return None
def registers(self):
return self._registers.iteritems()
@abc.abstractmethod
def stack_pointer(self):
raise NotImplementedError
@abc.abstractmethod
def program_counter(self):
raise NotImplementedError
class CpuFactory(object):
__metaclass__ = abc.ABCMeta
def create_cpu(self, architecture):
assert architecture in _cpu_map
return _cpu_map.get(architecture,
None)(self)
@abc.abstractmethod
def create_register(self, cpu, register):
raise NotImplementedError
class CpuRepository(object):
def __init__(self, cpu_factory):
self._cpu_factory = cpu_factory
self._cpus = {}
def get_cpu(self, architecture):
if architecture in self._cpus:
return self._cpus[architecture]
cpu = self._cpu_factory.create_cpu(architecture)
self._cpus[architecture] = cpu
return cpu
def register_cpu(cls):
_cpu_map[cls.architecture()] = cls
return cls
_cpu_map = {}
| size | identifier_name |
cpu.py | # (void)walker hardware platform support
# Copyright (C) 2012-2013 David Holm <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import abc
from ..utils import OrderedDict
from ..utils import enum
Architecture = enum('Test', 'X86', 'X8664', 'Mips', 'Arm', 'Generic',
enum_type='Architecture')
class Register(object):
_register_fmt = {16: '0x%032lX',
10: '0x%020lX',
8: '0x%016lX',
4: '0x%08lX',
2: '0x%04lX',
1: '0x%02lX'}
def __init__(self, name):
self._name = name
def name(self):
return self._name
def size(self):
raise NotImplementedError
def value(self):
raise NotImplementedError
def str(self):
if self.value() is not None:
return self._register_fmt[self.size()] % self.value()
chars_per_byte = 2
return ''.join(['-' * (self.size() * chars_per_byte)])
def create_static_register(register):
class StaticRegister(type(register), object):
def __init__(self, name):
super(StaticRegister, self).__init__(name)
self._size = register.size()
self._value = register.value()
def size(self):
return self._size
def value(self):
return self._value
return StaticRegister(register.name())
class Cpu(object):
__metaclass__ = abc.ABCMeta
def __init__(self, cpu_factory, registers):
self._registers = OrderedDict()
for group, register_list in registers.iteritems():
registers = OrderedDict([(x.name(),
cpu_factory.create_register(self, x))
for x in register_list])
self._registers[group] = registers
@classmethod
@abc.abstractmethod
def architecture(cls):
raise NotImplementedError
def register(self, name):
for register_dict in self._registers.itervalues():
if name in register_dict:
return register_dict[name]
return None
| def registers(self):
return self._registers.iteritems()
@abc.abstractmethod
def stack_pointer(self):
raise NotImplementedError
@abc.abstractmethod
def program_counter(self):
raise NotImplementedError
class CpuFactory(object):
__metaclass__ = abc.ABCMeta
def create_cpu(self, architecture):
assert architecture in _cpu_map
return _cpu_map.get(architecture,
None)(self)
@abc.abstractmethod
def create_register(self, cpu, register):
raise NotImplementedError
class CpuRepository(object):
def __init__(self, cpu_factory):
self._cpu_factory = cpu_factory
self._cpus = {}
def get_cpu(self, architecture):
if architecture in self._cpus:
return self._cpus[architecture]
cpu = self._cpu_factory.create_cpu(architecture)
self._cpus[architecture] = cpu
return cpu
def register_cpu(cls):
_cpu_map[cls.architecture()] = cls
return cls
_cpu_map = {} | random_line_split |
|
cpu.py | # (void)walker hardware platform support
# Copyright (C) 2012-2013 David Holm <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import abc
from ..utils import OrderedDict
from ..utils import enum
Architecture = enum('Test', 'X86', 'X8664', 'Mips', 'Arm', 'Generic',
enum_type='Architecture')
class Register(object):
|
def create_static_register(register):
class StaticRegister(type(register), object):
def __init__(self, name):
super(StaticRegister, self).__init__(name)
self._size = register.size()
self._value = register.value()
def size(self):
return self._size
def value(self):
return self._value
return StaticRegister(register.name())
class Cpu(object):
__metaclass__ = abc.ABCMeta
def __init__(self, cpu_factory, registers):
self._registers = OrderedDict()
for group, register_list in registers.iteritems():
registers = OrderedDict([(x.name(),
cpu_factory.create_register(self, x))
for x in register_list])
self._registers[group] = registers
@classmethod
@abc.abstractmethod
def architecture(cls):
raise NotImplementedError
def register(self, name):
for register_dict in self._registers.itervalues():
if name in register_dict:
return register_dict[name]
return None
def registers(self):
return self._registers.iteritems()
@abc.abstractmethod
def stack_pointer(self):
raise NotImplementedError
@abc.abstractmethod
def program_counter(self):
raise NotImplementedError
class CpuFactory(object):
__metaclass__ = abc.ABCMeta
def create_cpu(self, architecture):
assert architecture in _cpu_map
return _cpu_map.get(architecture,
None)(self)
@abc.abstractmethod
def create_register(self, cpu, register):
raise NotImplementedError
class CpuRepository(object):
def __init__(self, cpu_factory):
self._cpu_factory = cpu_factory
self._cpus = {}
def get_cpu(self, architecture):
if architecture in self._cpus:
return self._cpus[architecture]
cpu = self._cpu_factory.create_cpu(architecture)
self._cpus[architecture] = cpu
return cpu
def register_cpu(cls):
_cpu_map[cls.architecture()] = cls
return cls
_cpu_map = {}
| _register_fmt = {16: '0x%032lX',
10: '0x%020lX',
8: '0x%016lX',
4: '0x%08lX',
2: '0x%04lX',
1: '0x%02lX'}
def __init__(self, name):
self._name = name
def name(self):
return self._name
def size(self):
raise NotImplementedError
def value(self):
raise NotImplementedError
def str(self):
if self.value() is not None:
return self._register_fmt[self.size()] % self.value()
chars_per_byte = 2
return ''.join(['-' * (self.size() * chars_per_byte)]) | identifier_body |
cpu.py | # (void)walker hardware platform support
# Copyright (C) 2012-2013 David Holm <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import abc
from ..utils import OrderedDict
from ..utils import enum
Architecture = enum('Test', 'X86', 'X8664', 'Mips', 'Arm', 'Generic',
enum_type='Architecture')
class Register(object):
_register_fmt = {16: '0x%032lX',
10: '0x%020lX',
8: '0x%016lX',
4: '0x%08lX',
2: '0x%04lX',
1: '0x%02lX'}
def __init__(self, name):
self._name = name
def name(self):
return self._name
def size(self):
raise NotImplementedError
def value(self):
raise NotImplementedError
def str(self):
if self.value() is not None:
|
chars_per_byte = 2
return ''.join(['-' * (self.size() * chars_per_byte)])
def create_static_register(register):
class StaticRegister(type(register), object):
def __init__(self, name):
super(StaticRegister, self).__init__(name)
self._size = register.size()
self._value = register.value()
def size(self):
return self._size
def value(self):
return self._value
return StaticRegister(register.name())
class Cpu(object):
__metaclass__ = abc.ABCMeta
def __init__(self, cpu_factory, registers):
self._registers = OrderedDict()
for group, register_list in registers.iteritems():
registers = OrderedDict([(x.name(),
cpu_factory.create_register(self, x))
for x in register_list])
self._registers[group] = registers
@classmethod
@abc.abstractmethod
def architecture(cls):
raise NotImplementedError
def register(self, name):
for register_dict in self._registers.itervalues():
if name in register_dict:
return register_dict[name]
return None
def registers(self):
return self._registers.iteritems()
@abc.abstractmethod
def stack_pointer(self):
raise NotImplementedError
@abc.abstractmethod
def program_counter(self):
raise NotImplementedError
class CpuFactory(object):
__metaclass__ = abc.ABCMeta
def create_cpu(self, architecture):
assert architecture in _cpu_map
return _cpu_map.get(architecture,
None)(self)
@abc.abstractmethod
def create_register(self, cpu, register):
raise NotImplementedError
class CpuRepository(object):
def __init__(self, cpu_factory):
self._cpu_factory = cpu_factory
self._cpus = {}
def get_cpu(self, architecture):
if architecture in self._cpus:
return self._cpus[architecture]
cpu = self._cpu_factory.create_cpu(architecture)
self._cpus[architecture] = cpu
return cpu
def register_cpu(cls):
_cpu_map[cls.architecture()] = cls
return cls
_cpu_map = {}
| return self._register_fmt[self.size()] % self.value() | conditional_block |
mod.rs | //! Platform-specific extensions to `std` for Windows.
//!
//! Provides access to platform-level information for Windows, and exposes
//! Windows-specific idioms that would otherwise be inappropriate as part
//! the core `std` library. These extensions allow developers to use
//! `std` types and idioms with Windows in a way that the normal
//! platform-agnostic idioms would not normally support.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(cfg(windows))]
pub mod ffi;
pub mod fs;
pub mod io;
pub mod process;
pub mod raw;
pub mod thread;
/// A prelude for conveniently writing platform-specific code.
///
/// Includes all extension traits, and some important type definitions.
#[stable(feature = "rust1", since = "1.0.0")] | #[doc(no_inline)]
#[stable(feature = "file_offset", since = "1.15.0")]
pub use super::fs::FileExt;
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::fs::{MetadataExt, OpenOptionsExt};
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::io::{
AsHandle, AsSocket, BorrowedHandle, BorrowedSocket, FromRawHandle, FromRawSocket,
HandleOrInvalid, IntoRawHandle, IntoRawSocket, OwnedHandle, OwnedSocket,
};
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::io::{AsRawHandle, AsRawSocket, RawHandle, RawSocket};
} | pub mod prelude {
#[doc(no_inline)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use super::ffi::{OsStrExt, OsStringExt}; | random_line_split |
validate.py | from matplotlib import pyplot
from .algo import _bs_fit
def axes_object(ax):
""" Checks if a value if an Axes. If None, a new one is created.
Both the figure and axes are returned (in that order).
"""
if ax is None:
ax = pyplot.gca()
fig = ax.figure
elif isinstance(ax, pyplot.Axes):
fig = ax.figure
else:
msg = "`ax` must be a matplotlib Axes instance or None"
raise ValueError(msg)
return fig, ax
def axis_name(axis, axname):
"""
Checks that an axis name is in ``{'x', 'y'}``. Raises an error on
an invalid value. Returns the lower case version of valid values.
"""
valid_args = ["x", "y"]
if axis.lower() not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(axname, axis, valid_args))
return axis.lower()
def fit_argument(arg, argname):
"""
Checks that an axis options is in ``{'x', y', 'both', None}``.
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
valid_args = ["x", "y", "both", None]
if arg not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(argname, arg, valid_args))
elif arg is not None:
arg = arg.lower()
return arg
def axis_type(axtype):
"""
Checks that a valid axis type is requested.
- *pp* - percentile axis
- *qq* - quantile axis
- *prob* - probability axis
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
if axtype.lower() not in ["pp", "qq", "prob"]:
raise ValueError("invalid axtype: {}".format(axtype))
return axtype.lower()
def axis_label(label):
"""
Replaces None with an empty string for axis labels.
"""
return "" if label is None else label
def | (options):
"""
Replaces None with an empty dict for plotting options.
"""
return dict() if options is None else options.copy()
def estimator(value):
if value.lower() in ["res", "resid", "resids", "residual", "residuals"]:
msg = "Bootstrapping the residuals is not ready yet"
raise NotImplementedError(msg)
elif value.lower() in ["fit", "values"]:
est = _bs_fit
else:
raise ValueError('estimator must be either "resid" or "fit".')
return est
| other_options | identifier_name |
validate.py | from matplotlib import pyplot
from .algo import _bs_fit
def axes_object(ax):
""" Checks if a value if an Axes. If None, a new one is created.
Both the figure and axes are returned (in that order).
"""
if ax is None:
ax = pyplot.gca()
fig = ax.figure
elif isinstance(ax, pyplot.Axes):
fig = ax.figure
else:
msg = "`ax` must be a matplotlib Axes instance or None"
raise ValueError(msg)
return fig, ax
def axis_name(axis, axname):
"""
Checks that an axis name is in ``{'x', 'y'}``. Raises an error on
an invalid value. Returns the lower case version of valid values.
"""
valid_args = ["x", "y"]
if axis.lower() not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(axname, axis, valid_args))
return axis.lower()
def fit_argument(arg, argname):
"""
Checks that an axis options is in ``{'x', y', 'both', None}``.
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
valid_args = ["x", "y", "both", None]
if arg not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(argname, arg, valid_args))
elif arg is not None:
arg = arg.lower()
return arg
def axis_type(axtype):
"""
Checks that a valid axis type is requested.
- *pp* - percentile axis
- *qq* - quantile axis
- *prob* - probability axis
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
if axtype.lower() not in ["pp", "qq", "prob"]:
raise ValueError("invalid axtype: {}".format(axtype))
return axtype.lower()
def axis_label(label):
"""
Replaces None with an empty string for axis labels.
"""
return "" if label is None else label
def other_options(options):
"""
Replaces None with an empty dict for plotting options.
"""
return dict() if options is None else options.copy()
def estimator(value):
if value.lower() in ["res", "resid", "resids", "residual", "residuals"]:
msg = "Bootstrapping the residuals is not ready yet"
raise NotImplementedError(msg) | raise ValueError('estimator must be either "resid" or "fit".')
return est | elif value.lower() in ["fit", "values"]:
est = _bs_fit
else: | random_line_split |
validate.py | from matplotlib import pyplot
from .algo import _bs_fit
def axes_object(ax):
""" Checks if a value if an Axes. If None, a new one is created.
Both the figure and axes are returned (in that order).
"""
if ax is None:
ax = pyplot.gca()
fig = ax.figure
elif isinstance(ax, pyplot.Axes):
fig = ax.figure
else:
msg = "`ax` must be a matplotlib Axes instance or None"
raise ValueError(msg)
return fig, ax
def axis_name(axis, axname):
"""
Checks that an axis name is in ``{'x', 'y'}``. Raises an error on
an invalid value. Returns the lower case version of valid values.
"""
valid_args = ["x", "y"]
if axis.lower() not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(axname, axis, valid_args))
return axis.lower()
def fit_argument(arg, argname):
"""
Checks that an axis options is in ``{'x', y', 'both', None}``.
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
valid_args = ["x", "y", "both", None]
if arg not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(argname, arg, valid_args))
elif arg is not None:
arg = arg.lower()
return arg
def axis_type(axtype):
"""
Checks that a valid axis type is requested.
- *pp* - percentile axis
- *qq* - quantile axis
- *prob* - probability axis
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
if axtype.lower() not in ["pp", "qq", "prob"]:
raise ValueError("invalid axtype: {}".format(axtype))
return axtype.lower()
def axis_label(label):
"""
Replaces None with an empty string for axis labels.
"""
return "" if label is None else label
def other_options(options):
"""
Replaces None with an empty dict for plotting options.
"""
return dict() if options is None else options.copy()
def estimator(value):
if value.lower() in ["res", "resid", "resids", "residual", "residuals"]:
msg = "Bootstrapping the residuals is not ready yet"
raise NotImplementedError(msg)
elif value.lower() in ["fit", "values"]:
est = _bs_fit
else:
|
return est
| raise ValueError('estimator must be either "resid" or "fit".') | conditional_block |
validate.py | from matplotlib import pyplot
from .algo import _bs_fit
def axes_object(ax):
""" Checks if a value if an Axes. If None, a new one is created.
Both the figure and axes are returned (in that order).
"""
if ax is None:
ax = pyplot.gca()
fig = ax.figure
elif isinstance(ax, pyplot.Axes):
fig = ax.figure
else:
msg = "`ax` must be a matplotlib Axes instance or None"
raise ValueError(msg)
return fig, ax
def axis_name(axis, axname):
"""
Checks that an axis name is in ``{'x', 'y'}``. Raises an error on
an invalid value. Returns the lower case version of valid values.
"""
valid_args = ["x", "y"]
if axis.lower() not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(axname, axis, valid_args))
return axis.lower()
def fit_argument(arg, argname):
"""
Checks that an axis options is in ``{'x', y', 'both', None}``.
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
valid_args = ["x", "y", "both", None]
if arg not in valid_args:
msg = "Invalid value for {} ({}). Must be on of {}."
raise ValueError(msg.format(argname, arg, valid_args))
elif arg is not None:
arg = arg.lower()
return arg
def axis_type(axtype):
"""
Checks that a valid axis type is requested.
- *pp* - percentile axis
- *qq* - quantile axis
- *prob* - probability axis
Raises an error on an invalid value. Returns the lower case version
of valid values.
"""
if axtype.lower() not in ["pp", "qq", "prob"]:
raise ValueError("invalid axtype: {}".format(axtype))
return axtype.lower()
def axis_label(label):
|
def other_options(options):
"""
Replaces None with an empty dict for plotting options.
"""
return dict() if options is None else options.copy()
def estimator(value):
if value.lower() in ["res", "resid", "resids", "residual", "residuals"]:
msg = "Bootstrapping the residuals is not ready yet"
raise NotImplementedError(msg)
elif value.lower() in ["fit", "values"]:
est = _bs_fit
else:
raise ValueError('estimator must be either "resid" or "fit".')
return est
| """
Replaces None with an empty string for axis labels.
"""
return "" if label is None else label | identifier_body |
main.rs | extern crate itertools;
extern crate clap;
extern crate rand;
use itertools::Itertools;
use clap::{App, Arg};
use std::fs::File;
use rand::Rng;
use std::io::{Read, Write};
const DEFAULT_CROSSOVER_POINTS: usize = 3;
const DEFAULT_MUTATION_RATE: f64 = 0.001;
const DEFAULT_UNIT: usize = 1;
const DEFAULT_STRIDE: usize = 1;
fn main() {
let matches = App::new("matef")
.version("1.0")
.author("Geordon Worley <[email protected]>")
.about("Mates two files")
.arg(Arg::with_name("output")
.help("The output location")
.required(true)
.index(1))
.arg(Arg::with_name("file1")
.help("Input file 1")
.required(true)
.index(2))
.arg(Arg::with_name("file2")
.help("Input file 2")
.required(true)
.index(3))
.arg(Arg::with_name("mutation-rate")
.short("m")
.multiple(false)
.long("mutation-rate")
.value_name("RATE")
.help("Takes a RATE of mutation that randomizes UNIT bytes at a time")
.takes_value(true))
.arg(Arg::with_name("crossover-points")
.short("c")
.multiple(false)
.long("crossover-points")
.value_name("NUMBER")
.help("Takes a NUMBER of crossover points of 1 or greater")
.takes_value(true))
.arg(Arg::with_name("unit")
.short("u")
.multiple(false)
.long("unit")
.value_name("BYTES")
.help("Takes an amount of BYTES that are always mutated as a group")
.takes_value(true))
.arg(Arg::with_name("stride")
.short("s")
.multiple(false)
.long("stride")
.value_name("BYTES")
.help("Takes an amount of BYTES that define the alignment of mutated units")
.takes_value(true))
.get_matches();
let crossover_points = match matches.value_of("crossover-points") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 crossover-points.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse crossover-points: {}", e);
return;
},
}
},
None => DEFAULT_CROSSOVER_POINTS,
};
let mutation_rate = match matches.value_of("mutation-rate") {
Some(c) => {
match c.parse::<f64>() {
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse mutation-rate: {}", e);
return;
},
}
},
None => DEFAULT_MUTATION_RATE,
};
let mutation_size = match matches.value_of("unit") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the unit.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse unit: {}", e);
return;
},
}
},
None => DEFAULT_UNIT,
};
let stride = match matches.value_of("stride") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the stride.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse stride: {}", e);
return;
},
}
},
None => DEFAULT_STRIDE,
};
let output = matches.value_of("output").unwrap();
let filenames = (matches.value_of("file1").unwrap(), matches.value_of("file2").unwrap());
let files = (
match File::open(filenames.0) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.0, e);
return;
}, | }
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.0, e);
return;
},
}, match File::open(filenames.1) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.1, e);
return;
},
}
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.1, e);
return;
},
},
);
let len = std::cmp::min(files.0.len(), files.1.len());
let mut rng = rand::os::OsRng::new().ok().unwrap();
//Generate crossover file
let mut result =
(0..crossover_points)
//Map these to random crossover points
.map(|_| rng.gen_range(0, len))
//Add total_instructions at the end so we can generate a range with it
.chain(Some(len))
//Sort them by value into BTree, which removes duplicates and orders them
.fold(std::collections::BTreeSet::new(), |mut set, i| {set.insert(i); set})
//Iterate over the sorted values
.iter()
//Turn every copy of two, prepending a 0, into a range
.scan(0, |prev, x| {let out = Some(*prev..*x); *prev = *x; out})
//Enumerate by index to differentiate odd and even values
.enumerate()
//Map even pairs to ranges in parent 0 and odd ones to ranges in parent 1 and expand the ranges
.flat_map(|(index, range)| {
{if index % 2 == 0 {files.0[range].iter()} else {files.1[range].iter()}}.cloned()
})
//Collect all the instruction ranges from each parent
.collect_vec();
//Mutate result file
let strides =
//We can only stride the beginning of a mutation group up to this actual len
(result.len() - (mutation_size - 1))
//Divide by stride
/ stride;
for i in 0..strides {
if rng.next_f64() < mutation_rate {
for v in &mut result[(i * stride)..(i * stride + mutation_size)] {
*v = rng.gen();
}
}
}
let mut outfile = match File::create(output) {
Ok(f) => f,
Err(e) => {
println!("Could not create file \"{}\": {}", output, e);
return;
},
};
match outfile.write_all(&result[..]) {
Ok(_) => {},
Err(e) => {
println!("Could not write to \"{}\": {}", output, e);
return;
},
}
} | random_line_split |
|
main.rs | extern crate itertools;
extern crate clap;
extern crate rand;
use itertools::Itertools;
use clap::{App, Arg};
use std::fs::File;
use rand::Rng;
use std::io::{Read, Write};
const DEFAULT_CROSSOVER_POINTS: usize = 3;
const DEFAULT_MUTATION_RATE: f64 = 0.001;
const DEFAULT_UNIT: usize = 1;
const DEFAULT_STRIDE: usize = 1;
fn main() | {
let matches = App::new("matef")
.version("1.0")
.author("Geordon Worley <[email protected]>")
.about("Mates two files")
.arg(Arg::with_name("output")
.help("The output location")
.required(true)
.index(1))
.arg(Arg::with_name("file1")
.help("Input file 1")
.required(true)
.index(2))
.arg(Arg::with_name("file2")
.help("Input file 2")
.required(true)
.index(3))
.arg(Arg::with_name("mutation-rate")
.short("m")
.multiple(false)
.long("mutation-rate")
.value_name("RATE")
.help("Takes a RATE of mutation that randomizes UNIT bytes at a time")
.takes_value(true))
.arg(Arg::with_name("crossover-points")
.short("c")
.multiple(false)
.long("crossover-points")
.value_name("NUMBER")
.help("Takes a NUMBER of crossover points of 1 or greater")
.takes_value(true))
.arg(Arg::with_name("unit")
.short("u")
.multiple(false)
.long("unit")
.value_name("BYTES")
.help("Takes an amount of BYTES that are always mutated as a group")
.takes_value(true))
.arg(Arg::with_name("stride")
.short("s")
.multiple(false)
.long("stride")
.value_name("BYTES")
.help("Takes an amount of BYTES that define the alignment of mutated units")
.takes_value(true))
.get_matches();
let crossover_points = match matches.value_of("crossover-points") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 crossover-points.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse crossover-points: {}", e);
return;
},
}
},
None => DEFAULT_CROSSOVER_POINTS,
};
let mutation_rate = match matches.value_of("mutation-rate") {
Some(c) => {
match c.parse::<f64>() {
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse mutation-rate: {}", e);
return;
},
}
},
None => DEFAULT_MUTATION_RATE,
};
let mutation_size = match matches.value_of("unit") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the unit.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse unit: {}", e);
return;
},
}
},
None => DEFAULT_UNIT,
};
let stride = match matches.value_of("stride") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the stride.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse stride: {}", e);
return;
},
}
},
None => DEFAULT_STRIDE,
};
let output = matches.value_of("output").unwrap();
let filenames = (matches.value_of("file1").unwrap(), matches.value_of("file2").unwrap());
let files = (
match File::open(filenames.0) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.0, e);
return;
},
}
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.0, e);
return;
},
}, match File::open(filenames.1) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.1, e);
return;
},
}
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.1, e);
return;
},
},
);
let len = std::cmp::min(files.0.len(), files.1.len());
let mut rng = rand::os::OsRng::new().ok().unwrap();
//Generate crossover file
let mut result =
(0..crossover_points)
//Map these to random crossover points
.map(|_| rng.gen_range(0, len))
//Add total_instructions at the end so we can generate a range with it
.chain(Some(len))
//Sort them by value into BTree, which removes duplicates and orders them
.fold(std::collections::BTreeSet::new(), |mut set, i| {set.insert(i); set})
//Iterate over the sorted values
.iter()
//Turn every copy of two, prepending a 0, into a range
.scan(0, |prev, x| {let out = Some(*prev..*x); *prev = *x; out})
//Enumerate by index to differentiate odd and even values
.enumerate()
//Map even pairs to ranges in parent 0 and odd ones to ranges in parent 1 and expand the ranges
.flat_map(|(index, range)| {
{if index % 2 == 0 {files.0[range].iter()} else {files.1[range].iter()}}.cloned()
})
//Collect all the instruction ranges from each parent
.collect_vec();
//Mutate result file
let strides =
//We can only stride the beginning of a mutation group up to this actual len
(result.len() - (mutation_size - 1))
//Divide by stride
/ stride;
for i in 0..strides {
if rng.next_f64() < mutation_rate {
for v in &mut result[(i * stride)..(i * stride + mutation_size)] {
*v = rng.gen();
}
}
}
let mut outfile = match File::create(output) {
Ok(f) => f,
Err(e) => {
println!("Could not create file \"{}\": {}", output, e);
return;
},
};
match outfile.write_all(&result[..]) {
Ok(_) => {},
Err(e) => {
println!("Could not write to \"{}\": {}", output, e);
return;
},
}
} | identifier_body |
|
main.rs | extern crate itertools;
extern crate clap;
extern crate rand;
use itertools::Itertools;
use clap::{App, Arg};
use std::fs::File;
use rand::Rng;
use std::io::{Read, Write};
const DEFAULT_CROSSOVER_POINTS: usize = 3;
const DEFAULT_MUTATION_RATE: f64 = 0.001;
const DEFAULT_UNIT: usize = 1;
const DEFAULT_STRIDE: usize = 1;
fn | () {
let matches = App::new("matef")
.version("1.0")
.author("Geordon Worley <[email protected]>")
.about("Mates two files")
.arg(Arg::with_name("output")
.help("The output location")
.required(true)
.index(1))
.arg(Arg::with_name("file1")
.help("Input file 1")
.required(true)
.index(2))
.arg(Arg::with_name("file2")
.help("Input file 2")
.required(true)
.index(3))
.arg(Arg::with_name("mutation-rate")
.short("m")
.multiple(false)
.long("mutation-rate")
.value_name("RATE")
.help("Takes a RATE of mutation that randomizes UNIT bytes at a time")
.takes_value(true))
.arg(Arg::with_name("crossover-points")
.short("c")
.multiple(false)
.long("crossover-points")
.value_name("NUMBER")
.help("Takes a NUMBER of crossover points of 1 or greater")
.takes_value(true))
.arg(Arg::with_name("unit")
.short("u")
.multiple(false)
.long("unit")
.value_name("BYTES")
.help("Takes an amount of BYTES that are always mutated as a group")
.takes_value(true))
.arg(Arg::with_name("stride")
.short("s")
.multiple(false)
.long("stride")
.value_name("BYTES")
.help("Takes an amount of BYTES that define the alignment of mutated units")
.takes_value(true))
.get_matches();
let crossover_points = match matches.value_of("crossover-points") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 crossover-points.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse crossover-points: {}", e);
return;
},
}
},
None => DEFAULT_CROSSOVER_POINTS,
};
let mutation_rate = match matches.value_of("mutation-rate") {
Some(c) => {
match c.parse::<f64>() {
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse mutation-rate: {}", e);
return;
},
}
},
None => DEFAULT_MUTATION_RATE,
};
let mutation_size = match matches.value_of("unit") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the unit.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse unit: {}", e);
return;
},
}
},
None => DEFAULT_UNIT,
};
let stride = match matches.value_of("stride") {
Some(c) => {
match c.parse::<usize>() {
Ok(0) => {
println!("Error: Cannot accept 0 bytes as the stride.");
return;
},
Ok(n) => n,
Err(e) => {
println!("Error: Failed to parse stride: {}", e);
return;
},
}
},
None => DEFAULT_STRIDE,
};
let output = matches.value_of("output").unwrap();
let filenames = (matches.value_of("file1").unwrap(), matches.value_of("file2").unwrap());
let files = (
match File::open(filenames.0) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.0, e);
return;
},
}
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.0, e);
return;
},
}, match File::open(filenames.1) {
Ok(mut f) => {
let mut v = Vec::new();
match f.read_to_end(&mut v) {
Ok(_) => {},
Err(e) => {
println!("Could not read file \"{}\": {}", filenames.1, e);
return;
},
}
v
},
Err(e) => {
println!("Could not open file \"{}\": {}", filenames.1, e);
return;
},
},
);
let len = std::cmp::min(files.0.len(), files.1.len());
let mut rng = rand::os::OsRng::new().ok().unwrap();
//Generate crossover file
let mut result =
(0..crossover_points)
//Map these to random crossover points
.map(|_| rng.gen_range(0, len))
//Add total_instructions at the end so we can generate a range with it
.chain(Some(len))
//Sort them by value into BTree, which removes duplicates and orders them
.fold(std::collections::BTreeSet::new(), |mut set, i| {set.insert(i); set})
//Iterate over the sorted values
.iter()
//Turn every copy of two, prepending a 0, into a range
.scan(0, |prev, x| {let out = Some(*prev..*x); *prev = *x; out})
//Enumerate by index to differentiate odd and even values
.enumerate()
//Map even pairs to ranges in parent 0 and odd ones to ranges in parent 1 and expand the ranges
.flat_map(|(index, range)| {
{if index % 2 == 0 {files.0[range].iter()} else {files.1[range].iter()}}.cloned()
})
//Collect all the instruction ranges from each parent
.collect_vec();
//Mutate result file
let strides =
//We can only stride the beginning of a mutation group up to this actual len
(result.len() - (mutation_size - 1))
//Divide by stride
/ stride;
for i in 0..strides {
if rng.next_f64() < mutation_rate {
for v in &mut result[(i * stride)..(i * stride + mutation_size)] {
*v = rng.gen();
}
}
}
let mut outfile = match File::create(output) {
Ok(f) => f,
Err(e) => {
println!("Could not create file \"{}\": {}", output, e);
return;
},
};
match outfile.write_all(&result[..]) {
Ok(_) => {},
Err(e) => {
println!("Could not write to \"{}\": {}", output, e);
return;
},
}
}
| main | identifier_name |
regress-900055.js | // Copyright 2008 Google Inc. All Rights Reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var alias = eval;
function e(s) |
assertEquals(42, e("42"));
assertEquals(Object, e("Object"));
assertEquals(e, e("e"));
var caught = false;
try {
e('s'); // should throw exception since aliased eval is global
} catch (e) {
caught = true;
assertTrue(e instanceof ReferenceError);
}
assertTrue(caught);
| { return alias(s); } | identifier_body |
regress-900055.js | // Copyright 2008 Google Inc. All Rights Reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var alias = eval;
function e(s) { return alias(s); }
assertEquals(42, e("42"));
assertEquals(Object, e("Object"));
assertEquals(e, e("e")); | e('s'); // should throw exception since aliased eval is global
} catch (e) {
caught = true;
assertTrue(e instanceof ReferenceError);
}
assertTrue(caught); |
var caught = false;
try { | random_line_split |
regress-900055.js | // Copyright 2008 Google Inc. All Rights Reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var alias = eval;
function | (s) { return alias(s); }
assertEquals(42, e("42"));
assertEquals(Object, e("Object"));
assertEquals(e, e("e"));
var caught = false;
try {
e('s'); // should throw exception since aliased eval is global
} catch (e) {
caught = true;
assertTrue(e instanceof ReferenceError);
}
assertTrue(caught);
| e | identifier_name |
files.py | import hashlib
import os
import random
import time
from django.utils.deconstruct import deconstructible
from django.utils.text import slugify
@deconstructible
class UploadToDir(object):
"""Generates a function to give to ``upload_to`` parameter in
models.Fields, that generates an name for uploaded files based on ``populate_from``
attribute.
"""
def __init__(self, path, populate_from=None, prefix=None, random_name=False):
self.path = path
self.populate_from = populate_from
self.random_name = random_name
self.prefix = prefix
def __call__(self, instance, filename):
"""Generates an name for an uploaded file."""
if self.populate_from is not None and not hasattr(instance, self.populate_from):
raise AttributeError(
"Instance hasn't {} attribute".format(self.populate_from)
)
ext = filename.split(".")[-1]
readable_name = slugify(filename.split(".")[0])
if self.populate_from:
readable_name = slugify(getattr(instance, self.populate_from))
if self.random_name:
random_name = hashlib.sha256( | elif self.prefix is not None:
readable_name = f"{self.prefix}{readable_name}"
file_name = "{}.{}".format(readable_name, ext)
return os.path.join(self.path, file_name) | "{}--{}".format(time.time(), random.random()).encode("utf-8")
)
readable_name = random_name.hexdigest() | random_line_split |
files.py | import hashlib
import os
import random
import time
from django.utils.deconstruct import deconstructible
from django.utils.text import slugify
@deconstructible
class UploadToDir(object):
"""Generates a function to give to ``upload_to`` parameter in
models.Fields, that generates an name for uploaded files based on ``populate_from``
attribute.
"""
def | (self, path, populate_from=None, prefix=None, random_name=False):
self.path = path
self.populate_from = populate_from
self.random_name = random_name
self.prefix = prefix
def __call__(self, instance, filename):
"""Generates an name for an uploaded file."""
if self.populate_from is not None and not hasattr(instance, self.populate_from):
raise AttributeError(
"Instance hasn't {} attribute".format(self.populate_from)
)
ext = filename.split(".")[-1]
readable_name = slugify(filename.split(".")[0])
if self.populate_from:
readable_name = slugify(getattr(instance, self.populate_from))
if self.random_name:
random_name = hashlib.sha256(
"{}--{}".format(time.time(), random.random()).encode("utf-8")
)
readable_name = random_name.hexdigest()
elif self.prefix is not None:
readable_name = f"{self.prefix}{readable_name}"
file_name = "{}.{}".format(readable_name, ext)
return os.path.join(self.path, file_name)
| __init__ | identifier_name |
files.py | import hashlib
import os
import random
import time
from django.utils.deconstruct import deconstructible
from django.utils.text import slugify
@deconstructible
class UploadToDir(object):
"""Generates a function to give to ``upload_to`` parameter in
models.Fields, that generates an name for uploaded files based on ``populate_from``
attribute.
"""
def __init__(self, path, populate_from=None, prefix=None, random_name=False):
self.path = path
self.populate_from = populate_from
self.random_name = random_name
self.prefix = prefix
def __call__(self, instance, filename):
"""Generates an name for an uploaded file."""
if self.populate_from is not None and not hasattr(instance, self.populate_from):
raise AttributeError(
"Instance hasn't {} attribute".format(self.populate_from)
)
ext = filename.split(".")[-1]
readable_name = slugify(filename.split(".")[0])
if self.populate_from:
|
if self.random_name:
random_name = hashlib.sha256(
"{}--{}".format(time.time(), random.random()).encode("utf-8")
)
readable_name = random_name.hexdigest()
elif self.prefix is not None:
readable_name = f"{self.prefix}{readable_name}"
file_name = "{}.{}".format(readable_name, ext)
return os.path.join(self.path, file_name)
| readable_name = slugify(getattr(instance, self.populate_from)) | conditional_block |
files.py | import hashlib
import os
import random
import time
from django.utils.deconstruct import deconstructible
from django.utils.text import slugify
@deconstructible
class UploadToDir(object):
"""Generates a function to give to ``upload_to`` parameter in
models.Fields, that generates an name for uploaded files based on ``populate_from``
attribute.
"""
def __init__(self, path, populate_from=None, prefix=None, random_name=False):
self.path = path
self.populate_from = populate_from
self.random_name = random_name
self.prefix = prefix
def __call__(self, instance, filename):
| """Generates an name for an uploaded file."""
if self.populate_from is not None and not hasattr(instance, self.populate_from):
raise AttributeError(
"Instance hasn't {} attribute".format(self.populate_from)
)
ext = filename.split(".")[-1]
readable_name = slugify(filename.split(".")[0])
if self.populate_from:
readable_name = slugify(getattr(instance, self.populate_from))
if self.random_name:
random_name = hashlib.sha256(
"{}--{}".format(time.time(), random.random()).encode("utf-8")
)
readable_name = random_name.hexdigest()
elif self.prefix is not None:
readable_name = f"{self.prefix}{readable_name}"
file_name = "{}.{}".format(readable_name, ext)
return os.path.join(self.path, file_name) | identifier_body |
|
build.js | 'use strict';
var path = require('path');
var gulp = require('gulp');
var conf = require('./conf');
var $ = require('gulp-load-plugins')({
pattern: ['gulp-*', 'main-bower-files', 'uglify-save-license', 'del']
});
gulp.task('partials', ['clean'], function () {
return gulp.src([
path.join(conf.paths.src, '/app/**/*.html'),
path.join(conf.paths.tmp, '/serve/app/**/*.html')
])
.pipe($.minifyHtml({
empty: true,
spare: true,
quotes: true
}))
.pipe($.angularTemplatecache('templateCacheHtml.js', {
module: 'ngDatetimeRangePicker',
root: 'app'
}))
.pipe(gulp.dest(conf.paths.tmp + '/partials/'));
});
gulp.task('html', ['inject', 'partials'], function () {
var partialsInjectFile = gulp.src(path.join(conf.paths.tmp, '/partials/templateCacheHtml.js'), {read: false});
var partialsInjectOptions = {
starttag: '<!-- inject:partials -->',
ignorePath: path.join(conf.paths.tmp, '/partials'),
addRootSlash: false
};
var htmlFilter = $.filter('*.html', {restore: true});
var jsFilter = $.filter('**/*.js', {restore: true});
| return gulp.src(path.join(conf.paths.tmp, '/serve/*.html'))
.pipe($.inject(partialsInjectFile, partialsInjectOptions))
.pipe(assets = $.useref.assets())
//.pipe($.rev())
.pipe(jsFilter)
.pipe($.sourcemaps.init())
//.pipe($.uglify({ preserveComments: $.uglifySaveLicense })).on('error', conf.errorHandler('Uglify'))
.pipe($.sourcemaps.write('maps'))
.pipe(jsFilter.restore)
.pipe(cssFilter)
.pipe($.sourcemaps.init())
.pipe($.minifyCss({processImport: false}))
.pipe($.sourcemaps.write('maps'))
.pipe(cssFilter.restore)
.pipe(assets.restore())
.pipe($.useref())
.pipe($.revReplace())
.pipe(htmlFilter)
.pipe($.minifyHtml({
empty: true,
spare: true,
quotes: true,
conditionals: true
}))
.pipe(htmlFilter.restore)
.pipe(gulp.dest(path.join(conf.paths.dist, '/')))
.pipe($.size({title: path.join(conf.paths.dist, '/'), showFiles: true}));
});
// Only applies for fonts from bower dependencies
// Custom fonts are handled by the "other" task
gulp.task('fonts', function () {
return gulp.src($.mainBowerFiles())
.pipe($.filter('**/*.{eot,svg,ttf,woff,woff2}'))
.pipe($.flatten())
.pipe(gulp.dest(path.join(conf.paths.dist, '/fonts/')));
});
gulp.task('other', function () {
var fileFilter = $.filter(function (file) {
return file.stat.isFile();
});
return gulp.src([
path.join(conf.paths.src, '/**/*'),
path.join('!' + conf.paths.src, '/**/*.{html,css,js,scss}')
])
.pipe(fileFilter)
.pipe(gulp.dest(path.join(conf.paths.dist, '/')));
});
gulp.task('clean', function () {
return $.del([path.join(conf.paths.dist, '/'), path.join(conf.paths.tmp, '/**')]);
});
gulp.task('build', ['html', 'fonts', 'other']);
gulp.task('release', ['build'], function () {
return gulp.src([
path.join(conf.paths.dist, '/scripts/**/*.js')
])
.pipe($.sourcemaps.init())
.pipe($.uglify()).on('error', conf.errorHandler('Uglify'))
.pipe($.sourcemaps.write('maps'))
.pipe(gulp.dest(path.join(conf.paths.dist, '/scripts/min/')));
}); | var cssFilter = $.filter('**/*.css', {restore: true});
var assets;
| random_line_split |
typeck-unsafe-always-share.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verify that UnsafeCell is *always* sync regardless if `T` is sync.
// ignore-tidy-linelength
use std::cell::UnsafeCell;
use std::kinds::marker;
struct MySync<T> {
u: UnsafeCell<T>
}
struct NoSync {
m: marker::NoSync
}
fn test<T: Sync>(s: T){
}
| test(uns);
let ms = MySync{u: uns};
test(ms);
let ns = NoSync{m: marker::NoSync};
test(ns);
//~^ ERROR `core::kinds::Sync` is not implemented
} | fn main() {
let us = UnsafeCell::new(MySync{u: UnsafeCell::new(0i)});
test(us);
let uns = UnsafeCell::new(NoSync{m: marker::NoSync}); | random_line_split |
typeck-unsafe-always-share.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verify that UnsafeCell is *always* sync regardless if `T` is sync.
// ignore-tidy-linelength
use std::cell::UnsafeCell;
use std::kinds::marker;
struct MySync<T> {
u: UnsafeCell<T>
}
struct NoSync {
m: marker::NoSync
}
fn test<T: Sync>(s: T) |
fn main() {
let us = UnsafeCell::new(MySync{u: UnsafeCell::new(0i)});
test(us);
let uns = UnsafeCell::new(NoSync{m: marker::NoSync});
test(uns);
let ms = MySync{u: uns};
test(ms);
let ns = NoSync{m: marker::NoSync};
test(ns);
//~^ ERROR `core::kinds::Sync` is not implemented
}
| {
} | identifier_body |
typeck-unsafe-always-share.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verify that UnsafeCell is *always* sync regardless if `T` is sync.
// ignore-tidy-linelength
use std::cell::UnsafeCell;
use std::kinds::marker;
struct | <T> {
u: UnsafeCell<T>
}
struct NoSync {
m: marker::NoSync
}
fn test<T: Sync>(s: T){
}
fn main() {
let us = UnsafeCell::new(MySync{u: UnsafeCell::new(0i)});
test(us);
let uns = UnsafeCell::new(NoSync{m: marker::NoSync});
test(uns);
let ms = MySync{u: uns};
test(ms);
let ns = NoSync{m: marker::NoSync};
test(ns);
//~^ ERROR `core::kinds::Sync` is not implemented
}
| MySync | identifier_name |
playground.js | const ScratchRender = require('../RenderWebGL');
const getMousePosition = require('./getMousePosition');
const canvas = document.getElementById('scratch-stage');
let fudge = 90;
const renderer = new ScratchRender(canvas);
renderer.setLayerGroupOrdering(['group1']);
const drawableID = renderer.createDrawable('group1');
renderer.updateDrawableProperties(drawableID, {
position: [0, 0],
scale: [100, 100],
direction: 90
});
const WantedSkinType = {
bitmap: 'bitmap',
vector: 'vector',
pen: 'pen'
};
const drawableID2 = renderer.createDrawable('group1');
const wantedSkin = WantedSkinType.vector;
// Bitmap (squirrel)
const image = new Image();
image.addEventListener('load', () => {
const bitmapSkinId = renderer.createBitmapSkin(image);
if (wantedSkin === WantedSkinType.bitmap) {
renderer.updateDrawableProperties(drawableID2, {
skinId: bitmapSkinId
});
}
});
image.crossOrigin = 'anonymous';
image.src = 'https://cdn.assets.scratch.mit.edu/internalapi/asset/7e24c99c1b853e52f8e7f9004416fa34.png/get/';
// SVG (cat 1-a)
const xhr = new XMLHttpRequest();
xhr.addEventListener('load', () => {
const skinId = renderer.createSVGSkin(xhr.responseText);
if (wantedSkin === WantedSkinType.vector) {
renderer.updateDrawableProperties(drawableID2, {
skinId: skinId
});
}
});
xhr.open('GET', 'https://cdn.assets.scratch.mit.edu/internalapi/asset/b7853f557e4426412e64bb3da6531a99.svg/get/');
xhr.send();
if (wantedSkin === WantedSkinType.pen) {
const penSkinID = renderer.createPenSkin();
renderer.updateDrawableProperties(drawableID2, {
skinId: penSkinID
});
canvas.addEventListener('click', event => {
const rect = canvas.getBoundingClientRect();
const x = event.clientX - rect.left;
const y = event.clientY - rect.top;
renderer.penLine(penSkinID, {
color4f: [Math.random(), Math.random(), Math.random(), 1],
diameter: 8
},
x - 240, 180 - y, (Math.random() * 480) - 240, (Math.random() * 360) - 180);
});
}
let posX = 0;
let posY = 0;
let scaleX = 100;
let scaleY = 100;
let fudgeProperty = 'posx';
const fudgeInput = document.getElementById('fudge');
const fudgePropertyInput = document.getElementById('fudgeproperty');
const fudgeMinInput = document.getElementById('fudgeMin');
const fudgeMaxInput = document.getElementById('fudgeMax');
/* eslint require-jsdoc: 0 */
const updateFudgeProperty = event => {
fudgeProperty = event.target.value;
};
const updateFudgeMin = event => {
fudgeInput.min = event.target.valueAsNumber;
};
const updateFudgeMax = event => {
fudgeInput.max = event.target.valueAsNumber;
};
fudgePropertyInput.addEventListener('change', updateFudgeProperty);
fudgePropertyInput.addEventListener('init', updateFudgeProperty);
fudgeMinInput.addEventListener('change', updateFudgeMin);
fudgeMinInput.addEventListener('init', updateFudgeMin);
fudgeMaxInput.addEventListener('change', updateFudgeMax);
fudgeMaxInput.addEventListener('init', updateFudgeMax);
// Ugly hack to properly set the values of the inputs on page load,
// since they persist across reloads, at least in Firefox.
// The best ugly hacks are the ones that reduce code duplication!
fudgePropertyInput.dispatchEvent(new CustomEvent('init'));
fudgeMinInput.dispatchEvent(new CustomEvent('init'));
fudgeMaxInput.dispatchEvent(new CustomEvent('init'));
fudgeInput.dispatchEvent(new CustomEvent('init'));
const handleFudgeChanged = function (event) {
fudge = event.target.valueAsNumber;
const props = {};
switch (fudgeProperty) {
case 'posx':
props.position = [fudge, posY];
posX = fudge;
break;
case 'posy':
props.position = [posX, fudge];
posY = fudge;
break;
case 'direction':
props.direction = fudge;
break;
case 'scalex':
props.scale = [fudge, scaleY];
scaleX = fudge;
break;
case 'scaley':
props.scale = [scaleX, fudge];
scaleY = fudge;
break;
case 'scaleboth':
props.scale = [fudge, fudge];
scaleX = fudge;
scaleY = fudge;
break;
case 'color':
props.color = fudge;
break;
case 'whirl':
props.whirl = fudge;
break;
case 'fisheye':
props.fisheye = fudge;
break;
case 'pixelate':
props.pixelate = fudge;
break;
case 'mosaic':
props.mosaic = fudge;
break;
case 'brightness':
props.brightness = fudge;
break;
case 'ghost':
props.ghost = fudge;
break;
}
renderer.updateDrawableProperties(drawableID2, props);
};
fudgeInput.addEventListener('input', handleFudgeChanged);
fudgeInput.addEventListener('change', handleFudgeChanged);
fudgeInput.addEventListener('init', handleFudgeChanged);
const updateStageScale = event => {
renderer.resize(480 * event.target.valueAsNumber, 360 * event.target.valueAsNumber);
};
const stageScaleInput = document.getElementById('stage-scale');
stageScaleInput.addEventListener('input', updateStageScale); | renderer.extractColor(mousePos.x, mousePos.y, 30);
});
canvas.addEventListener('click', event => {
const mousePos = getMousePosition(event, canvas);
const pickID = renderer.pick(mousePos.x, mousePos.y);
console.log(`You clicked on ${(pickID < 0 ? 'nothing' : `ID# ${pickID}`)}`);
if (pickID >= 0) {
console.dir(renderer.extractDrawableScreenSpace(pickID, mousePos.x, mousePos.y));
}
});
const drawStep = function () {
renderer.draw();
// renderer.getBounds(drawableID2);
// renderer.isTouchingColor(drawableID2, [255,255,255]);
requestAnimationFrame(drawStep);
};
drawStep();
const debugCanvas = /** @type {canvas} */ document.getElementById('debug-canvas');
renderer.setDebugCanvas(debugCanvas); | stageScaleInput.addEventListener('change', updateStageScale);
canvas.addEventListener('mousemove', event => {
const mousePos = getMousePosition(event, canvas); | random_line_split |
playground.js | const ScratchRender = require('../RenderWebGL');
const getMousePosition = require('./getMousePosition');
const canvas = document.getElementById('scratch-stage');
let fudge = 90;
const renderer = new ScratchRender(canvas);
renderer.setLayerGroupOrdering(['group1']);
const drawableID = renderer.createDrawable('group1');
renderer.updateDrawableProperties(drawableID, {
position: [0, 0],
scale: [100, 100],
direction: 90
});
const WantedSkinType = {
bitmap: 'bitmap',
vector: 'vector',
pen: 'pen'
};
const drawableID2 = renderer.createDrawable('group1');
const wantedSkin = WantedSkinType.vector;
// Bitmap (squirrel)
const image = new Image();
image.addEventListener('load', () => {
const bitmapSkinId = renderer.createBitmapSkin(image);
if (wantedSkin === WantedSkinType.bitmap) {
renderer.updateDrawableProperties(drawableID2, {
skinId: bitmapSkinId
});
}
});
image.crossOrigin = 'anonymous';
image.src = 'https://cdn.assets.scratch.mit.edu/internalapi/asset/7e24c99c1b853e52f8e7f9004416fa34.png/get/';
// SVG (cat 1-a)
const xhr = new XMLHttpRequest();
xhr.addEventListener('load', () => {
const skinId = renderer.createSVGSkin(xhr.responseText);
if (wantedSkin === WantedSkinType.vector) |
});
xhr.open('GET', 'https://cdn.assets.scratch.mit.edu/internalapi/asset/b7853f557e4426412e64bb3da6531a99.svg/get/');
xhr.send();
if (wantedSkin === WantedSkinType.pen) {
const penSkinID = renderer.createPenSkin();
renderer.updateDrawableProperties(drawableID2, {
skinId: penSkinID
});
canvas.addEventListener('click', event => {
const rect = canvas.getBoundingClientRect();
const x = event.clientX - rect.left;
const y = event.clientY - rect.top;
renderer.penLine(penSkinID, {
color4f: [Math.random(), Math.random(), Math.random(), 1],
diameter: 8
},
x - 240, 180 - y, (Math.random() * 480) - 240, (Math.random() * 360) - 180);
});
}
let posX = 0;
let posY = 0;
let scaleX = 100;
let scaleY = 100;
let fudgeProperty = 'posx';
const fudgeInput = document.getElementById('fudge');
const fudgePropertyInput = document.getElementById('fudgeproperty');
const fudgeMinInput = document.getElementById('fudgeMin');
const fudgeMaxInput = document.getElementById('fudgeMax');
/* eslint require-jsdoc: 0 */
const updateFudgeProperty = event => {
fudgeProperty = event.target.value;
};
const updateFudgeMin = event => {
fudgeInput.min = event.target.valueAsNumber;
};
const updateFudgeMax = event => {
fudgeInput.max = event.target.valueAsNumber;
};
fudgePropertyInput.addEventListener('change', updateFudgeProperty);
fudgePropertyInput.addEventListener('init', updateFudgeProperty);
fudgeMinInput.addEventListener('change', updateFudgeMin);
fudgeMinInput.addEventListener('init', updateFudgeMin);
fudgeMaxInput.addEventListener('change', updateFudgeMax);
fudgeMaxInput.addEventListener('init', updateFudgeMax);
// Ugly hack to properly set the values of the inputs on page load,
// since they persist across reloads, at least in Firefox.
// The best ugly hacks are the ones that reduce code duplication!
fudgePropertyInput.dispatchEvent(new CustomEvent('init'));
fudgeMinInput.dispatchEvent(new CustomEvent('init'));
fudgeMaxInput.dispatchEvent(new CustomEvent('init'));
fudgeInput.dispatchEvent(new CustomEvent('init'));
const handleFudgeChanged = function (event) {
fudge = event.target.valueAsNumber;
const props = {};
switch (fudgeProperty) {
case 'posx':
props.position = [fudge, posY];
posX = fudge;
break;
case 'posy':
props.position = [posX, fudge];
posY = fudge;
break;
case 'direction':
props.direction = fudge;
break;
case 'scalex':
props.scale = [fudge, scaleY];
scaleX = fudge;
break;
case 'scaley':
props.scale = [scaleX, fudge];
scaleY = fudge;
break;
case 'scaleboth':
props.scale = [fudge, fudge];
scaleX = fudge;
scaleY = fudge;
break;
case 'color':
props.color = fudge;
break;
case 'whirl':
props.whirl = fudge;
break;
case 'fisheye':
props.fisheye = fudge;
break;
case 'pixelate':
props.pixelate = fudge;
break;
case 'mosaic':
props.mosaic = fudge;
break;
case 'brightness':
props.brightness = fudge;
break;
case 'ghost':
props.ghost = fudge;
break;
}
renderer.updateDrawableProperties(drawableID2, props);
};
fudgeInput.addEventListener('input', handleFudgeChanged);
fudgeInput.addEventListener('change', handleFudgeChanged);
fudgeInput.addEventListener('init', handleFudgeChanged);
const updateStageScale = event => {
renderer.resize(480 * event.target.valueAsNumber, 360 * event.target.valueAsNumber);
};
const stageScaleInput = document.getElementById('stage-scale');
stageScaleInput.addEventListener('input', updateStageScale);
stageScaleInput.addEventListener('change', updateStageScale);
canvas.addEventListener('mousemove', event => {
const mousePos = getMousePosition(event, canvas);
renderer.extractColor(mousePos.x, mousePos.y, 30);
});
canvas.addEventListener('click', event => {
const mousePos = getMousePosition(event, canvas);
const pickID = renderer.pick(mousePos.x, mousePos.y);
console.log(`You clicked on ${(pickID < 0 ? 'nothing' : `ID# ${pickID}`)}`);
if (pickID >= 0) {
console.dir(renderer.extractDrawableScreenSpace(pickID, mousePos.x, mousePos.y));
}
});
const drawStep = function () {
renderer.draw();
// renderer.getBounds(drawableID2);
// renderer.isTouchingColor(drawableID2, [255,255,255]);
requestAnimationFrame(drawStep);
};
drawStep();
const debugCanvas = /** @type {canvas} */ document.getElementById('debug-canvas');
renderer.setDebugCanvas(debugCanvas);
| {
renderer.updateDrawableProperties(drawableID2, {
skinId: skinId
});
} | conditional_block |
run_tests.py | #!/usr/bin/env python
"""Execute the tests for the samcat program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for samcat'
print '========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'apps/samcat/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'apps/samcat', 'samcat')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run on DNA (Adenoviruses).
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.sam')],
to_diff=[(ph.inFile('ex1_merged.sam'),
ph.outFile('ex1_merged.sam'))])
conf_list.append(conf)
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.bam')],
to_diff=[(ph.inFile('ex1_merged.bam'),
ph.outFile('ex1_merged.bam'), "gunzip")])
conf_list.append(conf)
# Execute the tests.
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user.
print ' '.join(conf.commandLineArgs())
if res:
|
else:
failures += 1
print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0
if __name__ == '__main__':
sys.exit(app_tests.main(main))
| print 'OK' | conditional_block |
run_tests.py | #!/usr/bin/env python
"""Execute the tests for the samcat program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def | (source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for samcat'
print '========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'apps/samcat/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'apps/samcat', 'samcat')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run on DNA (Adenoviruses).
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.sam')],
to_diff=[(ph.inFile('ex1_merged.sam'),
ph.outFile('ex1_merged.sam'))])
conf_list.append(conf)
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.bam')],
to_diff=[(ph.inFile('ex1_merged.bam'),
ph.outFile('ex1_merged.bam'), "gunzip")])
conf_list.append(conf)
# Execute the tests.
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user.
print ' '.join(conf.commandLineArgs())
if res:
print 'OK'
else:
failures += 1
print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0
if __name__ == '__main__':
sys.exit(app_tests.main(main))
| main | identifier_name |
run_tests.py | #!/usr/bin/env python
"""Execute the tests for the samcat program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for samcat'
print '========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'apps/samcat/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'apps/samcat', 'samcat')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run on DNA (Adenoviruses).
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.sam')],
to_diff=[(ph.inFile('ex1_merged.sam'),
ph.outFile('ex1_merged.sam'))])
conf_list.append(conf)
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.bam')],
to_diff=[(ph.inFile('ex1_merged.bam'),
ph.outFile('ex1_merged.bam'), "gunzip")])
conf_list.append(conf)
# Execute the tests.
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user. | print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0
if __name__ == '__main__':
sys.exit(app_tests.main(main)) | print ' '.join(conf.commandLineArgs())
if res:
print 'OK'
else:
failures += 1 | random_line_split |
run_tests.py | #!/usr/bin/env python
"""Execute the tests for the samcat program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
|
if __name__ == '__main__':
sys.exit(app_tests.main(main))
| """Main entry point of the script."""
print 'Executing test for samcat'
print '========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'apps/samcat/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'apps/samcat', 'samcat')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run on DNA (Adenoviruses).
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.sam')],
to_diff=[(ph.inFile('ex1_merged.sam'),
ph.outFile('ex1_merged.sam'))])
conf_list.append(conf)
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.bam')],
to_diff=[(ph.inFile('ex1_merged.bam'),
ph.outFile('ex1_merged.bam'), "gunzip")])
conf_list.append(conf)
# Execute the tests.
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user.
print ' '.join(conf.commandLineArgs())
if res:
print 'OK'
else:
failures += 1
print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0 | identifier_body |
mock_webapp.py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mocks for classes defined in webapp module.
Use this classes to test functionality depending on webapp framework.
"""
import StringIO
import urlparse
class MockHeaders(dict):
"""Mocks out headers in webapp.Request and webapp.Response."""
def add_header(self, key, value):
self[key] = value
class MockRequest(object):
"""Mocks out webapp.Request.
Use get()/set() to configure the query parameters for the request.
Public Members:
method: A string representing the request type. Defaults to 'GET'.
uri: A string representing the requested URI. Defaults to '/start'.
"""
uri = property(lambda self: self.url)
def __init__(self):
"""Initializer."""
self.method = 'GET'
self.scheme = 'http'
self.host = 'foo.com'
self._path = '/start'
self.params = {}
self.params_list = []
self.headers = MockHeaders()
self.body = ''
self.url = ''
self.path_qs = ''
self.update_properties()
self.environ = {}
def get_path(self):
return self._path
def set_path(self, value):
self._path = value
self.update_properties()
path = property(get_path, set_path)
def set_url(self, url):
"""Set full URL for the request.
Parses the URL and sets path, scheme, host and parameters correctly.
"""
o = urlparse.urlparse(url)
self.path = o.path
self.scheme = o.scheme or self.scheme
self.host = o.netloc or self.host
for (name, value) in urlparse.parse_qs(o.query).items():
assert len(value) == 1
self.set(name, value[0])
def get(self, argument_name, default_value='', allow_multiple=False):
"""Looks up the value of a query parameter.
Args:
argument_name: The query parameter key as a string.
default_value: The default query parameter value as a string if it was
not supplied.
allow_multiple: return a list of values with the given name
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
if argument_name not in self.params:
if allow_multiple:
return []
return default_value
if allow_multiple:
return list(self.params[argument_name])
if isinstance(self.params[argument_name], list):
return self.params[argument_name][0]
return self.params[argument_name]
def get_all(self, argument_name):
"""Returns a list of query parameters with the given name.
Args:
argument_name: the name of the query argument.
Returns:
A (possibly empty) list of values.
"""
if argument_name in self.params:
if isinstance(self.params[argument_name], list):
return self.params[argument_name]
else:
return [self.params[argument_name]]
return []
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
value = self.get(name, default)
if value is None:
return value
try:
value = int(value)
except ValueError:
value = default
if value is not None:
if max_value is not None:
value = min(value, max_value)
if min_value is not None:
value = max(value, min_value)
return value
def set(self, argument_name, value):
"""Sets the value of a query parameter.
Args:
argument_name: The string name of the query parameter.
value: The string value of the query parameter. Pass None to remove
query parameter.
"""
self.params_list = filter(lambda p: p[0] != argument_name, self.params_list)
if value is not None:
self.params[argument_name] = value
if type(value) == list:
for v in value:
self.params_list.append((argument_name, v))
else:
self.params_list.append((argument_name, value))
else:
del self.params[argument_name]
self.update_properties()
def relative_url(self, other_url, to_application=False):
"""Return an absolute (!) URL by combining self.path with other_url."""
url = '%s://%s/' % (self.scheme, self.host)
return urlparse.urljoin(url, other_url)
def update_properties(self):
"""Update url, path_qs property to be in sync with path and params."""
self.path_qs = self._path
params_qs = ''
for param_value_pair in self.params_list:
if params_qs:
params_qs += '&'
params_qs += param_value_pair[0] + "=" + param_value_pair[1]
if params_qs:
self.path_qs += '?' + params_qs
self.url = self.scheme + '://' + self.host + self.path_qs
def arguments(self):
"""Gets the set of argument names used in this request."""
return list(set(p[0] for p in self.params_list))
class MockResponse(object):
"""Mocks out webapp.Response.
Public Members:
out: A StringIO instance.
status: HTTP status code.
message: HTTP status message.
headers: A dict of HTTP response headers.
"""
def __init__(self):
self.out = StringIO.StringIO()
self.headers = MockHeaders()
self.status = 200
self.status_message = 'OK'
def set_status(self, status, message=None):
"""Sets the value of status.
Args:
status: HTTP status code.
message: HTTP status message.
"""
self.status = status
if message:
self.status_message = message
def | (self):
"""Indicates whether the response was an error response."""
return self.status >= 400
def clear(self):
"""Clears all data written to self.out."""
self.out.seek(0)
self.out.truncate(0)
| has_error | identifier_name |
mock_webapp.py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mocks for classes defined in webapp module.
Use this classes to test functionality depending on webapp framework.
"""
import StringIO
import urlparse
class MockHeaders(dict):
"""Mocks out headers in webapp.Request and webapp.Response."""
def add_header(self, key, value):
self[key] = value
class MockRequest(object):
"""Mocks out webapp.Request.
Use get()/set() to configure the query parameters for the request.
Public Members:
method: A string representing the request type. Defaults to 'GET'.
uri: A string representing the requested URI. Defaults to '/start'.
"""
uri = property(lambda self: self.url)
def __init__(self):
"""Initializer."""
self.method = 'GET'
self.scheme = 'http'
self.host = 'foo.com'
self._path = '/start'
self.params = {}
self.params_list = []
self.headers = MockHeaders()
self.body = ''
self.url = ''
self.path_qs = ''
self.update_properties()
self.environ = {}
def get_path(self):
return self._path
def set_path(self, value):
self._path = value
self.update_properties()
path = property(get_path, set_path)
def set_url(self, url):
"""Set full URL for the request.
Parses the URL and sets path, scheme, host and parameters correctly.
"""
o = urlparse.urlparse(url)
self.path = o.path
self.scheme = o.scheme or self.scheme
self.host = o.netloc or self.host
for (name, value) in urlparse.parse_qs(o.query).items():
assert len(value) == 1
self.set(name, value[0])
def get(self, argument_name, default_value='', allow_multiple=False):
"""Looks up the value of a query parameter.
Args:
argument_name: The query parameter key as a string.
default_value: The default query parameter value as a string if it was
not supplied.
allow_multiple: return a list of values with the given name
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
if argument_name not in self.params:
if allow_multiple:
return []
return default_value
if allow_multiple:
return list(self.params[argument_name])
if isinstance(self.params[argument_name], list):
return self.params[argument_name][0]
return self.params[argument_name]
def get_all(self, argument_name):
"""Returns a list of query parameters with the given name.
Args:
argument_name: the name of the query argument.
Returns:
A (possibly empty) list of values.
"""
if argument_name in self.params:
if isinstance(self.params[argument_name], list):
return self.params[argument_name]
else:
return [self.params[argument_name]]
return []
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
value = self.get(name, default)
if value is None:
return value
try:
value = int(value)
except ValueError:
value = default
if value is not None:
if max_value is not None:
|
if min_value is not None:
value = max(value, min_value)
return value
def set(self, argument_name, value):
"""Sets the value of a query parameter.
Args:
argument_name: The string name of the query parameter.
value: The string value of the query parameter. Pass None to remove
query parameter.
"""
self.params_list = filter(lambda p: p[0] != argument_name, self.params_list)
if value is not None:
self.params[argument_name] = value
if type(value) == list:
for v in value:
self.params_list.append((argument_name, v))
else:
self.params_list.append((argument_name, value))
else:
del self.params[argument_name]
self.update_properties()
def relative_url(self, other_url, to_application=False):
"""Return an absolute (!) URL by combining self.path with other_url."""
url = '%s://%s/' % (self.scheme, self.host)
return urlparse.urljoin(url, other_url)
def update_properties(self):
"""Update url, path_qs property to be in sync with path and params."""
self.path_qs = self._path
params_qs = ''
for param_value_pair in self.params_list:
if params_qs:
params_qs += '&'
params_qs += param_value_pair[0] + "=" + param_value_pair[1]
if params_qs:
self.path_qs += '?' + params_qs
self.url = self.scheme + '://' + self.host + self.path_qs
def arguments(self):
"""Gets the set of argument names used in this request."""
return list(set(p[0] for p in self.params_list))
class MockResponse(object):
"""Mocks out webapp.Response.
Public Members:
out: A StringIO instance.
status: HTTP status code.
message: HTTP status message.
headers: A dict of HTTP response headers.
"""
def __init__(self):
self.out = StringIO.StringIO()
self.headers = MockHeaders()
self.status = 200
self.status_message = 'OK'
def set_status(self, status, message=None):
"""Sets the value of status.
Args:
status: HTTP status code.
message: HTTP status message.
"""
self.status = status
if message:
self.status_message = message
def has_error(self):
"""Indicates whether the response was an error response."""
return self.status >= 400
def clear(self):
"""Clears all data written to self.out."""
self.out.seek(0)
self.out.truncate(0)
| value = min(value, max_value) | conditional_block |
mock_webapp.py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mocks for classes defined in webapp module.
Use this classes to test functionality depending on webapp framework.
"""
import StringIO
import urlparse
class MockHeaders(dict):
"""Mocks out headers in webapp.Request and webapp.Response."""
def add_header(self, key, value):
self[key] = value
class MockRequest(object):
"""Mocks out webapp.Request.
Use get()/set() to configure the query parameters for the request.
Public Members:
method: A string representing the request type. Defaults to 'GET'.
uri: A string representing the requested URI. Defaults to '/start'.
"""
uri = property(lambda self: self.url)
def __init__(self):
"""Initializer."""
self.method = 'GET'
self.scheme = 'http'
self.host = 'foo.com'
self._path = '/start'
self.params = {}
self.params_list = []
self.headers = MockHeaders()
self.body = ''
self.url = ''
self.path_qs = ''
self.update_properties()
self.environ = {}
def get_path(self):
|
def set_path(self, value):
self._path = value
self.update_properties()
path = property(get_path, set_path)
def set_url(self, url):
"""Set full URL for the request.
Parses the URL and sets path, scheme, host and parameters correctly.
"""
o = urlparse.urlparse(url)
self.path = o.path
self.scheme = o.scheme or self.scheme
self.host = o.netloc or self.host
for (name, value) in urlparse.parse_qs(o.query).items():
assert len(value) == 1
self.set(name, value[0])
def get(self, argument_name, default_value='', allow_multiple=False):
"""Looks up the value of a query parameter.
Args:
argument_name: The query parameter key as a string.
default_value: The default query parameter value as a string if it was
not supplied.
allow_multiple: return a list of values with the given name
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
if argument_name not in self.params:
if allow_multiple:
return []
return default_value
if allow_multiple:
return list(self.params[argument_name])
if isinstance(self.params[argument_name], list):
return self.params[argument_name][0]
return self.params[argument_name]
def get_all(self, argument_name):
"""Returns a list of query parameters with the given name.
Args:
argument_name: the name of the query argument.
Returns:
A (possibly empty) list of values.
"""
if argument_name in self.params:
if isinstance(self.params[argument_name], list):
return self.params[argument_name]
else:
return [self.params[argument_name]]
return []
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
value = self.get(name, default)
if value is None:
return value
try:
value = int(value)
except ValueError:
value = default
if value is not None:
if max_value is not None:
value = min(value, max_value)
if min_value is not None:
value = max(value, min_value)
return value
def set(self, argument_name, value):
"""Sets the value of a query parameter.
Args:
argument_name: The string name of the query parameter.
value: The string value of the query parameter. Pass None to remove
query parameter.
"""
self.params_list = filter(lambda p: p[0] != argument_name, self.params_list)
if value is not None:
self.params[argument_name] = value
if type(value) == list:
for v in value:
self.params_list.append((argument_name, v))
else:
self.params_list.append((argument_name, value))
else:
del self.params[argument_name]
self.update_properties()
def relative_url(self, other_url, to_application=False):
"""Return an absolute (!) URL by combining self.path with other_url."""
url = '%s://%s/' % (self.scheme, self.host)
return urlparse.urljoin(url, other_url)
def update_properties(self):
"""Update url, path_qs property to be in sync with path and params."""
self.path_qs = self._path
params_qs = ''
for param_value_pair in self.params_list:
if params_qs:
params_qs += '&'
params_qs += param_value_pair[0] + "=" + param_value_pair[1]
if params_qs:
self.path_qs += '?' + params_qs
self.url = self.scheme + '://' + self.host + self.path_qs
def arguments(self):
"""Gets the set of argument names used in this request."""
return list(set(p[0] for p in self.params_list))
class MockResponse(object):
"""Mocks out webapp.Response.
Public Members:
out: A StringIO instance.
status: HTTP status code.
message: HTTP status message.
headers: A dict of HTTP response headers.
"""
def __init__(self):
self.out = StringIO.StringIO()
self.headers = MockHeaders()
self.status = 200
self.status_message = 'OK'
def set_status(self, status, message=None):
"""Sets the value of status.
Args:
status: HTTP status code.
message: HTTP status message.
"""
self.status = status
if message:
self.status_message = message
def has_error(self):
"""Indicates whether the response was an error response."""
return self.status >= 400
def clear(self):
"""Clears all data written to self.out."""
self.out.seek(0)
self.out.truncate(0)
| return self._path | identifier_body |
mock_webapp.py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mocks for classes defined in webapp module.
Use this classes to test functionality depending on webapp framework.
"""
import StringIO
import urlparse
class MockHeaders(dict):
"""Mocks out headers in webapp.Request and webapp.Response."""
def add_header(self, key, value):
self[key] = value
class MockRequest(object):
"""Mocks out webapp.Request.
Use get()/set() to configure the query parameters for the request.
Public Members:
method: A string representing the request type. Defaults to 'GET'.
uri: A string representing the requested URI. Defaults to '/start'.
"""
uri = property(lambda self: self.url)
def __init__(self):
"""Initializer."""
self.method = 'GET'
self.scheme = 'http'
self.host = 'foo.com'
self._path = '/start'
self.params = {}
self.params_list = []
self.headers = MockHeaders()
self.body = ''
self.url = ''
self.path_qs = ''
self.update_properties()
self.environ = {}
def get_path(self):
return self._path
def set_path(self, value):
self._path = value
self.update_properties()
path = property(get_path, set_path)
def set_url(self, url):
"""Set full URL for the request.
Parses the URL and sets path, scheme, host and parameters correctly.
"""
o = urlparse.urlparse(url)
self.path = o.path
self.scheme = o.scheme or self.scheme
self.host = o.netloc or self.host
for (name, value) in urlparse.parse_qs(o.query).items():
assert len(value) == 1
self.set(name, value[0])
def get(self, argument_name, default_value='', allow_multiple=False):
"""Looks up the value of a query parameter.
Args:
argument_name: The query parameter key as a string.
default_value: The default query parameter value as a string if it was
not supplied.
allow_multiple: return a list of values with the given name
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
if argument_name not in self.params:
if allow_multiple:
return []
return default_value
if allow_multiple:
return list(self.params[argument_name])
if isinstance(self.params[argument_name], list):
return self.params[argument_name][0]
return self.params[argument_name]
def get_all(self, argument_name):
"""Returns a list of query parameters with the given name.
Args:
argument_name: the name of the query argument.
Returns:
A (possibly empty) list of values.
"""
if argument_name in self.params:
if isinstance(self.params[argument_name], list):
return self.params[argument_name]
else:
return [self.params[argument_name]]
return []
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
value = self.get(name, default)
if value is None:
return value
try:
value = int(value)
except ValueError:
value = default
if value is not None:
if max_value is not None:
value = min(value, max_value)
if min_value is not None:
value = max(value, min_value)
return value
def set(self, argument_name, value):
"""Sets the value of a query parameter.
| argument_name: The string name of the query parameter.
value: The string value of the query parameter. Pass None to remove
query parameter.
"""
self.params_list = filter(lambda p: p[0] != argument_name, self.params_list)
if value is not None:
self.params[argument_name] = value
if type(value) == list:
for v in value:
self.params_list.append((argument_name, v))
else:
self.params_list.append((argument_name, value))
else:
del self.params[argument_name]
self.update_properties()
def relative_url(self, other_url, to_application=False):
"""Return an absolute (!) URL by combining self.path with other_url."""
url = '%s://%s/' % (self.scheme, self.host)
return urlparse.urljoin(url, other_url)
def update_properties(self):
"""Update url, path_qs property to be in sync with path and params."""
self.path_qs = self._path
params_qs = ''
for param_value_pair in self.params_list:
if params_qs:
params_qs += '&'
params_qs += param_value_pair[0] + "=" + param_value_pair[1]
if params_qs:
self.path_qs += '?' + params_qs
self.url = self.scheme + '://' + self.host + self.path_qs
def arguments(self):
"""Gets the set of argument names used in this request."""
return list(set(p[0] for p in self.params_list))
class MockResponse(object):
"""Mocks out webapp.Response.
Public Members:
out: A StringIO instance.
status: HTTP status code.
message: HTTP status message.
headers: A dict of HTTP response headers.
"""
def __init__(self):
self.out = StringIO.StringIO()
self.headers = MockHeaders()
self.status = 200
self.status_message = 'OK'
def set_status(self, status, message=None):
"""Sets the value of status.
Args:
status: HTTP status code.
message: HTTP status message.
"""
self.status = status
if message:
self.status_message = message
def has_error(self):
"""Indicates whether the response was an error response."""
return self.status >= 400
def clear(self):
"""Clears all data written to self.out."""
self.out.seek(0)
self.out.truncate(0) | Args: | random_line_split |
index.d.ts | // Type definitions for react-router-redux 3.x
// Project: https://github.com/rackt/react-router-redux
// Definitions by: Isman Usoh <http://github.com/isman-usoh>, Noah Shipley <https://github.com/noah79>, Dimitri Rosenberg <https://github.com/rosendi>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.1
import * as Redux from "redux";
import * as History from "history";
/// <reference types="react-router"/>
declare namespace ReactRouterRedux {
const TRANSITION: string;
const UPDATE_LOCATION: string;
const push: PushAction;
const replace: ReplaceAction;
const go: GoAction;
const goBack: GoForwardAction;
const goForward: GoBackAction;
const routeActions: RouteActions;
type LocationDescriptor = History.LocationDescriptor;
type PushAction = (nextLocation: LocationDescriptor) => void;
type ReplaceAction = (nextLocation: LocationDescriptor) => void;
type GoAction = (n: number) => void;
type GoForwardAction = () => void;
type GoBackAction = () => void;
interface RouteActions {
push: PushAction;
replace: ReplaceAction;
go: GoAction;
goForward: GoForwardAction;
goBack: GoBackAction; |
function routeReducer(state?: any, options?: any): Redux.Reducer<any>;
function syncHistory(history: History.History): HistoryMiddleware;
}
export = ReactRouterRedux; | }
interface HistoryMiddleware extends Redux.Middleware {
listenForReplays(store: Redux.Store<any>, selectLocationState?: Function): void;
unsubscribe(): void;
} | random_line_split |
npm.d.ts | import { BowerJson, DistTag, IShellRunOptions, IonicEnvironment, PackageJson } from '../../definitions';
export declare const ERROR_INVALID_PACKAGE_JSON = "INVALID_PACKAGE_JSON";
export declare const ERROR_INVALID_BOWER_JSON = "INVALID_BOWER_JSON";
/**
* Lightweight version of https://github.com/npm/validate-npm-package-name
*/
export declare function isValidPackageName(name: string): boolean;
export declare function readPackageJsonFile(path: string): Promise<PackageJson>;
export declare function readBowerJsonFile(path: string): Promise<BowerJson>; | global?: boolean;
link?: boolean;
save?: boolean;
saveDev?: boolean;
saveExact?: boolean;
}
/**
* Resolves pkg manager intent with command args.
*
* @return Promise<args> If the args is an empty array, it means the pkg manager doesn't have that command.
*/
export declare function pkgManagerArgs(env: IonicEnvironment, options?: PkgManagerOptions): Promise<string[]>;
export declare function pkgLatestVersion(env: IonicEnvironment, pkg: string, distTag?: DistTag): Promise<string | undefined>; | export interface PkgManagerOptions extends IShellRunOptions {
command?: 'dedupe' | 'install' | 'uninstall';
pkg?: string; | random_line_split |
exec.rs | #![feature(test)]
extern crate rasen;
extern crate rasen_dsl;
extern crate test;
use rasen_dsl::prelude::*;
use std::f32::consts::PI;
use test::Bencher;
include!("../../tests/dsl.rs");
#[bench]
fn bench_run_basic_frag(b: &mut Bencher) {
b.iter(|| {
basic_frag(
vec3(0.0f32, 1.0f32, 0.0f32),
vec2(0.0f32, 0.0f32),
Value::of(Sampler(Vec4([0.25f32, 0.625f32, 1.0f32, 1.0f32]))),
);
});
}
#[bench]
fn bench_run_basic_vert(b: &mut Bencher) {
b.iter(|| {
let a_pos = vec3(1.0f32, 2.0f32, 3.0f32);
let a_normal = vec3(0.0f32, 1.0f32, 0.0f32);
let a_uv = vec2(0.5f32, 0.5f32);
#[rustfmt::skip]
let projection = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let view = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let model = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
basic_vert(
a_pos,
a_normal,
a_uv,
Value::of(projection),
Value::of(view),
Value::of(model),
)
});
}
#[bench]
fn bench_run_functions(b: &mut Bencher) | {
b.iter(|| {
functions(Value::of(PI));
});
} | identifier_body |
|
exec.rs | #![feature(test)]
extern crate rasen;
extern crate rasen_dsl;
extern crate test;
use rasen_dsl::prelude::*;
use std::f32::consts::PI;
use test::Bencher;
include!("../../tests/dsl.rs");
#[bench]
fn | (b: &mut Bencher) {
b.iter(|| {
basic_frag(
vec3(0.0f32, 1.0f32, 0.0f32),
vec2(0.0f32, 0.0f32),
Value::of(Sampler(Vec4([0.25f32, 0.625f32, 1.0f32, 1.0f32]))),
);
});
}
#[bench]
fn bench_run_basic_vert(b: &mut Bencher) {
b.iter(|| {
let a_pos = vec3(1.0f32, 2.0f32, 3.0f32);
let a_normal = vec3(0.0f32, 1.0f32, 0.0f32);
let a_uv = vec2(0.5f32, 0.5f32);
#[rustfmt::skip]
let projection = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let view = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let model = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
basic_vert(
a_pos,
a_normal,
a_uv,
Value::of(projection),
Value::of(view),
Value::of(model),
)
});
}
#[bench]
fn bench_run_functions(b: &mut Bencher) {
b.iter(|| {
functions(Value::of(PI));
});
}
| bench_run_basic_frag | identifier_name |
exec.rs | #![feature(test)]
extern crate rasen;
extern crate rasen_dsl;
extern crate test;
use rasen_dsl::prelude::*;
use std::f32::consts::PI;
use test::Bencher;
include!("../../tests/dsl.rs");
#[bench]
fn bench_run_basic_frag(b: &mut Bencher) {
b.iter(|| {
basic_frag(
vec3(0.0f32, 1.0f32, 0.0f32),
vec2(0.0f32, 0.0f32),
Value::of(Sampler(Vec4([0.25f32, 0.625f32, 1.0f32, 1.0f32]))),
);
});
}
#[bench]
fn bench_run_basic_vert(b: &mut Bencher) {
b.iter(|| {
let a_pos = vec3(1.0f32, 2.0f32, 3.0f32);
let a_normal = vec3(0.0f32, 1.0f32, 0.0f32); | 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let view = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
#[rustfmt::skip]
let model = Mat4([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]);
basic_vert(
a_pos,
a_normal,
a_uv,
Value::of(projection),
Value::of(view),
Value::of(model),
)
});
}
#[bench]
fn bench_run_functions(b: &mut Bencher) {
b.iter(|| {
functions(Value::of(PI));
});
} | let a_uv = vec2(0.5f32, 0.5f32);
#[rustfmt::skip]
let projection = Mat4([ | random_line_split |
init.rs | use disk::ahci::Ahci;
use disk::ide::Ide;
use env::Environment;
use schemes::file::FileScheme;
use super::config::PciConfig;
use super::common::class::*;
use super::common::subclass::*;
use super::common::programming_interface::*;
/*
use super::common::vendorid::*;
use super::common::deviceid::*;
use audio::ac97::Ac97;
use audio::intelhda::IntelHda;
use network::rtl8139::Rtl8139;
use network::intel8254x::Intel8254x;
use usb::uhci::Uhci;
use usb::ohci::Ohci;
use usb::ehci::Ehci;
use usb::xhci::Xhci;
*/
/// PCI device
pub unsafe fn pci_device(env: &mut Environment,
pci: PciConfig,
class_id: u8,
subclass_id: u8,
interface_id: u8,
vendor_code: u16,
device_code: u16) {
match (class_id, subclass_id, interface_id) {
(MASS_STORAGE, IDE, _) => {
if let Some(module) = FileScheme::new(Ide::disks(pci)) {
env.schemes.lock().push(module);
}
}
(MASS_STORAGE, SATA, AHCI) => {
if let Some(module) = FileScheme::new(Ahci::disks(pci)) {
env.schemes.lock().push(module);
}
}
/*
(SERIAL_BUS, USB, UHCI) => env.schemes.lock().push(Uhci::new(pci)),
(SERIAL_BUS, USB, OHCI) => env.schemes.lock().push(Ohci::new(pci)),
(SERIAL_BUS, USB, EHCI) => env.schemes.lock().push(Ehci::new(pci)),
(SERIAL_BUS, USB, XHCI) => env.schemes.lock().push(Xhci::new(pci)),
*/
_ => match (vendor_code, device_code) {
//(REALTEK, RTL8139) => env.schemes.lock().push(Rtl8139::new(pci)),
//(INTEL, GBE_82540EM) => env.schemes.lock().push(Intel8254x::new(pci)),
//(INTEL, AC97_82801AA) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, AC97_ICH4) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, INTELHDA_ICH6) => env.schemes.lock().push(IntelHda::new(pci)),
_ => (),
}
}
}
/// Initialize PCI session
pub unsafe fn pci_init(env: &mut Environment) {
for bus in 0..256 {
for slot in 0..32 {
for func in 0..8 {
let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);
let id = pci.read(0);
if (id & 0xFFFF) != 0xFFFF {
let class_id = pci.read(8);
/*
debug!(" * PCI {}, {}, {}: ID {:X} CL {:X}",
bus,
slot,
func,
id,
class_id);
for i in 0..6 {
let bar = pci.read(i * 4 + 0x10);
if bar > 0 {
debug!(" BAR{}: {:X}", i, bar);
pci.write(i * 4 + 0x10, 0xFFFFFFFF);
let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;
pci.write(i * 4 + 0x10, bar);
if size > 0 {
debug!(" {}", size);
}
}
}
debugln!("");
*/
pci_device(env,
pci,
((class_id >> 24) & 0xFF) as u8,
((class_id >> 16) & 0xFF) as u8,
((class_id >> 8) & 0xFF) as u8,
(id & 0xFFFF) as u16, | }
}
}
}
} | ((id >> 16) & 0xFFFF) as u16); | random_line_split |
init.rs | use disk::ahci::Ahci;
use disk::ide::Ide;
use env::Environment;
use schemes::file::FileScheme;
use super::config::PciConfig;
use super::common::class::*;
use super::common::subclass::*;
use super::common::programming_interface::*;
/*
use super::common::vendorid::*;
use super::common::deviceid::*;
use audio::ac97::Ac97;
use audio::intelhda::IntelHda;
use network::rtl8139::Rtl8139;
use network::intel8254x::Intel8254x;
use usb::uhci::Uhci;
use usb::ohci::Ohci;
use usb::ehci::Ehci;
use usb::xhci::Xhci;
*/
/// PCI device
pub unsafe fn pci_device(env: &mut Environment,
pci: PciConfig,
class_id: u8,
subclass_id: u8,
interface_id: u8,
vendor_code: u16,
device_code: u16) {
match (class_id, subclass_id, interface_id) {
(MASS_STORAGE, IDE, _) => |
(MASS_STORAGE, SATA, AHCI) => {
if let Some(module) = FileScheme::new(Ahci::disks(pci)) {
env.schemes.lock().push(module);
}
}
/*
(SERIAL_BUS, USB, UHCI) => env.schemes.lock().push(Uhci::new(pci)),
(SERIAL_BUS, USB, OHCI) => env.schemes.lock().push(Ohci::new(pci)),
(SERIAL_BUS, USB, EHCI) => env.schemes.lock().push(Ehci::new(pci)),
(SERIAL_BUS, USB, XHCI) => env.schemes.lock().push(Xhci::new(pci)),
*/
_ => match (vendor_code, device_code) {
//(REALTEK, RTL8139) => env.schemes.lock().push(Rtl8139::new(pci)),
//(INTEL, GBE_82540EM) => env.schemes.lock().push(Intel8254x::new(pci)),
//(INTEL, AC97_82801AA) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, AC97_ICH4) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, INTELHDA_ICH6) => env.schemes.lock().push(IntelHda::new(pci)),
_ => (),
}
}
}
/// Initialize PCI session
pub unsafe fn pci_init(env: &mut Environment) {
for bus in 0..256 {
for slot in 0..32 {
for func in 0..8 {
let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);
let id = pci.read(0);
if (id & 0xFFFF) != 0xFFFF {
let class_id = pci.read(8);
/*
debug!(" * PCI {}, {}, {}: ID {:X} CL {:X}",
bus,
slot,
func,
id,
class_id);
for i in 0..6 {
let bar = pci.read(i * 4 + 0x10);
if bar > 0 {
debug!(" BAR{}: {:X}", i, bar);
pci.write(i * 4 + 0x10, 0xFFFFFFFF);
let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;
pci.write(i * 4 + 0x10, bar);
if size > 0 {
debug!(" {}", size);
}
}
}
debugln!("");
*/
pci_device(env,
pci,
((class_id >> 24) & 0xFF) as u8,
((class_id >> 16) & 0xFF) as u8,
((class_id >> 8) & 0xFF) as u8,
(id & 0xFFFF) as u16,
((id >> 16) & 0xFFFF) as u16);
}
}
}
}
}
| {
if let Some(module) = FileScheme::new(Ide::disks(pci)) {
env.schemes.lock().push(module);
}
} | conditional_block |
init.rs | use disk::ahci::Ahci;
use disk::ide::Ide;
use env::Environment;
use schemes::file::FileScheme;
use super::config::PciConfig;
use super::common::class::*;
use super::common::subclass::*;
use super::common::programming_interface::*;
/*
use super::common::vendorid::*;
use super::common::deviceid::*;
use audio::ac97::Ac97;
use audio::intelhda::IntelHda;
use network::rtl8139::Rtl8139;
use network::intel8254x::Intel8254x;
use usb::uhci::Uhci;
use usb::ohci::Ohci;
use usb::ehci::Ehci;
use usb::xhci::Xhci;
*/
/// PCI device
pub unsafe fn | (env: &mut Environment,
pci: PciConfig,
class_id: u8,
subclass_id: u8,
interface_id: u8,
vendor_code: u16,
device_code: u16) {
match (class_id, subclass_id, interface_id) {
(MASS_STORAGE, IDE, _) => {
if let Some(module) = FileScheme::new(Ide::disks(pci)) {
env.schemes.lock().push(module);
}
}
(MASS_STORAGE, SATA, AHCI) => {
if let Some(module) = FileScheme::new(Ahci::disks(pci)) {
env.schemes.lock().push(module);
}
}
/*
(SERIAL_BUS, USB, UHCI) => env.schemes.lock().push(Uhci::new(pci)),
(SERIAL_BUS, USB, OHCI) => env.schemes.lock().push(Ohci::new(pci)),
(SERIAL_BUS, USB, EHCI) => env.schemes.lock().push(Ehci::new(pci)),
(SERIAL_BUS, USB, XHCI) => env.schemes.lock().push(Xhci::new(pci)),
*/
_ => match (vendor_code, device_code) {
//(REALTEK, RTL8139) => env.schemes.lock().push(Rtl8139::new(pci)),
//(INTEL, GBE_82540EM) => env.schemes.lock().push(Intel8254x::new(pci)),
//(INTEL, AC97_82801AA) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, AC97_ICH4) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, INTELHDA_ICH6) => env.schemes.lock().push(IntelHda::new(pci)),
_ => (),
}
}
}
/// Initialize PCI session
pub unsafe fn pci_init(env: &mut Environment) {
for bus in 0..256 {
for slot in 0..32 {
for func in 0..8 {
let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);
let id = pci.read(0);
if (id & 0xFFFF) != 0xFFFF {
let class_id = pci.read(8);
/*
debug!(" * PCI {}, {}, {}: ID {:X} CL {:X}",
bus,
slot,
func,
id,
class_id);
for i in 0..6 {
let bar = pci.read(i * 4 + 0x10);
if bar > 0 {
debug!(" BAR{}: {:X}", i, bar);
pci.write(i * 4 + 0x10, 0xFFFFFFFF);
let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;
pci.write(i * 4 + 0x10, bar);
if size > 0 {
debug!(" {}", size);
}
}
}
debugln!("");
*/
pci_device(env,
pci,
((class_id >> 24) & 0xFF) as u8,
((class_id >> 16) & 0xFF) as u8,
((class_id >> 8) & 0xFF) as u8,
(id & 0xFFFF) as u16,
((id >> 16) & 0xFFFF) as u16);
}
}
}
}
}
| pci_device | identifier_name |
init.rs | use disk::ahci::Ahci;
use disk::ide::Ide;
use env::Environment;
use schemes::file::FileScheme;
use super::config::PciConfig;
use super::common::class::*;
use super::common::subclass::*;
use super::common::programming_interface::*;
/*
use super::common::vendorid::*;
use super::common::deviceid::*;
use audio::ac97::Ac97;
use audio::intelhda::IntelHda;
use network::rtl8139::Rtl8139;
use network::intel8254x::Intel8254x;
use usb::uhci::Uhci;
use usb::ohci::Ohci;
use usb::ehci::Ehci;
use usb::xhci::Xhci;
*/
/// PCI device
pub unsafe fn pci_device(env: &mut Environment,
pci: PciConfig,
class_id: u8,
subclass_id: u8,
interface_id: u8,
vendor_code: u16,
device_code: u16) {
match (class_id, subclass_id, interface_id) {
(MASS_STORAGE, IDE, _) => {
if let Some(module) = FileScheme::new(Ide::disks(pci)) {
env.schemes.lock().push(module);
}
}
(MASS_STORAGE, SATA, AHCI) => {
if let Some(module) = FileScheme::new(Ahci::disks(pci)) {
env.schemes.lock().push(module);
}
}
/*
(SERIAL_BUS, USB, UHCI) => env.schemes.lock().push(Uhci::new(pci)),
(SERIAL_BUS, USB, OHCI) => env.schemes.lock().push(Ohci::new(pci)),
(SERIAL_BUS, USB, EHCI) => env.schemes.lock().push(Ehci::new(pci)),
(SERIAL_BUS, USB, XHCI) => env.schemes.lock().push(Xhci::new(pci)),
*/
_ => match (vendor_code, device_code) {
//(REALTEK, RTL8139) => env.schemes.lock().push(Rtl8139::new(pci)),
//(INTEL, GBE_82540EM) => env.schemes.lock().push(Intel8254x::new(pci)),
//(INTEL, AC97_82801AA) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, AC97_ICH4) => env.schemes.lock().push(Ac97::new(pci)),
//(INTEL, INTELHDA_ICH6) => env.schemes.lock().push(IntelHda::new(pci)),
_ => (),
}
}
}
/// Initialize PCI session
pub unsafe fn pci_init(env: &mut Environment) | {
for bus in 0..256 {
for slot in 0..32 {
for func in 0..8 {
let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);
let id = pci.read(0);
if (id & 0xFFFF) != 0xFFFF {
let class_id = pci.read(8);
/*
debug!(" * PCI {}, {}, {}: ID {:X} CL {:X}",
bus,
slot,
func,
id,
class_id);
for i in 0..6 {
let bar = pci.read(i * 4 + 0x10);
if bar > 0 {
debug!(" BAR{}: {:X}", i, bar);
pci.write(i * 4 + 0x10, 0xFFFFFFFF);
let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;
pci.write(i * 4 + 0x10, bar);
if size > 0 {
debug!(" {}", size);
}
}
}
debugln!("");
*/
pci_device(env,
pci,
((class_id >> 24) & 0xFF) as u8,
((class_id >> 16) & 0xFF) as u8,
((class_id >> 8) & 0xFF) as u8,
(id & 0xFFFF) as u16,
((id >> 16) & 0xFFFF) as u16);
}
}
}
}
} | identifier_body |
|
index.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics';
import {relative} from 'path';
import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths';
import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host';
import {migrateFile} from './util';
export default function(): Rule {
return async (tree: Tree) => {
const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree);
const basePath = process.cwd();
const allPaths = [...buildPaths, ...testPaths];
if (!allPaths.length) {
throw new SchematicsException(
'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
}
for (const tsconfigPath of allPaths) {
runTypedFormsMigration(tree, tsconfigPath, basePath);
}
};
}
function runTypedFormsMigration(tree: Tree, tsconfigPath: string, basePath: string) {
const {program} = createMigrationProgram(tree, tsconfigPath, basePath);
const typeChecker = program.getTypeChecker();
const sourceFiles =
program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program));
for (const sourceFile of sourceFiles) {
let update: UpdateRecorder|null = null;
const rewriter = (startPos: number, origLength: number, text: string) => {
if (update === null) {
// Lazily initialize update, because most files will not require migration.
update = tree.beginUpdate(relative(basePath, sourceFile.fileName)); | update.insertLeft(startPos, text);
};
migrateFile(sourceFile, typeChecker, rewriter);
if (update !== null) {
tree.commitUpdate(update);
}
}
} | }
update.remove(startPos, origLength); | random_line_split |
index.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics';
import {relative} from 'path';
import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths';
import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host';
import {migrateFile} from './util';
export default function(): Rule {
return async (tree: Tree) => {
const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree);
const basePath = process.cwd();
const allPaths = [...buildPaths, ...testPaths];
if (!allPaths.length) {
throw new SchematicsException(
'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
}
for (const tsconfigPath of allPaths) {
runTypedFormsMigration(tree, tsconfigPath, basePath);
}
};
}
function runTypedFormsMigration(tree: Tree, tsconfigPath: string, basePath: string) | {
const {program} = createMigrationProgram(tree, tsconfigPath, basePath);
const typeChecker = program.getTypeChecker();
const sourceFiles =
program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program));
for (const sourceFile of sourceFiles) {
let update: UpdateRecorder|null = null;
const rewriter = (startPos: number, origLength: number, text: string) => {
if (update === null) {
// Lazily initialize update, because most files will not require migration.
update = tree.beginUpdate(relative(basePath, sourceFile.fileName));
}
update.remove(startPos, origLength);
update.insertLeft(startPos, text);
};
migrateFile(sourceFile, typeChecker, rewriter);
if (update !== null) {
tree.commitUpdate(update);
}
}
} | identifier_body |
|
index.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics';
import {relative} from 'path';
import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths';
import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host';
import {migrateFile} from './util';
export default function(): Rule {
return async (tree: Tree) => {
const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree);
const basePath = process.cwd();
const allPaths = [...buildPaths, ...testPaths];
if (!allPaths.length) {
throw new SchematicsException(
'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
}
for (const tsconfigPath of allPaths) {
runTypedFormsMigration(tree, tsconfigPath, basePath);
}
};
}
function | (tree: Tree, tsconfigPath: string, basePath: string) {
const {program} = createMigrationProgram(tree, tsconfigPath, basePath);
const typeChecker = program.getTypeChecker();
const sourceFiles =
program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program));
for (const sourceFile of sourceFiles) {
let update: UpdateRecorder|null = null;
const rewriter = (startPos: number, origLength: number, text: string) => {
if (update === null) {
// Lazily initialize update, because most files will not require migration.
update = tree.beginUpdate(relative(basePath, sourceFile.fileName));
}
update.remove(startPos, origLength);
update.insertLeft(startPos, text);
};
migrateFile(sourceFile, typeChecker, rewriter);
if (update !== null) {
tree.commitUpdate(update);
}
}
}
| runTypedFormsMigration | identifier_name |
index.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics';
import {relative} from 'path';
import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths';
import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host';
import {migrateFile} from './util';
export default function(): Rule {
return async (tree: Tree) => {
const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree);
const basePath = process.cwd();
const allPaths = [...buildPaths, ...testPaths];
if (!allPaths.length) |
for (const tsconfigPath of allPaths) {
runTypedFormsMigration(tree, tsconfigPath, basePath);
}
};
}
function runTypedFormsMigration(tree: Tree, tsconfigPath: string, basePath: string) {
const {program} = createMigrationProgram(tree, tsconfigPath, basePath);
const typeChecker = program.getTypeChecker();
const sourceFiles =
program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program));
for (const sourceFile of sourceFiles) {
let update: UpdateRecorder|null = null;
const rewriter = (startPos: number, origLength: number, text: string) => {
if (update === null) {
// Lazily initialize update, because most files will not require migration.
update = tree.beginUpdate(relative(basePath, sourceFile.fileName));
}
update.remove(startPos, origLength);
update.insertLeft(startPos, text);
};
migrateFile(sourceFile, typeChecker, rewriter);
if (update !== null) {
tree.commitUpdate(update);
}
}
}
| {
throw new SchematicsException(
'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
} | conditional_block |
add_to_google_spreadsheet_menu.js | odoo.define('board.AddToGoogleSpreadsheetMenu', function (require) {
"use strict";
var ActionManager = require('web.ActionManager');
var core = require('web.core');
var data = require('web.data');
var Domain = require('web.Domain');
var favorites_submenus_registry = require('web.favorites_submenus_registry');
var pyUtils = require('web.py_utils');
var Widget = require('web.Widget');
var QWeb = core.qweb;
var AddToGoogleSpreadsheetMenu = Widget.extend({
events: _.extend({}, Widget.prototype.events, {
'click .add_to_spreadsheet': '_onAddToSpreadsheetClick',
}),
/**
* @override
* @param {Object} params
* @param {Object} params.action an ir.actions description
*/
init: function (parent, params) {
this._super(parent);
this.action = params.action;
},
/**
* @override
*/
start: function () {
if (this.action.type === 'ir.actions.act_window') {
this._render();
}
return this._super.apply(this, arguments);
},
//--------------------------------------------------------------------------
// Private
//--------------------------------------------------------------------------
/**
* @private
*/
_addToSpreadsheet: function () {
// AAB: trigger_up an event that will be intercepted by the controller,
// as soon as the controller is the parent of the control panel
var actionManager = this.findAncestor(function (ancestor) {
return ancestor instanceof ActionManager;
});
var controller = actionManager.getCurrentController();
var searchQuery;
// TO DO: for now the domains in query are evaluated.
// This should be changed I think.
this.trigger_up('get_search_query', {
callback: function (query) {
searchQuery = query;
}
});
var modelName = this.action.res_model;
var list_view = _.findWhere(controller.widget.actionViews, {type: 'list'});
var list_view_id = list_view ? list_view.viewID : false;
var domain = searchQuery.domain;
var groupBys = pyUtils.eval('groupbys', searchQuery.groupBys).join(" ");
var ds = new data.DataSet(this, 'google.drive.config');
ds.call('set_spreadsheet', [modelName, Domain.prototype.arrayToString(domain), groupBys, list_view_id])
.then(function (res) {
if (res.url) |
});
},
/**
* Renders the `SearchView.addtogooglespreadsheet` template.
*
* @private
*/
_render: function () {
var $el = QWeb.render('SearchView.addtogooglespreadsheet', {widget: this});
this._replaceElement($el);
},
//--------------------------------------------------------------------------
// Handlers
//--------------------------------------------------------------------------
/**
* @private
* @param {jQueryEvent} event
*/
_onAddToSpreadsheetClick: function (event) {
event.preventDefault();
event.stopPropagation();
this._addToSpreadsheet();
},
});
favorites_submenus_registry.add('add_to_google_spreadsheet_menu', AddToGoogleSpreadsheetMenu, 20);
return AddToGoogleSpreadsheetMenu;
});
| {
window.open(res.url, '_blank');
} | conditional_block |
add_to_google_spreadsheet_menu.js | odoo.define('board.AddToGoogleSpreadsheetMenu', function (require) {
"use strict";
var ActionManager = require('web.ActionManager');
var core = require('web.core');
var data = require('web.data');
var Domain = require('web.Domain');
var favorites_submenus_registry = require('web.favorites_submenus_registry');
var pyUtils = require('web.py_utils');
var Widget = require('web.Widget');
var QWeb = core.qweb;
var AddToGoogleSpreadsheetMenu = Widget.extend({
events: _.extend({}, Widget.prototype.events, {
'click .add_to_spreadsheet': '_onAddToSpreadsheetClick',
}),
/**
* @override
* @param {Object} params
* @param {Object} params.action an ir.actions description
*/
init: function (parent, params) {
this._super(parent);
this.action = params.action;
},
/**
* @override
*/
start: function () {
if (this.action.type === 'ir.actions.act_window') {
this._render();
}
return this._super.apply(this, arguments);
},
//--------------------------------------------------------------------------
// Private
//--------------------------------------------------------------------------
/**
* @private
*/
_addToSpreadsheet: function () {
// AAB: trigger_up an event that will be intercepted by the controller,
// as soon as the controller is the parent of the control panel
var actionManager = this.findAncestor(function (ancestor) {
return ancestor instanceof ActionManager;
});
var controller = actionManager.getCurrentController();
var searchQuery;
// TO DO: for now the domains in query are evaluated.
// This should be changed I think.
this.trigger_up('get_search_query', {
callback: function (query) {
searchQuery = query;
}
});
var modelName = this.action.res_model;
var list_view = _.findWhere(controller.widget.actionViews, {type: 'list'});
var list_view_id = list_view ? list_view.viewID : false;
var domain = searchQuery.domain;
var groupBys = pyUtils.eval('groupbys', searchQuery.groupBys).join(" ");
var ds = new data.DataSet(this, 'google.drive.config');
ds.call('set_spreadsheet', [modelName, Domain.prototype.arrayToString(domain), groupBys, list_view_id])
.then(function (res) {
if (res.url){
window.open(res.url, '_blank');
}
});
},
/**
* Renders the `SearchView.addtogooglespreadsheet` template.
*
* @private
*/
_render: function () {
var $el = QWeb.render('SearchView.addtogooglespreadsheet', {widget: this});
this._replaceElement($el);
},
//--------------------------------------------------------------------------
// Handlers | //--------------------------------------------------------------------------
/**
* @private
* @param {jQueryEvent} event
*/
_onAddToSpreadsheetClick: function (event) {
event.preventDefault();
event.stopPropagation();
this._addToSpreadsheet();
},
});
favorites_submenus_registry.add('add_to_google_spreadsheet_menu', AddToGoogleSpreadsheetMenu, 20);
return AddToGoogleSpreadsheetMenu;
}); | random_line_split |
|
show.js | Slipmat.Views.LabelShow = Backbone.ModularView.extend({
tagName: "main",
className: "group",
template: JST["labels/show"],
initialize: function (options) {
this.router = options.router;
this.listenTo(this.model, "sync change", this.render);
},
events: {
"submit": "addComment"
},
render: function () {
var content = this.template({ label: this.model });
this.$el.html(content);
if (Slipmat.currentUser.isSignedIn()) |
this.listContributors();
this.renderComments();
this.renderRecords();
return this;
},
renderRecords: function () {
var records = this.model.records(),
template = JST["records/_record"],
header = JST["layouts/_paginationHeader"]({ collection: records }),
footer = JST["layouts/_paginationFooter"]({ collection: records }),
$el = this.$(".content-records");
this.$(".pagination-header").html(header);
this.$(".pagination-footer").html(footer);
records.forEach(record => {
var subview = template({ model: record });
$el.append(subview);
});
}
});
| {
$textarea = $('<textarea class="form comment-form">');
this.$("#new-comment").prepend($textarea);
} | conditional_block |
show.js | Slipmat.Views.LabelShow = Backbone.ModularView.extend({
tagName: "main",
className: "group",
template: JST["labels/show"],
initialize: function (options) {
this.router = options.router;
this.listenTo(this.model, "sync change", this.render);
},
events: {
"submit": "addComment"
},
render: function () {
var content = this.template({ label: this.model });
this.$el.html(content);
if (Slipmat.currentUser.isSignedIn()) {
$textarea = $('<textarea class="form comment-form">');
this.$("#new-comment").prepend($textarea);
}
this.listContributors();
this.renderComments();
this.renderRecords();
return this; | renderRecords: function () {
var records = this.model.records(),
template = JST["records/_record"],
header = JST["layouts/_paginationHeader"]({ collection: records }),
footer = JST["layouts/_paginationFooter"]({ collection: records }),
$el = this.$(".content-records");
this.$(".pagination-header").html(header);
this.$(".pagination-footer").html(footer);
records.forEach(record => {
var subview = template({ model: record });
$el.append(subview);
});
}
}); | },
| random_line_split |
GroupQueryTreeRequest.py | # -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class GroupQueryTreeRequest(Request):
r'''A /g_queryTree request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.GroupQueryTreeRequest(
... node_id=0,
... include_controls=True,
... )
>>> request
GroupQueryTreeRequest(
include_controls=True,
node_id=0
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(57, 0, 1)
::
>>> message.address == requesttools.RequestId.GROUP_QUERY_TREE
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_include_controls',
'_node_id',
)
### INITIALIZER ###
def __init__(
self,
include_controls=False,
node_id=None,
):
Request.__init__(self)
self._node_id = node_id
self._include_controls = bool(include_controls)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
node_id = int(self.node_id)
include_controls = int(self.include_controls)
message = osctools.OscMessage(
request_id,
node_id,
include_controls,
)
return message
### PUBLIC PROPERTIES ###
@property
def include_controls(self):
return self._include_controls
@property
def node_id(self):
return self._node_id
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.QueryTreeResponse: None,
}
@property
def | (self):
from supriya.tools import requesttools
return requesttools.RequestId.GROUP_QUERY_TREE | request_id | identifier_name |
GroupQueryTreeRequest.py | # -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class GroupQueryTreeRequest(Request):
| r'''A /g_queryTree request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.GroupQueryTreeRequest(
... node_id=0,
... include_controls=True,
... )
>>> request
GroupQueryTreeRequest(
include_controls=True,
node_id=0
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(57, 0, 1)
::
>>> message.address == requesttools.RequestId.GROUP_QUERY_TREE
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_include_controls',
'_node_id',
)
### INITIALIZER ###
def __init__(
self,
include_controls=False,
node_id=None,
):
Request.__init__(self)
self._node_id = node_id
self._include_controls = bool(include_controls)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
node_id = int(self.node_id)
include_controls = int(self.include_controls)
message = osctools.OscMessage(
request_id,
node_id,
include_controls,
)
return message
### PUBLIC PROPERTIES ###
@property
def include_controls(self):
return self._include_controls
@property
def node_id(self):
return self._node_id
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.QueryTreeResponse: None,
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.GROUP_QUERY_TREE | identifier_body |
|
GroupQueryTreeRequest.py | # -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class GroupQueryTreeRequest(Request):
r'''A /g_queryTree request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.GroupQueryTreeRequest(
... node_id=0,
... include_controls=True,
... )
>>> request
GroupQueryTreeRequest(
include_controls=True,
node_id=0
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(57, 0, 1)
::
>>> message.address == requesttools.RequestId.GROUP_QUERY_TREE
True
'''
### CLASS VARIABLES ###
__slots__ = ( | )
### INITIALIZER ###
def __init__(
self,
include_controls=False,
node_id=None,
):
Request.__init__(self)
self._node_id = node_id
self._include_controls = bool(include_controls)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
node_id = int(self.node_id)
include_controls = int(self.include_controls)
message = osctools.OscMessage(
request_id,
node_id,
include_controls,
)
return message
### PUBLIC PROPERTIES ###
@property
def include_controls(self):
return self._include_controls
@property
def node_id(self):
return self._node_id
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.QueryTreeResponse: None,
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.GROUP_QUERY_TREE | '_include_controls',
'_node_id', | random_line_split |
main.rs | // Copyright (c) 2015 Sergey "SnakE" Gromov
//
// See the file license.txt for copying permission.
//! # Radix Conversion Utility
extern crate num;
mod table;
mod convtable;
use std::{env, path};
use num::BigInt;
use convtable::ConvTable;
use std::error::Error;
use num::traits::Num;
fn main() {
let mut table = ConvTable::new();
let args: Vec<String> = env::args().collect();
if args.len() == 1 {
usage(path::Path::new(&args[0]).file_name().unwrap().to_str().unwrap());
return;
}
for arg in args.into_iter().skip(1) {
let arg = arg.trim();
let (v, radix) = if let Some(s) = strip_prefix(&arg, "0x") {
(s, 16)
} else if let Some(s) = strip_prefix(&arg, "0b") {
(s, 2)
} else if let Some(s) = strip_prefix(&arg, "0o") {
(s, 8)
} else {
(&*arg, 10)
};
match BigInt::from_str_radix(&v, radix) {
Ok(x) => table.push_result(&arg, &x),
Err(e) => table.push_error(&arg, e.description()),
};
}
table.print();
}
/// Return input string without prefix if prefix matches.
fn | <'a>(s: &'a str, prefix: &str) -> Option<&'a str> {
if s.starts_with(prefix) {
Some(&s[prefix.len()..])
} else {
None
}
}
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
fn usage(tool: &str) {
println!("\
Display numbers in multiple radii
(c) 2015 Sergey \"SnakE\" Gromov
Version {}
Usage: {} num [num ...]
num decimal, hex, octal, or binary number
decimal start with a digit
hex start with `0x`
octal start with `0o`
binary start with `0b`", VERSION, tool);
}
| strip_prefix | identifier_name |
main.rs | // Copyright (c) 2015 Sergey "SnakE" Gromov
//
// See the file license.txt for copying permission.
//! # Radix Conversion Utility
extern crate num;
mod table;
mod convtable;
use std::{env, path};
use num::BigInt;
use convtable::ConvTable;
use std::error::Error;
use num::traits::Num;
fn main() {
let mut table = ConvTable::new();
let args: Vec<String> = env::args().collect();
if args.len() == 1 {
usage(path::Path::new(&args[0]).file_name().unwrap().to_str().unwrap());
return;
}
for arg in args.into_iter().skip(1) {
let arg = arg.trim();
let (v, radix) = if let Some(s) = strip_prefix(&arg, "0x") {
(s, 16)
} else if let Some(s) = strip_prefix(&arg, "0b") {
(s, 2)
} else if let Some(s) = strip_prefix(&arg, "0o") {
(s, 8)
} else {
(&*arg, 10)
};
match BigInt::from_str_radix(&v, radix) {
Ok(x) => table.push_result(&arg, &x),
Err(e) => table.push_error(&arg, e.description()),
};
}
table.print();
}
/// Return input string without prefix if prefix matches.
fn strip_prefix<'a>(s: &'a str, prefix: &str) -> Option<&'a str> |
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
fn usage(tool: &str) {
println!("\
Display numbers in multiple radii
(c) 2015 Sergey \"SnakE\" Gromov
Version {}
Usage: {} num [num ...]
num decimal, hex, octal, or binary number
decimal start with a digit
hex start with `0x`
octal start with `0o`
binary start with `0b`", VERSION, tool);
}
| {
if s.starts_with(prefix) {
Some(&s[prefix.len()..])
} else {
None
}
} | identifier_body |
main.rs | // Copyright (c) 2015 Sergey "SnakE" Gromov
//
// See the file license.txt for copying permission.
//! # Radix Conversion Utility
extern crate num;
mod table;
mod convtable;
use std::{env, path};
use num::BigInt;
use convtable::ConvTable;
use std::error::Error;
use num::traits::Num;
fn main() {
let mut table = ConvTable::new();
let args: Vec<String> = env::args().collect();
if args.len() == 1 {
usage(path::Path::new(&args[0]).file_name().unwrap().to_str().unwrap());
return;
}
for arg in args.into_iter().skip(1) {
let arg = arg.trim();
let (v, radix) = if let Some(s) = strip_prefix(&arg, "0x") {
(s, 16)
} else if let Some(s) = strip_prefix(&arg, "0b") {
(s, 2)
} else if let Some(s) = strip_prefix(&arg, "0o") {
(s, 8) | match BigInt::from_str_radix(&v, radix) {
Ok(x) => table.push_result(&arg, &x),
Err(e) => table.push_error(&arg, e.description()),
};
}
table.print();
}
/// Return input string without prefix if prefix matches.
fn strip_prefix<'a>(s: &'a str, prefix: &str) -> Option<&'a str> {
if s.starts_with(prefix) {
Some(&s[prefix.len()..])
} else {
None
}
}
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
fn usage(tool: &str) {
println!("\
Display numbers in multiple radii
(c) 2015 Sergey \"SnakE\" Gromov
Version {}
Usage: {} num [num ...]
num decimal, hex, octal, or binary number
decimal start with a digit
hex start with `0x`
octal start with `0o`
binary start with `0b`", VERSION, tool);
} | } else {
(&*arg, 10)
};
| random_line_split |
view.rs | /*!
PE view.
*/
use std::prelude::v1::*;
use std::{cmp, slice};
use crate::Result;
use super::image::*;
use super::pe::validate_headers;
use super::{Align, Pe, PeObject};
/// View into a mapped PE image.
#[derive(Copy, Clone)]
pub struct PeView<'a> {
image: &'a [u8],
}
current_target! {
impl PeView<'static> {
/// Constructs a view of the module this code is executing in.
#[inline]
pub unsafe fn new() -> PeView<'static> {
Self::module(image_base() as *const _ as *const u8)
}
}
}
impl<'a> PeView<'a> {
/// Constructs a view from a byte slice.
///
/// # Errors
///
/// * [`Bounds`](../enum.Error.html#variant.Bounds):
/// The byte slice is too small to fit the PE headers.
///
/// * [`Misaligned`](../enum.Error.html#variant.Misaligned):
/// The minimum alignment of 4 is not satisfied.
///
/// * [`BadMagic`](../enum.Error.html#variant.BadMagic):
/// This is not a PE file.
///
/// * [`PeMagic`](../enum.Error.html#variant.PeMagic):
/// Trying to parse a PE32 file with the PE32+ parser and vice versa.
///
/// * [`Insanity`](../enum.Error.html#variant.Insanity):
/// Reasonable limits on `e_lfanew`, `SizeOfHeaders` or `NumberOfSections` are exceeded.
pub fn from_bytes<T: AsRef<[u8]> + ?Sized>(image: &'a T) -> Result<PeView<'a>> {
let image = image.as_ref();
let _ = validate_headers(image)?;
Ok(PeView { image })
}
/// Constructs a new view from module handle.
///
/// # Safety
///
/// The underlying memory is borrowed and an unbounded lifetime is returned.
/// Ensure the lifetime outlives this view instance!
///
/// No sanity or safety checks are done to make sure this is really PE32(+) image.
/// When using this with a `HMODULE` from the system the caller must be sure this is a PE32(+) image.
#[inline]
pub unsafe fn module(base: *const u8) -> PeView<'a> {
let dos = &*(base as *const IMAGE_DOS_HEADER);
let nt = &*(base.offset(dos.e_lfanew as isize) as *const IMAGE_NT_HEADERS);
PeView {
image: slice::from_raw_parts(base, nt.OptionalHeader.SizeOfImage as usize),
}
}
/// Converts the view to file alignment.
pub fn to_file(self) -> Vec<u8> {
let (sizeof_headers, sizeof_image) = {
let optional_header = self.optional_header();
(optional_header.SizeOfHeaders, optional_header.SizeOfImage)
};
// Figure out the size of the file image | }
// Clamp to the actual image size...
file_size = cmp::min(file_size, sizeof_image);
// Zero fill the underlying file
let mut vec = vec![0u8; file_size as usize];
// Start by copying the headers
let image = self.image();
unsafe {
// Validated by constructor
let dest_headers = vec.get_unchecked_mut(..sizeof_headers as usize);
let src_headers = image.get_unchecked(..sizeof_headers as usize);
dest_headers.copy_from_slice(src_headers);
}
// Copy the section image data
for section in self.section_headers() {
let dest = vec.get_mut(section.PointerToRawData as usize..u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData) as usize);
let src = image.get(section.VirtualAddress as usize..u32::wrapping_add(section.VirtualAddress, section.VirtualSize) as usize);
// Skip invalid sections...
if let (Some(dest), Some(src)) = (dest, src) {
dest.copy_from_slice(src);
}
}
vec
}
}
//----------------------------------------------------------------
unsafe impl<'a> Pe<'a> for PeView<'a> {}
unsafe impl<'a> PeObject<'a> for PeView<'a> {
fn image(&self) -> &'a [u8] {
self.image
}
fn align(&self) -> Align {
Align::Section
}
#[cfg(feature = "serde")]
fn serde_name(&self) -> &'static str {
"PeView"
}
}
//----------------------------------------------------------------
#[cfg(feature = "serde")]
impl<'a> serde::Serialize for PeView<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {
super::pe::serialize_pe(*self, serializer)
}
}
//----------------------------------------------------------------
#[cfg(test)]
mod tests {
use crate::Error;
use super::PeView;
#[test]
fn from_byte_slice() {
assert!(match PeView::from_bytes(&[]) { Err(Error::Bounds) => true, _ => false });
}
} | let mut file_size = sizeof_headers;
for section in self.section_headers() {
file_size = cmp::max(file_size, u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData)); | random_line_split |
view.rs | /*!
PE view.
*/
use std::prelude::v1::*;
use std::{cmp, slice};
use crate::Result;
use super::image::*;
use super::pe::validate_headers;
use super::{Align, Pe, PeObject};
/// View into a mapped PE image.
#[derive(Copy, Clone)]
pub struct PeView<'a> {
image: &'a [u8],
}
current_target! {
impl PeView<'static> {
/// Constructs a view of the module this code is executing in.
#[inline]
pub unsafe fn new() -> PeView<'static> {
Self::module(image_base() as *const _ as *const u8)
}
}
}
impl<'a> PeView<'a> {
/// Constructs a view from a byte slice.
///
/// # Errors
///
/// * [`Bounds`](../enum.Error.html#variant.Bounds):
/// The byte slice is too small to fit the PE headers.
///
/// * [`Misaligned`](../enum.Error.html#variant.Misaligned):
/// The minimum alignment of 4 is not satisfied.
///
/// * [`BadMagic`](../enum.Error.html#variant.BadMagic):
/// This is not a PE file.
///
/// * [`PeMagic`](../enum.Error.html#variant.PeMagic):
/// Trying to parse a PE32 file with the PE32+ parser and vice versa.
///
/// * [`Insanity`](../enum.Error.html#variant.Insanity):
/// Reasonable limits on `e_lfanew`, `SizeOfHeaders` or `NumberOfSections` are exceeded.
pub fn from_bytes<T: AsRef<[u8]> + ?Sized>(image: &'a T) -> Result<PeView<'a>> {
let image = image.as_ref();
let _ = validate_headers(image)?;
Ok(PeView { image })
}
/// Constructs a new view from module handle.
///
/// # Safety
///
/// The underlying memory is borrowed and an unbounded lifetime is returned.
/// Ensure the lifetime outlives this view instance!
///
/// No sanity or safety checks are done to make sure this is really PE32(+) image.
/// When using this with a `HMODULE` from the system the caller must be sure this is a PE32(+) image.
#[inline]
pub unsafe fn module(base: *const u8) -> PeView<'a> {
let dos = &*(base as *const IMAGE_DOS_HEADER);
let nt = &*(base.offset(dos.e_lfanew as isize) as *const IMAGE_NT_HEADERS);
PeView {
image: slice::from_raw_parts(base, nt.OptionalHeader.SizeOfImage as usize),
}
}
/// Converts the view to file alignment.
pub fn to_file(self) -> Vec<u8> {
let (sizeof_headers, sizeof_image) = {
let optional_header = self.optional_header();
(optional_header.SizeOfHeaders, optional_header.SizeOfImage)
};
// Figure out the size of the file image
let mut file_size = sizeof_headers;
for section in self.section_headers() {
file_size = cmp::max(file_size, u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData));
}
// Clamp to the actual image size...
file_size = cmp::min(file_size, sizeof_image);
// Zero fill the underlying file
let mut vec = vec![0u8; file_size as usize];
// Start by copying the headers
let image = self.image();
unsafe {
// Validated by constructor
let dest_headers = vec.get_unchecked_mut(..sizeof_headers as usize);
let src_headers = image.get_unchecked(..sizeof_headers as usize);
dest_headers.copy_from_slice(src_headers);
}
// Copy the section image data
for section in self.section_headers() {
let dest = vec.get_mut(section.PointerToRawData as usize..u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData) as usize);
let src = image.get(section.VirtualAddress as usize..u32::wrapping_add(section.VirtualAddress, section.VirtualSize) as usize);
// Skip invalid sections...
if let (Some(dest), Some(src)) = (dest, src) |
}
vec
}
}
//----------------------------------------------------------------
unsafe impl<'a> Pe<'a> for PeView<'a> {}
unsafe impl<'a> PeObject<'a> for PeView<'a> {
fn image(&self) -> &'a [u8] {
self.image
}
fn align(&self) -> Align {
Align::Section
}
#[cfg(feature = "serde")]
fn serde_name(&self) -> &'static str {
"PeView"
}
}
//----------------------------------------------------------------
#[cfg(feature = "serde")]
impl<'a> serde::Serialize for PeView<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {
super::pe::serialize_pe(*self, serializer)
}
}
//----------------------------------------------------------------
#[cfg(test)]
mod tests {
use crate::Error;
use super::PeView;
#[test]
fn from_byte_slice() {
assert!(match PeView::from_bytes(&[]) { Err(Error::Bounds) => true, _ => false });
}
}
| {
dest.copy_from_slice(src);
} | conditional_block |
view.rs | /*!
PE view.
*/
use std::prelude::v1::*;
use std::{cmp, slice};
use crate::Result;
use super::image::*;
use super::pe::validate_headers;
use super::{Align, Pe, PeObject};
/// View into a mapped PE image.
#[derive(Copy, Clone)]
pub struct PeView<'a> {
image: &'a [u8],
}
current_target! {
impl PeView<'static> {
/// Constructs a view of the module this code is executing in.
#[inline]
pub unsafe fn new() -> PeView<'static> {
Self::module(image_base() as *const _ as *const u8)
}
}
}
impl<'a> PeView<'a> {
/// Constructs a view from a byte slice.
///
/// # Errors
///
/// * [`Bounds`](../enum.Error.html#variant.Bounds):
/// The byte slice is too small to fit the PE headers.
///
/// * [`Misaligned`](../enum.Error.html#variant.Misaligned):
/// The minimum alignment of 4 is not satisfied.
///
/// * [`BadMagic`](../enum.Error.html#variant.BadMagic):
/// This is not a PE file.
///
/// * [`PeMagic`](../enum.Error.html#variant.PeMagic):
/// Trying to parse a PE32 file with the PE32+ parser and vice versa.
///
/// * [`Insanity`](../enum.Error.html#variant.Insanity):
/// Reasonable limits on `e_lfanew`, `SizeOfHeaders` or `NumberOfSections` are exceeded.
pub fn from_bytes<T: AsRef<[u8]> + ?Sized>(image: &'a T) -> Result<PeView<'a>> {
let image = image.as_ref();
let _ = validate_headers(image)?;
Ok(PeView { image })
}
/// Constructs a new view from module handle.
///
/// # Safety
///
/// The underlying memory is borrowed and an unbounded lifetime is returned.
/// Ensure the lifetime outlives this view instance!
///
/// No sanity or safety checks are done to make sure this is really PE32(+) image.
/// When using this with a `HMODULE` from the system the caller must be sure this is a PE32(+) image.
#[inline]
pub unsafe fn module(base: *const u8) -> PeView<'a> {
let dos = &*(base as *const IMAGE_DOS_HEADER);
let nt = &*(base.offset(dos.e_lfanew as isize) as *const IMAGE_NT_HEADERS);
PeView {
image: slice::from_raw_parts(base, nt.OptionalHeader.SizeOfImage as usize),
}
}
/// Converts the view to file alignment.
pub fn to_file(self) -> Vec<u8> {
let (sizeof_headers, sizeof_image) = {
let optional_header = self.optional_header();
(optional_header.SizeOfHeaders, optional_header.SizeOfImage)
};
// Figure out the size of the file image
let mut file_size = sizeof_headers;
for section in self.section_headers() {
file_size = cmp::max(file_size, u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData));
}
// Clamp to the actual image size...
file_size = cmp::min(file_size, sizeof_image);
// Zero fill the underlying file
let mut vec = vec![0u8; file_size as usize];
// Start by copying the headers
let image = self.image();
unsafe {
// Validated by constructor
let dest_headers = vec.get_unchecked_mut(..sizeof_headers as usize);
let src_headers = image.get_unchecked(..sizeof_headers as usize);
dest_headers.copy_from_slice(src_headers);
}
// Copy the section image data
for section in self.section_headers() {
let dest = vec.get_mut(section.PointerToRawData as usize..u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData) as usize);
let src = image.get(section.VirtualAddress as usize..u32::wrapping_add(section.VirtualAddress, section.VirtualSize) as usize);
// Skip invalid sections...
if let (Some(dest), Some(src)) = (dest, src) {
dest.copy_from_slice(src);
}
}
vec
}
}
//----------------------------------------------------------------
unsafe impl<'a> Pe<'a> for PeView<'a> {}
unsafe impl<'a> PeObject<'a> for PeView<'a> {
fn image(&self) -> &'a [u8] {
self.image
}
fn align(&self) -> Align {
Align::Section
}
#[cfg(feature = "serde")]
fn serde_name(&self) -> &'static str |
}
//----------------------------------------------------------------
#[cfg(feature = "serde")]
impl<'a> serde::Serialize for PeView<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {
super::pe::serialize_pe(*self, serializer)
}
}
//----------------------------------------------------------------
#[cfg(test)]
mod tests {
use crate::Error;
use super::PeView;
#[test]
fn from_byte_slice() {
assert!(match PeView::from_bytes(&[]) { Err(Error::Bounds) => true, _ => false });
}
}
| {
"PeView"
} | identifier_body |
view.rs | /*!
PE view.
*/
use std::prelude::v1::*;
use std::{cmp, slice};
use crate::Result;
use super::image::*;
use super::pe::validate_headers;
use super::{Align, Pe, PeObject};
/// View into a mapped PE image.
#[derive(Copy, Clone)]
pub struct PeView<'a> {
image: &'a [u8],
}
current_target! {
impl PeView<'static> {
/// Constructs a view of the module this code is executing in.
#[inline]
pub unsafe fn new() -> PeView<'static> {
Self::module(image_base() as *const _ as *const u8)
}
}
}
impl<'a> PeView<'a> {
/// Constructs a view from a byte slice.
///
/// # Errors
///
/// * [`Bounds`](../enum.Error.html#variant.Bounds):
/// The byte slice is too small to fit the PE headers.
///
/// * [`Misaligned`](../enum.Error.html#variant.Misaligned):
/// The minimum alignment of 4 is not satisfied.
///
/// * [`BadMagic`](../enum.Error.html#variant.BadMagic):
/// This is not a PE file.
///
/// * [`PeMagic`](../enum.Error.html#variant.PeMagic):
/// Trying to parse a PE32 file with the PE32+ parser and vice versa.
///
/// * [`Insanity`](../enum.Error.html#variant.Insanity):
/// Reasonable limits on `e_lfanew`, `SizeOfHeaders` or `NumberOfSections` are exceeded.
pub fn from_bytes<T: AsRef<[u8]> + ?Sized>(image: &'a T) -> Result<PeView<'a>> {
let image = image.as_ref();
let _ = validate_headers(image)?;
Ok(PeView { image })
}
/// Constructs a new view from module handle.
///
/// # Safety
///
/// The underlying memory is borrowed and an unbounded lifetime is returned.
/// Ensure the lifetime outlives this view instance!
///
/// No sanity or safety checks are done to make sure this is really PE32(+) image.
/// When using this with a `HMODULE` from the system the caller must be sure this is a PE32(+) image.
#[inline]
pub unsafe fn module(base: *const u8) -> PeView<'a> {
let dos = &*(base as *const IMAGE_DOS_HEADER);
let nt = &*(base.offset(dos.e_lfanew as isize) as *const IMAGE_NT_HEADERS);
PeView {
image: slice::from_raw_parts(base, nt.OptionalHeader.SizeOfImage as usize),
}
}
/// Converts the view to file alignment.
pub fn to_file(self) -> Vec<u8> {
let (sizeof_headers, sizeof_image) = {
let optional_header = self.optional_header();
(optional_header.SizeOfHeaders, optional_header.SizeOfImage)
};
// Figure out the size of the file image
let mut file_size = sizeof_headers;
for section in self.section_headers() {
file_size = cmp::max(file_size, u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData));
}
// Clamp to the actual image size...
file_size = cmp::min(file_size, sizeof_image);
// Zero fill the underlying file
let mut vec = vec![0u8; file_size as usize];
// Start by copying the headers
let image = self.image();
unsafe {
// Validated by constructor
let dest_headers = vec.get_unchecked_mut(..sizeof_headers as usize);
let src_headers = image.get_unchecked(..sizeof_headers as usize);
dest_headers.copy_from_slice(src_headers);
}
// Copy the section image data
for section in self.section_headers() {
let dest = vec.get_mut(section.PointerToRawData as usize..u32::wrapping_add(section.PointerToRawData, section.SizeOfRawData) as usize);
let src = image.get(section.VirtualAddress as usize..u32::wrapping_add(section.VirtualAddress, section.VirtualSize) as usize);
// Skip invalid sections...
if let (Some(dest), Some(src)) = (dest, src) {
dest.copy_from_slice(src);
}
}
vec
}
}
//----------------------------------------------------------------
unsafe impl<'a> Pe<'a> for PeView<'a> {}
unsafe impl<'a> PeObject<'a> for PeView<'a> {
fn image(&self) -> &'a [u8] {
self.image
}
fn align(&self) -> Align {
Align::Section
}
#[cfg(feature = "serde")]
fn serde_name(&self) -> &'static str {
"PeView"
}
}
//----------------------------------------------------------------
#[cfg(feature = "serde")]
impl<'a> serde::Serialize for PeView<'a> {
fn | <S: serde::Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {
super::pe::serialize_pe(*self, serializer)
}
}
//----------------------------------------------------------------
#[cfg(test)]
mod tests {
use crate::Error;
use super::PeView;
#[test]
fn from_byte_slice() {
assert!(match PeView::from_bytes(&[]) { Err(Error::Bounds) => true, _ => false });
}
}
| serialize | identifier_name |
test_removeSign.py | #!/usr/bin/env python2
##
# autosign
# https://github.com/leosartaj/autosign.git
#
# copyright (c) 2014 sartaj singh
# licensed under the mit license.
##
import unittest
import os, shutil
import helper
from autosign.main import removeSign, isSign
from autosign.exce import UnsignedError
class TestremoveSign(unittest.TestCase):
"""
tests the removeSign function in main module
"""
def | (self):
self.dire = os.path.dirname(__file__)
self.signedfile = os.path.join(self.dire, 'testData/toBeSigned.py')
self.signed = os.path.join(self.dire, 'testData/test_signedfile.py')
shutil.copyfile(self.signedfile, self.signed)
self.unsigned = os.path.join(self.dire, 'testData/test_unsignedfile.py')
helper.newFile(self.unsigned)
helper.readrc(self)
def test_remove_from_unsigned_file(self):
self.assertRaises(UnsignedError, removeSign, self.unsigned, self.options_py)
def test_remove_from_signed_file(self):
self.assertTrue(isSign(self.signed, self.options_py))
removeSign(self.signed, self.options_py)
self.assertFalse(isSign(self.signed, self.options_py))
def tearDown(self):
os.remove(self.unsigned)
| setUp | identifier_name |
test_removeSign.py | #!/usr/bin/env python2
##
# autosign
# https://github.com/leosartaj/autosign.git
#
# copyright (c) 2014 sartaj singh
# licensed under the mit license.
##
import unittest
import os, shutil
import helper
from autosign.main import removeSign, isSign
from autosign.exce import UnsignedError
class TestremoveSign(unittest.TestCase):
"""
tests the removeSign function in main module
"""
def setUp(self):
self.dire = os.path.dirname(__file__)
self.signedfile = os.path.join(self.dire, 'testData/toBeSigned.py')
self.signed = os.path.join(self.dire, 'testData/test_signedfile.py')
shutil.copyfile(self.signedfile, self.signed)
self.unsigned = os.path.join(self.dire, 'testData/test_unsignedfile.py')
helper.newFile(self.unsigned)
helper.readrc(self)
def test_remove_from_unsigned_file(self):
self.assertRaises(UnsignedError, removeSign, self.unsigned, self.options_py)
def test_remove_from_signed_file(self):
|
def tearDown(self):
os.remove(self.unsigned)
| self.assertTrue(isSign(self.signed, self.options_py))
removeSign(self.signed, self.options_py)
self.assertFalse(isSign(self.signed, self.options_py)) | identifier_body |
test_removeSign.py | #!/usr/bin/env python2
##
# autosign
# https://github.com/leosartaj/autosign.git
#
# copyright (c) 2014 sartaj singh
# licensed under the mit license.
##
| import unittest
import os, shutil
import helper
from autosign.main import removeSign, isSign
from autosign.exce import UnsignedError
class TestremoveSign(unittest.TestCase):
"""
tests the removeSign function in main module
"""
def setUp(self):
self.dire = os.path.dirname(__file__)
self.signedfile = os.path.join(self.dire, 'testData/toBeSigned.py')
self.signed = os.path.join(self.dire, 'testData/test_signedfile.py')
shutil.copyfile(self.signedfile, self.signed)
self.unsigned = os.path.join(self.dire, 'testData/test_unsignedfile.py')
helper.newFile(self.unsigned)
helper.readrc(self)
def test_remove_from_unsigned_file(self):
self.assertRaises(UnsignedError, removeSign, self.unsigned, self.options_py)
def test_remove_from_signed_file(self):
self.assertTrue(isSign(self.signed, self.options_py))
removeSign(self.signed, self.options_py)
self.assertFalse(isSign(self.signed, self.options_py))
def tearDown(self):
os.remove(self.unsigned) | random_line_split |
|
xrwebglsubimage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::XRWebGLSubImageBinding::XRWebGLSubImageBinding::XRWebGLSubImageMethods;
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::webgltexture::WebGLTexture;
use crate::dom::xrsubimage::XRSubImage;
use dom_struct::dom_struct;
#[dom_struct]
pub struct XRWebGLSubImage {
xr_sub_image: XRSubImage,
color_texture: Dom<WebGLTexture>,
depth_stencil_texture: Option<Dom<WebGLTexture>>,
image_index: Option<u32>,
}
impl XRWebGLSubImageMethods for XRWebGLSubImage {
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-colortexture
fn ColorTexture(&self) -> DomRoot<WebGLTexture> {
DomRoot::from_ref(&self.color_texture)
}
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-depthstenciltexture
fn GetDepthStencilTexture(&self) -> Option<DomRoot<WebGLTexture>> {
self.depth_stencil_texture.as_deref().map(DomRoot::from_ref)
}
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-imageindex | fn GetImageIndex(&self) -> Option<u32> {
self.image_index
}
} | random_line_split |
|
xrwebglsubimage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::XRWebGLSubImageBinding::XRWebGLSubImageBinding::XRWebGLSubImageMethods;
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::webgltexture::WebGLTexture;
use crate::dom::xrsubimage::XRSubImage;
use dom_struct::dom_struct;
#[dom_struct]
pub struct XRWebGLSubImage {
xr_sub_image: XRSubImage,
color_texture: Dom<WebGLTexture>,
depth_stencil_texture: Option<Dom<WebGLTexture>>,
image_index: Option<u32>,
}
impl XRWebGLSubImageMethods for XRWebGLSubImage {
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-colortexture
fn ColorTexture(&self) -> DomRoot<WebGLTexture> {
DomRoot::from_ref(&self.color_texture)
}
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-depthstenciltexture
fn GetDepthStencilTexture(&self) -> Option<DomRoot<WebGLTexture>> {
self.depth_stencil_texture.as_deref().map(DomRoot::from_ref)
}
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-imageindex
fn | (&self) -> Option<u32> {
self.image_index
}
}
| GetImageIndex | identifier_name |
xrwebglsubimage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::XRWebGLSubImageBinding::XRWebGLSubImageBinding::XRWebGLSubImageMethods;
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::webgltexture::WebGLTexture;
use crate::dom::xrsubimage::XRSubImage;
use dom_struct::dom_struct;
#[dom_struct]
pub struct XRWebGLSubImage {
xr_sub_image: XRSubImage,
color_texture: Dom<WebGLTexture>,
depth_stencil_texture: Option<Dom<WebGLTexture>>,
image_index: Option<u32>,
}
impl XRWebGLSubImageMethods for XRWebGLSubImage {
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-colortexture
fn ColorTexture(&self) -> DomRoot<WebGLTexture> {
DomRoot::from_ref(&self.color_texture)
}
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-depthstenciltexture
fn GetDepthStencilTexture(&self) -> Option<DomRoot<WebGLTexture>> |
/// https://immersive-web.github.io/layers/#dom-xrwebglsubimage-imageindex
fn GetImageIndex(&self) -> Option<u32> {
self.image_index
}
}
| {
self.depth_stencil_texture.as_deref().map(DomRoot::from_ref)
} | identifier_body |
test_breakpoint-07.js | /* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Check that setting a breakpoint in a line without code in the second child
* script will skip forward.
*/
var gDebuggee;
var gClient;
var gThreadClient;
function run_test()
{
initTestDebuggerServer();
gDebuggee = addTestGlobal("test-stack");
gClient = new DebuggerClient(DebuggerServer.connectPipe());
gClient.connect(function () {
attachTestGlobalClientAndResume(gClient,
"test-stack",
function (aResponse, aThreadClient) {
gThreadClient = aThreadClient;
test_second_child_skip_breakpoint();
});
});
do_test_pending();
}
function test_second_child_skip_breakpoint()
| {
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
let path = getFilePath('test_breakpoint-07.js');
let location = { url: path, line: gDebuggee.line0 + 6};
gThreadClient.setBreakpoint(location, function (aResponse, bpClient) {
// Check that the breakpoint has properly skipped forward one line.
do_check_eq(aResponse.actualLocation.url, location.url);
do_check_eq(aResponse.actualLocation.line, location.line + 1);
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
// Check the return value.
do_check_eq(aPacket.type, "paused");
do_check_eq(aPacket.frame.where.url, path);
do_check_eq(aPacket.frame.where.line, location.line + 1);
do_check_eq(aPacket.why.type, "breakpoint");
do_check_eq(aPacket.why.actors[0], bpClient.actor);
// Check that the breakpoint worked.
do_check_eq(gDebuggee.a, 1);
do_check_eq(gDebuggee.b, undefined);
// Remove the breakpoint.
bpClient.remove(function (aResponse) {
gThreadClient.resume(function () {
finishClient(gClient);
});
});
});
// Continue until the breakpoint is hit.
gThreadClient.resume();
});
});
gDebuggee.eval("var line0 = Error().lineNumber;\n" +
"function foo() {\n" + // line0 + 1
" bar();\n" + // line0 + 2
"}\n" + // line0 + 3
"function bar() {\n" + // line0 + 4
" this.a = 1;\n" + // line0 + 5
" // A comment.\n" + // line0 + 6
" this.b = 2;\n" + // line0 + 7
"}\n" + // line0 + 8
"debugger;\n" + // line0 + 9
"foo();\n"); // line0 + 10
} | identifier_body |
|
test_breakpoint-07.js | /* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Check that setting a breakpoint in a line without code in the second child
* script will skip forward.
*/
var gDebuggee;
var gClient;
var gThreadClient;
function | ()
{
initTestDebuggerServer();
gDebuggee = addTestGlobal("test-stack");
gClient = new DebuggerClient(DebuggerServer.connectPipe());
gClient.connect(function () {
attachTestGlobalClientAndResume(gClient,
"test-stack",
function (aResponse, aThreadClient) {
gThreadClient = aThreadClient;
test_second_child_skip_breakpoint();
});
});
do_test_pending();
}
function test_second_child_skip_breakpoint()
{
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
let path = getFilePath('test_breakpoint-07.js');
let location = { url: path, line: gDebuggee.line0 + 6};
gThreadClient.setBreakpoint(location, function (aResponse, bpClient) {
// Check that the breakpoint has properly skipped forward one line.
do_check_eq(aResponse.actualLocation.url, location.url);
do_check_eq(aResponse.actualLocation.line, location.line + 1);
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
// Check the return value.
do_check_eq(aPacket.type, "paused");
do_check_eq(aPacket.frame.where.url, path);
do_check_eq(aPacket.frame.where.line, location.line + 1);
do_check_eq(aPacket.why.type, "breakpoint");
do_check_eq(aPacket.why.actors[0], bpClient.actor);
// Check that the breakpoint worked.
do_check_eq(gDebuggee.a, 1);
do_check_eq(gDebuggee.b, undefined);
// Remove the breakpoint.
bpClient.remove(function (aResponse) {
gThreadClient.resume(function () {
finishClient(gClient);
});
});
});
// Continue until the breakpoint is hit.
gThreadClient.resume();
});
});
gDebuggee.eval("var line0 = Error().lineNumber;\n" +
"function foo() {\n" + // line0 + 1
" bar();\n" + // line0 + 2
"}\n" + // line0 + 3
"function bar() {\n" + // line0 + 4
" this.a = 1;\n" + // line0 + 5
" // A comment.\n" + // line0 + 6
" this.b = 2;\n" + // line0 + 7
"}\n" + // line0 + 8
"debugger;\n" + // line0 + 9
"foo();\n"); // line0 + 10
}
| run_test | identifier_name |
test_breakpoint-07.js | /* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Check that setting a breakpoint in a line without code in the second child | var gDebuggee;
var gClient;
var gThreadClient;
function run_test()
{
initTestDebuggerServer();
gDebuggee = addTestGlobal("test-stack");
gClient = new DebuggerClient(DebuggerServer.connectPipe());
gClient.connect(function () {
attachTestGlobalClientAndResume(gClient,
"test-stack",
function (aResponse, aThreadClient) {
gThreadClient = aThreadClient;
test_second_child_skip_breakpoint();
});
});
do_test_pending();
}
function test_second_child_skip_breakpoint()
{
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
let path = getFilePath('test_breakpoint-07.js');
let location = { url: path, line: gDebuggee.line0 + 6};
gThreadClient.setBreakpoint(location, function (aResponse, bpClient) {
// Check that the breakpoint has properly skipped forward one line.
do_check_eq(aResponse.actualLocation.url, location.url);
do_check_eq(aResponse.actualLocation.line, location.line + 1);
gThreadClient.addOneTimeListener("paused", function (aEvent, aPacket) {
// Check the return value.
do_check_eq(aPacket.type, "paused");
do_check_eq(aPacket.frame.where.url, path);
do_check_eq(aPacket.frame.where.line, location.line + 1);
do_check_eq(aPacket.why.type, "breakpoint");
do_check_eq(aPacket.why.actors[0], bpClient.actor);
// Check that the breakpoint worked.
do_check_eq(gDebuggee.a, 1);
do_check_eq(gDebuggee.b, undefined);
// Remove the breakpoint.
bpClient.remove(function (aResponse) {
gThreadClient.resume(function () {
finishClient(gClient);
});
});
});
// Continue until the breakpoint is hit.
gThreadClient.resume();
});
});
gDebuggee.eval("var line0 = Error().lineNumber;\n" +
"function foo() {\n" + // line0 + 1
" bar();\n" + // line0 + 2
"}\n" + // line0 + 3
"function bar() {\n" + // line0 + 4
" this.a = 1;\n" + // line0 + 5
" // A comment.\n" + // line0 + 6
" this.b = 2;\n" + // line0 + 7
"}\n" + // line0 + 8
"debugger;\n" + // line0 + 9
"foo();\n"); // line0 + 10
} | * script will skip forward.
*/
| random_line_split |
DraftOffsetKey.js | /**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule DraftOffsetKey
* @flow
*/
'use strict';
import type {DraftOffsetKeyPath} from 'DraftOffsetKeyPath';
var KEY_DELIMITER = '-';
var DraftOffsetKey = {
encode: function(
blockKey: string,
decoratorKey: number,
leafKey: number
): string {
return blockKey + KEY_DELIMITER + decoratorKey + KEY_DELIMITER + leafKey;
},
decode: function(offsetKey: string): DraftOffsetKeyPath {
var [blockKey, decoratorKey, leafKey] = offsetKey.split(KEY_DELIMITER);
return { | decoratorKey: parseInt(decoratorKey, 10),
leafKey: parseInt(leafKey, 10),
};
},
};
module.exports = DraftOffsetKey; | blockKey, | random_line_split |
Register_sale.graphql.ts | /* tslint:disable */
import { ReaderFragment } from "relay-runtime";
import { FragmentRefs } from "relay-runtime";
export type Register_sale = {
readonly slug: string;
readonly internalID: string;
readonly status: string | null;
readonly requireIdentityVerification: boolean | null;
readonly " $refType": "Register_sale";
};
export type Register_sale$data = Register_sale;
export type Register_sale$key = {
readonly " $data"?: Register_sale$data;
readonly " $fragmentRefs": FragmentRefs<"Register_sale">;
};
const node: ReaderFragment = {
"kind": "Fragment",
"name": "Register_sale",
"type": "Sale",
"metadata": null,
"argumentDefinitions": [],
"selections": [
{
"kind": "ScalarField",
"alias": null,
"name": "slug",
"args": null,
"storageKey": null
},
{
"kind": "ScalarField",
"alias": null, | },
{
"kind": "ScalarField",
"alias": null,
"name": "status",
"args": null,
"storageKey": null
},
{
"kind": "ScalarField",
"alias": null,
"name": "requireIdentityVerification",
"args": null,
"storageKey": null
}
]
};
(node as any).hash = '555438fc585fbee0be6171fe3a713deb';
export default node; | "name": "internalID",
"args": null,
"storageKey": null | random_line_split |
buildUaBlockRegex.test.ts | import { expect } from '@hapi/code'
import * as Lab from '@hapi/lab'
import { buildUaBlockRegex } from '../../src/modules'
export const lab = Lab.script()
const describe = lab.describe
const it = lab.it | before(() => {
result = buildUaBlockRegex(['A', 'B', 'C'])
})
it('expect a RegEx to be returned', () => {
expect(result).to.exist()
expect(result).to.equal(new RegExp(`^.*(a|b|c).*$`))
})
})
describe('when passing in an empty Array', () => {
let result: RegExp
before(() => {
result = buildUaBlockRegex([])
})
it('expect a RegEx to be returned', () => {
expect(result).to.not.exist()
})
})
}) | const before = lab.before
describe('buildUaBlockRegex', () => {
describe('when passing in a valid Array', () => {
let result: RegExp | random_line_split |
thread.rs | use alloc::boxed::Box;
use core::mem;
use system::syscall::{sys_clone, sys_exit, sys_yield, sys_nanosleep, sys_waitpid, CLONE_VM, CLONE_FS, CLONE_FILES,
TimeSpec};
use time::Duration;
/// An owned permission to join on a thread (block on its termination).
///
/// A `JoinHandle` *detaches* the child thread when it is dropped.
///
/// Due to platform restrictions, it is not possible to `Clone` this
/// handle: the ability to join a child thread is a uniquely-owned
/// permission.
// TODO: Mutex the result
pub struct JoinHandle<T> {
pid: usize,
result_ptr: *mut Option<T>,
}
impl<T> JoinHandle<T> {
/// Waits for the associated thread to finish.
pub fn join(self) -> Option<T> where T: ::core::fmt::Debug |
}
/// Sleep for a duration
pub fn sleep(duration: Duration) {
let req = TimeSpec {
tv_sec: duration.as_secs() as i64,
tv_nsec: duration.subsec_nanos() as i32,
};
let mut rem = TimeSpec {
tv_sec: 0,
tv_nsec: 0,
};
let _ = sys_nanosleep(&req, &mut rem);
}
/// Sleep for a number of milliseconds
pub fn sleep_ms(ms: u32) {
let secs = ms as u64 / 1000;
let nanos = (ms % 1000) * 1000000;
sleep(Duration::new(secs, nanos))
}
/// Spawns a new thread, returning a `JoinHandle` for it.
///
/// The join handle will implicitly *detach* the child thread upon being
/// dropped. In this case, the child thread may outlive the parent (unless
/// the parent thread is the main thread; the whole process is terminated when
/// the main thread finishes.) Additionally, the join handle provides a `join`
/// method that can be used to join the child thread. If the child thread
/// panics, `join` will return an `Err` containing the argument given to
/// `panic`.
///
/// # Panics
///
/// Panics if the OS fails to create a thread; use `Builder::spawn`
/// to recover from such errors.
// TODO: Catch panic
pub fn spawn<F, T>(f: F) -> JoinHandle<T>
where F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static
{
let result_ptr: *mut Option<T> = Box::into_raw(box None);
//This must only be used by the child
let boxed_f = Box::new(f);
match unsafe { sys_clone(CLONE_VM | CLONE_FS | CLONE_FILES).unwrap() } {
0 => {
unsafe { *result_ptr = Some(boxed_f()) };
loop {
let _ = sys_exit(0);
}
},
pid => {
//Forget so that the parent will not drop while the child is using
mem::forget(boxed_f);
JoinHandle {
pid: pid,
result_ptr: result_ptr
}
}
}
}
pub fn yield_now() {
let _ = sys_yield();
}
| {
let mut status = 0;
match sys_waitpid(self.pid, &mut status, 0) {
Ok(_) => unsafe { *Box::from_raw(self.result_ptr) },
Err(_) => None
}
} | identifier_body |
thread.rs | use alloc::boxed::Box;
use core::mem;
use system::syscall::{sys_clone, sys_exit, sys_yield, sys_nanosleep, sys_waitpid, CLONE_VM, CLONE_FS, CLONE_FILES,
TimeSpec};
use time::Duration;
/// An owned permission to join on a thread (block on its termination).
///
/// A `JoinHandle` *detaches* the child thread when it is dropped.
///
/// Due to platform restrictions, it is not possible to `Clone` this
/// handle: the ability to join a child thread is a uniquely-owned
/// permission.
// TODO: Mutex the result
pub struct JoinHandle<T> {
pid: usize,
result_ptr: *mut Option<T>,
}
impl<T> JoinHandle<T> {
/// Waits for the associated thread to finish.
pub fn join(self) -> Option<T> where T: ::core::fmt::Debug {
let mut status = 0;
match sys_waitpid(self.pid, &mut status, 0) {
Ok(_) => unsafe { *Box::from_raw(self.result_ptr) },
Err(_) => None
}
}
}
/// Sleep for a duration
pub fn sleep(duration: Duration) {
let req = TimeSpec {
tv_sec: duration.as_secs() as i64,
tv_nsec: duration.subsec_nanos() as i32,
};
let mut rem = TimeSpec {
tv_sec: 0,
tv_nsec: 0,
};
let _ = sys_nanosleep(&req, &mut rem);
}
/// Sleep for a number of milliseconds
pub fn sleep_ms(ms: u32) {
let secs = ms as u64 / 1000;
let nanos = (ms % 1000) * 1000000;
sleep(Duration::new(secs, nanos))
}
/// Spawns a new thread, returning a `JoinHandle` for it.
///
/// The join handle will implicitly *detach* the child thread upon being
/// dropped. In this case, the child thread may outlive the parent (unless
/// the parent thread is the main thread; the whole process is terminated when
/// the main thread finishes.) Additionally, the join handle provides a `join`
/// method that can be used to join the child thread. If the child thread
/// panics, `join` will return an `Err` containing the argument given to
/// `panic`.
///
/// # Panics
///
/// Panics if the OS fails to create a thread; use `Builder::spawn`
/// to recover from such errors.
// TODO: Catch panic
pub fn | <F, T>(f: F) -> JoinHandle<T>
where F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static
{
let result_ptr: *mut Option<T> = Box::into_raw(box None);
//This must only be used by the child
let boxed_f = Box::new(f);
match unsafe { sys_clone(CLONE_VM | CLONE_FS | CLONE_FILES).unwrap() } {
0 => {
unsafe { *result_ptr = Some(boxed_f()) };
loop {
let _ = sys_exit(0);
}
},
pid => {
//Forget so that the parent will not drop while the child is using
mem::forget(boxed_f);
JoinHandle {
pid: pid,
result_ptr: result_ptr
}
}
}
}
pub fn yield_now() {
let _ = sys_yield();
}
| spawn | identifier_name |
thread.rs | use alloc::boxed::Box;
use core::mem;
use system::syscall::{sys_clone, sys_exit, sys_yield, sys_nanosleep, sys_waitpid, CLONE_VM, CLONE_FS, CLONE_FILES,
TimeSpec};
use time::Duration;
/// An owned permission to join on a thread (block on its termination).
///
/// A `JoinHandle` *detaches* the child thread when it is dropped.
///
/// Due to platform restrictions, it is not possible to `Clone` this
/// handle: the ability to join a child thread is a uniquely-owned
/// permission.
// TODO: Mutex the result
pub struct JoinHandle<T> {
pid: usize,
result_ptr: *mut Option<T>,
}
impl<T> JoinHandle<T> {
/// Waits for the associated thread to finish.
pub fn join(self) -> Option<T> where T: ::core::fmt::Debug {
let mut status = 0;
match sys_waitpid(self.pid, &mut status, 0) {
Ok(_) => unsafe { *Box::from_raw(self.result_ptr) },
Err(_) => None
}
}
}
/// Sleep for a duration
pub fn sleep(duration: Duration) {
let req = TimeSpec {
tv_sec: duration.as_secs() as i64,
tv_nsec: duration.subsec_nanos() as i32,
};
| let _ = sys_nanosleep(&req, &mut rem);
}
/// Sleep for a number of milliseconds
pub fn sleep_ms(ms: u32) {
let secs = ms as u64 / 1000;
let nanos = (ms % 1000) * 1000000;
sleep(Duration::new(secs, nanos))
}
/// Spawns a new thread, returning a `JoinHandle` for it.
///
/// The join handle will implicitly *detach* the child thread upon being
/// dropped. In this case, the child thread may outlive the parent (unless
/// the parent thread is the main thread; the whole process is terminated when
/// the main thread finishes.) Additionally, the join handle provides a `join`
/// method that can be used to join the child thread. If the child thread
/// panics, `join` will return an `Err` containing the argument given to
/// `panic`.
///
/// # Panics
///
/// Panics if the OS fails to create a thread; use `Builder::spawn`
/// to recover from such errors.
// TODO: Catch panic
pub fn spawn<F, T>(f: F) -> JoinHandle<T>
where F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static
{
let result_ptr: *mut Option<T> = Box::into_raw(box None);
//This must only be used by the child
let boxed_f = Box::new(f);
match unsafe { sys_clone(CLONE_VM | CLONE_FS | CLONE_FILES).unwrap() } {
0 => {
unsafe { *result_ptr = Some(boxed_f()) };
loop {
let _ = sys_exit(0);
}
},
pid => {
//Forget so that the parent will not drop while the child is using
mem::forget(boxed_f);
JoinHandle {
pid: pid,
result_ptr: result_ptr
}
}
}
}
pub fn yield_now() {
let _ = sys_yield();
} | let mut rem = TimeSpec {
tv_sec: 0,
tv_nsec: 0,
};
| random_line_split |
App.js | import React, { Component } from 'react';
import logo from './logo.svg';
import './App.css';
import update from 'immutability-helper';
class ListItemBullet extends Component {
render() {
let cssClasses = `listbullet`;
const bulletStyle = {
border: `5px solid ${this.props.color}`,
backgroundColor: 'white'
};
return (
<div className={cssClasses} style={bulletStyle}></div>
);
}
};
class MyRouteMap extends Component {
render() {
const renderList = (list) => {
const lis = list.map((data, index) => {
return (<li key={index}><ListItemBullet color={this.props.color} />
{
data.url?
(<a href={data.url}><span className='description'>{data.name}</span><div className='details'>This is a test.<br/>This is second line</div></a>)
:
(<span className='description'>{data.name}</span>)
}
</li>);
})
return lis;
};
const cssClasses = `App-routemap ${this.props.color}`;
const ulStyle = {
//color: this.props.color,
marginTop: '30px'
};
const ulBeforeStyle = {
content: " ",
position: 'absolute',
marginLeft: '8px',
left: '0px',
top: '20px',
bottom: '40px',
width: '12px',
zIndex: -5,
backgroundColor: this.props.color
}; | <div className="App-routemap-div">
<h2>{this.props.title}</h2>
<ul className={cssClasses} style={ulStyle}>
<div style={ulBeforeStyle}></div>
{ renderList(this.props.datalist) }
</ul>
</div>
);
}
}
class App extends Component {
constructor() {
super();
this.state = {
list_unix: [
{
name: "Git 使用與教學",
url: "https://se101.mtsa.me/Slide/Git/#/"
}
],
'list_system': [
"作業系統概述",
"分散式系統架構",
"Scaling Up"
],
'list_data': [
"大數據分析簡介",
"TensorFlow 簡介",
],
'list_algo': [
"Python3 語法介紹",
"演算法簡介",
"基礎資料結構",
"字串處理",
"動態規劃",
"圖論演算法"
]
};
}
handleClick(e) {
var p = this.state.list_data;
p.push("Test Item");
var newState = update(this.state, {'list_data': {$set: p}});
this.setState(newState);
}
render() {
return (
<div className="App">
<div className="App-header">
<h1>SE101: 我想成為軟體工程師!</h1>
<span className="App-intro">UMich Taiwanese Software Engineers Reading Group - Fall 2017</span>
</div>
<div className="App-contents">
<MyRouteMap title='黑框框與開發者工具'datalist={this.state.list_unix} color='red' />
<MyRouteMap title='系統架設與維運' datalist={this.state.list_system} color='darkgreen' />
<MyRouteMap title='資料科學與技術' datalist={this.state.list_data} color='darkblue' />
<MyRouteMap title='編程面試與程式語言' datalist={this.state.list_algo} color='orange' />
</div>
</div>
);
//<button onClick={() => this.handleClick()}>Add Item</button>
}
}
export default App; |
return ( | random_line_split |
App.js | import React, { Component } from 'react';
import logo from './logo.svg';
import './App.css';
import update from 'immutability-helper';
class ListItemBullet extends Component {
render() {
let cssClasses = `listbullet`;
const bulletStyle = {
border: `5px solid ${this.props.color}`,
backgroundColor: 'white'
};
return (
<div className={cssClasses} style={bulletStyle}></div>
);
}
};
class MyRouteMap extends Component {
render() {
const renderList = (list) => {
const lis = list.map((data, index) => {
return (<li key={index}><ListItemBullet color={this.props.color} />
{
data.url?
(<a href={data.url}><span className='description'>{data.name}</span><div className='details'>This is a test.<br/>This is second line</div></a>)
:
(<span className='description'>{data.name}</span>)
}
</li>);
})
return lis;
};
const cssClasses = `App-routemap ${this.props.color}`;
const ulStyle = {
//color: this.props.color,
marginTop: '30px'
};
const ulBeforeStyle = {
content: " ",
position: 'absolute',
marginLeft: '8px',
left: '0px',
top: '20px',
bottom: '40px',
width: '12px',
zIndex: -5,
backgroundColor: this.props.color
};
return (
<div className="App-routemap-div">
<h2>{this.props.title}</h2>
<ul className={cssClasses} style={ulStyle}>
<div style={ulBeforeStyle}></div>
{ renderList(this.props.datalist) }
</ul>
</div>
);
}
}
class App extends Component {
| () {
super();
this.state = {
list_unix: [
{
name: "Git 使用與教學",
url: "https://se101.mtsa.me/Slide/Git/#/"
}
],
'list_system': [
"作業系統概述",
"分散式系統架構",
"Scaling Up"
],
'list_data': [
"大數據分析簡介",
"TensorFlow 簡介",
],
'list_algo': [
"Python3 語法介紹",
"演算法簡介",
"基礎資料結構",
"字串處理",
"動態規劃",
"圖論演算法"
]
};
}
handleClick(e) {
var p = this.state.list_data;
p.push("Test Item");
var newState = update(this.state, {'list_data': {$set: p}});
this.setState(newState);
}
render() {
return (
<div className="App">
<div className="App-header">
<h1>SE101: 我想成為軟體工程師!</h1>
<span className="App-intro">UMich Taiwanese Software Engineers Reading Group - Fall 2017</span>
</div>
<div className="App-contents">
<MyRouteMap title='黑框框與開發者工具'datalist={this.state.list_unix} color='red' />
<MyRouteMap title='系統架設與維運' datalist={this.state.list_system} color='darkgreen' />
<MyRouteMap title='資料科學與技術' datalist={this.state.list_data} color='darkblue' />
<MyRouteMap title='編程面試與程式語言' datalist={this.state.list_algo} color='orange' />
</div>
</div>
);
//<button onClick={() => this.handleClick()}>Add Item</button>
}
}
export default App;
| constructor | identifier_name |
create_health_report.py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides the web interface for adding and editing sheriff rotations."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import json
from google.appengine.api import users
from google.appengine.ext import ndb
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.common import xsrf
from dashboard.models import table_config
class CreateHealthReportHandler(request_handler.RequestHandler):
def get(self):
"""Renders the UI with the form fields."""
self.RenderStaticHtml('create_health_report.html')
def post(self):
"""POSTS the data to the datastore."""
user = users.get_current_user()
if not user:
self.response.out.write(json.dumps({'error': 'User not logged in.'}))
return
if not utils.IsInternalUser():
self.response.out.write(json.dumps(
{'error':
'Unauthorized access, please use chromium account to login.'}))
return
get_token = self.request.get('getToken')
get_table_config_list = self.request.get('getTableConfigList')
get_table_config_details = self.request.get('getTableConfigDetails')
if get_token == 'true':
values = {}
self.GetDynamicVariables(values)
self.response.out.write(json.dumps({
'xsrf_token': values['xsrf_token'],
}))
elif get_table_config_list:
self._GetTableConfigList()
elif get_table_config_details:
self._GetTableConfigDetails(get_table_config_details)
else:
self._CreateTableConfig()
def _GetTableConfigList(self):
query = table_config.TableConfig.query()
table_config_list = query.fetch(keys_only=True)
return_list = []
for config in table_config_list:
return_list.append(config.id())
self.response.out.write(json.dumps({
'table_config_list': return_list,
}))
def _GetTableConfigDetails(self, config_name):
config_entity = ndb.Key('TableConfig', config_name).get()
if config_entity:
master_bot_list = []
for bot in config_entity.bots:
master_bot_list.append(bot.parent().string_id() + '/' + bot.string_id())
self.response.out.write(json.dumps({
'table_name': config_name,
'table_bots': master_bot_list,
'table_tests': config_entity.tests,
'table_layout': config_entity.table_layout
}))
else:
self.response.out.write(json.dumps({
'error': 'Invalid config name.'
}))
def _CreateTableConfig(self):
"""Creates a table config. Writes a valid name or an error message."""
self._ValidateToken() | table_layout = self.request.get('tableLayout')
override = int(self.request.get('override'))
user = users.get_current_user()
if not name or not master_bot or not tests or not table_layout or not user:
self.response.out.write(json.dumps({
'error': 'Please fill out the form entirely.'
}))
return
try:
created_table = table_config.CreateTableConfig(
name=name, bots=master_bot, tests=tests, layout=table_layout,
username=user.email(), override=override)
except table_config.BadRequestError as error:
self.response.out.write(json.dumps({
'error': error.message,
}))
logging.error('BadRequestError: %r', error.message)
return
if created_table:
self.response.out.write(json.dumps({
'name': name,
}))
else:
self.response.out.write(json.dumps({
'error': 'Could not create table.',
}))
logging.error('Could not create table.')
def _ValidateToken(self):
user = users.get_current_user()
token = str(self.request.get('xsrf_token'))
if not user or not xsrf._ValidateToken(token, user):
self.abort(403) | name = self.request.get('tableName')
master_bot = self.request.get('tableBots').splitlines()
tests = self.request.get('tableTests').splitlines() | random_line_split |
create_health_report.py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides the web interface for adding and editing sheriff rotations."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import json
from google.appengine.api import users
from google.appengine.ext import ndb
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.common import xsrf
from dashboard.models import table_config
class | (request_handler.RequestHandler):
def get(self):
"""Renders the UI with the form fields."""
self.RenderStaticHtml('create_health_report.html')
def post(self):
"""POSTS the data to the datastore."""
user = users.get_current_user()
if not user:
self.response.out.write(json.dumps({'error': 'User not logged in.'}))
return
if not utils.IsInternalUser():
self.response.out.write(json.dumps(
{'error':
'Unauthorized access, please use chromium account to login.'}))
return
get_token = self.request.get('getToken')
get_table_config_list = self.request.get('getTableConfigList')
get_table_config_details = self.request.get('getTableConfigDetails')
if get_token == 'true':
values = {}
self.GetDynamicVariables(values)
self.response.out.write(json.dumps({
'xsrf_token': values['xsrf_token'],
}))
elif get_table_config_list:
self._GetTableConfigList()
elif get_table_config_details:
self._GetTableConfigDetails(get_table_config_details)
else:
self._CreateTableConfig()
def _GetTableConfigList(self):
query = table_config.TableConfig.query()
table_config_list = query.fetch(keys_only=True)
return_list = []
for config in table_config_list:
return_list.append(config.id())
self.response.out.write(json.dumps({
'table_config_list': return_list,
}))
def _GetTableConfigDetails(self, config_name):
config_entity = ndb.Key('TableConfig', config_name).get()
if config_entity:
master_bot_list = []
for bot in config_entity.bots:
master_bot_list.append(bot.parent().string_id() + '/' + bot.string_id())
self.response.out.write(json.dumps({
'table_name': config_name,
'table_bots': master_bot_list,
'table_tests': config_entity.tests,
'table_layout': config_entity.table_layout
}))
else:
self.response.out.write(json.dumps({
'error': 'Invalid config name.'
}))
def _CreateTableConfig(self):
"""Creates a table config. Writes a valid name or an error message."""
self._ValidateToken()
name = self.request.get('tableName')
master_bot = self.request.get('tableBots').splitlines()
tests = self.request.get('tableTests').splitlines()
table_layout = self.request.get('tableLayout')
override = int(self.request.get('override'))
user = users.get_current_user()
if not name or not master_bot or not tests or not table_layout or not user:
self.response.out.write(json.dumps({
'error': 'Please fill out the form entirely.'
}))
return
try:
created_table = table_config.CreateTableConfig(
name=name, bots=master_bot, tests=tests, layout=table_layout,
username=user.email(), override=override)
except table_config.BadRequestError as error:
self.response.out.write(json.dumps({
'error': error.message,
}))
logging.error('BadRequestError: %r', error.message)
return
if created_table:
self.response.out.write(json.dumps({
'name': name,
}))
else:
self.response.out.write(json.dumps({
'error': 'Could not create table.',
}))
logging.error('Could not create table.')
def _ValidateToken(self):
user = users.get_current_user()
token = str(self.request.get('xsrf_token'))
if not user or not xsrf._ValidateToken(token, user):
self.abort(403)
| CreateHealthReportHandler | identifier_name |
create_health_report.py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides the web interface for adding and editing sheriff rotations."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import json
from google.appengine.api import users
from google.appengine.ext import ndb
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.common import xsrf
from dashboard.models import table_config
class CreateHealthReportHandler(request_handler.RequestHandler):
| def get(self):
"""Renders the UI with the form fields."""
self.RenderStaticHtml('create_health_report.html')
def post(self):
"""POSTS the data to the datastore."""
user = users.get_current_user()
if not user:
self.response.out.write(json.dumps({'error': 'User not logged in.'}))
return
if not utils.IsInternalUser():
self.response.out.write(json.dumps(
{'error':
'Unauthorized access, please use chromium account to login.'}))
return
get_token = self.request.get('getToken')
get_table_config_list = self.request.get('getTableConfigList')
get_table_config_details = self.request.get('getTableConfigDetails')
if get_token == 'true':
values = {}
self.GetDynamicVariables(values)
self.response.out.write(json.dumps({
'xsrf_token': values['xsrf_token'],
}))
elif get_table_config_list:
self._GetTableConfigList()
elif get_table_config_details:
self._GetTableConfigDetails(get_table_config_details)
else:
self._CreateTableConfig()
def _GetTableConfigList(self):
query = table_config.TableConfig.query()
table_config_list = query.fetch(keys_only=True)
return_list = []
for config in table_config_list:
return_list.append(config.id())
self.response.out.write(json.dumps({
'table_config_list': return_list,
}))
def _GetTableConfigDetails(self, config_name):
config_entity = ndb.Key('TableConfig', config_name).get()
if config_entity:
master_bot_list = []
for bot in config_entity.bots:
master_bot_list.append(bot.parent().string_id() + '/' + bot.string_id())
self.response.out.write(json.dumps({
'table_name': config_name,
'table_bots': master_bot_list,
'table_tests': config_entity.tests,
'table_layout': config_entity.table_layout
}))
else:
self.response.out.write(json.dumps({
'error': 'Invalid config name.'
}))
def _CreateTableConfig(self):
"""Creates a table config. Writes a valid name or an error message."""
self._ValidateToken()
name = self.request.get('tableName')
master_bot = self.request.get('tableBots').splitlines()
tests = self.request.get('tableTests').splitlines()
table_layout = self.request.get('tableLayout')
override = int(self.request.get('override'))
user = users.get_current_user()
if not name or not master_bot or not tests or not table_layout or not user:
self.response.out.write(json.dumps({
'error': 'Please fill out the form entirely.'
}))
return
try:
created_table = table_config.CreateTableConfig(
name=name, bots=master_bot, tests=tests, layout=table_layout,
username=user.email(), override=override)
except table_config.BadRequestError as error:
self.response.out.write(json.dumps({
'error': error.message,
}))
logging.error('BadRequestError: %r', error.message)
return
if created_table:
self.response.out.write(json.dumps({
'name': name,
}))
else:
self.response.out.write(json.dumps({
'error': 'Could not create table.',
}))
logging.error('Could not create table.')
def _ValidateToken(self):
user = users.get_current_user()
token = str(self.request.get('xsrf_token'))
if not user or not xsrf._ValidateToken(token, user):
self.abort(403) | identifier_body |
|
create_health_report.py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides the web interface for adding and editing sheriff rotations."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import json
from google.appengine.api import users
from google.appengine.ext import ndb
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.common import xsrf
from dashboard.models import table_config
class CreateHealthReportHandler(request_handler.RequestHandler):
def get(self):
"""Renders the UI with the form fields."""
self.RenderStaticHtml('create_health_report.html')
def post(self):
"""POSTS the data to the datastore."""
user = users.get_current_user()
if not user:
self.response.out.write(json.dumps({'error': 'User not logged in.'}))
return
if not utils.IsInternalUser():
self.response.out.write(json.dumps(
{'error':
'Unauthorized access, please use chromium account to login.'}))
return
get_token = self.request.get('getToken')
get_table_config_list = self.request.get('getTableConfigList')
get_table_config_details = self.request.get('getTableConfigDetails')
if get_token == 'true':
values = {}
self.GetDynamicVariables(values)
self.response.out.write(json.dumps({
'xsrf_token': values['xsrf_token'],
}))
elif get_table_config_list:
self._GetTableConfigList()
elif get_table_config_details:
self._GetTableConfigDetails(get_table_config_details)
else:
self._CreateTableConfig()
def _GetTableConfigList(self):
query = table_config.TableConfig.query()
table_config_list = query.fetch(keys_only=True)
return_list = []
for config in table_config_list:
return_list.append(config.id())
self.response.out.write(json.dumps({
'table_config_list': return_list,
}))
def _GetTableConfigDetails(self, config_name):
config_entity = ndb.Key('TableConfig', config_name).get()
if config_entity:
master_bot_list = []
for bot in config_entity.bots:
master_bot_list.append(bot.parent().string_id() + '/' + bot.string_id())
self.response.out.write(json.dumps({
'table_name': config_name,
'table_bots': master_bot_list,
'table_tests': config_entity.tests,
'table_layout': config_entity.table_layout
}))
else:
self.response.out.write(json.dumps({
'error': 'Invalid config name.'
}))
def _CreateTableConfig(self):
"""Creates a table config. Writes a valid name or an error message."""
self._ValidateToken()
name = self.request.get('tableName')
master_bot = self.request.get('tableBots').splitlines()
tests = self.request.get('tableTests').splitlines()
table_layout = self.request.get('tableLayout')
override = int(self.request.get('override'))
user = users.get_current_user()
if not name or not master_bot or not tests or not table_layout or not user:
|
try:
created_table = table_config.CreateTableConfig(
name=name, bots=master_bot, tests=tests, layout=table_layout,
username=user.email(), override=override)
except table_config.BadRequestError as error:
self.response.out.write(json.dumps({
'error': error.message,
}))
logging.error('BadRequestError: %r', error.message)
return
if created_table:
self.response.out.write(json.dumps({
'name': name,
}))
else:
self.response.out.write(json.dumps({
'error': 'Could not create table.',
}))
logging.error('Could not create table.')
def _ValidateToken(self):
user = users.get_current_user()
token = str(self.request.get('xsrf_token'))
if not user or not xsrf._ValidateToken(token, user):
self.abort(403)
| self.response.out.write(json.dumps({
'error': 'Please fill out the form entirely.'
}))
return | conditional_block |
Session.ts | import {SessionDao} from "../database/daos/SessionDao"
import {getClient} from "../database/Connection"
import {ISessionRow} from "../models/Session"
import {IUserRow} from "../models/User"
import {Option, some, none} from "../Option"
import * as _ from "lodash"
import * as Promise from "bluebird"
import * as crypto from "crypto"
import * as moment from "moment"
const randomBytes = Promise.promisify(crypto.randomBytes)
const dao = new SessionDao();
async function create(user: IUserRow) {
// 16 * 8 = 128 bits of entropy
const token = await randomBytes(16);
const session : ISessionRow = {
user_id: user.id,
created: moment().toISOString(),
valid_until: moment().add(2, "weeks").toISOString(),
// TODO?: Some modified form of base64 would be more efficient | return session;
}
async function refresh(session: ISessionRow) {
const update = {
valid_until: moment().add(2, "weeks").toISOString()
}
await dao.update(update, "user_id", session.user_id);
return <ISessionRow> _.assign(_.clone(session), update);
}
function isValid(session: ISessionRow) {
return moment(session.valid_until).isAfter(moment());
}
export async function getOrCreate(user: IUserRow) : Promise<ISessionRow> {
const session = await dao.getById(user.id);
if (session == null) {
return create(user);
}
if (isValid(session)) {
return refresh(session);
}
await dao.delete(session.user_id);
return create(user);
}
export async function tryGet(sessionToken: string) : Promise<Option<ISessionRow>> {
const session = await dao.getOneByColumn("token", sessionToken);
if (session != null) {
if (moment(session.valid_until).isAfter(moment())) {
return some(await refresh(session));
} else {
return none<ISessionRow>();
}
} else {
return none<ISessionRow>();
}
}
export async function deleteIfExists(userId: number) {
return dao.delete(userId);
} | token: token.toString("hex")
}
await dao.insert(session);
| random_line_split |
Session.ts |
import {SessionDao} from "../database/daos/SessionDao"
import {getClient} from "../database/Connection"
import {ISessionRow} from "../models/Session"
import {IUserRow} from "../models/User"
import {Option, some, none} from "../Option"
import * as _ from "lodash"
import * as Promise from "bluebird"
import * as crypto from "crypto"
import * as moment from "moment"
const randomBytes = Promise.promisify(crypto.randomBytes)
const dao = new SessionDao();
async function create(user: IUserRow) {
// 16 * 8 = 128 bits of entropy
const token = await randomBytes(16);
const session : ISessionRow = {
user_id: user.id,
created: moment().toISOString(),
valid_until: moment().add(2, "weeks").toISOString(),
// TODO?: Some modified form of base64 would be more efficient
token: token.toString("hex")
}
await dao.insert(session);
return session;
}
async function refresh(session: ISessionRow) {
const update = {
valid_until: moment().add(2, "weeks").toISOString()
}
await dao.update(update, "user_id", session.user_id);
return <ISessionRow> _.assign(_.clone(session), update);
}
function isValid(session: ISessionRow) {
return moment(session.valid_until).isAfter(moment());
}
export async function getOrCreate(user: IUserRow) : Promise<ISessionRow> {
const session = await dao.getById(user.id);
if (session == null) {
return create(user);
}
if (isValid(session)) {
return refresh(session);
}
await dao.delete(session.user_id);
return create(user);
}
export async function tryGet(sessionToken: string) : Promise<Option<ISessionRow>> {
const session = await dao.getOneByColumn("token", sessionToken);
if (session != null) {
if (moment(session.valid_until).isAfter(moment())) {
return some(await refresh(session));
} else {
return none<ISessionRow>();
}
} else |
}
export async function deleteIfExists(userId: number) {
return dao.delete(userId);
} | {
return none<ISessionRow>();
} | conditional_block |
Session.ts |
import {SessionDao} from "../database/daos/SessionDao"
import {getClient} from "../database/Connection"
import {ISessionRow} from "../models/Session"
import {IUserRow} from "../models/User"
import {Option, some, none} from "../Option"
import * as _ from "lodash"
import * as Promise from "bluebird"
import * as crypto from "crypto"
import * as moment from "moment"
const randomBytes = Promise.promisify(crypto.randomBytes)
const dao = new SessionDao();
async function create(user: IUserRow) {
// 16 * 8 = 128 bits of entropy
const token = await randomBytes(16);
const session : ISessionRow = {
user_id: user.id,
created: moment().toISOString(),
valid_until: moment().add(2, "weeks").toISOString(),
// TODO?: Some modified form of base64 would be more efficient
token: token.toString("hex")
}
await dao.insert(session);
return session;
}
async function refresh(session: ISessionRow) {
const update = {
valid_until: moment().add(2, "weeks").toISOString()
}
await dao.update(update, "user_id", session.user_id);
return <ISessionRow> _.assign(_.clone(session), update);
}
function | (session: ISessionRow) {
return moment(session.valid_until).isAfter(moment());
}
export async function getOrCreate(user: IUserRow) : Promise<ISessionRow> {
const session = await dao.getById(user.id);
if (session == null) {
return create(user);
}
if (isValid(session)) {
return refresh(session);
}
await dao.delete(session.user_id);
return create(user);
}
export async function tryGet(sessionToken: string) : Promise<Option<ISessionRow>> {
const session = await dao.getOneByColumn("token", sessionToken);
if (session != null) {
if (moment(session.valid_until).isAfter(moment())) {
return some(await refresh(session));
} else {
return none<ISessionRow>();
}
} else {
return none<ISessionRow>();
}
}
export async function deleteIfExists(userId: number) {
return dao.delete(userId);
} | isValid | identifier_name |
Session.ts |
import {SessionDao} from "../database/daos/SessionDao"
import {getClient} from "../database/Connection"
import {ISessionRow} from "../models/Session"
import {IUserRow} from "../models/User"
import {Option, some, none} from "../Option"
import * as _ from "lodash"
import * as Promise from "bluebird"
import * as crypto from "crypto"
import * as moment from "moment"
const randomBytes = Promise.promisify(crypto.randomBytes)
const dao = new SessionDao();
async function create(user: IUserRow) {
// 16 * 8 = 128 bits of entropy
const token = await randomBytes(16);
const session : ISessionRow = {
user_id: user.id,
created: moment().toISOString(),
valid_until: moment().add(2, "weeks").toISOString(),
// TODO?: Some modified form of base64 would be more efficient
token: token.toString("hex")
}
await dao.insert(session);
return session;
}
async function refresh(session: ISessionRow) {
const update = {
valid_until: moment().add(2, "weeks").toISOString()
}
await dao.update(update, "user_id", session.user_id);
return <ISessionRow> _.assign(_.clone(session), update);
}
function isValid(session: ISessionRow) {
return moment(session.valid_until).isAfter(moment());
}
export async function getOrCreate(user: IUserRow) : Promise<ISessionRow> {
const session = await dao.getById(user.id);
if (session == null) {
return create(user);
}
if (isValid(session)) {
return refresh(session);
}
await dao.delete(session.user_id);
return create(user);
}
export async function tryGet(sessionToken: string) : Promise<Option<ISessionRow>> {
const session = await dao.getOneByColumn("token", sessionToken);
if (session != null) {
if (moment(session.valid_until).isAfter(moment())) {
return some(await refresh(session));
} else {
return none<ISessionRow>();
}
} else {
return none<ISessionRow>();
}
}
export async function deleteIfExists(userId: number) | {
return dao.delete(userId);
} | identifier_body |
|
sequence.rs | use io::fai::FaiRecord;
use sequence::*;
use std::fmt;
use std::fs::File;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::path::Path;
use std::path::PathBuf;
#[derive(Clone, Debug)]
pub struct FaiSequence {
record: FaiRecord,
filename: PathBuf,
}
impl FaiSequence {
pub fn new<P: AsRef<Path>>(record: FaiRecord, filename: &P) -> FaiSequence {
FaiSequence {
record: record,
filename: filename.as_ref().to_path_buf(),
} |
fn length(&self) -> usize {
self.record.length()
}
fn vec(&self) -> Vec<DnaNucleotide> {
self.subsequence(0, self.length()).vec()
}
fn subsequence(&self, offset: usize, length: usize) -> DnaSequence {
let n_lines = offset / self.record.linebases();
let n_bases = offset - (n_lines * self.record.linebases());
let file_offset = self.record.offset() + n_lines * self.record.linewidth() + n_bases;
let mut fh = match File::open(&self.filename) {
Err(_) => return DnaSequence::default(),
Ok(fh) => fh,
};
if ! fh.seek(SeekFrom::Start(file_offset as u64)).is_ok() {
return DnaSequence::default();
}
let sequence: Vec<DnaNucleotide> = fh.bytes()
.map(|b| b.unwrap() as char)
.take_while(|c| *c != '>') // Break at new record
.filter(|c| ! c.is_whitespace() ) // drop whitespaces
.take(length)
.map(|c| DnaNucleotide::from(c))
.collect();
DnaSequence::from(sequence)
}
}
impl fmt::Display for FaiSequence {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"FaiSequence[{}:{}bp]",
self.record.name(),
self.record.length()
)
}
}
#[cfg(test)]
mod tests {
use io::fai::*;
use sequence::*;
#[test]
fn test_a(){
let index_result = FaiIndex::read_fai(&"testdata/toy.fasta.fai");
assert!(index_result.is_ok());
let index = index_result.unwrap();
let record = index.find_record(&"ref").expect(&"Expected to find a record with name 'ref'");
let chrom = FaiSequence::new(record, &"testdata/toy.fasta");
assert_eq!(chrom.subsequence(0,4).to_string(), "AGCA");
assert_eq!(chrom.subsequence(1,3).to_string(), "GCA");
}
} | }
}
impl Sequence<DnaNucleotide> for FaiSequence {
type SubsequenceType = DnaSequence; | random_line_split |
sequence.rs |
use io::fai::FaiRecord;
use sequence::*;
use std::fmt;
use std::fs::File;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::path::Path;
use std::path::PathBuf;
#[derive(Clone, Debug)]
pub struct FaiSequence {
record: FaiRecord,
filename: PathBuf,
}
impl FaiSequence {
pub fn new<P: AsRef<Path>>(record: FaiRecord, filename: &P) -> FaiSequence {
FaiSequence {
record: record,
filename: filename.as_ref().to_path_buf(),
}
}
}
impl Sequence<DnaNucleotide> for FaiSequence {
type SubsequenceType = DnaSequence;
fn length(&self) -> usize |
fn vec(&self) -> Vec<DnaNucleotide> {
self.subsequence(0, self.length()).vec()
}
fn subsequence(&self, offset: usize, length: usize) -> DnaSequence {
let n_lines = offset / self.record.linebases();
let n_bases = offset - (n_lines * self.record.linebases());
let file_offset = self.record.offset() + n_lines * self.record.linewidth() + n_bases;
let mut fh = match File::open(&self.filename) {
Err(_) => return DnaSequence::default(),
Ok(fh) => fh,
};
if ! fh.seek(SeekFrom::Start(file_offset as u64)).is_ok() {
return DnaSequence::default();
}
let sequence: Vec<DnaNucleotide> = fh.bytes()
.map(|b| b.unwrap() as char)
.take_while(|c| *c != '>') // Break at new record
.filter(|c| ! c.is_whitespace() ) // drop whitespaces
.take(length)
.map(|c| DnaNucleotide::from(c))
.collect();
DnaSequence::from(sequence)
}
}
impl fmt::Display for FaiSequence {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"FaiSequence[{}:{}bp]",
self.record.name(),
self.record.length()
)
}
}
#[cfg(test)]
mod tests {
use io::fai::*;
use sequence::*;
#[test]
fn test_a(){
let index_result = FaiIndex::read_fai(&"testdata/toy.fasta.fai");
assert!(index_result.is_ok());
let index = index_result.unwrap();
let record = index.find_record(&"ref").expect(&"Expected to find a record with name 'ref'");
let chrom = FaiSequence::new(record, &"testdata/toy.fasta");
assert_eq!(chrom.subsequence(0,4).to_string(), "AGCA");
assert_eq!(chrom.subsequence(1,3).to_string(), "GCA");
}
} | {
self.record.length()
} | identifier_body |
sequence.rs |
use io::fai::FaiRecord;
use sequence::*;
use std::fmt;
use std::fs::File;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::path::Path;
use std::path::PathBuf;
#[derive(Clone, Debug)]
pub struct | {
record: FaiRecord,
filename: PathBuf,
}
impl FaiSequence {
pub fn new<P: AsRef<Path>>(record: FaiRecord, filename: &P) -> FaiSequence {
FaiSequence {
record: record,
filename: filename.as_ref().to_path_buf(),
}
}
}
impl Sequence<DnaNucleotide> for FaiSequence {
type SubsequenceType = DnaSequence;
fn length(&self) -> usize {
self.record.length()
}
fn vec(&self) -> Vec<DnaNucleotide> {
self.subsequence(0, self.length()).vec()
}
fn subsequence(&self, offset: usize, length: usize) -> DnaSequence {
let n_lines = offset / self.record.linebases();
let n_bases = offset - (n_lines * self.record.linebases());
let file_offset = self.record.offset() + n_lines * self.record.linewidth() + n_bases;
let mut fh = match File::open(&self.filename) {
Err(_) => return DnaSequence::default(),
Ok(fh) => fh,
};
if ! fh.seek(SeekFrom::Start(file_offset as u64)).is_ok() {
return DnaSequence::default();
}
let sequence: Vec<DnaNucleotide> = fh.bytes()
.map(|b| b.unwrap() as char)
.take_while(|c| *c != '>') // Break at new record
.filter(|c| ! c.is_whitespace() ) // drop whitespaces
.take(length)
.map(|c| DnaNucleotide::from(c))
.collect();
DnaSequence::from(sequence)
}
}
impl fmt::Display for FaiSequence {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"FaiSequence[{}:{}bp]",
self.record.name(),
self.record.length()
)
}
}
#[cfg(test)]
mod tests {
use io::fai::*;
use sequence::*;
#[test]
fn test_a(){
let index_result = FaiIndex::read_fai(&"testdata/toy.fasta.fai");
assert!(index_result.is_ok());
let index = index_result.unwrap();
let record = index.find_record(&"ref").expect(&"Expected to find a record with name 'ref'");
let chrom = FaiSequence::new(record, &"testdata/toy.fasta");
assert_eq!(chrom.subsequence(0,4).to_string(), "AGCA");
assert_eq!(chrom.subsequence(1,3).to_string(), "GCA");
}
} | FaiSequence | identifier_name |
Battery20Rounded.js | import React from 'react'; |
export default createSvgIcon(
<React.Fragment><path fill="none" d="M0 0h24v24H0V0z" /><g><path d="M7 17v3.67C7 21.4 7.6 22 8.33 22h7.33c.74 0 1.34-.6 1.34-1.33V17H7z" /><path fillOpacity=".3" d="M17 5.33C17 4.6 16.4 4 15.67 4H14V3c0-.55-.45-1-1-1h-2c-.55 0-1 .45-1 1v1H8.33C7.6 4 7 4.6 7 5.33V17h10V5.33z" /></g></React.Fragment>
, 'Battery20Rounded'); | import createSvgIcon from './utils/createSvgIcon'; | random_line_split |
MapScreen.js | import React, { Component } from 'react';
import ReactDOM from 'react-dom';
// import {Map, InfoWindow, Marker, Polygon, GoogleApiWrapper} from 'google-maps-react';
import history from '../history';
import { poiClusters } from '../Config/sampleMapClusters';
import appConfig from '../Config/params';
import MapComponent from '../Components/MapComponent';
const map_key = appConfig.googlemaps.key;
const POIClustersData = poiClusters;
const DEFAULT_PADDING = { top: 40, right: 40, bottom: 40, left: 40 };
export default class MapScreen extends React.Component {
constructor(props) {
super(props);
// const window_google_instance = this.props.google;
this.state = { width: '0', height: '0' };
this.updateWindowDimensions = this.updateWindowDimensions.bind(this);
this.mapMarkers = this.parseMarkers();
this.state.mapDataLoaded = false;
this.state.mapMarkersData = [];
this.state.mapClustersData = [];
this.state.selectedCluster = 0;
this.polygons = poiClusters;
this.initialRegion = {
lat: 39.135452,
//lng: -94.577164
lng: -94.577350
};
}
componentWillMount() {
fetch(appConfig.app.API_BASE_URL+'clusters')
.then(response => response.json(true))
.then((responseData) => {
// console.log(JSON.parse(responseData.body));
// venue.showInMap = "Yes"
let apiResponseData = JSON.parse(responseData.body);
let allMarkersData = apiResponseData.markers;
let allClustersData = apiResponseData.clusters;
let tempMarkersData= [];
let tempClustersData = [];
for (var i = 0; i < allMarkersData.length; i++) {
if(allMarkersData[i].showInMap == "Yes")
{
allMarkersData[i].latlng = {
lat: allMarkersData[i].latlng.latitude,
lng: allMarkersData[i].latlng.longitude
};
tempMarkersData.push(allMarkersData[i]);
}
}
for (var i = 0; i < allClustersData.length; i++) {
let tmpData = allClustersData[i];
tmpData.coordinates = this.transformClusterCoordinates(allClustersData[i].coordinates);
// tmpData.coordinates = allClustersData[i].coordinates;
tempClustersData.push(tmpData);
}
console.log("allMarkersData - ");
console.log(allMarkersData.length);
console.log("markers to show on map Data - ");
console.log(tempMarkersData);
console.log("all clusters Data - ");
console.log(tempClustersData);
this.setState({ mapMarkersData: tempMarkersData });
this.setState({ mapClustersData: tempClustersData });
this.setState({ mapDataLoaded: true });
});
}
transformClusterCoordinates(inputCoordinates)
{
let tmpData = [];
for (let j = 0; j < inputCoordinates.length; j++) {
tmpData.push({
lat: inputCoordinates[j].latitude,
lng: inputCoordinates[j].longitude
});
}
return tmpData;
}
val2key(val,array){
for (var key in array) {
let this_val = array[key];
if(this_val.key == val){
return key;
break;
}
}
}
parseMarkers()
{
let markers = [];
for (var i = POIClustersData.length - 1; i >= 0; i--) {
for (var j = POIClustersData[i].pois.length - 1; j >= 0; j--) {
markers.push(POIClustersData[i].pois[j]);
}
}
return markers;
}
/**
* check if input latlong is inside any of the polygons
* if yes return that polygon
* else return false
*/
pointInPloygons(point)
{
var tmpFlag = false;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
tmpFlag = this.pointInPoly(point, POIClustersData[i].polygonOverlay.coordinates);
if(tmpFlag)
{
break; | {
return POIClustersData[i];
}
else
{
return tmpFlag;
}
}
/**
* Check if point(latlong object) is inside polygon
* Returns boolean true or false
*/
pointInPoly(point, polygon) {
// ray-casting algorithm based on
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
var x = point.latitude, y = point.longitude;
var inside = false;
for (var i = 0, j = polygon.length - 1; i < polygon.length; j = i++) {
var xi = polygon[i].latitude, yi = polygon[i].longitude;
var xj = polygon[j].latitude, yj = polygon[j].longitude;
var intersect = ((yi > y) != (yj > y))
&& (x < (xj - xi) * (y - yi) / (yj - yi) + xi);
if (intersect) inside = !inside;
}
return inside;
};
componentDidMount() {
// this.state.markerPoint = new window.google.maps.Point(32,32);
// this.state.markerSize = new window.google.maps.Size(64,64);
this.updateWindowDimensions();
window.addEventListener('resize', this.updateWindowDimensions);
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions);
}
updateWindowDimensions() {
this.setState({ width: window.innerWidth, height: window.innerHeight });
}
onPolygonClick(e, polygon_key)
{
// alert(polygon_key);
console.log("Polygon key - ");
console.log(polygon_key);
let selected_polygon = null;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
if(POIClustersData[i].polygon.key==polygon_key)
{
selected_polygon = POIClustersData[i];
}
}
if(selected_polygon)
{
this.fitPolygonToScreen(selected_polygon);
}
else
{
console.log("No selected polygon found.");
}
}
onMarkerClick(e, venue_key)
{
// alert(venue_key);
console.log(this.props);
// this.props.params = {currentVenueId: venue_key};
history.replace('/venue',{venue_key:venue_key});
}
/**
* Fit map to polygon coordinates
*/
fitPolygonToScreen(polygon)
{
this.map.fitToCoordinates(polygon.polygonOverlay.coordinates, {
edgePadding: DEFAULT_PADDING,
animated: true,
});
}
onMapClicked(e)
{
console.log(e);
// alert("Map clicked");
}
render() {
// markerPoint = new this.props.google.maps.Point(32,32)
// markerSize = new this.props.google.maps.Size(64,64)
//<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
return (
<div>
{
this.state.mapDataLoaded ?
<span>
<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
</span>
:
<span> Loading maps data, please wait.</span>
}
</div>
);
}
}
const style = {
width: '100%',
height: '100%'
} | }
}
if(tmpFlag) | random_line_split |
MapScreen.js | import React, { Component } from 'react';
import ReactDOM from 'react-dom';
// import {Map, InfoWindow, Marker, Polygon, GoogleApiWrapper} from 'google-maps-react';
import history from '../history';
import { poiClusters } from '../Config/sampleMapClusters';
import appConfig from '../Config/params';
import MapComponent from '../Components/MapComponent';
const map_key = appConfig.googlemaps.key;
const POIClustersData = poiClusters;
const DEFAULT_PADDING = { top: 40, right: 40, bottom: 40, left: 40 };
export default class MapScreen extends React.Component {
constructor(props) {
super(props);
// const window_google_instance = this.props.google;
this.state = { width: '0', height: '0' };
this.updateWindowDimensions = this.updateWindowDimensions.bind(this);
this.mapMarkers = this.parseMarkers();
this.state.mapDataLoaded = false;
this.state.mapMarkersData = [];
this.state.mapClustersData = [];
this.state.selectedCluster = 0;
this.polygons = poiClusters;
this.initialRegion = {
lat: 39.135452,
//lng: -94.577164
lng: -94.577350
};
}
componentWillMount() {
fetch(appConfig.app.API_BASE_URL+'clusters')
.then(response => response.json(true))
.then((responseData) => {
// console.log(JSON.parse(responseData.body));
// venue.showInMap = "Yes"
let apiResponseData = JSON.parse(responseData.body);
let allMarkersData = apiResponseData.markers;
let allClustersData = apiResponseData.clusters;
let tempMarkersData= [];
let tempClustersData = [];
for (var i = 0; i < allMarkersData.length; i++) {
if(allMarkersData[i].showInMap == "Yes")
{
allMarkersData[i].latlng = {
lat: allMarkersData[i].latlng.latitude,
lng: allMarkersData[i].latlng.longitude
};
tempMarkersData.push(allMarkersData[i]);
}
}
for (var i = 0; i < allClustersData.length; i++) {
let tmpData = allClustersData[i];
tmpData.coordinates = this.transformClusterCoordinates(allClustersData[i].coordinates);
// tmpData.coordinates = allClustersData[i].coordinates;
tempClustersData.push(tmpData);
}
console.log("allMarkersData - ");
console.log(allMarkersData.length);
console.log("markers to show on map Data - ");
console.log(tempMarkersData);
console.log("all clusters Data - ");
console.log(tempClustersData);
this.setState({ mapMarkersData: tempMarkersData });
this.setState({ mapClustersData: tempClustersData });
this.setState({ mapDataLoaded: true });
});
}
transformClusterCoordinates(inputCoordinates)
{
let tmpData = [];
for (let j = 0; j < inputCoordinates.length; j++) {
tmpData.push({
lat: inputCoordinates[j].latitude,
lng: inputCoordinates[j].longitude
});
}
return tmpData;
}
val2key(val,array){
for (var key in array) {
let this_val = array[key];
if(this_val.key == val){
return key;
break;
}
}
}
parseMarkers()
{
let markers = [];
for (var i = POIClustersData.length - 1; i >= 0; i--) {
for (var j = POIClustersData[i].pois.length - 1; j >= 0; j--) {
markers.push(POIClustersData[i].pois[j]);
}
}
return markers;
}
/**
* check if input latlong is inside any of the polygons
* if yes return that polygon
* else return false
*/
pointInPloygons(point)
{
var tmpFlag = false;
for (var i = POIClustersData.length - 1; i >= 0; i--) |
if(tmpFlag)
{
return POIClustersData[i];
}
else
{
return tmpFlag;
}
}
/**
* Check if point(latlong object) is inside polygon
* Returns boolean true or false
*/
pointInPoly(point, polygon) {
// ray-casting algorithm based on
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
var x = point.latitude, y = point.longitude;
var inside = false;
for (var i = 0, j = polygon.length - 1; i < polygon.length; j = i++) {
var xi = polygon[i].latitude, yi = polygon[i].longitude;
var xj = polygon[j].latitude, yj = polygon[j].longitude;
var intersect = ((yi > y) != (yj > y))
&& (x < (xj - xi) * (y - yi) / (yj - yi) + xi);
if (intersect) inside = !inside;
}
return inside;
};
componentDidMount() {
// this.state.markerPoint = new window.google.maps.Point(32,32);
// this.state.markerSize = new window.google.maps.Size(64,64);
this.updateWindowDimensions();
window.addEventListener('resize', this.updateWindowDimensions);
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions);
}
updateWindowDimensions() {
this.setState({ width: window.innerWidth, height: window.innerHeight });
}
onPolygonClick(e, polygon_key)
{
// alert(polygon_key);
console.log("Polygon key - ");
console.log(polygon_key);
let selected_polygon = null;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
if(POIClustersData[i].polygon.key==polygon_key)
{
selected_polygon = POIClustersData[i];
}
}
if(selected_polygon)
{
this.fitPolygonToScreen(selected_polygon);
}
else
{
console.log("No selected polygon found.");
}
}
onMarkerClick(e, venue_key)
{
// alert(venue_key);
console.log(this.props);
// this.props.params = {currentVenueId: venue_key};
history.replace('/venue',{venue_key:venue_key});
}
/**
* Fit map to polygon coordinates
*/
fitPolygonToScreen(polygon)
{
this.map.fitToCoordinates(polygon.polygonOverlay.coordinates, {
edgePadding: DEFAULT_PADDING,
animated: true,
});
}
onMapClicked(e)
{
console.log(e);
// alert("Map clicked");
}
render() {
// markerPoint = new this.props.google.maps.Point(32,32)
// markerSize = new this.props.google.maps.Size(64,64)
//<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
return (
<div>
{
this.state.mapDataLoaded ?
<span>
<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
</span>
:
<span> Loading maps data, please wait.</span>
}
</div>
);
}
}
const style = {
width: '100%',
height: '100%'
} | {
tmpFlag = this.pointInPoly(point, POIClustersData[i].polygonOverlay.coordinates);
if(tmpFlag)
{
break;
}
} | conditional_block |
MapScreen.js | import React, { Component } from 'react';
import ReactDOM from 'react-dom';
// import {Map, InfoWindow, Marker, Polygon, GoogleApiWrapper} from 'google-maps-react';
import history from '../history';
import { poiClusters } from '../Config/sampleMapClusters';
import appConfig from '../Config/params';
import MapComponent from '../Components/MapComponent';
const map_key = appConfig.googlemaps.key;
const POIClustersData = poiClusters;
const DEFAULT_PADDING = { top: 40, right: 40, bottom: 40, left: 40 };
export default class MapScreen extends React.Component {
constructor(props) |
componentWillMount() {
fetch(appConfig.app.API_BASE_URL+'clusters')
.then(response => response.json(true))
.then((responseData) => {
// console.log(JSON.parse(responseData.body));
// venue.showInMap = "Yes"
let apiResponseData = JSON.parse(responseData.body);
let allMarkersData = apiResponseData.markers;
let allClustersData = apiResponseData.clusters;
let tempMarkersData= [];
let tempClustersData = [];
for (var i = 0; i < allMarkersData.length; i++) {
if(allMarkersData[i].showInMap == "Yes")
{
allMarkersData[i].latlng = {
lat: allMarkersData[i].latlng.latitude,
lng: allMarkersData[i].latlng.longitude
};
tempMarkersData.push(allMarkersData[i]);
}
}
for (var i = 0; i < allClustersData.length; i++) {
let tmpData = allClustersData[i];
tmpData.coordinates = this.transformClusterCoordinates(allClustersData[i].coordinates);
// tmpData.coordinates = allClustersData[i].coordinates;
tempClustersData.push(tmpData);
}
console.log("allMarkersData - ");
console.log(allMarkersData.length);
console.log("markers to show on map Data - ");
console.log(tempMarkersData);
console.log("all clusters Data - ");
console.log(tempClustersData);
this.setState({ mapMarkersData: tempMarkersData });
this.setState({ mapClustersData: tempClustersData });
this.setState({ mapDataLoaded: true });
});
}
transformClusterCoordinates(inputCoordinates)
{
let tmpData = [];
for (let j = 0; j < inputCoordinates.length; j++) {
tmpData.push({
lat: inputCoordinates[j].latitude,
lng: inputCoordinates[j].longitude
});
}
return tmpData;
}
val2key(val,array){
for (var key in array) {
let this_val = array[key];
if(this_val.key == val){
return key;
break;
}
}
}
parseMarkers()
{
let markers = [];
for (var i = POIClustersData.length - 1; i >= 0; i--) {
for (var j = POIClustersData[i].pois.length - 1; j >= 0; j--) {
markers.push(POIClustersData[i].pois[j]);
}
}
return markers;
}
/**
* check if input latlong is inside any of the polygons
* if yes return that polygon
* else return false
*/
pointInPloygons(point)
{
var tmpFlag = false;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
tmpFlag = this.pointInPoly(point, POIClustersData[i].polygonOverlay.coordinates);
if(tmpFlag)
{
break;
}
}
if(tmpFlag)
{
return POIClustersData[i];
}
else
{
return tmpFlag;
}
}
/**
* Check if point(latlong object) is inside polygon
* Returns boolean true or false
*/
pointInPoly(point, polygon) {
// ray-casting algorithm based on
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
var x = point.latitude, y = point.longitude;
var inside = false;
for (var i = 0, j = polygon.length - 1; i < polygon.length; j = i++) {
var xi = polygon[i].latitude, yi = polygon[i].longitude;
var xj = polygon[j].latitude, yj = polygon[j].longitude;
var intersect = ((yi > y) != (yj > y))
&& (x < (xj - xi) * (y - yi) / (yj - yi) + xi);
if (intersect) inside = !inside;
}
return inside;
};
componentDidMount() {
// this.state.markerPoint = new window.google.maps.Point(32,32);
// this.state.markerSize = new window.google.maps.Size(64,64);
this.updateWindowDimensions();
window.addEventListener('resize', this.updateWindowDimensions);
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions);
}
updateWindowDimensions() {
this.setState({ width: window.innerWidth, height: window.innerHeight });
}
onPolygonClick(e, polygon_key)
{
// alert(polygon_key);
console.log("Polygon key - ");
console.log(polygon_key);
let selected_polygon = null;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
if(POIClustersData[i].polygon.key==polygon_key)
{
selected_polygon = POIClustersData[i];
}
}
if(selected_polygon)
{
this.fitPolygonToScreen(selected_polygon);
}
else
{
console.log("No selected polygon found.");
}
}
onMarkerClick(e, venue_key)
{
// alert(venue_key);
console.log(this.props);
// this.props.params = {currentVenueId: venue_key};
history.replace('/venue',{venue_key:venue_key});
}
/**
* Fit map to polygon coordinates
*/
fitPolygonToScreen(polygon)
{
this.map.fitToCoordinates(polygon.polygonOverlay.coordinates, {
edgePadding: DEFAULT_PADDING,
animated: true,
});
}
onMapClicked(e)
{
console.log(e);
// alert("Map clicked");
}
render() {
// markerPoint = new this.props.google.maps.Point(32,32)
// markerSize = new this.props.google.maps.Size(64,64)
//<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
return (
<div>
{
this.state.mapDataLoaded ?
<span>
<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
</span>
:
<span> Loading maps data, please wait.</span>
}
</div>
);
}
}
const style = {
width: '100%',
height: '100%'
} | {
super(props);
// const window_google_instance = this.props.google;
this.state = { width: '0', height: '0' };
this.updateWindowDimensions = this.updateWindowDimensions.bind(this);
this.mapMarkers = this.parseMarkers();
this.state.mapDataLoaded = false;
this.state.mapMarkersData = [];
this.state.mapClustersData = [];
this.state.selectedCluster = 0;
this.polygons = poiClusters;
this.initialRegion = {
lat: 39.135452,
//lng: -94.577164
lng: -94.577350
};
} | identifier_body |
MapScreen.js | import React, { Component } from 'react';
import ReactDOM from 'react-dom';
// import {Map, InfoWindow, Marker, Polygon, GoogleApiWrapper} from 'google-maps-react';
import history from '../history';
import { poiClusters } from '../Config/sampleMapClusters';
import appConfig from '../Config/params';
import MapComponent from '../Components/MapComponent';
const map_key = appConfig.googlemaps.key;
const POIClustersData = poiClusters;
const DEFAULT_PADDING = { top: 40, right: 40, bottom: 40, left: 40 };
export default class MapScreen extends React.Component {
constructor(props) {
super(props);
// const window_google_instance = this.props.google;
this.state = { width: '0', height: '0' };
this.updateWindowDimensions = this.updateWindowDimensions.bind(this);
this.mapMarkers = this.parseMarkers();
this.state.mapDataLoaded = false;
this.state.mapMarkersData = [];
this.state.mapClustersData = [];
this.state.selectedCluster = 0;
this.polygons = poiClusters;
this.initialRegion = {
lat: 39.135452,
//lng: -94.577164
lng: -94.577350
};
}
componentWillMount() {
fetch(appConfig.app.API_BASE_URL+'clusters')
.then(response => response.json(true))
.then((responseData) => {
// console.log(JSON.parse(responseData.body));
// venue.showInMap = "Yes"
let apiResponseData = JSON.parse(responseData.body);
let allMarkersData = apiResponseData.markers;
let allClustersData = apiResponseData.clusters;
let tempMarkersData= [];
let tempClustersData = [];
for (var i = 0; i < allMarkersData.length; i++) {
if(allMarkersData[i].showInMap == "Yes")
{
allMarkersData[i].latlng = {
lat: allMarkersData[i].latlng.latitude,
lng: allMarkersData[i].latlng.longitude
};
tempMarkersData.push(allMarkersData[i]);
}
}
for (var i = 0; i < allClustersData.length; i++) {
let tmpData = allClustersData[i];
tmpData.coordinates = this.transformClusterCoordinates(allClustersData[i].coordinates);
// tmpData.coordinates = allClustersData[i].coordinates;
tempClustersData.push(tmpData);
}
console.log("allMarkersData - ");
console.log(allMarkersData.length);
console.log("markers to show on map Data - ");
console.log(tempMarkersData);
console.log("all clusters Data - ");
console.log(tempClustersData);
this.setState({ mapMarkersData: tempMarkersData });
this.setState({ mapClustersData: tempClustersData });
this.setState({ mapDataLoaded: true });
});
}
| (inputCoordinates)
{
let tmpData = [];
for (let j = 0; j < inputCoordinates.length; j++) {
tmpData.push({
lat: inputCoordinates[j].latitude,
lng: inputCoordinates[j].longitude
});
}
return tmpData;
}
val2key(val,array){
for (var key in array) {
let this_val = array[key];
if(this_val.key == val){
return key;
break;
}
}
}
parseMarkers()
{
let markers = [];
for (var i = POIClustersData.length - 1; i >= 0; i--) {
for (var j = POIClustersData[i].pois.length - 1; j >= 0; j--) {
markers.push(POIClustersData[i].pois[j]);
}
}
return markers;
}
/**
* check if input latlong is inside any of the polygons
* if yes return that polygon
* else return false
*/
pointInPloygons(point)
{
var tmpFlag = false;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
tmpFlag = this.pointInPoly(point, POIClustersData[i].polygonOverlay.coordinates);
if(tmpFlag)
{
break;
}
}
if(tmpFlag)
{
return POIClustersData[i];
}
else
{
return tmpFlag;
}
}
/**
* Check if point(latlong object) is inside polygon
* Returns boolean true or false
*/
pointInPoly(point, polygon) {
// ray-casting algorithm based on
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
var x = point.latitude, y = point.longitude;
var inside = false;
for (var i = 0, j = polygon.length - 1; i < polygon.length; j = i++) {
var xi = polygon[i].latitude, yi = polygon[i].longitude;
var xj = polygon[j].latitude, yj = polygon[j].longitude;
var intersect = ((yi > y) != (yj > y))
&& (x < (xj - xi) * (y - yi) / (yj - yi) + xi);
if (intersect) inside = !inside;
}
return inside;
};
componentDidMount() {
// this.state.markerPoint = new window.google.maps.Point(32,32);
// this.state.markerSize = new window.google.maps.Size(64,64);
this.updateWindowDimensions();
window.addEventListener('resize', this.updateWindowDimensions);
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions);
}
updateWindowDimensions() {
this.setState({ width: window.innerWidth, height: window.innerHeight });
}
onPolygonClick(e, polygon_key)
{
// alert(polygon_key);
console.log("Polygon key - ");
console.log(polygon_key);
let selected_polygon = null;
for (var i = POIClustersData.length - 1; i >= 0; i--) {
if(POIClustersData[i].polygon.key==polygon_key)
{
selected_polygon = POIClustersData[i];
}
}
if(selected_polygon)
{
this.fitPolygonToScreen(selected_polygon);
}
else
{
console.log("No selected polygon found.");
}
}
onMarkerClick(e, venue_key)
{
// alert(venue_key);
console.log(this.props);
// this.props.params = {currentVenueId: venue_key};
history.replace('/venue',{venue_key:venue_key});
}
/**
* Fit map to polygon coordinates
*/
fitPolygonToScreen(polygon)
{
this.map.fitToCoordinates(polygon.polygonOverlay.coordinates, {
edgePadding: DEFAULT_PADDING,
animated: true,
});
}
onMapClicked(e)
{
console.log(e);
// alert("Map clicked");
}
render() {
// markerPoint = new this.props.google.maps.Point(32,32)
// markerSize = new this.props.google.maps.Size(64,64)
//<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
return (
<div>
{
this.state.mapDataLoaded ?
<span>
<MapComponent initialRegion={this.initialRegion} mapMarkers={this.state.mapMarkersData} mapClusters={this.state.mapClustersData} />
</span>
:
<span> Loading maps data, please wait.</span>
}
</div>
);
}
}
const style = {
width: '100%',
height: '100%'
} | transformClusterCoordinates | identifier_name |
index.d.ts | // Type definitions for bell 9.3
// Project: https://github.com/hapijs/bell
// Definitions by: Simon Schick <https://github.com/SimonSchick>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.8
import { Server, Request, Plugin, AuthCredentials } from 'hapi';
declare module 'hapi' {
interface ServerAuth {
strategy(name: string, scheme: 'bell', options: BellOptions): void;
strategy(
name: string,
scheme: 'bell',
mode: boolean | 'required' | 'optional' | 'try',
options: BellOptions,
): void;
}
}
export interface StringLikeMap {
[key: string]: string | number;
} | 'arcgisonline' |
'auth0' |
'azuread' |
'bitbucket' |
'digitalocean' |
'discord' |
'dropbox' |
'dropboxV2' |
'facebook' |
'fitbit' |
'foursquare' |
'github' |
'gitlab' |
'google' |
'googleplus' |
'instagram' |
'linkedin' |
'live' |
'medium' |
'meetup' |
'mixer' |
'nest' |
'office365' |
'okta' |
'phabricator' |
'pingfed' |
'pinterest' |
'reddit' |
'salesforce' |
'slack' |
'spotify' |
'stripe' |
'trakt' |
'tumblr' |
'twitch' |
'twitter' |
'vk' |
'wordpress' |
'yahoo';
export type RequestPassThrough = (request: Request) => PromiseLike<AuthCredentials> | AuthCredentials;
export interface OptionalOptions {
/**
* the name of the cookie used to manage the temporary state.
* Defaults to 'bell-provider' where 'provider' is the provider name (or 'custom' for custom providers).
* For example, the Twitter cookie name defaults to 'bell-twitter'.
*/
cookie?: string;
/**
* sets the cookie secure flag.
* Defaults to true.
*/
isSecure?: boolean;
/**
* sets the cookie HTTP only flag.
* Defaults to true.
*/
isHttpOnly?: boolean;
/**
* cookie time-to-live in milliseconds.
* Defaults to null (session time-life - cookies are deleted when the browser is closed).
*/
ttl?: number;
/**
* the domain scope.
* Defaults to null (no domain).
*/
domain?: string;
/**
* provider-specific query parameters for the authentication endpoint.
* It may be passed either as an object to merge into the query string,
* or a function which takes the client's request and returns an object.
* Each provider supports its own set of parameters which customize the user's login experience.
* For example:
* * Facebook supports `display` ('page', 'popup', or 'touch'), `auth_type`, `auth_nonce`.
* * Google supports `access_type`, `approval_prompt`, `prompt`, `login_hint`, `user_id`, `hd`.
* * Twitter supports `force_login`, `screen_name`.
* * Linkedin supports `fields`.
*/
providerParams?: StringLikeMap | ((request: Request) => StringLikeMap);
/**
* allows passing query parameters from a bell protected endpoint to the auth request.
* It will merge the query params you pass along with the providerParams and any other predefined ones.
* Be aware that this will override predefined query parameters!
* Default to false.
*/
allowRuntimeProviderParams?: StringLikeMap | boolean;
/**
* Each built-in vendor comes with the required scope for basic profile information.
* Use scope to specify a different scope as required by your application.
* It may be passed either as an object to merge into the query string,
* or a function which takes the client's request and returns an object.
* Consult the provider for their specific supported scopes.
*/
scope?: string[] | ((request: Request) => string[]);
/**
* skips obtaining a user profile from the provider.
* Useful if you need specific scopes,
* but not the user profile.
* Defaults to false.
*/
skipProfile?: boolean;
/**
* a configuration object used to customize the provider settings.
* The built-in 'twitter' provider accepts the `extendedProfile` & `getMethod` options.
* option which allows disabling the extra profile request when the provider
* returns the user information in the callback (defaults to true).
* The built-in 'github' and 'phabricator' providers accept the uri
* option which allows pointing to a private enterprise installation (e.g. 'https://vpn.example.com').
* See Providers documentation for more information.
*/
config?: { extendedProfile?: boolean; getMethod?: string } | { uri?: string };
/**
* an object of key-value pairs that specify additional
* URL query parameters to send with the profile request to the provider.
* The built-in facebook provider,
* for example, could have fields specified to determine the fields returned from the user's graph,
* which would then be available to you in the auth.credentials.profile.raw object.
*/
profileParams?: StringLikeMap;
/**
* allows passing additional OAuth state from initial request.
* This must be a function returning a string,
* which will be appended to the bell internal state parameter for OAuth code flow.
*/
runtimeStateCallback?(req: Request): string;
// THESE ARE IN THE *REQUIRED* section but are actually not...
/**
* A boolean indicating whether or not you want the redirect_uri to be forced to https.
* Useful if your hapi application runs as http, but is accessed through https.
*/
forceHttps?: boolean;
/**
* Set the base redirect_uri manually if it cannot be inferred properly from server settings.
* Useful to override port, protocol, and host if proxied or forwarded.
*/
location?: string | ((req: Request) => string);
}
export interface RequiredProviderOptions {
/**
* the cookie encryption password.
* Used to encrypt the temporary state cookie used by the module in
* between the authorization protocol steps.
*/
password: string;
/**
* the OAuth client identifier (consumer key).
*/
clientId: string;
/**
* the OAuth client secret (consumer secret)
*/
clientSecret: string;
}
export interface KnownProviderOptions extends RequiredProviderOptions, OptionalOptions {
provider: Provider;
}
/**
* @param uri the requested resource URI (bell will add the token or authentication header as needed).
* @param params any URI query parameters (cannot include them in the URI due to signature requirements).
*/
export type AuthedRequest = (uri: string, params?: { [key: string]: string }) => Promise<object>;
export interface Credentials {
provider: Provider | 'custom';
token: string;
query: StringLikeMap;
/**
* Varying data depending on provider.
*/
profile?: object;
}
export interface Credentials1 extends Credentials {
secret: string;
}
export interface Credentials2 extends Credentials {
refreshToken?: string;
expiresIn?: number;
}
export interface CustomProtocol {
/**
* The name of the protocol.
* @default custom
*/
name?: string;
/**
* the authorization endpoint URI.
*/
auth: string;
/**
* the access token endpoint URI.
*/
token: string;
/**
* a headers object with additional headers required by the provider
* (e.g. GitHub required the 'User-Agent' header which is set by default).
*/
headers?: {
[key: string]: string;
};
}
/**
* a function used to obtain user profile information and normalize it.
* @param credentials the credentials object.
* Change the object directly within the function (profile information is typically stored under credentials.profile).
* @param params the parsed information received from the provider (e.g. token, secret, and other custom fields).
* @param get an OAuth helper function to make authenticated requests using the credentials received.
*/
export type ProfileGetter<C extends Credentials> = (this: CustomProviderOptions, credentials: C, params: { [key: string]: string }, get: AuthedRequest) => Promise<void>;
export interface CustomProtocol1 extends CustomProtocol {
/**
* the authorization protocol used.
*/
protocol: 'oauth';
/**
* the OAuth signature method. Must be one of:
* * 'HMAC-SHA1' - default
* * 'RSA-SHA1' - in that case, the clientSecret is your RSA private key
*/
signatureMethod?: 'HMAC-SHA1' | 'RSA-SHA1';
/**
* the temporary credentials (request token) endpoint).
*/
temporary?: string;
profile: ProfileGetter<Credentials1>;
}
export interface CustomProtocol2 extends CustomProtocol {
/**
* the authorization protocol used.
*/
protocol: 'oauth2';
/**
* an array of scope strings.
*/
scope?: string[] | ((query: StringLikeMap) => string[]);
/**
* boolean that determines if OAuth client id and client secret will be sent
* as parameters as opposed to an Authorization header.
* Defaults to false.
*/
useParamsAuth?: boolean;
/**
* the scope separator character. Only required when a provider has a broken OAuth 2.0 implementation. Defaults to space (Facebook and GitHub default to comma).
*/
scopeSeparator?: string;
profile: ProfileGetter<Credentials2>;
}
export interface CustomProviderOptions extends RequiredProviderOptions, OptionalOptions {
provider: CustomProtocol1 | CustomProtocol2;
}
export type BellOptions = CustomProviderOptions | KnownProviderOptions;
export const plugin: Plugin<BellOptions>;
/**
* Enables simulation mode.
*/
export function simulate(credentialsFunc: RequestPassThrough): void;
/**
* [See docs](https://github.com/hapijs/bell/blob/master/API.md#simulated-authentication)
* Disables simulation mode
*/
export function simulate(state: false): void; | export type Provider = | random_line_split |
blocked.js | var blockedModule = {
find: function (UserBlocked, user, blocked) {
return UserBlocked.findOne({
or: [{
user: user.id,
blocked: blocked.id
}, {
blocked: user.id,
user: blocked.id
}]
});
},
isUserBlocked: function (UserBlocked, user, blocked) {
return blockedModule.find(UserBlocked, user, blocked).then(function (found) {
return found;
});
},
create: function (UserBlocked, user, blocked) {
return blockedModule.find(UserBlocked, user, blocked).then(function (found) {
if (found) |
return UserBlocked.create({
user: user.id,
blocked: blocked.id
}).then(function (created) {
return created;
});
});
},
remove: function (UserBlocked, user, blocked) {
return UserBlocked.destroy({
user: user.id,
blocked: blocked.id
});
}
};
module.exports = blockedModule;
| {
return found;
} | conditional_block |
blocked.js | var blockedModule = {
find: function (UserBlocked, user, blocked) {
return UserBlocked.findOne({
or: [{
user: user.id, | blocked: user.id,
user: blocked.id
}]
});
},
isUserBlocked: function (UserBlocked, user, blocked) {
return blockedModule.find(UserBlocked, user, blocked).then(function (found) {
return found;
});
},
create: function (UserBlocked, user, blocked) {
return blockedModule.find(UserBlocked, user, blocked).then(function (found) {
if (found) {
return found;
}
return UserBlocked.create({
user: user.id,
blocked: blocked.id
}).then(function (created) {
return created;
});
});
},
remove: function (UserBlocked, user, blocked) {
return UserBlocked.destroy({
user: user.id,
blocked: blocked.id
});
}
};
module.exports = blockedModule; | blocked: blocked.id
}, { | random_line_split |
HypothermicPresence.tsx | import React from "react";
import Analyzer, { Options } from "parser/core/Analyzer";
import SPELLS from "common/SPELLS";
import Statistic from "parser/ui/Statistic";
import { STATISTIC_ORDER } from "parser/ui/StatisticBox";
import BoringSpellValue from "parser/ui/BoringSpellValue";
import RunicPowerTracker from "../runicpower/RunicPowerTracker";
/** reduces the Runic Power cost of your abilities by 35% for 8 sec */
class HypothermicPresence extends Analyzer {
static dependencies = {
runicPowerTracker: RunicPowerTracker,
}
protected runicPowerTracker!: RunicPowerTracker;
constructor(options: Options) {
super(options);
this.active = this.selectedCombatant.hasTalent(SPELLS.HYPOTHERMIC_PRESENCE_TALENT.id);
if (!this.active) |
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.OPTIONAL(50)}
size="flexible"
>
<BoringSpellValue
spell={SPELLS.HYPOTHERMIC_PRESENCE_TALENT}
value={`${this.runicPowerTracker.totalHypothermicPresenceReduction}`}
label="Runic Power saved"
/>
</Statistic>
)
}
}
export default HypothermicPresence;
| {
return;
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.