file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
ExceptionDecoService.js | 'use strict';
angular
.module('app.module')
.factory('exception', exception);
exception.$inject = ['logger'];
function exception(logger) |
/*
angular //TODO: get back to this
.module('blocks.exception')
.config(exceptionConfig);
exceptionConfig.$inject = ['$provide'];
function exceptionConfig($provide) {
$provide.decorator('$exceptionHandler', extendExceptionHandler);
}
extendExceptionHandler.$inject = ['$delegate'];
function extendExceptionHandler($delegate) {
return function(exception, cause) {
$delegate(exception, cause);
var errorData = {
exception: exception,
cause: cause
};
//toastr.error(exception.msg, errorData);
console.error("ExceptionDecoService: " +exception.msg +" " +errorData)
};
}
*/ | {
var service = {
catcher: catcher
};
return service;
function catcher(message) {
return function(reason) {
logger.error(message, reason);
};
}
} | identifier_body |
ExceptionDecoService.js | 'use strict';
angular
.module('app.module')
.factory('exception', exception);
exception.$inject = ['logger'];
function exception(logger) {
var service = {
catcher: catcher
};
return service;
function catcher(message) {
return function(reason) {
logger.error(message, reason);
};
}
}
/*
angular //TODO: get back to this
.module('blocks.exception')
.config(exceptionConfig);
exceptionConfig.$inject = ['$provide'];
function exceptionConfig($provide) {
$provide.decorator('$exceptionHandler', extendExceptionHandler); | extendExceptionHandler.$inject = ['$delegate'];
function extendExceptionHandler($delegate) {
return function(exception, cause) {
$delegate(exception, cause);
var errorData = {
exception: exception,
cause: cause
};
//toastr.error(exception.msg, errorData);
console.error("ExceptionDecoService: " +exception.msg +" " +errorData)
};
}
*/ | }
| random_line_split |
ExceptionDecoService.js | 'use strict';
angular
.module('app.module')
.factory('exception', exception);
exception.$inject = ['logger'];
function exception(logger) {
var service = {
catcher: catcher
};
return service;
function | (message) {
return function(reason) {
logger.error(message, reason);
};
}
}
/*
angular //TODO: get back to this
.module('blocks.exception')
.config(exceptionConfig);
exceptionConfig.$inject = ['$provide'];
function exceptionConfig($provide) {
$provide.decorator('$exceptionHandler', extendExceptionHandler);
}
extendExceptionHandler.$inject = ['$delegate'];
function extendExceptionHandler($delegate) {
return function(exception, cause) {
$delegate(exception, cause);
var errorData = {
exception: exception,
cause: cause
};
//toastr.error(exception.msg, errorData);
console.error("ExceptionDecoService: " +exception.msg +" " +errorData)
};
}
*/ | catcher | identifier_name |
systick.rs | //! # SysTick for the Cortex-M4F
//!
//! Each Cortex-M4 has a timer peripheral typically used for OS scheduling tick.
//! Here we configure it as a countdown timer that overflows every 2**24 ticks
//! (so about once a second at 16MHz), and maintain a separate atomic overflow
//! count to accurately track time since power-up.
// ****************************************************************************
//
// Imports
//
// ****************************************************************************
use core::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use cortex_m::peripheral as cm_periph;
// ****************************************************************************
//
// Public Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Public Data
//
// ****************************************************************************
/// SysTick is a 24-bit timer
pub const SYSTICK_MAX: usize = (1 << 24) - 1;
lazy_static! {
/// total number of times SysTick has wrapped
pub static ref SYSTICK_WRAP_COUNT:AtomicUsize = ATOMIC_USIZE_INIT;
}
// ****************************************************************************
//
// Private Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Private Data
//
// ****************************************************************************
// *****************************************************************************
//
// The following are defines for the bit fields in the NVIC_ST_CTRL register.
//
// *****************************************************************************
const NVIC_ST_CTRL_INTEN: usize = 0x00000002; // Interrupt Enable
const NVIC_ST_CTRL_ENABLE: usize = 0x00000001; // Enable
// ****************************************************************************
//
// Public Functions
//
// ****************************************************************************
/// Initialises the SysTick system.
///
/// We configure SysTick to run at PIOSC / 4, with the full 24 bit range.
pub fn init() {
unsafe {
let syst = &*cm_periph::SYST::ptr();
syst.rvr.write(SYSTICK_MAX as u32);
// A write to current resets the timer
syst.cvr.write(0);
// Set to multi-shot mode, with interrupts on and on the PIOSC / 4
syst.csr
.write((NVIC_ST_CTRL_ENABLE | NVIC_ST_CTRL_INTEN) as u32);
}
}
/// Should be attached to the SysTick vector in the interrupt vector table.
/// Called when SysTick hits zero. Increments an overflow counter atomically.
pub fn isr() {
SYSTICK_WRAP_COUNT.fetch_add(1, Ordering::Relaxed);
}
/// Returns how many times SysTick has overflowed.
pub fn get_overflows() -> usize {
SYSTICK_WRAP_COUNT.load(Ordering::Relaxed)
}
/// Gets the current SysTick value
pub fn get_ticks() -> usize {
unsafe { (*cm_periph::SYST::ptr()).cvr.read() as usize }
}
/// Returns (overflows, ticks), correctly handling the case that it overflowed
/// between the two separate reads that are required.
pub fn get_overflows_ticks() -> (usize, usize) {
let overflow1 = get_overflows();
let ticks = get_ticks();
let overflow2 = get_overflows();
if overflow1 != overflow2 | else {
// No overflow, good to go
(overflow1, ticks)
}
}
/// Calculates the elapsed period in SysTicks between `start` and the current value.
pub fn get_since(start: usize) -> usize {
let now = get_ticks();
// SysTick counts down! This subtraction is opposite to what you expect.
let delta = start.wrapping_sub(now) & SYSTICK_MAX;
delta
}
/// How long since the system booted in ticks.
/// The u64 is good for 584,000 years.
pub fn run_time_ticks() -> u64 {
let (overflows, ticks) = get_overflows_ticks();
let mut result: u64;
result = overflows as u64;
result *= (SYSTICK_MAX + 1) as u64;
result += (SYSTICK_MAX - ticks) as u64;
result
}
// ****************************************************************************
//
// Private Functions
//
// ****************************************************************************
// None
// ****************************************************************************
//
// End Of File
//
// ****************************************************************************
| {
// A overflow occurred while we were reading the tick register
// Should be safe to try again
(overflow2, get_ticks())
} | conditional_block |
systick.rs | //! # SysTick for the Cortex-M4F
//!
//! Each Cortex-M4 has a timer peripheral typically used for OS scheduling tick.
//! Here we configure it as a countdown timer that overflows every 2**24 ticks
//! (so about once a second at 16MHz), and maintain a separate atomic overflow
//! count to accurately track time since power-up.
// ****************************************************************************
//
// Imports
//
// ****************************************************************************
use core::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use cortex_m::peripheral as cm_periph;
// ****************************************************************************
//
// Public Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Public Data |
lazy_static! {
/// total number of times SysTick has wrapped
pub static ref SYSTICK_WRAP_COUNT:AtomicUsize = ATOMIC_USIZE_INIT;
}
// ****************************************************************************
//
// Private Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Private Data
//
// ****************************************************************************
// *****************************************************************************
//
// The following are defines for the bit fields in the NVIC_ST_CTRL register.
//
// *****************************************************************************
const NVIC_ST_CTRL_INTEN: usize = 0x00000002; // Interrupt Enable
const NVIC_ST_CTRL_ENABLE: usize = 0x00000001; // Enable
// ****************************************************************************
//
// Public Functions
//
// ****************************************************************************
/// Initialises the SysTick system.
///
/// We configure SysTick to run at PIOSC / 4, with the full 24 bit range.
pub fn init() {
unsafe {
let syst = &*cm_periph::SYST::ptr();
syst.rvr.write(SYSTICK_MAX as u32);
// A write to current resets the timer
syst.cvr.write(0);
// Set to multi-shot mode, with interrupts on and on the PIOSC / 4
syst.csr
.write((NVIC_ST_CTRL_ENABLE | NVIC_ST_CTRL_INTEN) as u32);
}
}
/// Should be attached to the SysTick vector in the interrupt vector table.
/// Called when SysTick hits zero. Increments an overflow counter atomically.
pub fn isr() {
SYSTICK_WRAP_COUNT.fetch_add(1, Ordering::Relaxed);
}
/// Returns how many times SysTick has overflowed.
pub fn get_overflows() -> usize {
SYSTICK_WRAP_COUNT.load(Ordering::Relaxed)
}
/// Gets the current SysTick value
pub fn get_ticks() -> usize {
unsafe { (*cm_periph::SYST::ptr()).cvr.read() as usize }
}
/// Returns (overflows, ticks), correctly handling the case that it overflowed
/// between the two separate reads that are required.
pub fn get_overflows_ticks() -> (usize, usize) {
let overflow1 = get_overflows();
let ticks = get_ticks();
let overflow2 = get_overflows();
if overflow1 != overflow2 {
// A overflow occurred while we were reading the tick register
// Should be safe to try again
(overflow2, get_ticks())
} else {
// No overflow, good to go
(overflow1, ticks)
}
}
/// Calculates the elapsed period in SysTicks between `start` and the current value.
pub fn get_since(start: usize) -> usize {
let now = get_ticks();
// SysTick counts down! This subtraction is opposite to what you expect.
let delta = start.wrapping_sub(now) & SYSTICK_MAX;
delta
}
/// How long since the system booted in ticks.
/// The u64 is good for 584,000 years.
pub fn run_time_ticks() -> u64 {
let (overflows, ticks) = get_overflows_ticks();
let mut result: u64;
result = overflows as u64;
result *= (SYSTICK_MAX + 1) as u64;
result += (SYSTICK_MAX - ticks) as u64;
result
}
// ****************************************************************************
//
// Private Functions
//
// ****************************************************************************
// None
// ****************************************************************************
//
// End Of File
//
// **************************************************************************** | //
// ****************************************************************************
/// SysTick is a 24-bit timer
pub const SYSTICK_MAX: usize = (1 << 24) - 1; | random_line_split |
systick.rs | //! # SysTick for the Cortex-M4F
//!
//! Each Cortex-M4 has a timer peripheral typically used for OS scheduling tick.
//! Here we configure it as a countdown timer that overflows every 2**24 ticks
//! (so about once a second at 16MHz), and maintain a separate atomic overflow
//! count to accurately track time since power-up.
// ****************************************************************************
//
// Imports
//
// ****************************************************************************
use core::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use cortex_m::peripheral as cm_periph;
// ****************************************************************************
//
// Public Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Public Data
//
// ****************************************************************************
/// SysTick is a 24-bit timer
pub const SYSTICK_MAX: usize = (1 << 24) - 1;
lazy_static! {
/// total number of times SysTick has wrapped
pub static ref SYSTICK_WRAP_COUNT:AtomicUsize = ATOMIC_USIZE_INIT;
}
// ****************************************************************************
//
// Private Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Private Data
//
// ****************************************************************************
// *****************************************************************************
//
// The following are defines for the bit fields in the NVIC_ST_CTRL register.
//
// *****************************************************************************
const NVIC_ST_CTRL_INTEN: usize = 0x00000002; // Interrupt Enable
const NVIC_ST_CTRL_ENABLE: usize = 0x00000001; // Enable
// ****************************************************************************
//
// Public Functions
//
// ****************************************************************************
/// Initialises the SysTick system.
///
/// We configure SysTick to run at PIOSC / 4, with the full 24 bit range.
pub fn init() {
unsafe {
let syst = &*cm_periph::SYST::ptr();
syst.rvr.write(SYSTICK_MAX as u32);
// A write to current resets the timer
syst.cvr.write(0);
// Set to multi-shot mode, with interrupts on and on the PIOSC / 4
syst.csr
.write((NVIC_ST_CTRL_ENABLE | NVIC_ST_CTRL_INTEN) as u32);
}
}
/// Should be attached to the SysTick vector in the interrupt vector table.
/// Called when SysTick hits zero. Increments an overflow counter atomically.
pub fn | () {
SYSTICK_WRAP_COUNT.fetch_add(1, Ordering::Relaxed);
}
/// Returns how many times SysTick has overflowed.
pub fn get_overflows() -> usize {
SYSTICK_WRAP_COUNT.load(Ordering::Relaxed)
}
/// Gets the current SysTick value
pub fn get_ticks() -> usize {
unsafe { (*cm_periph::SYST::ptr()).cvr.read() as usize }
}
/// Returns (overflows, ticks), correctly handling the case that it overflowed
/// between the two separate reads that are required.
pub fn get_overflows_ticks() -> (usize, usize) {
let overflow1 = get_overflows();
let ticks = get_ticks();
let overflow2 = get_overflows();
if overflow1 != overflow2 {
// A overflow occurred while we were reading the tick register
// Should be safe to try again
(overflow2, get_ticks())
} else {
// No overflow, good to go
(overflow1, ticks)
}
}
/// Calculates the elapsed period in SysTicks between `start` and the current value.
pub fn get_since(start: usize) -> usize {
let now = get_ticks();
// SysTick counts down! This subtraction is opposite to what you expect.
let delta = start.wrapping_sub(now) & SYSTICK_MAX;
delta
}
/// How long since the system booted in ticks.
/// The u64 is good for 584,000 years.
pub fn run_time_ticks() -> u64 {
let (overflows, ticks) = get_overflows_ticks();
let mut result: u64;
result = overflows as u64;
result *= (SYSTICK_MAX + 1) as u64;
result += (SYSTICK_MAX - ticks) as u64;
result
}
// ****************************************************************************
//
// Private Functions
//
// ****************************************************************************
// None
// ****************************************************************************
//
// End Of File
//
// ****************************************************************************
| isr | identifier_name |
systick.rs | //! # SysTick for the Cortex-M4F
//!
//! Each Cortex-M4 has a timer peripheral typically used for OS scheduling tick.
//! Here we configure it as a countdown timer that overflows every 2**24 ticks
//! (so about once a second at 16MHz), and maintain a separate atomic overflow
//! count to accurately track time since power-up.
// ****************************************************************************
//
// Imports
//
// ****************************************************************************
use core::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use cortex_m::peripheral as cm_periph;
// ****************************************************************************
//
// Public Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Public Data
//
// ****************************************************************************
/// SysTick is a 24-bit timer
pub const SYSTICK_MAX: usize = (1 << 24) - 1;
lazy_static! {
/// total number of times SysTick has wrapped
pub static ref SYSTICK_WRAP_COUNT:AtomicUsize = ATOMIC_USIZE_INIT;
}
// ****************************************************************************
//
// Private Types
//
// ****************************************************************************
// None
// ****************************************************************************
//
// Private Data
//
// ****************************************************************************
// *****************************************************************************
//
// The following are defines for the bit fields in the NVIC_ST_CTRL register.
//
// *****************************************************************************
const NVIC_ST_CTRL_INTEN: usize = 0x00000002; // Interrupt Enable
const NVIC_ST_CTRL_ENABLE: usize = 0x00000001; // Enable
// ****************************************************************************
//
// Public Functions
//
// ****************************************************************************
/// Initialises the SysTick system.
///
/// We configure SysTick to run at PIOSC / 4, with the full 24 bit range.
pub fn init() {
unsafe {
let syst = &*cm_periph::SYST::ptr();
syst.rvr.write(SYSTICK_MAX as u32);
// A write to current resets the timer
syst.cvr.write(0);
// Set to multi-shot mode, with interrupts on and on the PIOSC / 4
syst.csr
.write((NVIC_ST_CTRL_ENABLE | NVIC_ST_CTRL_INTEN) as u32);
}
}
/// Should be attached to the SysTick vector in the interrupt vector table.
/// Called when SysTick hits zero. Increments an overflow counter atomically.
pub fn isr() {
SYSTICK_WRAP_COUNT.fetch_add(1, Ordering::Relaxed);
}
/// Returns how many times SysTick has overflowed.
pub fn get_overflows() -> usize {
SYSTICK_WRAP_COUNT.load(Ordering::Relaxed)
}
/// Gets the current SysTick value
pub fn get_ticks() -> usize {
unsafe { (*cm_periph::SYST::ptr()).cvr.read() as usize }
}
/// Returns (overflows, ticks), correctly handling the case that it overflowed
/// between the two separate reads that are required.
pub fn get_overflows_ticks() -> (usize, usize) {
let overflow1 = get_overflows();
let ticks = get_ticks();
let overflow2 = get_overflows();
if overflow1 != overflow2 {
// A overflow occurred while we were reading the tick register
// Should be safe to try again
(overflow2, get_ticks())
} else {
// No overflow, good to go
(overflow1, ticks)
}
}
/// Calculates the elapsed period in SysTicks between `start` and the current value.
pub fn get_since(start: usize) -> usize |
/// How long since the system booted in ticks.
/// The u64 is good for 584,000 years.
pub fn run_time_ticks() -> u64 {
let (overflows, ticks) = get_overflows_ticks();
let mut result: u64;
result = overflows as u64;
result *= (SYSTICK_MAX + 1) as u64;
result += (SYSTICK_MAX - ticks) as u64;
result
}
// ****************************************************************************
//
// Private Functions
//
// ****************************************************************************
// None
// ****************************************************************************
//
// End Of File
//
// ****************************************************************************
| {
let now = get_ticks();
// SysTick counts down! This subtraction is opposite to what you expect.
let delta = start.wrapping_sub(now) & SYSTICK_MAX;
delta
} | identifier_body |
groupbox.py | from collections import namedtuple
from cairo import LINE_JOIN_ROUND
from zorro.di import di, dependency, has_dependencies
from tilenol.groups import GroupManager
from tilenol.commands import CommandDispatcher
from .base import Widget
from tilenol.theme import Theme
from tilenol.window import Window
GroupState = namedtuple(
'GroupState',
('name', 'empty', 'active', 'visible', 'urgent')
)
@has_dependencies
class State(object):
commander = dependency(CommandDispatcher, 'commander')
gman = dependency(GroupManager, 'group-manager')
def __init__(self):
self._state = None
def dirty(self):
return self._state != self._read()
def update(self):
nval = self._read()
if nval != self._state:
self._state = nval
return True
def _read(self):
cur = self.commander.get('group')
visgr = self.gman.current_groups.values()
return tuple(GroupState(g.name, g.empty, g is cur, g in visgr,
g.has_urgent_windows)
for g in self.gman.groups)
@property
def groups(self):
return self._state
@has_dependencies
class Groupbox(Widget):
theme = dependency(Theme, 'theme')
def | (self, *, filled=False, first_letter=False, right=False):
super().__init__(right=right)
self.filled = filled
self.first_letter = first_letter
def __zorro_di_done__(self):
self.state = di(self).inject(State())
bar = self.theme.bar
self.font = bar.font
self.inactive_color = bar.dim_color_pat
self.urgent_color = bar.bright_color_pat
self.active_color = bar.text_color_pat
self.selected_color = bar.active_border_pat
self.subactive_color = bar.subactive_border_pat
self.padding = bar.text_padding
self.border_width = bar.border_width
self.state.gman.group_changed.listen(self.bar.redraw.emit)
Window.any_window_changed.listen(self.check_state)
def check_state(self):
if self.state.dirty:
self.bar.redraw.emit()
def draw(self, canvas, l, r):
self.state.update()
assert not self.right, "Sorry, right not implemented"
self.font.apply(canvas)
canvas.set_line_join(LINE_JOIN_ROUND)
canvas.set_line_width(self.border_width)
x = l
between = self.padding.right + self.padding.left
for gs in self.state.groups:
gname = gs.name
if self.first_letter:
gname = gname[0]
sx, sy, w, h, ax, ay = canvas.text_extents(gname)
if gs.active:
canvas.set_source(self.selected_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
elif gs.visible:
canvas.set_source(self.subactive_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
if gs.urgent:
canvas.set_source(self.urgent_color)
elif gs.empty:
canvas.set_source(self.inactive_color)
else:
canvas.set_source(self.active_color)
canvas.move_to(x + self.padding.left,
self.height - self.padding.bottom)
canvas.show_text(gname)
x += ax + between
return x, r
| __init__ | identifier_name |
groupbox.py | from collections import namedtuple
from cairo import LINE_JOIN_ROUND
from zorro.di import di, dependency, has_dependencies
from tilenol.groups import GroupManager
from tilenol.commands import CommandDispatcher
from .base import Widget
from tilenol.theme import Theme
from tilenol.window import Window
GroupState = namedtuple(
'GroupState',
('name', 'empty', 'active', 'visible', 'urgent')
)
@has_dependencies
class State(object):
| def __init__(self):
self._state = None
def dirty(self):
return self._state != self._read()
def update(self):
nval = self._read()
if nval != self._state:
self._state = nval
return True
def _read(self):
cur = self.commander.get('group')
visgr = self.gman.current_groups.values()
return tuple(GroupState(g.name, g.empty, g is cur, g in visgr,
g.has_urgent_windows)
for g in self.gman.groups)
@property
def groups(self):
return self._state
@has_dependencies
class Groupbox(Widget):
theme = dependency(Theme, 'theme')
def __init__(self, *, filled=False, first_letter=False, right=False):
super().__init__(right=right)
self.filled = filled
self.first_letter = first_letter
def __zorro_di_done__(self):
self.state = di(self).inject(State())
bar = self.theme.bar
self.font = bar.font
self.inactive_color = bar.dim_color_pat
self.urgent_color = bar.bright_color_pat
self.active_color = bar.text_color_pat
self.selected_color = bar.active_border_pat
self.subactive_color = bar.subactive_border_pat
self.padding = bar.text_padding
self.border_width = bar.border_width
self.state.gman.group_changed.listen(self.bar.redraw.emit)
Window.any_window_changed.listen(self.check_state)
def check_state(self):
if self.state.dirty:
self.bar.redraw.emit()
def draw(self, canvas, l, r):
self.state.update()
assert not self.right, "Sorry, right not implemented"
self.font.apply(canvas)
canvas.set_line_join(LINE_JOIN_ROUND)
canvas.set_line_width(self.border_width)
x = l
between = self.padding.right + self.padding.left
for gs in self.state.groups:
gname = gs.name
if self.first_letter:
gname = gname[0]
sx, sy, w, h, ax, ay = canvas.text_extents(gname)
if gs.active:
canvas.set_source(self.selected_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
elif gs.visible:
canvas.set_source(self.subactive_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
if gs.urgent:
canvas.set_source(self.urgent_color)
elif gs.empty:
canvas.set_source(self.inactive_color)
else:
canvas.set_source(self.active_color)
canvas.move_to(x + self.padding.left,
self.height - self.padding.bottom)
canvas.show_text(gname)
x += ax + between
return x, r | commander = dependency(CommandDispatcher, 'commander')
gman = dependency(GroupManager, 'group-manager')
| random_line_split |
groupbox.py | from collections import namedtuple
from cairo import LINE_JOIN_ROUND
from zorro.di import di, dependency, has_dependencies
from tilenol.groups import GroupManager
from tilenol.commands import CommandDispatcher
from .base import Widget
from tilenol.theme import Theme
from tilenol.window import Window
GroupState = namedtuple(
'GroupState',
('name', 'empty', 'active', 'visible', 'urgent')
)
@has_dependencies
class State(object):
commander = dependency(CommandDispatcher, 'commander')
gman = dependency(GroupManager, 'group-manager')
def __init__(self):
self._state = None
def dirty(self):
return self._state != self._read()
def update(self):
nval = self._read()
if nval != self._state:
self._state = nval
return True
def _read(self):
cur = self.commander.get('group')
visgr = self.gman.current_groups.values()
return tuple(GroupState(g.name, g.empty, g is cur, g in visgr,
g.has_urgent_windows)
for g in self.gman.groups)
@property
def groups(self):
return self._state
@has_dependencies
class Groupbox(Widget):
theme = dependency(Theme, 'theme')
def __init__(self, *, filled=False, first_letter=False, right=False):
super().__init__(right=right)
self.filled = filled
self.first_letter = first_letter
def __zorro_di_done__(self):
self.state = di(self).inject(State())
bar = self.theme.bar
self.font = bar.font
self.inactive_color = bar.dim_color_pat
self.urgent_color = bar.bright_color_pat
self.active_color = bar.text_color_pat
self.selected_color = bar.active_border_pat
self.subactive_color = bar.subactive_border_pat
self.padding = bar.text_padding
self.border_width = bar.border_width
self.state.gman.group_changed.listen(self.bar.redraw.emit)
Window.any_window_changed.listen(self.check_state)
def check_state(self):
if self.state.dirty:
self.bar.redraw.emit()
def draw(self, canvas, l, r):
self.state.update()
assert not self.right, "Sorry, right not implemented"
self.font.apply(canvas)
canvas.set_line_join(LINE_JOIN_ROUND)
canvas.set_line_width(self.border_width)
x = l
between = self.padding.right + self.padding.left
for gs in self.state.groups:
gname = gs.name
if self.first_letter:
|
sx, sy, w, h, ax, ay = canvas.text_extents(gname)
if gs.active:
canvas.set_source(self.selected_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
elif gs.visible:
canvas.set_source(self.subactive_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
if gs.urgent:
canvas.set_source(self.urgent_color)
elif gs.empty:
canvas.set_source(self.inactive_color)
else:
canvas.set_source(self.active_color)
canvas.move_to(x + self.padding.left,
self.height - self.padding.bottom)
canvas.show_text(gname)
x += ax + between
return x, r
| gname = gname[0] | conditional_block |
groupbox.py | from collections import namedtuple
from cairo import LINE_JOIN_ROUND
from zorro.di import di, dependency, has_dependencies
from tilenol.groups import GroupManager
from tilenol.commands import CommandDispatcher
from .base import Widget
from tilenol.theme import Theme
from tilenol.window import Window
GroupState = namedtuple(
'GroupState',
('name', 'empty', 'active', 'visible', 'urgent')
)
@has_dependencies
class State(object):
commander = dependency(CommandDispatcher, 'commander')
gman = dependency(GroupManager, 'group-manager')
def __init__(self):
self._state = None
def dirty(self):
return self._state != self._read()
def update(self):
nval = self._read()
if nval != self._state:
self._state = nval
return True
def _read(self):
|
@property
def groups(self):
return self._state
@has_dependencies
class Groupbox(Widget):
theme = dependency(Theme, 'theme')
def __init__(self, *, filled=False, first_letter=False, right=False):
super().__init__(right=right)
self.filled = filled
self.first_letter = first_letter
def __zorro_di_done__(self):
self.state = di(self).inject(State())
bar = self.theme.bar
self.font = bar.font
self.inactive_color = bar.dim_color_pat
self.urgent_color = bar.bright_color_pat
self.active_color = bar.text_color_pat
self.selected_color = bar.active_border_pat
self.subactive_color = bar.subactive_border_pat
self.padding = bar.text_padding
self.border_width = bar.border_width
self.state.gman.group_changed.listen(self.bar.redraw.emit)
Window.any_window_changed.listen(self.check_state)
def check_state(self):
if self.state.dirty:
self.bar.redraw.emit()
def draw(self, canvas, l, r):
self.state.update()
assert not self.right, "Sorry, right not implemented"
self.font.apply(canvas)
canvas.set_line_join(LINE_JOIN_ROUND)
canvas.set_line_width(self.border_width)
x = l
between = self.padding.right + self.padding.left
for gs in self.state.groups:
gname = gs.name
if self.first_letter:
gname = gname[0]
sx, sy, w, h, ax, ay = canvas.text_extents(gname)
if gs.active:
canvas.set_source(self.selected_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
elif gs.visible:
canvas.set_source(self.subactive_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
if gs.urgent:
canvas.set_source(self.urgent_color)
elif gs.empty:
canvas.set_source(self.inactive_color)
else:
canvas.set_source(self.active_color)
canvas.move_to(x + self.padding.left,
self.height - self.padding.bottom)
canvas.show_text(gname)
x += ax + between
return x, r
| cur = self.commander.get('group')
visgr = self.gman.current_groups.values()
return tuple(GroupState(g.name, g.empty, g is cur, g in visgr,
g.has_urgent_windows)
for g in self.gman.groups) | identifier_body |
dropdowndemo.ts | import {Component} from '@angular/core';
import {SelectItem} from '../../../components/common/api';
@Component({
templateUrl: 'showcase/demo/dropdown/dropdown.html',
})
export class DropdownDemo {
cities: SelectItem[];
selectedCity: any;
cars: SelectItem[];
selectedCar: string;
selectedCar2: string = 'BMW';
| () {
this.cities = [];
this.cities.push({label:'Select City', value:null});
this.cities.push({label:'New York', value:{id:1, name: 'New York', code: 'NY'}});
this.cities.push({label:'Rome', value:{id:2, name: 'Rome', code: 'RM'}});
this.cities.push({label:'London', value:{id:3, name: 'London', code: 'LDN'}});
this.cities.push({label:'Istanbul', value:{id:4, name: 'Istanbul', code: 'IST'}});
this.cities.push({label:'Paris', value:{id:5, name: 'Paris', code: 'PRS'}});
this.cars = [];
this.cars.push({label: 'Audi', value: 'Audi'});
this.cars.push({label: 'BMW', value: 'BMW'});
this.cars.push({label: 'Fiat', value: 'Fiat'});
this.cars.push({label: 'Ford', value: 'Ford'});
this.cars.push({label: 'Honda', value: 'Honda'});
this.cars.push({label: 'Jaguar', value: 'Jaguar'});
this.cars.push({label: 'Mercedes', value: 'Mercedes'});
this.cars.push({label: 'Renault', value: 'Renault'});
this.cars.push({label: 'VW', value: 'VW'});
this.cars.push({label: 'Volvo', value: 'Volvo'});
}
} | constructor | identifier_name |
dropdowndemo.ts | import {Component} from '@angular/core';
import {SelectItem} from '../../../components/common/api';
@Component({
templateUrl: 'showcase/demo/dropdown/dropdown.html',
})
export class DropdownDemo {
cities: SelectItem[];
selectedCity: any;
cars: SelectItem[];
selectedCar: string;
selectedCar2: string = 'BMW';
constructor() |
} | {
this.cities = [];
this.cities.push({label:'Select City', value:null});
this.cities.push({label:'New York', value:{id:1, name: 'New York', code: 'NY'}});
this.cities.push({label:'Rome', value:{id:2, name: 'Rome', code: 'RM'}});
this.cities.push({label:'London', value:{id:3, name: 'London', code: 'LDN'}});
this.cities.push({label:'Istanbul', value:{id:4, name: 'Istanbul', code: 'IST'}});
this.cities.push({label:'Paris', value:{id:5, name: 'Paris', code: 'PRS'}});
this.cars = [];
this.cars.push({label: 'Audi', value: 'Audi'});
this.cars.push({label: 'BMW', value: 'BMW'});
this.cars.push({label: 'Fiat', value: 'Fiat'});
this.cars.push({label: 'Ford', value: 'Ford'});
this.cars.push({label: 'Honda', value: 'Honda'});
this.cars.push({label: 'Jaguar', value: 'Jaguar'});
this.cars.push({label: 'Mercedes', value: 'Mercedes'});
this.cars.push({label: 'Renault', value: 'Renault'});
this.cars.push({label: 'VW', value: 'VW'});
this.cars.push({label: 'Volvo', value: 'Volvo'});
} | identifier_body |
dropdowndemo.ts | import {Component} from '@angular/core';
import {SelectItem} from '../../../components/common/api';
@Component({
templateUrl: 'showcase/demo/dropdown/dropdown.html',
})
export class DropdownDemo {
cities: SelectItem[];
selectedCity: any;
cars: SelectItem[];
selectedCar: string;
selectedCar2: string = 'BMW';
constructor() { | this.cities.push({label:'Select City', value:null});
this.cities.push({label:'New York', value:{id:1, name: 'New York', code: 'NY'}});
this.cities.push({label:'Rome', value:{id:2, name: 'Rome', code: 'RM'}});
this.cities.push({label:'London', value:{id:3, name: 'London', code: 'LDN'}});
this.cities.push({label:'Istanbul', value:{id:4, name: 'Istanbul', code: 'IST'}});
this.cities.push({label:'Paris', value:{id:5, name: 'Paris', code: 'PRS'}});
this.cars = [];
this.cars.push({label: 'Audi', value: 'Audi'});
this.cars.push({label: 'BMW', value: 'BMW'});
this.cars.push({label: 'Fiat', value: 'Fiat'});
this.cars.push({label: 'Ford', value: 'Ford'});
this.cars.push({label: 'Honda', value: 'Honda'});
this.cars.push({label: 'Jaguar', value: 'Jaguar'});
this.cars.push({label: 'Mercedes', value: 'Mercedes'});
this.cars.push({label: 'Renault', value: 'Renault'});
this.cars.push({label: 'VW', value: 'VW'});
this.cars.push({label: 'Volvo', value: 'Volvo'});
}
} | this.cities = []; | random_line_split |
main.rs | #![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![feature(iterator_for_each)]
#[macro_use]
extern crate clap;
extern crate iota_kerl;
extern crate iota_sign;
extern crate iota_trytes;
extern crate log4rs;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mysql;
extern crate zmq;
#[macro_use]
mod macros;
mod app;
mod args;
mod worker;
mod message;
mod mapper;
mod solid;
mod event;
mod utils;
use args::Args;
use mapper::{AddressMapper, BundleMapper, Mapper, TransactionMapper};
use std::process::exit;
use std::sync::{mpsc, Arc};
use utils::MysqlConnUtils;
use worker::{ApproveThread, CalculateThreads, InsertThread, SolidateThread,
UpdateThread, ZmqLoop};
fn | () {
let matches = app::build().get_matches();
let args = Args::parse(&matches).unwrap_or_else(|err| {
eprintln!("Invalid arguments: {}", err);
exit(1);
});
let Args {
zmq_uri,
mysql_uri,
retry_interval,
update_interval,
calculation_threads,
calculation_limit,
generation_limit,
milestone_address,
milestone_start_index,
log_config,
} = args;
log4rs::init_file(log_config, Default::default()).unwrap_or_else(|err| {
eprintln!("Error while processing logger configuration file: {}", err);
exit(1);
});
let (insert_tx, insert_rx) = mpsc::channel();
let (approve_tx, approve_rx) = mpsc::channel();
let (solidate_tx, solidate_rx) = mpsc::channel();
let (calculate_tx, calculate_rx) = mpsc::channel();
let ctx = zmq::Context::new();
let socket = ctx.socket(zmq::SUB).expect("ZMQ socket create failure");
socket.connect(zmq_uri).expect("ZMQ socket connect failure");
socket.set_subscribe(b"tx ").expect("ZMQ subscribe failure");
let mut conn = mysql::Conn::new_retry(mysql_uri, retry_interval);
let transaction_mapper = Arc::new(
TransactionMapper::new(&mut conn, retry_interval)
.expect("Transaction mapper failure"),
);
let address_mapper = Arc::new(
AddressMapper::new(&mut conn, retry_interval)
.expect("Address mapper failure"),
);
let bundle_mapper = Arc::new(
BundleMapper::new(&mut conn, retry_interval)
.expect("Bundle mapper failure"),
);
info!("Milestone address: {}", milestone_address);
info!("Milestone start index string: {}", milestone_start_index);
info!("Initial `id_tx`: {}", transaction_mapper.current_id());
info!("Initial `id_address`: {}", address_mapper.current_id());
info!("Initial `id_bundle`: {}", bundle_mapper.current_id());
let insert_thread = InsertThread {
insert_rx,
approve_tx,
solidate_tx,
calculate_tx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
milestone_address,
milestone_start_index,
};
let update_thread = UpdateThread {
mysql_uri,
retry_interval,
update_interval,
generation_limit,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let approve_thread = ApproveThread {
approve_rx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let solidate_thread = SolidateThread {
solidate_rx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
};
let calculate_threads = CalculateThreads {
calculate_rx,
mysql_uri,
retry_interval,
calculation_threads,
calculation_limit,
transaction_mapper: transaction_mapper.clone(),
};
let zmq_loop = ZmqLoop { socket, insert_tx };
insert_thread.spawn();
update_thread.spawn();
approve_thread.spawn();
solidate_thread.spawn();
calculate_threads.spawn();
zmq_loop.run();
}
| main | identifier_name |
main.rs | #![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![feature(iterator_for_each)]
#[macro_use]
extern crate clap;
extern crate iota_kerl;
extern crate iota_sign;
extern crate iota_trytes;
extern crate log4rs;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mysql;
extern crate zmq;
#[macro_use]
mod macros;
mod app;
mod args;
mod worker;
mod message;
mod mapper;
mod solid;
mod event;
mod utils;
use args::Args;
use mapper::{AddressMapper, BundleMapper, Mapper, TransactionMapper};
use std::process::exit;
use std::sync::{mpsc, Arc};
use utils::MysqlConnUtils;
use worker::{ApproveThread, CalculateThreads, InsertThread, SolidateThread,
UpdateThread, ZmqLoop};
fn main() | {
let matches = app::build().get_matches();
let args = Args::parse(&matches).unwrap_or_else(|err| {
eprintln!("Invalid arguments: {}", err);
exit(1);
});
let Args {
zmq_uri,
mysql_uri,
retry_interval,
update_interval,
calculation_threads,
calculation_limit,
generation_limit,
milestone_address,
milestone_start_index,
log_config,
} = args;
log4rs::init_file(log_config, Default::default()).unwrap_or_else(|err| {
eprintln!("Error while processing logger configuration file: {}", err);
exit(1);
});
let (insert_tx, insert_rx) = mpsc::channel();
let (approve_tx, approve_rx) = mpsc::channel();
let (solidate_tx, solidate_rx) = mpsc::channel();
let (calculate_tx, calculate_rx) = mpsc::channel();
let ctx = zmq::Context::new();
let socket = ctx.socket(zmq::SUB).expect("ZMQ socket create failure");
socket.connect(zmq_uri).expect("ZMQ socket connect failure");
socket.set_subscribe(b"tx ").expect("ZMQ subscribe failure");
let mut conn = mysql::Conn::new_retry(mysql_uri, retry_interval);
let transaction_mapper = Arc::new(
TransactionMapper::new(&mut conn, retry_interval)
.expect("Transaction mapper failure"),
);
let address_mapper = Arc::new(
AddressMapper::new(&mut conn, retry_interval)
.expect("Address mapper failure"),
);
let bundle_mapper = Arc::new(
BundleMapper::new(&mut conn, retry_interval)
.expect("Bundle mapper failure"),
);
info!("Milestone address: {}", milestone_address);
info!("Milestone start index string: {}", milestone_start_index);
info!("Initial `id_tx`: {}", transaction_mapper.current_id());
info!("Initial `id_address`: {}", address_mapper.current_id());
info!("Initial `id_bundle`: {}", bundle_mapper.current_id());
let insert_thread = InsertThread {
insert_rx,
approve_tx,
solidate_tx,
calculate_tx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
milestone_address,
milestone_start_index,
};
let update_thread = UpdateThread {
mysql_uri,
retry_interval,
update_interval,
generation_limit,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let approve_thread = ApproveThread {
approve_rx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let solidate_thread = SolidateThread {
solidate_rx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
};
let calculate_threads = CalculateThreads {
calculate_rx,
mysql_uri,
retry_interval,
calculation_threads,
calculation_limit,
transaction_mapper: transaction_mapper.clone(),
};
let zmq_loop = ZmqLoop { socket, insert_tx };
insert_thread.spawn();
update_thread.spawn();
approve_thread.spawn();
solidate_thread.spawn();
calculate_threads.spawn();
zmq_loop.run();
} | identifier_body |
|
main.rs | #![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![feature(iterator_for_each)]
#[macro_use]
extern crate clap;
extern crate iota_kerl;
extern crate iota_sign;
extern crate iota_trytes;
extern crate log4rs;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mysql;
extern crate zmq;
#[macro_use]
mod macros;
mod app;
mod args;
mod worker;
mod message;
mod mapper;
mod solid;
mod event;
mod utils;
use args::Args;
use mapper::{AddressMapper, BundleMapper, Mapper, TransactionMapper};
use std::process::exit;
use std::sync::{mpsc, Arc};
use utils::MysqlConnUtils;
use worker::{ApproveThread, CalculateThreads, InsertThread, SolidateThread,
UpdateThread, ZmqLoop};
fn main() {
let matches = app::build().get_matches();
let args = Args::parse(&matches).unwrap_or_else(|err| {
eprintln!("Invalid arguments: {}", err);
exit(1);
});
let Args {
zmq_uri,
mysql_uri,
retry_interval,
update_interval,
calculation_threads,
calculation_limit,
generation_limit,
milestone_address,
milestone_start_index,
log_config,
} = args;
log4rs::init_file(log_config, Default::default()).unwrap_or_else(|err| {
eprintln!("Error while processing logger configuration file: {}", err);
exit(1);
});
let (insert_tx, insert_rx) = mpsc::channel();
let (approve_tx, approve_rx) = mpsc::channel();
let (solidate_tx, solidate_rx) = mpsc::channel();
let (calculate_tx, calculate_rx) = mpsc::channel();
let ctx = zmq::Context::new();
let socket = ctx.socket(zmq::SUB).expect("ZMQ socket create failure");
socket.connect(zmq_uri).expect("ZMQ socket connect failure");
socket.set_subscribe(b"tx ").expect("ZMQ subscribe failure");
let mut conn = mysql::Conn::new_retry(mysql_uri, retry_interval);
let transaction_mapper = Arc::new(
TransactionMapper::new(&mut conn, retry_interval)
.expect("Transaction mapper failure"),
);
let address_mapper = Arc::new(
AddressMapper::new(&mut conn, retry_interval)
.expect("Address mapper failure"),
);
let bundle_mapper = Arc::new(
BundleMapper::new(&mut conn, retry_interval)
.expect("Bundle mapper failure"),
);
info!("Milestone address: {}", milestone_address);
info!("Milestone start index string: {}", milestone_start_index);
info!("Initial `id_tx`: {}", transaction_mapper.current_id());
info!("Initial `id_address`: {}", address_mapper.current_id());
info!("Initial `id_bundle`: {}", bundle_mapper.current_id());
let insert_thread = InsertThread {
insert_rx,
approve_tx,
solidate_tx,
calculate_tx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
milestone_address,
milestone_start_index,
};
let update_thread = UpdateThread {
mysql_uri,
retry_interval,
update_interval,
generation_limit,
transaction_mapper: transaction_mapper.clone(),
address_mapper: address_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let approve_thread = ApproveThread {
approve_rx,
mysql_uri,
retry_interval, | solidate_rx,
mysql_uri,
retry_interval,
transaction_mapper: transaction_mapper.clone(),
};
let calculate_threads = CalculateThreads {
calculate_rx,
mysql_uri,
retry_interval,
calculation_threads,
calculation_limit,
transaction_mapper: transaction_mapper.clone(),
};
let zmq_loop = ZmqLoop { socket, insert_tx };
insert_thread.spawn();
update_thread.spawn();
approve_thread.spawn();
solidate_thread.spawn();
calculate_threads.spawn();
zmq_loop.run();
} | transaction_mapper: transaction_mapper.clone(),
bundle_mapper: bundle_mapper.clone(),
};
let solidate_thread = SolidateThread { | random_line_split |
discriminant_value-wrapper.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem; | Second(u64)
}
pub fn main() {
assert!(mem::discriminant(&ADT::First(0,0)) == mem::discriminant(&ADT::First(1,1)));
assert!(mem::discriminant(&ADT::Second(5)) == mem::discriminant(&ADT::Second(6)));
assert!(mem::discriminant(&ADT::First(2,2)) != mem::discriminant(&ADT::Second(2)));
let _ = mem::discriminant(&10);
let _ = mem::discriminant(&"test");
} |
enum ADT {
First(u32, u32), | random_line_split |
discriminant_value-wrapper.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
enum | {
First(u32, u32),
Second(u64)
}
pub fn main() {
assert!(mem::discriminant(&ADT::First(0,0)) == mem::discriminant(&ADT::First(1,1)));
assert!(mem::discriminant(&ADT::Second(5)) == mem::discriminant(&ADT::Second(6)));
assert!(mem::discriminant(&ADT::First(2,2)) != mem::discriminant(&ADT::Second(2)));
let _ = mem::discriminant(&10);
let _ = mem::discriminant(&"test");
}
| ADT | identifier_name |
notes.module.ts | // (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { AddonNotesProvider } from './providers/notes';
import { AddonNotesOfflineProvider } from './providers/notes-offline';
import { AddonNotesSyncProvider } from './providers/notes-sync';
import { AddonNotesCourseOptionHandler } from './providers/course-option-handler';
import { AddonNotesSyncCronHandler } from './providers/sync-cron-handler';
import { AddonNotesUserHandler } from './providers/user-handler';
import { AddonNotesComponentsModule } from './components/components.module';
import { CoreCourseOptionsDelegate } from '@core/course/providers/options-delegate';
import { CoreCronDelegate } from '@providers/cron';
import { CoreUserDelegate } from '@core/user/providers/user-delegate';
// List of providers (without handlers).
export const ADDON_NOTES_PROVIDERS: any[] = [
AddonNotesProvider,
AddonNotesOfflineProvider,
AddonNotesSyncProvider
];
@NgModule({
declarations: [
],
imports: [
AddonNotesComponentsModule
],
providers: [
AddonNotesProvider,
AddonNotesOfflineProvider,
AddonNotesSyncProvider,
AddonNotesCourseOptionHandler,
AddonNotesSyncCronHandler,
AddonNotesUserHandler ]
})
export class AddonNotesModule {
| (courseOptionsDelegate: CoreCourseOptionsDelegate, courseOptionHandler: AddonNotesCourseOptionHandler,
userDelegate: CoreUserDelegate, userHandler: AddonNotesUserHandler,
cronDelegate: CoreCronDelegate, syncHandler: AddonNotesSyncCronHandler) {
// Register handlers.
courseOptionsDelegate.registerHandler(courseOptionHandler);
userDelegate.registerHandler(userHandler);
cronDelegate.register(syncHandler);
}
}
| constructor | identifier_name |
notes.module.ts | // (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { AddonNotesProvider } from './providers/notes';
import { AddonNotesOfflineProvider } from './providers/notes-offline';
import { AddonNotesSyncProvider } from './providers/notes-sync';
import { AddonNotesCourseOptionHandler } from './providers/course-option-handler';
import { AddonNotesSyncCronHandler } from './providers/sync-cron-handler';
import { AddonNotesUserHandler } from './providers/user-handler';
import { AddonNotesComponentsModule } from './components/components.module';
import { CoreCourseOptionsDelegate } from '@core/course/providers/options-delegate';
import { CoreCronDelegate } from '@providers/cron';
import { CoreUserDelegate } from '@core/user/providers/user-delegate';
// List of providers (without handlers).
export const ADDON_NOTES_PROVIDERS: any[] = [
AddonNotesProvider,
AddonNotesOfflineProvider,
AddonNotesSyncProvider
];
@NgModule({
declarations: [
],
imports: [
AddonNotesComponentsModule
],
providers: [
AddonNotesProvider,
AddonNotesOfflineProvider,
AddonNotesSyncProvider,
AddonNotesCourseOptionHandler,
AddonNotesSyncCronHandler,
AddonNotesUserHandler ]
})
export class AddonNotesModule {
constructor(courseOptionsDelegate: CoreCourseOptionsDelegate, courseOptionHandler: AddonNotesCourseOptionHandler,
userDelegate: CoreUserDelegate, userHandler: AddonNotesUserHandler,
cronDelegate: CoreCronDelegate, syncHandler: AddonNotesSyncCronHandler) {
// Register handlers.
courseOptionsDelegate.registerHandler(courseOptionHandler);
userDelegate.registerHandler(userHandler);
cronDelegate.register(syncHandler); | }
} | random_line_split |
|
mpatch.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::os::raw::c_char;
use std::os::raw::c_void;
use std::ptr;
use libc::ssize_t;
use mpatch_sys::*;
unsafe extern "C" fn get_next_link(deltas: *mut c_void, index: ssize_t) -> *mut mpatch_flist {
let deltas = (deltas as *const Vec<&[u8]>).as_ref().unwrap();
if index < 0 || index as usize >= deltas.len() {
return ptr::null_mut();
}
let delta: &[u8] = deltas[index as usize];
let mut res: *mut mpatch_flist = ptr::null_mut();
if mpatch_decode(
delta.as_ptr() as *const c_char,
delta.len() as isize,
&mut res,
) < 0
{
return ptr::null_mut();
}
return res;
}
pub fn get_full_text(base_text: &[u8], deltas: &Vec<&[u8]>) -> Result<Vec<u8>, &'static str> {
// If there are no deltas, just return the full text portion
if deltas.len() == 0 {
return Ok(base_text.to_vec());
}
unsafe {
let patch: *mut mpatch_flist = mpatch_fold(
deltas as *const Vec<&[u8]> as *mut c_void,
Some(get_next_link),
0,
deltas.len() as isize,
);
if patch.is_null() {
return Err("mpatch failed to process the deltas");
}
let outlen = mpatch_calcsize(base_text.len() as isize, patch);
if outlen < 0 {
mpatch_lfree(patch);
return Err("mpatch failed to calculate size");
}
let outlen = outlen as usize;
let mut result: Vec<u8> = Vec::with_capacity(outlen);
result.set_len(outlen);
if mpatch_apply(
result.as_mut_ptr() as *mut c_char,
base_text.as_ptr() as *const c_char,
base_text.len() as ssize_t,
patch,
) < 0
{
mpatch_lfree(patch);
return Err("mpatch failed to apply patches");
}
mpatch_lfree(patch);
return Ok(result);
}
}
#[cfg(test)]
mod tests {
use super::get_full_text;
#[test]
fn no_deltas() {
let base_text = b"hello";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn no_deltas_empty_base() {
let base_text = b"";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn test_apply_delta() {
let base_text = b"My data";
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_deltas() {
let base_text = b"My data";
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x0D\x00\x00\x00\x10still deltafied ",
];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My still deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_invalid_deltas() {
let base_text = b"My data";
// Short delta
let deltas: Vec<&[u8]> = vec![b"\x00\x03"];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Short data
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ", | assert!(full_text.is_err());
// Delta doesn't match base_text
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\xFF\x00\x00\x01\x00\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
}
} | b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adelta",
];
let full_text = get_full_text(&base_text[..], &deltas); | random_line_split |
mpatch.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::os::raw::c_char;
use std::os::raw::c_void;
use std::ptr;
use libc::ssize_t;
use mpatch_sys::*;
unsafe extern "C" fn get_next_link(deltas: *mut c_void, index: ssize_t) -> *mut mpatch_flist {
let deltas = (deltas as *const Vec<&[u8]>).as_ref().unwrap();
if index < 0 || index as usize >= deltas.len() {
return ptr::null_mut();
}
let delta: &[u8] = deltas[index as usize];
let mut res: *mut mpatch_flist = ptr::null_mut();
if mpatch_decode(
delta.as_ptr() as *const c_char,
delta.len() as isize,
&mut res,
) < 0
{
return ptr::null_mut();
}
return res;
}
pub fn get_full_text(base_text: &[u8], deltas: &Vec<&[u8]>) -> Result<Vec<u8>, &'static str> {
// If there are no deltas, just return the full text portion
if deltas.len() == 0 {
return Ok(base_text.to_vec());
}
unsafe {
let patch: *mut mpatch_flist = mpatch_fold(
deltas as *const Vec<&[u8]> as *mut c_void,
Some(get_next_link),
0,
deltas.len() as isize,
);
if patch.is_null() {
return Err("mpatch failed to process the deltas");
}
let outlen = mpatch_calcsize(base_text.len() as isize, patch);
if outlen < 0 {
mpatch_lfree(patch);
return Err("mpatch failed to calculate size");
}
let outlen = outlen as usize;
let mut result: Vec<u8> = Vec::with_capacity(outlen);
result.set_len(outlen);
if mpatch_apply(
result.as_mut_ptr() as *mut c_char,
base_text.as_ptr() as *const c_char,
base_text.len() as ssize_t,
patch,
) < 0
{
mpatch_lfree(patch);
return Err("mpatch failed to apply patches");
}
mpatch_lfree(patch);
return Ok(result);
}
}
#[cfg(test)]
mod tests {
use super::get_full_text;
#[test]
fn no_deltas() {
let base_text = b"hello";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn no_deltas_empty_base() |
#[test]
fn test_apply_delta() {
let base_text = b"My data";
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_deltas() {
let base_text = b"My data";
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x0D\x00\x00\x00\x10still deltafied ",
];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My still deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_invalid_deltas() {
let base_text = b"My data";
// Short delta
let deltas: Vec<&[u8]> = vec![b"\x00\x03"];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Short data
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adelta",
];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Delta doesn't match base_text
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\xFF\x00\x00\x01\x00\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
}
}
| {
let base_text = b"";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
} | identifier_body |
mpatch.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::os::raw::c_char;
use std::os::raw::c_void;
use std::ptr;
use libc::ssize_t;
use mpatch_sys::*;
unsafe extern "C" fn get_next_link(deltas: *mut c_void, index: ssize_t) -> *mut mpatch_flist {
let deltas = (deltas as *const Vec<&[u8]>).as_ref().unwrap();
if index < 0 || index as usize >= deltas.len() {
return ptr::null_mut();
}
let delta: &[u8] = deltas[index as usize];
let mut res: *mut mpatch_flist = ptr::null_mut();
if mpatch_decode(
delta.as_ptr() as *const c_char,
delta.len() as isize,
&mut res,
) < 0
{
return ptr::null_mut();
}
return res;
}
pub fn get_full_text(base_text: &[u8], deltas: &Vec<&[u8]>) -> Result<Vec<u8>, &'static str> {
// If there are no deltas, just return the full text portion
if deltas.len() == 0 |
unsafe {
let patch: *mut mpatch_flist = mpatch_fold(
deltas as *const Vec<&[u8]> as *mut c_void,
Some(get_next_link),
0,
deltas.len() as isize,
);
if patch.is_null() {
return Err("mpatch failed to process the deltas");
}
let outlen = mpatch_calcsize(base_text.len() as isize, patch);
if outlen < 0 {
mpatch_lfree(patch);
return Err("mpatch failed to calculate size");
}
let outlen = outlen as usize;
let mut result: Vec<u8> = Vec::with_capacity(outlen);
result.set_len(outlen);
if mpatch_apply(
result.as_mut_ptr() as *mut c_char,
base_text.as_ptr() as *const c_char,
base_text.len() as ssize_t,
patch,
) < 0
{
mpatch_lfree(patch);
return Err("mpatch failed to apply patches");
}
mpatch_lfree(patch);
return Ok(result);
}
}
#[cfg(test)]
mod tests {
use super::get_full_text;
#[test]
fn no_deltas() {
let base_text = b"hello";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn no_deltas_empty_base() {
let base_text = b"";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn test_apply_delta() {
let base_text = b"My data";
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_deltas() {
let base_text = b"My data";
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x0D\x00\x00\x00\x10still deltafied ",
];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My still deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_invalid_deltas() {
let base_text = b"My data";
// Short delta
let deltas: Vec<&[u8]> = vec![b"\x00\x03"];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Short data
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adelta",
];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Delta doesn't match base_text
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\xFF\x00\x00\x01\x00\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
}
}
| {
return Ok(base_text.to_vec());
} | conditional_block |
mpatch.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::os::raw::c_char;
use std::os::raw::c_void;
use std::ptr;
use libc::ssize_t;
use mpatch_sys::*;
unsafe extern "C" fn get_next_link(deltas: *mut c_void, index: ssize_t) -> *mut mpatch_flist {
let deltas = (deltas as *const Vec<&[u8]>).as_ref().unwrap();
if index < 0 || index as usize >= deltas.len() {
return ptr::null_mut();
}
let delta: &[u8] = deltas[index as usize];
let mut res: *mut mpatch_flist = ptr::null_mut();
if mpatch_decode(
delta.as_ptr() as *const c_char,
delta.len() as isize,
&mut res,
) < 0
{
return ptr::null_mut();
}
return res;
}
pub fn get_full_text(base_text: &[u8], deltas: &Vec<&[u8]>) -> Result<Vec<u8>, &'static str> {
// If there are no deltas, just return the full text portion
if deltas.len() == 0 {
return Ok(base_text.to_vec());
}
unsafe {
let patch: *mut mpatch_flist = mpatch_fold(
deltas as *const Vec<&[u8]> as *mut c_void,
Some(get_next_link),
0,
deltas.len() as isize,
);
if patch.is_null() {
return Err("mpatch failed to process the deltas");
}
let outlen = mpatch_calcsize(base_text.len() as isize, patch);
if outlen < 0 {
mpatch_lfree(patch);
return Err("mpatch failed to calculate size");
}
let outlen = outlen as usize;
let mut result: Vec<u8> = Vec::with_capacity(outlen);
result.set_len(outlen);
if mpatch_apply(
result.as_mut_ptr() as *mut c_char,
base_text.as_ptr() as *const c_char,
base_text.len() as ssize_t,
patch,
) < 0
{
mpatch_lfree(patch);
return Err("mpatch failed to apply patches");
}
mpatch_lfree(patch);
return Ok(result);
}
}
#[cfg(test)]
mod tests {
use super::get_full_text;
#[test]
fn no_deltas() {
let base_text = b"hello";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn | () {
let base_text = b"";
let full_text = get_full_text(&base_text[..], &vec![]).unwrap();
assert_eq!(base_text, full_text.as_slice());
}
#[test]
fn test_apply_delta() {
let base_text = b"My data";
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_deltas() {
let base_text = b"My data";
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x0D\x00\x00\x00\x10still deltafied ",
];
let full_text = get_full_text(&base_text[..], &deltas).unwrap();
assert_eq!(b"My still deltafied data", full_text[..].as_ref());
}
#[test]
fn test_apply_invalid_deltas() {
let base_text = b"My data";
// Short delta
let deltas: Vec<&[u8]> = vec![b"\x00\x03"];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Short data
let deltas: Vec<&[u8]> = vec![
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adeltafied ",
b"\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x0Adelta",
];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
// Delta doesn't match base_text
let deltas: Vec<&[u8]> =
vec![b"\x00\x00\x00\xFF\x00\x00\x01\x00\x00\x00\x00\x0Adeltafied "];
let full_text = get_full_text(&base_text[..], &deltas);
assert!(full_text.is_err());
}
}
| no_deltas_empty_base | identifier_name |
feedback.js | $(function() {
$.getJSON('api', updateFeedback);
$('.feedback-form').submit(function(e) {
e.preventDefault();
$.post('api', {
name: $('#feedback-form-name').val(),
title: $('#feedback-form-title').val(),
message: $('#feedback-form-message').val()
}, updateFeedback);
});
$('.feedback-messages').on('click', function(e) {
if (e.target.className == 'glyphicon glyphicon-remove') {
$.ajax({
url: 'api/' + e.target.id,
type: 'DELETE',
success: updateFeedback
}); //ajax
} // the target is a delete button
}); //feedback messages
function updateFeedback(data) |
});
| {
var output = '';
$.each(data,function(key, item) {
output += ' <div class="feedback-item item-list media-list">';
output += ' <div class="feedback-item media">';
output += ' <div class="media-left"><button class="feedback-delete btn btn-xs btn-danger"><span id="' + key + '" class="glyphicon glyphicon-remove"></span></button></div>';
output += ' <div class="feedback-info media-body">';
output += ' <div class="feedback-head">';
output += ' <div class="feedback-title">' + item.title + ' <small class="feedback-name label label-info">' + item.name + '</small></div>';
output += ' </div>';
output += ' <div class="feedback-message">' + item.message + '</div>';
output += ' </div>';
output += ' </div>';
output += ' </div>';
});
$('.feedback-messages').html(output);
} | identifier_body |
feedback.js | $(function() {
$.getJSON('api', updateFeedback);
$('.feedback-form').submit(function(e) {
e.preventDefault();
$.post('api', {
name: $('#feedback-form-name').val(),
title: $('#feedback-form-title').val(),
message: $('#feedback-form-message').val()
}, updateFeedback);
});
$('.feedback-messages').on('click', function(e) {
if (e.target.className == 'glyphicon glyphicon-remove') {
$.ajax({
url: 'api/' + e.target.id,
type: 'DELETE',
success: updateFeedback | var output = '';
$.each(data,function(key, item) {
output += ' <div class="feedback-item item-list media-list">';
output += ' <div class="feedback-item media">';
output += ' <div class="media-left"><button class="feedback-delete btn btn-xs btn-danger"><span id="' + key + '" class="glyphicon glyphicon-remove"></span></button></div>';
output += ' <div class="feedback-info media-body">';
output += ' <div class="feedback-head">';
output += ' <div class="feedback-title">' + item.title + ' <small class="feedback-name label label-info">' + item.name + '</small></div>';
output += ' </div>';
output += ' <div class="feedback-message">' + item.message + '</div>';
output += ' </div>';
output += ' </div>';
output += ' </div>';
});
$('.feedback-messages').html(output);
}
}); | }); //ajax
} // the target is a delete button
}); //feedback messages
function updateFeedback(data) { | random_line_split |
feedback.js | $(function() {
$.getJSON('api', updateFeedback);
$('.feedback-form').submit(function(e) {
e.preventDefault();
$.post('api', {
name: $('#feedback-form-name').val(),
title: $('#feedback-form-title').val(),
message: $('#feedback-form-message').val()
}, updateFeedback);
});
$('.feedback-messages').on('click', function(e) {
if (e.target.className == 'glyphicon glyphicon-remove') {
$.ajax({
url: 'api/' + e.target.id,
type: 'DELETE',
success: updateFeedback
}); //ajax
} // the target is a delete button
}); //feedback messages
function | (data) {
var output = '';
$.each(data,function(key, item) {
output += ' <div class="feedback-item item-list media-list">';
output += ' <div class="feedback-item media">';
output += ' <div class="media-left"><button class="feedback-delete btn btn-xs btn-danger"><span id="' + key + '" class="glyphicon glyphicon-remove"></span></button></div>';
output += ' <div class="feedback-info media-body">';
output += ' <div class="feedback-head">';
output += ' <div class="feedback-title">' + item.title + ' <small class="feedback-name label label-info">' + item.name + '</small></div>';
output += ' </div>';
output += ' <div class="feedback-message">' + item.message + '</div>';
output += ' </div>';
output += ' </div>';
output += ' </div>';
});
$('.feedback-messages').html(output);
}
});
| updateFeedback | identifier_name |
feedback.js | $(function() {
$.getJSON('api', updateFeedback);
$('.feedback-form').submit(function(e) {
e.preventDefault();
$.post('api', {
name: $('#feedback-form-name').val(),
title: $('#feedback-form-title').val(),
message: $('#feedback-form-message').val()
}, updateFeedback);
});
$('.feedback-messages').on('click', function(e) {
if (e.target.className == 'glyphicon glyphicon-remove') |
}); //feedback messages
function updateFeedback(data) {
var output = '';
$.each(data,function(key, item) {
output += ' <div class="feedback-item item-list media-list">';
output += ' <div class="feedback-item media">';
output += ' <div class="media-left"><button class="feedback-delete btn btn-xs btn-danger"><span id="' + key + '" class="glyphicon glyphicon-remove"></span></button></div>';
output += ' <div class="feedback-info media-body">';
output += ' <div class="feedback-head">';
output += ' <div class="feedback-title">' + item.title + ' <small class="feedback-name label label-info">' + item.name + '</small></div>';
output += ' </div>';
output += ' <div class="feedback-message">' + item.message + '</div>';
output += ' </div>';
output += ' </div>';
output += ' </div>';
});
$('.feedback-messages').html(output);
}
});
| {
$.ajax({
url: 'api/' + e.target.id,
type: 'DELETE',
success: updateFeedback
}); //ajax
} // the target is a delete button | conditional_block |
web_reserver-bak.py | import web
import json
import datetime
import time
import uuid
#from mimerender import mimerender
#import mimerender
from onsa_jeroen import *
render_xml = lambda result: "<result>%s</result>"%result
render_json = lambda **result: json.dumps(result,sort_keys=True,indent=4)
render_html = lambda result: "<html><body>%s</body></html>"%result
render_txt = lambda result: result
def syncmyCall(func):
global result
result=None
def sync_func(*args, **kwargs):
global result
d=defer.maybeDeferred(func, *args, **kwargs)
while 1:
reactor.doSelect(1)
print result
time.sleep(1)
#return result
return sync_func
@syncmyCall
@defer.inlineCallbacks
def query (nsa):
|
print query("uva4k")
#if __name__ == "__main__":
| global result
client,client_nsa = createClient()
nsa = getNSA(nsa)
qr = yield client.query(client_nsa, nsa, None, "Summary", connection_ids = [] )
#result = qr
result = "blaaa" | identifier_body |
web_reserver-bak.py | import web
import json
import datetime
import time
import uuid
#from mimerender import mimerender
#import mimerender
from onsa_jeroen import *
render_xml = lambda result: "<result>%s</result>"%result
render_json = lambda **result: json.dumps(result,sort_keys=True,indent=4)
render_html = lambda result: "<html><body>%s</body></html>"%result
render_txt = lambda result: result
def syncmyCall(func):
global result
result=None
def | (*args, **kwargs):
global result
d=defer.maybeDeferred(func, *args, **kwargs)
while 1:
reactor.doSelect(1)
print result
time.sleep(1)
#return result
return sync_func
@syncmyCall
@defer.inlineCallbacks
def query (nsa):
global result
client,client_nsa = createClient()
nsa = getNSA(nsa)
qr = yield client.query(client_nsa, nsa, None, "Summary", connection_ids = [] )
#result = qr
result = "blaaa"
print query("uva4k")
#if __name__ == "__main__":
| sync_func | identifier_name |
web_reserver-bak.py | import web
import json
import datetime
import time
import uuid
#from mimerender import mimerender
#import mimerender
from onsa_jeroen import *
render_xml = lambda result: "<result>%s</result>"%result
render_json = lambda **result: json.dumps(result,sort_keys=True,indent=4)
render_html = lambda result: "<html><body>%s</body></html>"%result
render_txt = lambda result: result
def syncmyCall(func):
global result
result=None
def sync_func(*args, **kwargs):
global result
d=defer.maybeDeferred(func, *args, **kwargs)
while 1:
reactor.doSelect(1)
print result
time.sleep(1)
#return result
return sync_func | client,client_nsa = createClient()
nsa = getNSA(nsa)
qr = yield client.query(client_nsa, nsa, None, "Summary", connection_ids = [] )
#result = qr
result = "blaaa"
print query("uva4k")
#if __name__ == "__main__": |
@syncmyCall
@defer.inlineCallbacks
def query (nsa):
global result | random_line_split |
web_reserver-bak.py | import web
import json
import datetime
import time
import uuid
#from mimerender import mimerender
#import mimerender
from onsa_jeroen import *
render_xml = lambda result: "<result>%s</result>"%result
render_json = lambda **result: json.dumps(result,sort_keys=True,indent=4)
render_html = lambda result: "<html><body>%s</body></html>"%result
render_txt = lambda result: result
def syncmyCall(func):
global result
result=None
def sync_func(*args, **kwargs):
global result
d=defer.maybeDeferred(func, *args, **kwargs)
while 1:
|
return sync_func
@syncmyCall
@defer.inlineCallbacks
def query (nsa):
global result
client,client_nsa = createClient()
nsa = getNSA(nsa)
qr = yield client.query(client_nsa, nsa, None, "Summary", connection_ids = [] )
#result = qr
result = "blaaa"
print query("uva4k")
#if __name__ == "__main__":
| reactor.doSelect(1)
print result
time.sleep(1)
#return result | conditional_block |
histogram_dataselect_page.py | import sys
#from functools import partial
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
from ome_globals import *
import ui_histogram_dataselect_page
class HistogramDataSelectPage(QWizardPage, ui_histogram_dataselect_page.Ui_WizardPage):
def __init__(self, model, prev_hist_var=None, parent=None):
super(HistogramDataSelectPage, self).__init__(parent)
self.setupUi(self)
self.model = model
self.prev_hist_var = prev_hist_var
self._populate_combo_box()
def _populate_combo_box(self):
''' populates combo box with numerical variables '''
variables= self.model.get_variables()
variables.sort(key=lambda var: var.get_label())
default_index = 0
for var in variables:
# store column of var in user data
col = self.model.get_column_assigned_to_variable(var)
self.comboBox.addItem(var.get_label(), userData=QVariant(col))
index_of_item = self.comboBox.count()-1
if self.prev_hist_var == var:
default_index = index_of_item
# set default selection if given
self.comboBox.setCurrentIndex(default_index)
self.completeChanged.emit()
def | (self):
return True
def get_selected_var(self):
idx = self.comboBox.currentIndex()
data = self.comboBox.itemData(idx)
col = data.toInt()[0]
return self.model.get_variable_assigned_to_column(col)
| isComplete | identifier_name |
histogram_dataselect_page.py | import sys
#from functools import partial
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
from ome_globals import *
import ui_histogram_dataselect_page
class HistogramDataSelectPage(QWizardPage, ui_histogram_dataselect_page.Ui_WizardPage):
def __init__(self, model, prev_hist_var=None, parent=None):
super(HistogramDataSelectPage, self).__init__(parent)
self.setupUi(self)
self.model = model
self.prev_hist_var = prev_hist_var
self._populate_combo_box()
def _populate_combo_box(self):
''' populates combo box with numerical variables '''
variables= self.model.get_variables()
variables.sort(key=lambda var: var.get_label())
default_index = 0
for var in variables:
# store column of var in user data
col = self.model.get_column_assigned_to_variable(var)
self.comboBox.addItem(var.get_label(), userData=QVariant(col))
index_of_item = self.comboBox.count()-1
if self.prev_hist_var == var:
|
# set default selection if given
self.comboBox.setCurrentIndex(default_index)
self.completeChanged.emit()
def isComplete(self):
return True
def get_selected_var(self):
idx = self.comboBox.currentIndex()
data = self.comboBox.itemData(idx)
col = data.toInt()[0]
return self.model.get_variable_assigned_to_column(col)
| default_index = index_of_item | conditional_block |
histogram_dataselect_page.py | import sys
#from functools import partial
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
from ome_globals import *
import ui_histogram_dataselect_page
class HistogramDataSelectPage(QWizardPage, ui_histogram_dataselect_page.Ui_WizardPage):
def __init__(self, model, prev_hist_var=None, parent=None):
super(HistogramDataSelectPage, self).__init__(parent)
self.setupUi(self)
self.model = model
self.prev_hist_var = prev_hist_var
self._populate_combo_box()
def _populate_combo_box(self):
''' populates combo box with numerical variables '''
variables= self.model.get_variables()
variables.sort(key=lambda var: var.get_label())
default_index = 0
for var in variables:
# store column of var in user data
col = self.model.get_column_assigned_to_variable(var)
self.comboBox.addItem(var.get_label(), userData=QVariant(col))
index_of_item = self.comboBox.count()-1
if self.prev_hist_var == var:
default_index = index_of_item
# set default selection if given
self.comboBox.setCurrentIndex(default_index)
self.completeChanged.emit()
| return True
def get_selected_var(self):
idx = self.comboBox.currentIndex()
data = self.comboBox.itemData(idx)
col = data.toInt()[0]
return self.model.get_variable_assigned_to_column(col) | def isComplete(self): | random_line_split |
histogram_dataselect_page.py | import sys
#from functools import partial
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
from ome_globals import *
import ui_histogram_dataselect_page
class HistogramDataSelectPage(QWizardPage, ui_histogram_dataselect_page.Ui_WizardPage):
| def __init__(self, model, prev_hist_var=None, parent=None):
super(HistogramDataSelectPage, self).__init__(parent)
self.setupUi(self)
self.model = model
self.prev_hist_var = prev_hist_var
self._populate_combo_box()
def _populate_combo_box(self):
''' populates combo box with numerical variables '''
variables= self.model.get_variables()
variables.sort(key=lambda var: var.get_label())
default_index = 0
for var in variables:
# store column of var in user data
col = self.model.get_column_assigned_to_variable(var)
self.comboBox.addItem(var.get_label(), userData=QVariant(col))
index_of_item = self.comboBox.count()-1
if self.prev_hist_var == var:
default_index = index_of_item
# set default selection if given
self.comboBox.setCurrentIndex(default_index)
self.completeChanged.emit()
def isComplete(self):
return True
def get_selected_var(self):
idx = self.comboBox.currentIndex()
data = self.comboBox.itemData(idx)
col = data.toInt()[0]
return self.model.get_variable_assigned_to_column(col) | identifier_body |
|
units.py | order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def toSize(self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input)
if output > len(order):
output = len(order) - 1
elif output < 0:
|
output = order[output]
return self.toSize(value, input, output), output
class Bytes(Sizes):
_BASE = 1024.
| output = 0 | conditional_block |
units.py | order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def toSize(self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input) | output = len(order) - 1
elif output < 0:
output = 0
output = order[output]
return self.toSize(value, input, output), output
class Bytes(Sizes):
_BASE = 1024. | if output > len(order): | random_line_split |
units.py | order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def | (self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input)
if output > len(order):
output = len(order) - 1
elif output < 0:
output = 0
output = order[output]
return self.toSize(value, input, output), output
class Bytes(Sizes):
_BASE = 1024.
| toSize | identifier_name |
units.py | order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def toSize(self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input)
if output > len(order):
output = len(order) - 1
elif output < 0:
output = 0
output = order[output]
return self.toSize(value, input, output), output
class Bytes(Sizes):
| _BASE = 1024. | identifier_body |
|
scope.rs | use std::fmt;
use std::cmp::Eq;
use std::hash::Hash;
use std::collections::HashMap;
use util::*;
use error::*;
use traits::*;
use super::*;
#[derive(Clone)]
pub struct ProcessingScope<T: Hash + Eq> {
scope_id: String,
parent_id: Option<String>,
environment: ProcessingScopeEnvironment,
scoped_idents: HashMap<String, CommonBindings<T>>,
shaped_idents: HashMap<String, BindingShape<T>>,
element_bindings: HashMap<String, CommonBindings<T>>,
}
impl<T: Hash + Eq> Default for ProcessingScope<T> {
fn default() -> Self {
ProcessingScope::new(None as Option<String>, Default::default())
}
}
impl<T: Hash + Eq> ScopeParentId for ProcessingScope<T> {
fn parent_id(&self) -> Option<&str> {
self.parent_id.as_ref().map(|s| s.as_str())
}
}
impl<T: Hash + Eq> fmt::Debug for ProcessingScope<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} ({:?})", self.id(), self.environment())
}
}
impl<T: Hash + Eq> ProcessingScope<T> {
#[allow(dead_code)]
pub fn new<P: Into<String>>(
parent_id: Option<P>,
environment: ProcessingScopeEnvironment,
) -> Self {
let scope_id = allocate_element_key();
let parent_id = parent_id.map(|s| s.into());
ProcessingScope {
scope_id: scope_id,
parent_id: parent_id,
environment: environment,
| }
pub fn id(&self) -> &str {
&self.scope_id
}
pub fn add_ident(
&mut self,
key: String,
binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.scoped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.scoped_idents.get(key).map(|v| v.to_owned())
}
pub fn add_ident_shape(
&mut self,
key: String,
binding: BindingShape<T>,
) -> DocumentProcessingResult<()> {
self.shaped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident_shape(&mut self, key: &str) -> Option<BindingShape<T>>
where
T: Clone,
{
self.shaped_idents.get(key).map(|v| v.to_owned())
}
pub fn add_element_binding(
&mut self,
key: String,
common_binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.element_bindings.insert(key, common_binding);
Ok(())
}
pub fn get_element_binding(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.element_bindings.get(key).map(|v| v.to_owned())
}
pub fn environment(&self) -> &ProcessingScopeEnvironment {
&self.environment
}
} | scoped_idents: Default::default(),
shaped_idents: Default::default(),
element_bindings: Default::default(),
} | random_line_split |
scope.rs | use std::fmt;
use std::cmp::Eq;
use std::hash::Hash;
use std::collections::HashMap;
use util::*;
use error::*;
use traits::*;
use super::*;
#[derive(Clone)]
pub struct ProcessingScope<T: Hash + Eq> {
scope_id: String,
parent_id: Option<String>,
environment: ProcessingScopeEnvironment,
scoped_idents: HashMap<String, CommonBindings<T>>,
shaped_idents: HashMap<String, BindingShape<T>>,
element_bindings: HashMap<String, CommonBindings<T>>,
}
impl<T: Hash + Eq> Default for ProcessingScope<T> {
fn default() -> Self {
ProcessingScope::new(None as Option<String>, Default::default())
}
}
impl<T: Hash + Eq> ScopeParentId for ProcessingScope<T> {
fn parent_id(&self) -> Option<&str> {
self.parent_id.as_ref().map(|s| s.as_str())
}
}
impl<T: Hash + Eq> fmt::Debug for ProcessingScope<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} ({:?})", self.id(), self.environment())
}
}
impl<T: Hash + Eq> ProcessingScope<T> {
#[allow(dead_code)]
pub fn new<P: Into<String>>(
parent_id: Option<P>,
environment: ProcessingScopeEnvironment,
) -> Self {
let scope_id = allocate_element_key();
let parent_id = parent_id.map(|s| s.into());
ProcessingScope {
scope_id: scope_id,
parent_id: parent_id,
environment: environment,
scoped_idents: Default::default(),
shaped_idents: Default::default(),
element_bindings: Default::default(),
}
}
pub fn id(&self) -> &str {
&self.scope_id
}
pub fn add_ident(
&mut self,
key: String,
binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.scoped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.scoped_idents.get(key).map(|v| v.to_owned())
}
pub fn add_ident_shape(
&mut self,
key: String,
binding: BindingShape<T>,
) -> DocumentProcessingResult<()> {
self.shaped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident_shape(&mut self, key: &str) -> Option<BindingShape<T>>
where
T: Clone,
|
pub fn add_element_binding(
&mut self,
key: String,
common_binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.element_bindings.insert(key, common_binding);
Ok(())
}
pub fn get_element_binding(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.element_bindings.get(key).map(|v| v.to_owned())
}
pub fn environment(&self) -> &ProcessingScopeEnvironment {
&self.environment
}
}
| {
self.shaped_idents.get(key).map(|v| v.to_owned())
} | identifier_body |
scope.rs | use std::fmt;
use std::cmp::Eq;
use std::hash::Hash;
use std::collections::HashMap;
use util::*;
use error::*;
use traits::*;
use super::*;
#[derive(Clone)]
pub struct ProcessingScope<T: Hash + Eq> {
scope_id: String,
parent_id: Option<String>,
environment: ProcessingScopeEnvironment,
scoped_idents: HashMap<String, CommonBindings<T>>,
shaped_idents: HashMap<String, BindingShape<T>>,
element_bindings: HashMap<String, CommonBindings<T>>,
}
impl<T: Hash + Eq> Default for ProcessingScope<T> {
fn default() -> Self {
ProcessingScope::new(None as Option<String>, Default::default())
}
}
impl<T: Hash + Eq> ScopeParentId for ProcessingScope<T> {
fn parent_id(&self) -> Option<&str> {
self.parent_id.as_ref().map(|s| s.as_str())
}
}
impl<T: Hash + Eq> fmt::Debug for ProcessingScope<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} ({:?})", self.id(), self.environment())
}
}
impl<T: Hash + Eq> ProcessingScope<T> {
#[allow(dead_code)]
pub fn new<P: Into<String>>(
parent_id: Option<P>,
environment: ProcessingScopeEnvironment,
) -> Self {
let scope_id = allocate_element_key();
let parent_id = parent_id.map(|s| s.into());
ProcessingScope {
scope_id: scope_id,
parent_id: parent_id,
environment: environment,
scoped_idents: Default::default(),
shaped_idents: Default::default(),
element_bindings: Default::default(),
}
}
pub fn | (&self) -> &str {
&self.scope_id
}
pub fn add_ident(
&mut self,
key: String,
binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.scoped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.scoped_idents.get(key).map(|v| v.to_owned())
}
pub fn add_ident_shape(
&mut self,
key: String,
binding: BindingShape<T>,
) -> DocumentProcessingResult<()> {
self.shaped_idents.insert(key, binding);
Ok(())
}
pub fn get_ident_shape(&mut self, key: &str) -> Option<BindingShape<T>>
where
T: Clone,
{
self.shaped_idents.get(key).map(|v| v.to_owned())
}
pub fn add_element_binding(
&mut self,
key: String,
common_binding: CommonBindings<T>,
) -> DocumentProcessingResult<()> {
self.element_bindings.insert(key, common_binding);
Ok(())
}
pub fn get_element_binding(&mut self, key: &str) -> Option<CommonBindings<T>>
where
T: Clone,
{
self.element_bindings.get(key).map(|v| v.to_owned())
}
pub fn environment(&self) -> &ProcessingScopeEnvironment {
&self.environment
}
}
| id | identifier_name |
console.py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPython(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython
except ImportError:
pass
return get_ipython
@classproperty
def OutStream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred encoding in case
of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or
'latin-1'.
"""
if (_IPython.IOStream is not None and
isinstance(fileobj, _IPython.IOStream)):
# If the output stream is an IPython.utils.io.IOStream object that's
# not going to be very helpful to us since it doesn't raise any
# exceptions when an error occurs writing to its underlying stream.
# There's no advantage to us using IOStream.write directly though;
# instead just write directly to its underlying stream:
write = fileobj.stream.write
try:
write(s)
return write
except UnicodeEncodeError:
# Let's try the next approach...
pass
enc = locale.getpreferredencoding()
try:
Writer = codecs.getwriter(enc)
except LookupError:
Writer = codecs.getwriter(_DEFAULT_ENCODING)
f = Writer(fileobj)
write = f.write
try:
write(s)
return write
except UnicodeEncodeError:
Writer = codecs.getwriter('latin-1')
f = Writer(fileobj)
write = f.write
# If this doesn't work let the exception bubble up; I'm out of ideas
write(s)
return write
def color_print(*args, **kwargs):
"""
Prints colors and styles to the terminal uses ANSI escape
sequences.
::
color_print('This is the color ', 'default', 'GREEN', 'green')
Parameters
----------
positional args : str
The positional arguments come in pairs (*msg*, *color*), where
*msg* is the string to display and *color* is the color to
display it in.
*color* is an ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`. If file is not
a tty (as determined by calling its `isatty` member, if one
exists), no coloring will be included.
end : str, optional
The ending of the message. Defaults to ``\\n``. The end will
be printed after resetting any color or font state.
"""
file = kwargs.get('file', _get_stdout())
end = kwargs.get('end', '\n')
write = file.write
if isatty(file) and conf.use_color:
for i in range(0, len(args), 2):
msg = args[i]
if i + 1 == len(args):
color = ''
else:
color = args[i + 1]
if color:
msg = _color_text(msg, color)
# Some file objects support writing unicode sensibly on some Python
# versions; if this fails try creating a writer using the locale's
# preferred encoding. If that fails too give up.
if six.PY2 and isinstance(msg, bytes):
msg = _decode_preferred_encoding(msg)
write = _write_with_fallback(msg, write, file)
write(end)
else:
for i in range(0, len(args), 2):
msg = args[i]
if six.PY2 and isinstance(msg, bytes):
# Support decoding bytes to unicode on Python 2; use the
# preferred encoding for the locale (which is *sometimes*
# sensible)
msg = _decode_preferred_encoding(msg)
write(msg)
write(end)
def strip_ansi_codes(s):
"""
Remove ANSI color codes from the string.
"""
return re.sub('\033\\[([0-9]+)(;[0-9]+)*m', '', s)
def human_time(seconds):
"""
Returns a human-friendly time string that is always exactly 6
characters long.
Depending on the number of seconds given, can be one of::
1w 3d
2d 4h
1h 5m
1m 4s
15s
Will be in color if console coloring is turned on.
Parameters
----------
seconds : int
The number of seconds to represent
Returns
-------
time : str
A human-friendly representation of the given number of seconds
that is always exactly 6 characters.
"""
units = [
('y', 60 * 60 * 24 * 7 * 52),
('w', 60 * 60 * 24 * 7),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1),
]
seconds = int(seconds)
if seconds < 60:
return ' {0:2d}s'.format(seconds)
for i in range(len(units) - 1):
unit1, limit1 = units[i]
unit2, limit2 = units[i + 1]
if seconds >= limit1:
return '{0:2d}{1}{2:2d}{3}'.format(
seconds // limit1, unit1,
(seconds % limit1) // limit2, unit2)
return ' ~inf'
def human_file_size(size):
"""
Returns a human-friendly string representing a file size
that is 2-4 characters long.
For example, depending on the number of bytes given, can be one
of::
256b
64k
1.1G
Parameters
----------
size : int
The size of the file (in bytes)
Returns
-------
size : str
A human-friendly representation of the size of the file
"""
if hasattr(size, 'unit'):
# Import units only if necessary because the import takes a
# significant time [#4649]
from .. import units as u
size = size.to(u.byte).value
suffixes = ' kMGTPEZY'
if size == 0:
num_scale = 0
else:
num_scale = int(math.floor(math.log(size) / math.log(1000)))
if num_scale > 7:
suffix = '?'
else:
suffix = suffixes[num_scale]
num_scale = int(math.pow(1000, num_scale))
value = size / num_scale
str_value = str(value)
if suffix == ' ':
str_value = str_value[:str_value.index('.')]
elif str_value[2] == '.':
str_value = str_value[:2]
else:
str_value = str_value[:3]
return "{0:>3s}{1}".format(str_value, suffix)
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
if file is None:
file = _get_stdout()
if not ipython_widget and not isatty(file):
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({:>6.2%})'.format(frac))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
# Create and display an empty progress bar widget,
# if none exists.
if not hasattr(self, '_widget'):
# Import only if an IPython widget, i.e., widget in iPython NB
from IPython import version_info
if version_info[0] < 4:
from IPython.html import widgets
self._widget = widgets.FloatProgressWidget()
else:
_IPython.get_ipython()
from ipywidgets import widgets
self._widget = widgets.FloatProgress()
from IPython.display import display
display(self._widget)
self._widget.value = 0
# Calculate percent completion, and update progress bar
frac = (value/self._total)
self._widget.value = frac * 100
self._widget.description =' ({:>6.2%})'.format(frac)
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, step=100):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
with cls(len(items), file=file) as bar:
default_step = max(int(float(len(items)) / bar._bar_length), 1)
chunksize = min(default_step, step)
if not multiprocess:
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
p = multiprocessing.Pool()
for i, result in enumerate(
p.imap_unordered(function, items, chunksize=chunksize)):
bar.update(i)
results.append(result)
p.close()
p.join()
return results
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the ``with`` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = "◓◑◒◐"
_default_ascii_chars = "-/|\\"
def __init__(self, msg, color='default', file=None, step=1,
chars=None):
"""
Parameters
----------
msg : str
The message to print
color : str, optional
An ANSI terminal color name. Must be one of: black, red,
green, brown, blue, magenta, cyan, lightgrey, default,
darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white.
file : writeable file-like object, optional
The file to write the spinner to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the spinner will be
completely silent.
step : int, optional
Only update the spinner every *step* steps
chars : str, optional
The character sequence to use for the spinner
"""
if file is None:
file = _get_stdout()
self._msg = msg
self._color = color
self._file = file
self._step = step
if chars is None:
if conf.unicode_output:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not isatty(file)
def _iterator(self):
chars = self._chars
index = 0
file = self._file
write = file.write
flush = file.flush
try_fallback = True
while True:
write('\r')
color_print(self._msg, self._color, file=file, end='')
write(' ')
try:
if try_fallback:
write = _write_with_fallback(chars[index], write, file)
else:
write(chars[index])
except UnicodeError:
# If even _write_with_fallback failed for any reason just give
# up on trying to use the unicode characters
chars = self._default_ascii_chars
write(chars[index])
try_fallback = False # No good will come of using this again
flush()
yield
for i in range(self._step):
yield
index = (index + 1) % len(chars)
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
file = self._file
write = file.write
flush = file.flush
if not self._silent:
write('\r')
color_print(self._msg, self._color, file=file, end='')
if exc_type is None:
color_print(' [Done]', 'green', file=file)
else:
color_print(' [Failed]', 'red', file=file)
flush()
def _silent_iterator(self):
color_print(self._msg, self._color, file=self._file, end='')
self._file.flush()
while True:
yield
class ProgressBarOrSpinner(object):
"""
A class that displays either a `ProgressBar` or `Spinner`
depending on whether the total size of the operation is
known or not.
It is designed to be used with the ``with`` statement::
if file.has_length():
length = file.get_length()
else:
length = None
bytes_read = 0
with ProgressBarOrSpinner(length) as bar:
while file.read(blocksize):
bytes_read += blocksize
bar.update(bytes_read)
"""
def __init__(self, total, msg, color='default', file=None):
"""
Parameters
----------
total : int or None
If an int, the number of increments in the process being
tracked and a `ProgressBar` is displayed. If `None`, a
`Spinner` is displayed.
msg : str
The message to display above the `ProgressBar` or
alongside the `Spinner`.
color : str, optional
The color of ``msg``, if any. Must be an ANSI terminal
color name. Must be one of: black, red, green, brown,
blue, magenta, cyan, lightgrey, default, darkgrey,
lightred, lightgreen, yellow, lightblue, lightmagenta,
lightcyan, white.
file : writable file-like object, optional
The file to write the to. Defaults to `sys.stdout`. If
``file`` is not a tty (as determined by calling its `isatty`
member, if any), only ``msg`` will be displayed: the
`ProgressBar` or `Spinner` will be silent.
"""
if file is None:
file = _get_stdout()
if total is None or not isatty(file):
self._is_spinner = True
self._obj = Spinner(msg, color=color, file=file)
else:
self._is_spinner = False
color_print(msg, color, file=file)
self._obj = ProgressBar(total, file=file)
def __enter__(self):
self._iter = self._obj.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
return self._obj.__exit__(exc_type, exc_value, traceback)
def update(self, value):
"""
Update the progress bar to the given value (out of the total
given to the constructor.
"""
if self._is_spinner:
next(self._iter)
else:
self._obj.update(value)
def print_code_line(line, col=None, file=None, tabwidth=8, width=70):
"""
Prints a line of source code, highlighting a particular character
position in the line. Useful for displaying the context of error
messages.
If the line is more than ``width`` characters, the line is truncated
accordingly and '…' characters are inserted at the front and/or
end.
It looks like this::
there_is_a_syntax_error_here :
^
Parameters
----------
line : unicode
The line of code to display
col : int, optional
The character in the line to highlight. ``col`` must be less
than ``len(line)``.
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`.
tabwidth : int, optional
The number of spaces per tab (``'\\t'``) character. Default
is 8. All tabs will be converted to spaces to ensure that the
caret lines up with the correct column.
width : int, optional
The width of the display, beyond which the line will be
truncated. Defaults to 70 (this matches the default in the
standard library's `textwrap` module).
"""
if file is None:
file = _get_stdout()
if conf.unicode_output:
ellipsis = '…'
else:
ellipsis = '...'
write = file.write
if col is not None:
assert col < len(line)
ntabs = line[:col].count('\t')
col += ntabs * (tabwidth - 1)
line = line.rstrip('\n')
line = line.replace('\t', ' ' * tabwidth)
if col is not None and col > width:
new_col = min(width // 2, len(line) - col)
offset = col - new_col
line = line[offset + len(ellipsis):]
width -= len(ellipsis)
new_col = col
col -= offset
color_print(ellipsis, 'darkgrey', file=file, end='')
if len(line) > width:
write(line[:width - len(ellipsis)])
color_print(ellipsis, 'darkgrey', file=file)
else:
write(line)
write('\n')
if col is not None:
write(' ' * col)
color_print('^', 'red', file=file)
# The following four Getch* classes implement unbuffered character reading from
# stdin on Windows, linux, MacOSX. This is taken directly from ActiveState
# Code Recipes:
# http://code.activestate.com/recipes/134892-getch-like-unbuffered-character-reading-from-stdin/
#
class Getch(object):
"""Get a single character from standard input without screen echo.
Returns
-------
char : str (one character)
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except (ImportError, AttributeError):
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix(object):
def __init__ | tchWindows(object):
def __init__(self):
import msvcrt # pylint: disable=W0611
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon(object):
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt # see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
| (self):
import tty # pylint: disable=W0611
import sys # pylint: disable=W0611
# import termios now or else you'll get the Unix
# version on the Mac
import termios # pylint: disable=W0611
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _Ge | identifier_body |
console.py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPython(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython | pass
return get_ipython
@classproperty
def OutStream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred encoding in case
of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or
'latin-1'.
"""
if (_IPython.IOStream is not None and
isinstance(fileobj, _IPython.IOStream)):
# If the output stream is an IPython.utils.io.IOStream object that's
# not going to be very helpful to us since it doesn't raise any
# exceptions when an error occurs writing to its underlying stream.
# There's no advantage to us using IOStream.write directly though;
# instead just write directly to its underlying stream:
write = fileobj.stream.write
try:
write(s)
return write
except UnicodeEncodeError:
# Let's try the next approach...
pass
enc = locale.getpreferredencoding()
try:
Writer = codecs.getwriter(enc)
except LookupError:
Writer = codecs.getwriter(_DEFAULT_ENCODING)
f = Writer(fileobj)
write = f.write
try:
write(s)
return write
except UnicodeEncodeError:
Writer = codecs.getwriter('latin-1')
f = Writer(fileobj)
write = f.write
# If this doesn't work let the exception bubble up; I'm out of ideas
write(s)
return write
def color_print(*args, **kwargs):
"""
Prints colors and styles to the terminal uses ANSI escape
sequences.
::
color_print('This is the color ', 'default', 'GREEN', 'green')
Parameters
----------
positional args : str
The positional arguments come in pairs (*msg*, *color*), where
*msg* is the string to display and *color* is the color to
display it in.
*color* is an ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`. If file is not
a tty (as determined by calling its `isatty` member, if one
exists), no coloring will be included.
end : str, optional
The ending of the message. Defaults to ``\\n``. The end will
be printed after resetting any color or font state.
"""
file = kwargs.get('file', _get_stdout())
end = kwargs.get('end', '\n')
write = file.write
if isatty(file) and conf.use_color:
for i in range(0, len(args), 2):
msg = args[i]
if i + 1 == len(args):
color = ''
else:
color = args[i + 1]
if color:
msg = _color_text(msg, color)
# Some file objects support writing unicode sensibly on some Python
# versions; if this fails try creating a writer using the locale's
# preferred encoding. If that fails too give up.
if six.PY2 and isinstance(msg, bytes):
msg = _decode_preferred_encoding(msg)
write = _write_with_fallback(msg, write, file)
write(end)
else:
for i in range(0, len(args), 2):
msg = args[i]
if six.PY2 and isinstance(msg, bytes):
# Support decoding bytes to unicode on Python 2; use the
# preferred encoding for the locale (which is *sometimes*
# sensible)
msg = _decode_preferred_encoding(msg)
write(msg)
write(end)
def strip_ansi_codes(s):
"""
Remove ANSI color codes from the string.
"""
return re.sub('\033\\[([0-9]+)(;[0-9]+)*m', '', s)
def human_time(seconds):
"""
Returns a human-friendly time string that is always exactly 6
characters long.
Depending on the number of seconds given, can be one of::
1w 3d
2d 4h
1h 5m
1m 4s
15s
Will be in color if console coloring is turned on.
Parameters
----------
seconds : int
The number of seconds to represent
Returns
-------
time : str
A human-friendly representation of the given number of seconds
that is always exactly 6 characters.
"""
units = [
('y', 60 * 60 * 24 * 7 * 52),
('w', 60 * 60 * 24 * 7),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1),
]
seconds = int(seconds)
if seconds < 60:
return ' {0:2d}s'.format(seconds)
for i in range(len(units) - 1):
unit1, limit1 = units[i]
unit2, limit2 = units[i + 1]
if seconds >= limit1:
return '{0:2d}{1}{2:2d}{3}'.format(
seconds // limit1, unit1,
(seconds % limit1) // limit2, unit2)
return ' ~inf'
def human_file_size(size):
"""
Returns a human-friendly string representing a file size
that is 2-4 characters long.
For example, depending on the number of bytes given, can be one
of::
256b
64k
1.1G
Parameters
----------
size : int
The size of the file (in bytes)
Returns
-------
size : str
A human-friendly representation of the size of the file
"""
if hasattr(size, 'unit'):
# Import units only if necessary because the import takes a
# significant time [#4649]
from .. import units as u
size = size.to(u.byte).value
suffixes = ' kMGTPEZY'
if size == 0:
num_scale = 0
else:
num_scale = int(math.floor(math.log(size) / math.log(1000)))
if num_scale > 7:
suffix = '?'
else:
suffix = suffixes[num_scale]
num_scale = int(math.pow(1000, num_scale))
value = size / num_scale
str_value = str(value)
if suffix == ' ':
str_value = str_value[:str_value.index('.')]
elif str_value[2] == '.':
str_value = str_value[:2]
else:
str_value = str_value[:3]
return "{0:>3s}{1}".format(str_value, suffix)
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
if file is None:
file = _get_stdout()
if not ipython_widget and not isatty(file):
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({:>6.2%})'.format(frac))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
# Create and display an empty progress bar widget,
# if none exists.
if not hasattr(self, '_widget'):
# Import only if an IPython widget, i.e., widget in iPython NB
from IPython import version_info
if version_info[0] < 4:
from IPython.html import widgets
self._widget = widgets.FloatProgressWidget()
else:
_IPython.get_ipython()
from ipywidgets import widgets
self._widget = widgets.FloatProgress()
from IPython.display import display
display(self._widget)
self._widget.value = 0
# Calculate percent completion, and update progress bar
frac = (value/self._total)
self._widget.value = frac * 100
self._widget.description =' ({:>6.2%})'.format(frac)
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, step=100):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
with cls(len(items), file=file) as bar:
default_step = max(int(float(len(items)) / bar._bar_length), 1)
chunksize = min(default_step, step)
if not multiprocess:
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
p = multiprocessing.Pool()
for i, result in enumerate(
p.imap_unordered(function, items, chunksize=chunksize)):
bar.update(i)
results.append(result)
p.close()
p.join()
return results
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the ``with`` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = "◓◑◒◐"
_default_ascii_chars = "-/|\\"
def __init__(self, msg, color='default', file=None, step=1,
chars=None):
"""
Parameters
----------
msg : str
The message to print
color : str, optional
An ANSI terminal color name. Must be one of: black, red,
green, brown, blue, magenta, cyan, lightgrey, default,
darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white.
file : writeable file-like object, optional
The file to write the spinner to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the spinner will be
completely silent.
step : int, optional
Only update the spinner every *step* steps
chars : str, optional
The character sequence to use for the spinner
"""
if file is None:
file = _get_stdout()
self._msg = msg
self._color = color
self._file = file
self._step = step
if chars is None:
if conf.unicode_output:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not isatty(file)
def _iterator(self):
chars = self._chars
index = 0
file = self._file
write = file.write
flush = file.flush
try_fallback = True
while True:
write('\r')
color_print(self._msg, self._color, file=file, end='')
write(' ')
try:
if try_fallback:
write = _write_with_fallback(chars[index], write, file)
else:
write(chars[index])
except UnicodeError:
# If even _write_with_fallback failed for any reason just give
# up on trying to use the unicode characters
chars = self._default_ascii_chars
write(chars[index])
try_fallback = False # No good will come of using this again
flush()
yield
for i in range(self._step):
yield
index = (index + 1) % len(chars)
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
file = self._file
write = file.write
flush = file.flush
if not self._silent:
write('\r')
color_print(self._msg, self._color, file=file, end='')
if exc_type is None:
color_print(' [Done]', 'green', file=file)
else:
color_print(' [Failed]', 'red', file=file)
flush()
def _silent_iterator(self):
color_print(self._msg, self._color, file=self._file, end='')
self._file.flush()
while True:
yield
class ProgressBarOrSpinner(object):
"""
A class that displays either a `ProgressBar` or `Spinner`
depending on whether the total size of the operation is
known or not.
It is designed to be used with the ``with`` statement::
if file.has_length():
length = file.get_length()
else:
length = None
bytes_read = 0
with ProgressBarOrSpinner(length) as bar:
while file.read(blocksize):
bytes_read += blocksize
bar.update(bytes_read)
"""
def __init__(self, total, msg, color='default', file=None):
"""
Parameters
----------
total : int or None
If an int, the number of increments in the process being
tracked and a `ProgressBar` is displayed. If `None`, a
`Spinner` is displayed.
msg : str
The message to display above the `ProgressBar` or
alongside the `Spinner`.
color : str, optional
The color of ``msg``, if any. Must be an ANSI terminal
color name. Must be one of: black, red, green, brown,
blue, magenta, cyan, lightgrey, default, darkgrey,
lightred, lightgreen, yellow, lightblue, lightmagenta,
lightcyan, white.
file : writable file-like object, optional
The file to write the to. Defaults to `sys.stdout`. If
``file`` is not a tty (as determined by calling its `isatty`
member, if any), only ``msg`` will be displayed: the
`ProgressBar` or `Spinner` will be silent.
"""
if file is None:
file = _get_stdout()
if total is None or not isatty(file):
self._is_spinner = True
self._obj = Spinner(msg, color=color, file=file)
else:
self._is_spinner = False
color_print(msg, color, file=file)
self._obj = ProgressBar(total, file=file)
def __enter__(self):
self._iter = self._obj.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
return self._obj.__exit__(exc_type, exc_value, traceback)
def update(self, value):
"""
Update the progress bar to the given value (out of the total
given to the constructor.
"""
if self._is_spinner:
next(self._iter)
else:
self._obj.update(value)
def print_code_line(line, col=None, file=None, tabwidth=8, width=70):
"""
Prints a line of source code, highlighting a particular character
position in the line. Useful for displaying the context of error
messages.
If the line is more than ``width`` characters, the line is truncated
accordingly and '…' characters are inserted at the front and/or
end.
It looks like this::
there_is_a_syntax_error_here :
^
Parameters
----------
line : unicode
The line of code to display
col : int, optional
The character in the line to highlight. ``col`` must be less
than ``len(line)``.
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`.
tabwidth : int, optional
The number of spaces per tab (``'\\t'``) character. Default
is 8. All tabs will be converted to spaces to ensure that the
caret lines up with the correct column.
width : int, optional
The width of the display, beyond which the line will be
truncated. Defaults to 70 (this matches the default in the
standard library's `textwrap` module).
"""
if file is None:
file = _get_stdout()
if conf.unicode_output:
ellipsis = '…'
else:
ellipsis = '...'
write = file.write
if col is not None:
assert col < len(line)
ntabs = line[:col].count('\t')
col += ntabs * (tabwidth - 1)
line = line.rstrip('\n')
line = line.replace('\t', ' ' * tabwidth)
if col is not None and col > width:
new_col = min(width // 2, len(line) - col)
offset = col - new_col
line = line[offset + len(ellipsis):]
width -= len(ellipsis)
new_col = col
col -= offset
color_print(ellipsis, 'darkgrey', file=file, end='')
if len(line) > width:
write(line[:width - len(ellipsis)])
color_print(ellipsis, 'darkgrey', file=file)
else:
write(line)
write('\n')
if col is not None:
write(' ' * col)
color_print('^', 'red', file=file)
# The following four Getch* classes implement unbuffered character reading from
# stdin on Windows, linux, MacOSX. This is taken directly from ActiveState
# Code Recipes:
# http://code.activestate.com/recipes/134892-getch-like-unbuffered-character-reading-from-stdin/
#
class Getch(object):
"""Get a single character from standard input without screen echo.
Returns
-------
char : str (one character)
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except (ImportError, AttributeError):
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix(object):
def __init__(self):
import tty # pylint: disable=W0611
import sys # pylint: disable=W0611
# import termios now or else you'll get the Unix
# version on the Mac
import termios # pylint: disable=W0611
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows(object):
def __init__(self):
import msvcrt # pylint: disable=W0611
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon(object):
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt # see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF) | except ImportError: | random_line_split |
console.py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPython(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython
except ImportError:
pass
return get_ipython
@classproperty
def OutStream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred encoding in case
of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or
'latin-1'.
"""
if (_IPython.IOStream is not None and
isinstance(fileobj, _IPython.IOStream)):
# If the output stream is an IPython.utils.io.IOStream object that's
# not going to be very helpful to us since it doesn't raise any
# exceptions when an error occurs writing to its underlying stream.
# There's no advantage to us using IOStream.write directly though;
# instead just write directly to its underlying stream:
write = fileobj.stream.write
try:
write(s)
return write
except UnicodeEncodeError:
# Let's try the next approach...
pass
enc = locale.getpreferredencoding()
try:
Writer = codecs.getwriter(enc)
except LookupError:
Writer = codecs.getwriter(_DEFAULT_ENCODING)
f = Writer(fileobj)
write = f.write
try:
write(s)
return write
except UnicodeEncodeError:
Writer = codecs.getwriter('latin-1')
f = Writer(fileobj)
write = f.write
# If this doesn't work let the exception bubble up; I'm out of ideas
write(s)
return write
def color_print(*args, **kwargs):
"""
Prints colors and styles to the terminal uses ANSI escape
sequences.
::
color_print('This is the color ', 'default', 'GREEN', 'green')
Parameters
----------
positional args : str
The positional arguments come in pairs (*msg*, *color*), where
*msg* is the string to display and *color* is the color to
display it in.
*color* is an ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`. If file is not
a tty (as determined by calling its `isatty` member, if one
exists), no coloring will be included.
end : str, optional
The ending of the message. Defaults to ``\\n``. The end will
be printed after resetting any color or font state.
"""
file = kwargs.get('file', _get_stdout())
end = kwargs.get('end', '\n')
write = file.write
if isatty(file) and conf.use_color:
for i in range(0, len(args), 2):
msg = args[i]
if i + 1 == len(args):
color = ''
else:
color = args[i + 1]
if color:
msg = _color_text(msg, color)
# Some file objects support writing unicode sensibly on some Python
# versions; if this fails try creating a writer using the locale's
# preferred encoding. If that fails too give up.
if six.PY2 and isinstance(msg, bytes):
msg = _decode_preferred_encoding(msg)
write = _write_with_fallback(msg, write, file)
write(end)
else:
for i in range(0, len(args), 2):
msg = args[i]
if six.PY2 and isinstance(msg, bytes):
# Support decoding bytes to unicode on Python 2; use the
# preferred encoding for the locale (which is *sometimes*
# sensible)
msg = _decode_preferred_encoding(msg)
write(msg)
write(end)
def strip_ansi_codes(s):
"""
Remove ANSI color codes from the string.
"""
return re.sub('\033\\[([0-9]+)(;[0-9]+)*m', '', s)
def human_time(seconds):
"""
Returns a human-friendly time string that is always exactly 6
characters long.
Depending on the number of seconds given, can be one of::
1w 3d
2d 4h
1h 5m
1m 4s
15s
Will be in color if console coloring is turned on.
Parameters
----------
seconds : int
The number of seconds to represent
Returns
-------
time : str
A human-friendly representation of the given number of seconds
that is always exactly 6 characters.
"""
units = [
('y', 60 * 60 * 24 * 7 * 52),
('w', 60 * 60 * 24 * 7),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1),
]
seconds = int(seconds)
if seconds < 60:
return ' {0:2d}s'.format(seconds)
for i in range(len(units) - 1):
unit1, limit1 = units[i]
unit2, limit2 = units[i + 1]
if seconds >= limit1:
return '{0:2d}{1}{2:2d}{3}'.format(
seconds // limit1, unit1,
(seconds % limit1) // limit2, unit2)
return ' ~inf'
def human_file_size(size):
"""
Returns a human-friendly string representing a file size
that is 2-4 characters long.
For example, depending on the number of bytes given, can be one
of::
256b
64k
1.1G
Parameters
----------
size : int
The size of the file (in bytes)
Returns
-------
size : str
A human-friendly representation of the size of the file
"""
if hasattr(size, 'unit'):
# Import units only if necessary because the import takes a
# significant time [#4649]
from .. import units as u
size = size.to(u.byte).value
suffixes = ' kMGTPEZY'
if size == 0:
num_scale = 0
else:
num_scale = int(math.floor(math.log(size) / math.log(1000)))
if num_scale > 7:
suffix = '?'
else:
suffix = suffixes[num_scale]
num_scale = int(math.pow(1000, num_scale))
value = size / num_scale
str_value = str(value)
if suffix == ' ':
str_value = str_value[:str_value.index('.')]
elif str_value[2] == '.':
str_value = str_value[:2]
else:
str_value = str_value[:3]
return "{0:>3s}{1}".format(str_value, suffix)
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
if file is None:
file = _get_stdout()
if not ipython_widget and not isatty(file):
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({:>6.2%})'.format(frac))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
# Create and display an empty progress bar widget,
# if none exists.
if not hasattr(self, '_widget'):
# Import only if an IPython widget, i.e., widget in iPython NB
from IPython import version_info
if version_info[0] < 4:
from IPython.html import widgets
self._widget = widgets.FloatProgressWidget()
else:
_IPython.get_ipython()
from ipywidgets import widgets
self._widget = widgets.FloatProgress()
from IPython.display import display
display(self._widget)
self._widget.value = 0
# Calculate percent completion, and update progress bar
frac = (value/self._total)
self._widget.value = frac * 100
self._widget.description =' ({:>6.2%})'.format(frac)
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, step=100):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
with cls(len(items), file=file) as bar:
default_step = max(int(float(len(items)) / bar._bar_length), 1)
chunksize = min(default_step, step)
if not multiprocess:
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
|
return results
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the ``with`` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = "◓◑◒◐"
_default_ascii_chars = "-/|\\"
def __init__(self, msg, color='default', file=None, step=1,
chars=None):
"""
Parameters
----------
msg : str
The message to print
color : str, optional
An ANSI terminal color name. Must be one of: black, red,
green, brown, blue, magenta, cyan, lightgrey, default,
darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white.
file : writeable file-like object, optional
The file to write the spinner to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the spinner will be
completely silent.
step : int, optional
Only update the spinner every *step* steps
chars : str, optional
The character sequence to use for the spinner
"""
if file is None:
file = _get_stdout()
self._msg = msg
self._color = color
self._file = file
self._step = step
if chars is None:
if conf.unicode_output:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not isatty(file)
def _iterator(self):
chars = self._chars
index = 0
file = self._file
write = file.write
flush = file.flush
try_fallback = True
while True:
write('\r')
color_print(self._msg, self._color, file=file, end='')
write(' ')
try:
if try_fallback:
write = _write_with_fallback(chars[index], write, file)
else:
write(chars[index])
except UnicodeError:
# If even _write_with_fallback failed for any reason just give
# up on trying to use the unicode characters
chars = self._default_ascii_chars
write(chars[index])
try_fallback = False # No good will come of using this again
flush()
yield
for i in range(self._step):
yield
index = (index + 1) % len(chars)
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
file = self._file
write = file.write
flush = file.flush
if not self._silent:
write('\r')
color_print(self._msg, self._color, file=file, end='')
if exc_type is None:
color_print(' [Done]', 'green', file=file)
else:
color_print(' [Failed]', 'red', file=file)
flush()
def _silent_iterator(self):
color_print(self._msg, self._color, file=self._file, end='')
self._file.flush()
while True:
yield
class ProgressBarOrSpinner(object):
"""
A class that displays either a `ProgressBar` or `Spinner`
depending on whether the total size of the operation is
known or not.
It is designed to be used with the ``with`` statement::
if file.has_length():
length = file.get_length()
else:
length = None
bytes_read = 0
with ProgressBarOrSpinner(length) as bar:
while file.read(blocksize):
bytes_read += blocksize
bar.update(bytes_read)
"""
def __init__(self, total, msg, color='default', file=None):
"""
Parameters
----------
total : int or None
If an int, the number of increments in the process being
tracked and a `ProgressBar` is displayed. If `None`, a
`Spinner` is displayed.
msg : str
The message to display above the `ProgressBar` or
alongside the `Spinner`.
color : str, optional
The color of ``msg``, if any. Must be an ANSI terminal
color name. Must be one of: black, red, green, brown,
blue, magenta, cyan, lightgrey, default, darkgrey,
lightred, lightgreen, yellow, lightblue, lightmagenta,
lightcyan, white.
file : writable file-like object, optional
The file to write the to. Defaults to `sys.stdout`. If
``file`` is not a tty (as determined by calling its `isatty`
member, if any), only ``msg`` will be displayed: the
`ProgressBar` or `Spinner` will be silent.
"""
if file is None:
file = _get_stdout()
if total is None or not isatty(file):
self._is_spinner = True
self._obj = Spinner(msg, color=color, file=file)
else:
self._is_spinner = False
color_print(msg, color, file=file)
self._obj = ProgressBar(total, file=file)
def __enter__(self):
self._iter = self._obj.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
return self._obj.__exit__(exc_type, exc_value, traceback)
def update(self, value):
"""
Update the progress bar to the given value (out of the total
given to the constructor.
"""
if self._is_spinner:
next(self._iter)
else:
self._obj.update(value)
def print_code_line(line, col=None, file=None, tabwidth=8, width=70):
"""
Prints a line of source code, highlighting a particular character
position in the line. Useful for displaying the context of error
messages.
If the line is more than ``width`` characters, the line is truncated
accordingly and '…' characters are inserted at the front and/or
end.
It looks like this::
there_is_a_syntax_error_here :
^
Parameters
----------
line : unicode
The line of code to display
col : int, optional
The character in the line to highlight. ``col`` must be less
than ``len(line)``.
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`.
tabwidth : int, optional
The number of spaces per tab (``'\\t'``) character. Default
is 8. All tabs will be converted to spaces to ensure that the
caret lines up with the correct column.
width : int, optional
The width of the display, beyond which the line will be
truncated. Defaults to 70 (this matches the default in the
standard library's `textwrap` module).
"""
if file is None:
file = _get_stdout()
if conf.unicode_output:
ellipsis = '…'
else:
ellipsis = '...'
write = file.write
if col is not None:
assert col < len(line)
ntabs = line[:col].count('\t')
col += ntabs * (tabwidth - 1)
line = line.rstrip('\n')
line = line.replace('\t', ' ' * tabwidth)
if col is not None and col > width:
new_col = min(width // 2, len(line) - col)
offset = col - new_col
line = line[offset + len(ellipsis):]
width -= len(ellipsis)
new_col = col
col -= offset
color_print(ellipsis, 'darkgrey', file=file, end='')
if len(line) > width:
write(line[:width - len(ellipsis)])
color_print(ellipsis, 'darkgrey', file=file)
else:
write(line)
write('\n')
if col is not None:
write(' ' * col)
color_print('^', 'red', file=file)
# The following four Getch* classes implement unbuffered character reading from
# stdin on Windows, linux, MacOSX. This is taken directly from ActiveState
# Code Recipes:
# http://code.activestate.com/recipes/134892-getch-like-unbuffered-character-reading-from-stdin/
#
class Getch(object):
"""Get a single character from standard input without screen echo.
Returns
-------
char : str (one character)
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except (ImportError, AttributeError):
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix(object):
def __init__(self):
import tty # pylint: disable=W0611
import sys # pylint: disable=W0611
# import termios now or else you'll get the Unix
# version on the Mac
import termios # pylint: disable=W0611
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows(object):
def __init__(self):
import msvcrt # pylint: disable=W0611
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon(object):
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt # see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
| p = multiprocessing.Pool()
for i, result in enumerate(
p.imap_unordered(function, items, chunksize=chunksize)):
bar.update(i)
results.append(result)
p.close()
p.join() | conditional_block |
console.py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPython(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython
except ImportError:
pass
return get_ipython
@classproperty
def OutStream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred encoding in case
of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or
'latin-1'.
"""
if (_IPython.IOStream is not None and
isinstance(fileobj, _IPython.IOStream)):
# If the output stream is an IPython.utils.io.IOStream object that's
# not going to be very helpful to us since it doesn't raise any
# exceptions when an error occurs writing to its underlying stream.
# There's no advantage to us using IOStream.write directly though;
# instead just write directly to its underlying stream:
write = fileobj.stream.write
try:
write(s)
return write
except UnicodeEncodeError:
# Let's try the next approach...
pass
enc = locale.getpreferredencoding()
try:
Writer = codecs.getwriter(enc)
except LookupError:
Writer = codecs.getwriter(_DEFAULT_ENCODING)
f = Writer(fileobj)
write = f.write
try:
write(s)
return write
except UnicodeEncodeError:
Writer = codecs.getwriter('latin-1')
f = Writer(fileobj)
write = f.write
# If this doesn't work let the exception bubble up; I'm out of ideas
write(s)
return write
def color_print(*args, **kwargs):
"""
Prints colors and styles to the terminal uses ANSI escape
sequences.
::
color_print('This is the color ', 'default', 'GREEN', 'green')
Parameters
----------
positional args : str
The positional arguments come in pairs (*msg*, *color*), where
*msg* is the string to display and *color* is the color to
display it in.
*color* is an ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`. If file is not
a tty (as determined by calling its `isatty` member, if one
exists), no coloring will be included.
end : str, optional
The ending of the message. Defaults to ``\\n``. The end will
be printed after resetting any color or font state.
"""
file = kwargs.get('file', _get_stdout())
end = kwargs.get('end', '\n')
write = file.write
if isatty(file) and conf.use_color:
for i in range(0, len(args), 2):
msg = args[i]
if i + 1 == len(args):
color = ''
else:
color = args[i + 1]
if color:
msg = _color_text(msg, color)
# Some file objects support writing unicode sensibly on some Python
# versions; if this fails try creating a writer using the locale's
# preferred encoding. If that fails too give up.
if six.PY2 and isinstance(msg, bytes):
msg = _decode_preferred_encoding(msg)
write = _write_with_fallback(msg, write, file)
write(end)
else:
for i in range(0, len(args), 2):
msg = args[i]
if six.PY2 and isinstance(msg, bytes):
# Support decoding bytes to unicode on Python 2; use the
# preferred encoding for the locale (which is *sometimes*
# sensible)
msg = _decode_preferred_encoding(msg)
write(msg)
write(end)
def strip_ansi_codes(s):
"""
Remove ANSI color codes from the string.
"""
return re.sub('\033\\[([0-9]+)(;[0-9]+)*m', '', s)
def human_time(seconds):
"""
Returns a human-friendly time string that is always exactly 6
characters long.
Depending on the number of seconds given, can be one of::
1w 3d
2d 4h
1h 5m
1m 4s
15s
Will be in color if console coloring is turned on.
Parameters
----------
seconds : int
The number of seconds to represent
Returns
-------
time : str
A human-friendly representation of the given number of seconds
that is always exactly 6 characters.
"""
units = [
('y', 60 * 60 * 24 * 7 * 52),
('w', 60 * 60 * 24 * 7),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1),
]
seconds = int(seconds)
if seconds < 60:
return ' {0:2d}s'.format(seconds)
for i in range(len(units) - 1):
unit1, limit1 = units[i]
unit2, limit2 = units[i + 1]
if seconds >= limit1:
return '{0:2d}{1}{2:2d}{3}'.format(
seconds // limit1, unit1,
(seconds % limit1) // limit2, unit2)
return ' ~inf'
def human_file_size(size):
"""
Returns a human-friendly string representing a file size
that is 2-4 characters long.
For example, depending on the number of bytes given, can be one
of::
256b
64k
1.1G
Parameters
----------
size : int
The size of the file (in bytes)
Returns
-------
size : str
A human-friendly representation of the size of the file
"""
if hasattr(size, 'unit'):
# Import units only if necessary because the import takes a
# significant time [#4649]
from .. import units as u
size = size.to(u.byte).value
suffixes = ' kMGTPEZY'
if size == 0:
num_scale = 0
else:
num_scale = int(math.floor(math.log(size) / math.log(1000)))
if num_scale > 7:
suffix = '?'
else:
suffix = suffixes[num_scale]
num_scale = int(math.pow(1000, num_scale))
value = size / num_scale
str_value = str(value)
if suffix == ' ':
str_value = str_value[:str_value.index('.')]
elif str_value[2] == '.':
str_value = str_value[:2]
else:
str_value = str_value[:3]
return "{0:>3s}{1}".format(str_value, suffix)
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
if file is None:
file = _get_stdout()
if not ipython_widget and not isatty(file):
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({:>6.2%})'.format(frac))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
# Create and display an empty progress bar widget,
# if none exists.
if not hasattr(self, '_widget'):
# Import only if an IPython widget, i.e., widget in iPython NB
from IPython import version_info
if version_info[0] < 4:
from IPython.html import widgets
self._widget = widgets.FloatProgressWidget()
else:
_IPython.get_ipython()
from ipywidgets import widgets
self._widget = widgets.FloatProgress()
from IPython.display import display
display(self._widget)
self._widget.value = 0
# Calculate percent completion, and update progress bar
frac = (value/self._total)
self._widget.value = frac * 100
self._widget.description =' ({:>6.2%})'.format(frac)
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, step=100):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
with cls(len(items), file=file) as bar:
default_step = max(int(float(len(items)) / bar._bar_length), 1)
chunksize = min(default_step, step)
if not multiprocess:
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
p = multiprocessing.Pool()
for i, result in enumerate(
p.imap_unordered(function, items, chunksize=chunksize)):
bar.update(i)
results.append(result)
p.close()
p.join()
return results
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the ``with`` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = "◓◑◒◐"
_default_ascii_chars = "-/|\\"
def __init__(self, msg, color='default', file=None, step=1,
chars=None):
"""
Parameters
----------
msg : str
The message to print
color : str, optional
An ANSI terminal color name. Must be one of: black, red,
green, brown, blue, magenta, cyan, lightgrey, default,
darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white.
file : writeable file-like object, optional
The file to write the spinner to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the spinner will be
completely silent.
step : int, optional
Only update the spinner every *step* steps
chars : str, optional
The character sequence to use for the spinner
"""
if file is None:
file = _get_stdout()
self._msg = msg
self._color = color
self._file = file
self._step = step
if chars is None:
if conf.unicode_output:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not isatty(file)
def _iterator(self):
chars = self._chars
index = 0
file = self._file
write = file.write
flush = file.flush
try_fallback = True
while True:
write('\r')
color_print(self._msg, self._color, file=file, end='')
write(' ')
try:
if try_fallback:
write = _write_with_fallback(chars[index], write, file)
else:
write(chars[index])
except UnicodeError:
# If even _write_with_fallback failed for any reason just give
# up on trying to use the unicode characters
chars = self._default_ascii_chars
write(chars[index])
try_fallback = False # No good will come of using this again
flush()
yield
for i in range(self._step):
yield
index = (index + 1) % len(chars)
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
file = self._file
write = file.write
flush = file.flush
if not self._silent:
write('\r')
color_print(self._msg, self._color, file=file, end='')
if exc_type is None:
color_print(' [Done]', 'green', file=file)
else:
color_print(' [Failed]', 'red', file=file)
flush()
def _silent_iterator(self):
color_print(self._msg, self._color, file=self._file, end='')
self._file.flush()
while True:
yield
class ProgressBarOrSpinner(object):
"""
A class that displays either a `ProgressBar` or `Spinner`
depending on whether the total size of the operation is
known or not.
It is designed to be used with the ``with`` statement::
if file.has_length():
length = file.get_length()
else:
length = None
bytes_read = 0
with ProgressBarOrSpinner(length) as bar:
while file.read(blocksize):
bytes_read += blocksize
bar.update(bytes_read)
"""
def __init__(self, total, msg, color='default', file=None):
"""
Parameters
----------
total : int or None
If an int, the number of increments in the process being
tracked and a `ProgressBar` is displayed. If `None`, a
`Spinner` is displayed.
msg : str
The message to display above the `ProgressBar` or
alongside the `Spinner`.
color : str, optional
The color of ``msg``, if any. Must be an ANSI terminal
color name. Must be one of: black, red, green, brown,
blue, magenta, cyan, lightgrey, default, darkgrey,
lightred, lightgreen, yellow, lightblue, lightmagenta,
lightcyan, white.
file : writable file-like object, optional
The file to write the to. Defaults to `sys.stdout`. If
``file`` is not a tty (as determined by calling its `isatty`
member, if any), only ``msg`` will be displayed: the
`ProgressBar` or `Spinner` will be silent.
"""
if file is None:
file = _get_stdout()
if total is None or not isatty(file):
self._is_spinner = True
self._obj = Spinner(msg, color=color, file=file)
else:
self._is_spinner = False
color_print(msg, color, file=file)
self._obj = ProgressBar(total, file=file)
def __enter__(self):
self._iter = self._obj.__enter__()
return self
def __exit__ | xc_type, exc_value, traceback):
return self._obj.__exit__(exc_type, exc_value, traceback)
def update(self, value):
"""
Update the progress bar to the given value (out of the total
given to the constructor.
"""
if self._is_spinner:
next(self._iter)
else:
self._obj.update(value)
def print_code_line(line, col=None, file=None, tabwidth=8, width=70):
"""
Prints a line of source code, highlighting a particular character
position in the line. Useful for displaying the context of error
messages.
If the line is more than ``width`` characters, the line is truncated
accordingly and '…' characters are inserted at the front and/or
end.
It looks like this::
there_is_a_syntax_error_here :
^
Parameters
----------
line : unicode
The line of code to display
col : int, optional
The character in the line to highlight. ``col`` must be less
than ``len(line)``.
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`.
tabwidth : int, optional
The number of spaces per tab (``'\\t'``) character. Default
is 8. All tabs will be converted to spaces to ensure that the
caret lines up with the correct column.
width : int, optional
The width of the display, beyond which the line will be
truncated. Defaults to 70 (this matches the default in the
standard library's `textwrap` module).
"""
if file is None:
file = _get_stdout()
if conf.unicode_output:
ellipsis = '…'
else:
ellipsis = '...'
write = file.write
if col is not None:
assert col < len(line)
ntabs = line[:col].count('\t')
col += ntabs * (tabwidth - 1)
line = line.rstrip('\n')
line = line.replace('\t', ' ' * tabwidth)
if col is not None and col > width:
new_col = min(width // 2, len(line) - col)
offset = col - new_col
line = line[offset + len(ellipsis):]
width -= len(ellipsis)
new_col = col
col -= offset
color_print(ellipsis, 'darkgrey', file=file, end='')
if len(line) > width:
write(line[:width - len(ellipsis)])
color_print(ellipsis, 'darkgrey', file=file)
else:
write(line)
write('\n')
if col is not None:
write(' ' * col)
color_print('^', 'red', file=file)
# The following four Getch* classes implement unbuffered character reading from
# stdin on Windows, linux, MacOSX. This is taken directly from ActiveState
# Code Recipes:
# http://code.activestate.com/recipes/134892-getch-like-unbuffered-character-reading-from-stdin/
#
class Getch(object):
"""Get a single character from standard input without screen echo.
Returns
-------
char : str (one character)
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except (ImportError, AttributeError):
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix(object):
def __init__(self):
import tty # pylint: disable=W0611
import sys # pylint: disable=W0611
# import termios now or else you'll get the Unix
# version on the Mac
import termios # pylint: disable=W0611
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows(object):
def __init__(self):
import msvcrt # pylint: disable=W0611
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon(object):
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt # see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
| (self, e | identifier_name |
parse_csvn.py | #!/usr/bin/env python
# Copyright (c) 2015, Scott D. Peckham
#------------------------------------------------------
# S.D. Peckham
# July 9, 2015
#
# Tool to break CSDMS Standard Variable Names into
# all of their component parts, then save results in
# various formats. (e.g. Turtle TTL format)
#
# Example of use at a Unix prompt:
#
# % ./parse_csvn.py CSN_VarNames_v0.83.txt
#------------------------------------------------------
#
# Functions:
# parse_names()
#
#------------------------------------------------------
import os.path
import sys
#------------------------------------------------------
def | ( in_file='CSN_VarNames_v0.83.txt' ):
#--------------------------------------------------
# Open input file that contains copied names table
#--------------------------------------------------
try:
in_unit = open( in_file, 'r' )
except:
print 'SORRY: Could not open TXT file named:'
print ' ' + in_file
#-------------------------
# Open new CSV text file
#-------------------------
## pos = in_file.rfind('.')
## prefix = in_file[0:pos]
## out_file = prefix + '.ttl'
out_file = 'CSN_VarNames_v0.83.ttl'
#-------------------------------------------
OUT_EXISTS = os.path.exists( out_file )
if (OUT_EXISTS):
print 'SORRY, A TTL file with the name'
print ' ' + out_file
print ' already exists.'
return
out_unit = open( out_file, 'w' )
#------------------------
# Write TTL file header
#------------------------
out_unit.write( '@prefix dc: <http://purl.org/dc/elements/1.1/> .' + '\n' )
out_unit.write( '@prefix ns: <http://example.org/ns#> .' + '\n' )
out_unit.write( '@prefix vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> .' + '\n')
out_unit.write( '@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .' + '\n' )
out_unit.write( '@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .' + '\n' )
out_unit.write( '@prefix owl: <http://www.w3.org/2002/07/owl#> .' + '\n' )
out_unit.write( '@prefix csn: <http://ecgs.ncsa.illinois.edu/2015/csn#> .' + '\n' )
out_unit.write( '\n' ) # (blank line)
root_quan_list = list() # (list to save all root quantities)
#---------------------------
# Parse all variable names
#---------------------------
indent = ' ' # (four spaces)
n_names = 0
while (True):
#------------------------------
# Read data line from in_file
#------------------------------
line = in_unit.readline()
if (line == ''):
break
#-----------------------------------------
# Write entire variable name to TTL file
#-----------------------------------------
line = line.strip() # (strip leading/trailing white space)
out_unit.write( '<csn:' + line + '>\n' )
#--------------------------------------------------
# Write object and quantity fullnames to TTL file
#--------------------------------------------------
main_parts = line.split('__')
object_part = main_parts[0]
quantity_part = main_parts[1]
out_unit.write( indent + 'a csn:name ;\n' )
out_unit.write( indent + "csn:object_fullname '" + object_part + "' ;\n" )
out_unit.write( indent + "csn:quantity_fullname '" + quantity_part + "' ;\n" )
#---------------------------------------------
# Write parts of object_fullname to TTL file
#---------------------------------------------
object_list = object_part.split('_')
n_objects = len( object_list )
for k in xrange( n_objects ):
object = object_list[k]
obj_string = " '" + object + "' "
obj_prefix = indent + "csn:object" + str(k+1)
out_unit.write( obj_prefix + obj_string + ";\n")
adj_list = object.split('~')
n_adjectives = len(adj_list) - 1 # (first one in list is the object)
for j in xrange( n_adjectives ):
adj_string = " '" + adj_list[j+1] + "' "
adj_prefix = obj_prefix + "_adjective" + str(j+1)
out_unit.write( adj_prefix + adj_string + ";\n" )
#-------------------------------------
# Write root object name to TTL file
#-------------------------------------
root_object = object_list[-1] # (last object in list)
root_obj_string = " '" + root_object + "' "
root_obj_prefix = indent + "csn:root_object"
out_unit.write( root_obj_prefix + root_obj_string + ";\n" )
#--------------------------------------------------------
# Write all operations in quantity_fullname to TTL file
#--------------------------------------------------------
operation_list = quantity_part.split('_of_')
n_operations = len(operation_list) - 1 # (last one in list is the quantity)
for k in xrange( n_operations ):
operation = operation_list[k]
op_string = " '" + operation + "' "
op_prefix = indent + "csn:operation" + str(k+1)
out_unit.write( op_prefix + op_string + ";\n" )
#----------------------------------
# Write quantity name to TTL file
#----------------------------------
quantity = operation_list[-1]
quan_string = " '" + quantity + "' "
quan_prefix = indent + "csn:quantity"
out_unit.write( quan_prefix + quan_string + ";\n" )
#---------------------------------------
# Write root quantity name to TTL file
#---------------------------------------
quantity_parts = quantity.split('_')
root_quantity = quantity_parts[-1]
root_quan_string = " '" + root_quantity + "' "
root_quan_prefix = indent + "csn:root_quantity"
out_unit.write( root_quan_prefix + root_quan_string + ".\n" ) # (Notice "." vs. ";" here.)
out_unit.write( '\n' ) # (blank line)
root_quan_list.append( root_quantity ) # (save in root_quan_list)
n_names += 1
#----------------------
# Close the input file
#----------------------
in_unit.close()
#----------------------------
# Close the TXT output file
#----------------------------
out_unit.close()
print 'Finished writing CSN var names as TTL.'
print 'Number of names =', n_names, '.'
print ' '
#-----------------------------------------
# Write unique root quantities to a file
#-----------------------------------------
uniq_root_quan_list = sorted( set(root_quan_list) )
n_uniq_root_quans = len( uniq_root_quan_list )
root_quan_unit = open( 'Root_Quantities.txt', 'w' )
for k in xrange( n_uniq_root_quans ):
root_quantity = uniq_root_quan_list[k]
root_quan_unit.write( root_quantity + '\n' )
root_quan_unit.close()
print 'Number of root quantities =', n_uniq_root_quans, '.'
print ' '
# parse_names()
#------------------------------------------------------
if (__name__ == "__main__"):
#-----------------------------------------------------
# Note: First arg in sys.argv is the command itself.
#-----------------------------------------------------
n_args = len(sys.argv)
if (n_args < 2):
print 'ERROR: This tool requires an input'
print ' text file argument.'
print 'sys.argv =', sys.argv
print ' '
elif (n_args == 2):
parse_names( sys.argv[1] )
else:
print 'ERROR: Invalid number of arguments.'
#-----------------------------------------------------------------------
| parse_names | identifier_name |
parse_csvn.py | #!/usr/bin/env python
# Copyright (c) 2015, Scott D. Peckham
#------------------------------------------------------
# S.D. Peckham
# July 9, 2015
#
# Tool to break CSDMS Standard Variable Names into
# all of their component parts, then save results in
# various formats. (e.g. Turtle TTL format)
#
# Example of use at a Unix prompt:
#
# % ./parse_csvn.py CSN_VarNames_v0.83.txt
#------------------------------------------------------
#
# Functions:
# parse_names()
#
#------------------------------------------------------
import os.path
import sys
#------------------------------------------------------
def parse_names( in_file='CSN_VarNames_v0.83.txt' ):
#--------------------------------------------------
# Open input file that contains copied names table
#--------------------------------------------------
|
# parse_names()
#------------------------------------------------------
if (__name__ == "__main__"):
#-----------------------------------------------------
# Note: First arg in sys.argv is the command itself.
#-----------------------------------------------------
n_args = len(sys.argv)
if (n_args < 2):
print 'ERROR: This tool requires an input'
print ' text file argument.'
print 'sys.argv =', sys.argv
print ' '
elif (n_args == 2):
parse_names( sys.argv[1] )
else:
print 'ERROR: Invalid number of arguments.'
#-----------------------------------------------------------------------
| try:
in_unit = open( in_file, 'r' )
except:
print 'SORRY: Could not open TXT file named:'
print ' ' + in_file
#-------------------------
# Open new CSV text file
#-------------------------
## pos = in_file.rfind('.')
## prefix = in_file[0:pos]
## out_file = prefix + '.ttl'
out_file = 'CSN_VarNames_v0.83.ttl'
#-------------------------------------------
OUT_EXISTS = os.path.exists( out_file )
if (OUT_EXISTS):
print 'SORRY, A TTL file with the name'
print ' ' + out_file
print ' already exists.'
return
out_unit = open( out_file, 'w' )
#------------------------
# Write TTL file header
#------------------------
out_unit.write( '@prefix dc: <http://purl.org/dc/elements/1.1/> .' + '\n' )
out_unit.write( '@prefix ns: <http://example.org/ns#> .' + '\n' )
out_unit.write( '@prefix vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> .' + '\n')
out_unit.write( '@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .' + '\n' )
out_unit.write( '@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .' + '\n' )
out_unit.write( '@prefix owl: <http://www.w3.org/2002/07/owl#> .' + '\n' )
out_unit.write( '@prefix csn: <http://ecgs.ncsa.illinois.edu/2015/csn#> .' + '\n' )
out_unit.write( '\n' ) # (blank line)
root_quan_list = list() # (list to save all root quantities)
#---------------------------
# Parse all variable names
#---------------------------
indent = ' ' # (four spaces)
n_names = 0
while (True):
#------------------------------
# Read data line from in_file
#------------------------------
line = in_unit.readline()
if (line == ''):
break
#-----------------------------------------
# Write entire variable name to TTL file
#-----------------------------------------
line = line.strip() # (strip leading/trailing white space)
out_unit.write( '<csn:' + line + '>\n' )
#--------------------------------------------------
# Write object and quantity fullnames to TTL file
#--------------------------------------------------
main_parts = line.split('__')
object_part = main_parts[0]
quantity_part = main_parts[1]
out_unit.write( indent + 'a csn:name ;\n' )
out_unit.write( indent + "csn:object_fullname '" + object_part + "' ;\n" )
out_unit.write( indent + "csn:quantity_fullname '" + quantity_part + "' ;\n" )
#---------------------------------------------
# Write parts of object_fullname to TTL file
#---------------------------------------------
object_list = object_part.split('_')
n_objects = len( object_list )
for k in xrange( n_objects ):
object = object_list[k]
obj_string = " '" + object + "' "
obj_prefix = indent + "csn:object" + str(k+1)
out_unit.write( obj_prefix + obj_string + ";\n")
adj_list = object.split('~')
n_adjectives = len(adj_list) - 1 # (first one in list is the object)
for j in xrange( n_adjectives ):
adj_string = " '" + adj_list[j+1] + "' "
adj_prefix = obj_prefix + "_adjective" + str(j+1)
out_unit.write( adj_prefix + adj_string + ";\n" )
#-------------------------------------
# Write root object name to TTL file
#-------------------------------------
root_object = object_list[-1] # (last object in list)
root_obj_string = " '" + root_object + "' "
root_obj_prefix = indent + "csn:root_object"
out_unit.write( root_obj_prefix + root_obj_string + ";\n" )
#--------------------------------------------------------
# Write all operations in quantity_fullname to TTL file
#--------------------------------------------------------
operation_list = quantity_part.split('_of_')
n_operations = len(operation_list) - 1 # (last one in list is the quantity)
for k in xrange( n_operations ):
operation = operation_list[k]
op_string = " '" + operation + "' "
op_prefix = indent + "csn:operation" + str(k+1)
out_unit.write( op_prefix + op_string + ";\n" )
#----------------------------------
# Write quantity name to TTL file
#----------------------------------
quantity = operation_list[-1]
quan_string = " '" + quantity + "' "
quan_prefix = indent + "csn:quantity"
out_unit.write( quan_prefix + quan_string + ";\n" )
#---------------------------------------
# Write root quantity name to TTL file
#---------------------------------------
quantity_parts = quantity.split('_')
root_quantity = quantity_parts[-1]
root_quan_string = " '" + root_quantity + "' "
root_quan_prefix = indent + "csn:root_quantity"
out_unit.write( root_quan_prefix + root_quan_string + ".\n" ) # (Notice "." vs. ";" here.)
out_unit.write( '\n' ) # (blank line)
root_quan_list.append( root_quantity ) # (save in root_quan_list)
n_names += 1
#----------------------
# Close the input file
#----------------------
in_unit.close()
#----------------------------
# Close the TXT output file
#----------------------------
out_unit.close()
print 'Finished writing CSN var names as TTL.'
print 'Number of names =', n_names, '.'
print ' '
#-----------------------------------------
# Write unique root quantities to a file
#-----------------------------------------
uniq_root_quan_list = sorted( set(root_quan_list) )
n_uniq_root_quans = len( uniq_root_quan_list )
root_quan_unit = open( 'Root_Quantities.txt', 'w' )
for k in xrange( n_uniq_root_quans ):
root_quantity = uniq_root_quan_list[k]
root_quan_unit.write( root_quantity + '\n' )
root_quan_unit.close()
print 'Number of root quantities =', n_uniq_root_quans, '.'
print ' ' | identifier_body |
parse_csvn.py | #!/usr/bin/env python
# Copyright (c) 2015, Scott D. Peckham
#------------------------------------------------------
# S.D. Peckham
# July 9, 2015
#
# Tool to break CSDMS Standard Variable Names into
# all of their component parts, then save results in
# various formats. (e.g. Turtle TTL format)
#
# Example of use at a Unix prompt:
#
# % ./parse_csvn.py CSN_VarNames_v0.83.txt
#------------------------------------------------------
#
# Functions:
# parse_names()
#
#------------------------------------------------------
import os.path
import sys
#------------------------------------------------------
def parse_names( in_file='CSN_VarNames_v0.83.txt' ):
#--------------------------------------------------
# Open input file that contains copied names table
#--------------------------------------------------
try:
in_unit = open( in_file, 'r' )
except:
print 'SORRY: Could not open TXT file named:'
print ' ' + in_file
#-------------------------
# Open new CSV text file
#-------------------------
## pos = in_file.rfind('.')
## prefix = in_file[0:pos]
## out_file = prefix + '.ttl'
out_file = 'CSN_VarNames_v0.83.ttl'
#-------------------------------------------
OUT_EXISTS = os.path.exists( out_file )
if (OUT_EXISTS):
print 'SORRY, A TTL file with the name'
print ' ' + out_file
print ' already exists.'
return
out_unit = open( out_file, 'w' )
#------------------------
# Write TTL file header
#------------------------
out_unit.write( '@prefix dc: <http://purl.org/dc/elements/1.1/> .' + '\n' )
out_unit.write( '@prefix ns: <http://example.org/ns#> .' + '\n' )
out_unit.write( '@prefix vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> .' + '\n')
out_unit.write( '@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .' + '\n' )
out_unit.write( '@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .' + '\n' )
out_unit.write( '@prefix owl: <http://www.w3.org/2002/07/owl#> .' + '\n' )
out_unit.write( '@prefix csn: <http://ecgs.ncsa.illinois.edu/2015/csn#> .' + '\n' )
out_unit.write( '\n' ) # (blank line)
root_quan_list = list() # (list to save all root quantities)
#---------------------------
# Parse all variable names
#---------------------------
indent = ' ' # (four spaces)
n_names = 0
while (True):
#------------------------------
# Read data line from in_file
#------------------------------
line = in_unit.readline()
if (line == ''):
break
#-----------------------------------------
# Write entire variable name to TTL file
#-----------------------------------------
line = line.strip() # (strip leading/trailing white space)
out_unit.write( '<csn:' + line + '>\n' )
#--------------------------------------------------
# Write object and quantity fullnames to TTL file
#--------------------------------------------------
main_parts = line.split('__')
object_part = main_parts[0]
quantity_part = main_parts[1]
out_unit.write( indent + 'a csn:name ;\n' )
out_unit.write( indent + "csn:object_fullname '" + object_part + "' ;\n" )
out_unit.write( indent + "csn:quantity_fullname '" + quantity_part + "' ;\n" )
#---------------------------------------------
# Write parts of object_fullname to TTL file
#---------------------------------------------
object_list = object_part.split('_')
n_objects = len( object_list )
for k in xrange( n_objects ):
object = object_list[k]
obj_string = " '" + object + "' "
obj_prefix = indent + "csn:object" + str(k+1)
out_unit.write( obj_prefix + obj_string + ";\n")
adj_list = object.split('~')
n_adjectives = len(adj_list) - 1 # (first one in list is the object)
for j in xrange( n_adjectives ):
adj_string = " '" + adj_list[j+1] + "' "
adj_prefix = obj_prefix + "_adjective" + str(j+1)
out_unit.write( adj_prefix + adj_string + ";\n" )
#-------------------------------------
# Write root object name to TTL file
#-------------------------------------
root_object = object_list[-1] # (last object in list)
root_obj_string = " '" + root_object + "' "
root_obj_prefix = indent + "csn:root_object"
out_unit.write( root_obj_prefix + root_obj_string + ";\n" )
#--------------------------------------------------------
# Write all operations in quantity_fullname to TTL file
#--------------------------------------------------------
operation_list = quantity_part.split('_of_')
n_operations = len(operation_list) - 1 # (last one in list is the quantity)
for k in xrange( n_operations ):
operation = operation_list[k]
op_string = " '" + operation + "' "
op_prefix = indent + "csn:operation" + str(k+1)
out_unit.write( op_prefix + op_string + ";\n" )
#----------------------------------
# Write quantity name to TTL file
#----------------------------------
quantity = operation_list[-1]
quan_string = " '" + quantity + "' "
quan_prefix = indent + "csn:quantity"
out_unit.write( quan_prefix + quan_string + ";\n" )
#---------------------------------------
# Write root quantity name to TTL file
#---------------------------------------
quantity_parts = quantity.split('_')
root_quantity = quantity_parts[-1]
root_quan_string = " '" + root_quantity + "' "
root_quan_prefix = indent + "csn:root_quantity"
out_unit.write( root_quan_prefix + root_quan_string + ".\n" ) # (Notice "." vs. ";" here.)
out_unit.write( '\n' ) # (blank line)
root_quan_list.append( root_quantity ) # (save in root_quan_list)
n_names += 1
#----------------------
# Close the input file
#----------------------
in_unit.close()
#----------------------------
# Close the TXT output file
#----------------------------
out_unit.close()
print 'Finished writing CSN var names as TTL.'
print 'Number of names =', n_names, '.'
print ' '
#-----------------------------------------
# Write unique root quantities to a file
#-----------------------------------------
uniq_root_quan_list = sorted( set(root_quan_list) )
n_uniq_root_quans = len( uniq_root_quan_list )
root_quan_unit = open( 'Root_Quantities.txt', 'w' )
for k in xrange( n_uniq_root_quans ):
root_quantity = uniq_root_quan_list[k]
root_quan_unit.write( root_quantity + '\n' )
root_quan_unit.close()
print 'Number of root quantities =', n_uniq_root_quans, '.'
print ' '
# parse_names()
#------------------------------------------------------
if (__name__ == "__main__"):
#-----------------------------------------------------
# Note: First arg in sys.argv is the command itself.
#-----------------------------------------------------
n_args = len(sys.argv)
if (n_args < 2):
print 'ERROR: This tool requires an input'
print ' text file argument.'
print 'sys.argv =', sys.argv
print ' '
elif (n_args == 2):
|
else:
print 'ERROR: Invalid number of arguments.'
#-----------------------------------------------------------------------
| parse_names( sys.argv[1] ) | conditional_block |
parse_csvn.py | #!/usr/bin/env python
# Copyright (c) 2015, Scott D. Peckham
#------------------------------------------------------
# S.D. Peckham
# July 9, 2015
#
# Tool to break CSDMS Standard Variable Names into
# all of their component parts, then save results in
# various formats. (e.g. Turtle TTL format)
#
# Example of use at a Unix prompt:
#
# % ./parse_csvn.py CSN_VarNames_v0.83.txt
#------------------------------------------------------
#
# Functions:
# parse_names()
#
#------------------------------------------------------
import os.path
import sys
#------------------------------------------------------
def parse_names( in_file='CSN_VarNames_v0.83.txt' ):
#--------------------------------------------------
# Open input file that contains copied names table
#--------------------------------------------------
try:
in_unit = open( in_file, 'r' )
except:
print 'SORRY: Could not open TXT file named:'
print ' ' + in_file
#-------------------------
# Open new CSV text file
#-------------------------
## pos = in_file.rfind('.')
## prefix = in_file[0:pos]
## out_file = prefix + '.ttl'
out_file = 'CSN_VarNames_v0.83.ttl'
#-------------------------------------------
OUT_EXISTS = os.path.exists( out_file )
if (OUT_EXISTS):
print 'SORRY, A TTL file with the name'
print ' ' + out_file
print ' already exists.' | #------------------------
# Write TTL file header
#------------------------
out_unit.write( '@prefix dc: <http://purl.org/dc/elements/1.1/> .' + '\n' )
out_unit.write( '@prefix ns: <http://example.org/ns#> .' + '\n' )
out_unit.write( '@prefix vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> .' + '\n')
out_unit.write( '@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .' + '\n' )
out_unit.write( '@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .' + '\n' )
out_unit.write( '@prefix owl: <http://www.w3.org/2002/07/owl#> .' + '\n' )
out_unit.write( '@prefix csn: <http://ecgs.ncsa.illinois.edu/2015/csn#> .' + '\n' )
out_unit.write( '\n' ) # (blank line)
root_quan_list = list() # (list to save all root quantities)
#---------------------------
# Parse all variable names
#---------------------------
indent = ' ' # (four spaces)
n_names = 0
while (True):
#------------------------------
# Read data line from in_file
#------------------------------
line = in_unit.readline()
if (line == ''):
break
#-----------------------------------------
# Write entire variable name to TTL file
#-----------------------------------------
line = line.strip() # (strip leading/trailing white space)
out_unit.write( '<csn:' + line + '>\n' )
#--------------------------------------------------
# Write object and quantity fullnames to TTL file
#--------------------------------------------------
main_parts = line.split('__')
object_part = main_parts[0]
quantity_part = main_parts[1]
out_unit.write( indent + 'a csn:name ;\n' )
out_unit.write( indent + "csn:object_fullname '" + object_part + "' ;\n" )
out_unit.write( indent + "csn:quantity_fullname '" + quantity_part + "' ;\n" )
#---------------------------------------------
# Write parts of object_fullname to TTL file
#---------------------------------------------
object_list = object_part.split('_')
n_objects = len( object_list )
for k in xrange( n_objects ):
object = object_list[k]
obj_string = " '" + object + "' "
obj_prefix = indent + "csn:object" + str(k+1)
out_unit.write( obj_prefix + obj_string + ";\n")
adj_list = object.split('~')
n_adjectives = len(adj_list) - 1 # (first one in list is the object)
for j in xrange( n_adjectives ):
adj_string = " '" + adj_list[j+1] + "' "
adj_prefix = obj_prefix + "_adjective" + str(j+1)
out_unit.write( adj_prefix + adj_string + ";\n" )
#-------------------------------------
# Write root object name to TTL file
#-------------------------------------
root_object = object_list[-1] # (last object in list)
root_obj_string = " '" + root_object + "' "
root_obj_prefix = indent + "csn:root_object"
out_unit.write( root_obj_prefix + root_obj_string + ";\n" )
#--------------------------------------------------------
# Write all operations in quantity_fullname to TTL file
#--------------------------------------------------------
operation_list = quantity_part.split('_of_')
n_operations = len(operation_list) - 1 # (last one in list is the quantity)
for k in xrange( n_operations ):
operation = operation_list[k]
op_string = " '" + operation + "' "
op_prefix = indent + "csn:operation" + str(k+1)
out_unit.write( op_prefix + op_string + ";\n" )
#----------------------------------
# Write quantity name to TTL file
#----------------------------------
quantity = operation_list[-1]
quan_string = " '" + quantity + "' "
quan_prefix = indent + "csn:quantity"
out_unit.write( quan_prefix + quan_string + ";\n" )
#---------------------------------------
# Write root quantity name to TTL file
#---------------------------------------
quantity_parts = quantity.split('_')
root_quantity = quantity_parts[-1]
root_quan_string = " '" + root_quantity + "' "
root_quan_prefix = indent + "csn:root_quantity"
out_unit.write( root_quan_prefix + root_quan_string + ".\n" ) # (Notice "." vs. ";" here.)
out_unit.write( '\n' ) # (blank line)
root_quan_list.append( root_quantity ) # (save in root_quan_list)
n_names += 1
#----------------------
# Close the input file
#----------------------
in_unit.close()
#----------------------------
# Close the TXT output file
#----------------------------
out_unit.close()
print 'Finished writing CSN var names as TTL.'
print 'Number of names =', n_names, '.'
print ' '
#-----------------------------------------
# Write unique root quantities to a file
#-----------------------------------------
uniq_root_quan_list = sorted( set(root_quan_list) )
n_uniq_root_quans = len( uniq_root_quan_list )
root_quan_unit = open( 'Root_Quantities.txt', 'w' )
for k in xrange( n_uniq_root_quans ):
root_quantity = uniq_root_quan_list[k]
root_quan_unit.write( root_quantity + '\n' )
root_quan_unit.close()
print 'Number of root quantities =', n_uniq_root_quans, '.'
print ' '
# parse_names()
#------------------------------------------------------
if (__name__ == "__main__"):
#-----------------------------------------------------
# Note: First arg in sys.argv is the command itself.
#-----------------------------------------------------
n_args = len(sys.argv)
if (n_args < 2):
print 'ERROR: This tool requires an input'
print ' text file argument.'
print 'sys.argv =', sys.argv
print ' '
elif (n_args == 2):
parse_names( sys.argv[1] )
else:
print 'ERROR: Invalid number of arguments.'
#----------------------------------------------------------------------- | return
out_unit = open( out_file, 'w' )
| random_line_split |
ex6.py | #!/usr/bin/env python
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
from netmiko import ConnectHandler
from getpass import getpass
from routers import pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1
def | ():
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
ip_address = raw_input("Please enter IP: ")
password = getpass()
pynet_rtr1['ip'] = ip_address
pynet_rtr2['ip'] = ip_address
pynet_jnpr_srx1['ip'] = ip_address
pynet_rtr1['password'] = password
pynet_rtr2['password'] = password
pynet_jnpr_srx1['password'] = password
#for each router send show arp command and print result
for router in (pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1):
ssh_conn = ConnectHandler(verbose=False, **router)
output = ssh_conn.send_command('show arp')
print ">>> {}: \n".format(ssh_conn.ip)
print output
print ">>>\n"
if __name__ == '__main__':
main()
| main | identifier_name |
ex6.py | #!/usr/bin/env python
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
from netmiko import ConnectHandler
from getpass import getpass
from routers import pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1
def main():
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
ip_address = raw_input("Please enter IP: ")
password = getpass()
pynet_rtr1['ip'] = ip_address
pynet_rtr2['ip'] = ip_address
pynet_jnpr_srx1['ip'] = ip_address
pynet_rtr1['password'] = password
pynet_rtr2['password'] = password
pynet_jnpr_srx1['password'] = password
#for each router send show arp command and print result
for router in (pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1):
ssh_conn = ConnectHandler(verbose=False, **router)
output = ssh_conn.send_command('show arp')
print ">>> {}: \n".format(ssh_conn.ip)
print output
print ">>>\n"
if __name__ == '__main__':
| main() | conditional_block |
|
ex6.py | #!/usr/bin/env python
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
from netmiko import ConnectHandler
from getpass import getpass
from routers import pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1
def main():
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
ip_address = raw_input("Please enter IP: ")
password = getpass()
pynet_rtr1['ip'] = ip_address
pynet_rtr2['ip'] = ip_address
pynet_jnpr_srx1['ip'] = ip_address
pynet_rtr1['password'] = password
pynet_rtr2['password'] = password
pynet_jnpr_srx1['password'] = password
#for each router send show arp command and print result
for router in (pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1):
ssh_conn = ConnectHandler(verbose=False, **router)
output = ssh_conn.send_command('show arp')
| main() | print ">>> {}: \n".format(ssh_conn.ip)
print output
print ">>>\n"
if __name__ == '__main__': | random_line_split |
ex6.py | #!/usr/bin/env python
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
from netmiko import ConnectHandler
from getpass import getpass
from routers import pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1
def main():
|
if __name__ == '__main__':
main()
| '''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
ip_address = raw_input("Please enter IP: ")
password = getpass()
pynet_rtr1['ip'] = ip_address
pynet_rtr2['ip'] = ip_address
pynet_jnpr_srx1['ip'] = ip_address
pynet_rtr1['password'] = password
pynet_rtr2['password'] = password
pynet_jnpr_srx1['password'] = password
#for each router send show arp command and print result
for router in (pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1):
ssh_conn = ConnectHandler(verbose=False, **router)
output = ssh_conn.send_command('show arp')
print ">>> {}: \n".format(ssh_conn.ip)
print output
print ">>>\n" | identifier_body |
mitele.py | import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text,
"ogn": validate.any(None, validate.text),
}],
},
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
})
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def _get_streams(self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
|
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele
| log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return | conditional_block |
mitele.py | import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text, | },
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
})
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def _get_streams(self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele | "ogn": validate.any(None, validate.text),
}], | random_line_split |
mitele.py | import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text,
"ogn": validate.any(None, validate.text),
}],
},
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
|
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def _get_streams(self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele
| super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
}) | identifier_body |
mitele.py | import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text,
"ogn": validate.any(None, validate.text),
}],
},
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
})
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def | (self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele
| _get_streams | identifier_name |
invocation.rs | // Copyright (c) 2014 Richard Diamond & contributors.
//
// This file is part of Rust Rocket.
//
// Rust Rocket is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Rust Rocket is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Rust Rocket. If not, see <http://www.gnu.org/licenses/>.
use toolchain::tool::{Tool, Compiler, Cc, Cxx, Ar, Ld};
pub struct State {
}
pub struct Invocation<'a> {
state: Path,
print_invocation: bool,
// are we under a configure script? if so we don't need to resolve addresses.
configure: bool,
tool: Tool, | impl<'a> Invocation<'a> {
pub fn new(state: &str,
print_invocation: bool,
tool: &str,
opts: &'a [String]) -> Invocation<'a> {
Invocation {
state_file: Path::new(state),
print_invocation: print_invocation,
tool: from_str(tool).expect("unknown tool specified; this is more than likely a bug"),
opts: opts,
}
}
pub fn run(&self) {
use std::io::fs::File;
use serialize::ebml::reader::Decoder;
use serialize::ebml::Doc;
// don't try-block this; if we can't read the state file, we really do need to fail!().
let state = {
let state_bytes = try!({try!(File::open(self.state_file))}.read_to_end());
let mut decoder = Decoder::new(Doc::new(state_bytes));
decode(&mut decoder)
};
match self.tool {
Cc => {
}
Cxx => {
}
Ar => {
}
Ld => {
}
}
}
} | opts: &'a [String],
}
| random_line_split |
invocation.rs | // Copyright (c) 2014 Richard Diamond & contributors.
//
// This file is part of Rust Rocket.
//
// Rust Rocket is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Rust Rocket is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Rust Rocket. If not, see <http://www.gnu.org/licenses/>.
use toolchain::tool::{Tool, Compiler, Cc, Cxx, Ar, Ld};
pub struct State {
}
pub struct Invocation<'a> {
state: Path,
print_invocation: bool,
// are we under a configure script? if so we don't need to resolve addresses.
configure: bool,
tool: Tool,
opts: &'a [String],
}
impl<'a> Invocation<'a> {
pub fn new(state: &str,
print_invocation: bool,
tool: &str,
opts: &'a [String]) -> Invocation<'a> |
pub fn run(&self) {
use std::io::fs::File;
use serialize::ebml::reader::Decoder;
use serialize::ebml::Doc;
// don't try-block this; if we can't read the state file, we really do need to fail!().
let state = {
let state_bytes = try!({try!(File::open(self.state_file))}.read_to_end());
let mut decoder = Decoder::new(Doc::new(state_bytes));
decode(&mut decoder)
};
match self.tool {
Cc => {
}
Cxx => {
}
Ar => {
}
Ld => {
}
}
}
}
| {
Invocation {
state_file: Path::new(state),
print_invocation: print_invocation,
tool: from_str(tool).expect("unknown tool specified; this is more than likely a bug"),
opts: opts,
}
} | identifier_body |
invocation.rs | // Copyright (c) 2014 Richard Diamond & contributors.
//
// This file is part of Rust Rocket.
//
// Rust Rocket is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Rust Rocket is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Rust Rocket. If not, see <http://www.gnu.org/licenses/>.
use toolchain::tool::{Tool, Compiler, Cc, Cxx, Ar, Ld};
pub struct State {
}
pub struct Invocation<'a> {
state: Path,
print_invocation: bool,
// are we under a configure script? if so we don't need to resolve addresses.
configure: bool,
tool: Tool,
opts: &'a [String],
}
impl<'a> Invocation<'a> {
pub fn new(state: &str,
print_invocation: bool,
tool: &str,
opts: &'a [String]) -> Invocation<'a> {
Invocation {
state_file: Path::new(state),
print_invocation: print_invocation,
tool: from_str(tool).expect("unknown tool specified; this is more than likely a bug"),
opts: opts,
}
}
pub fn run(&self) {
use std::io::fs::File;
use serialize::ebml::reader::Decoder;
use serialize::ebml::Doc;
// don't try-block this; if we can't read the state file, we really do need to fail!().
let state = {
let state_bytes = try!({try!(File::open(self.state_file))}.read_to_end());
let mut decoder = Decoder::new(Doc::new(state_bytes));
decode(&mut decoder)
};
match self.tool {
Cc => |
Cxx => {
}
Ar => {
}
Ld => {
}
}
}
}
| {
} | conditional_block |
invocation.rs | // Copyright (c) 2014 Richard Diamond & contributors.
//
// This file is part of Rust Rocket.
//
// Rust Rocket is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Rust Rocket is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Rust Rocket. If not, see <http://www.gnu.org/licenses/>.
use toolchain::tool::{Tool, Compiler, Cc, Cxx, Ar, Ld};
pub struct State {
}
pub struct Invocation<'a> {
state: Path,
print_invocation: bool,
// are we under a configure script? if so we don't need to resolve addresses.
configure: bool,
tool: Tool,
opts: &'a [String],
}
impl<'a> Invocation<'a> {
pub fn | (state: &str,
print_invocation: bool,
tool: &str,
opts: &'a [String]) -> Invocation<'a> {
Invocation {
state_file: Path::new(state),
print_invocation: print_invocation,
tool: from_str(tool).expect("unknown tool specified; this is more than likely a bug"),
opts: opts,
}
}
pub fn run(&self) {
use std::io::fs::File;
use serialize::ebml::reader::Decoder;
use serialize::ebml::Doc;
// don't try-block this; if we can't read the state file, we really do need to fail!().
let state = {
let state_bytes = try!({try!(File::open(self.state_file))}.read_to_end());
let mut decoder = Decoder::new(Doc::new(state_bytes));
decode(&mut decoder)
};
match self.tool {
Cc => {
}
Cxx => {
}
Ar => {
}
Ld => {
}
}
}
}
| new | identifier_name |
Maze.py | # Copyright 2010 by Dana Larose
# This file is part of crashRun.
# crashRun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# crashRun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with crashRun. If not, see <http://www.gnu.org/licenses/>.
from random import choice
from .DisjointSet import DSNode
from .DisjointSet import union
from .DisjointSet import find
from .DisjointSet import split_sets
from .Terrain import TerrainFactory
from .Terrain import CYBERSPACE_WALL
from .Terrain import CYBERSPACE_FLOOR
class Maze(object):
def __init__(self, length, width):
self.length = length
self.width = width
if self.width % 2 == 0: |
if self.length % 2 == 0: self.length -= 1
self.map = []
self.__tf = TerrainFactory()
self.__ds_nodes = []
self.__wall = self.__tf.get_terrain_tile(CYBERSPACE_WALL)
self.__floor = self.__tf.get_terrain_tile(CYBERSPACE_FLOOR)
self.__gen_initial_map()
def __gen_initial_map(self):
for r in range(self.length):
if r % 2 == 0:
self.map.append([self.__wall] * self.width)
else:
_row = []
_ds_row = []
for c in range(self.width):
if c % 2 == 0:
_row.append(self.__wall)
else:
_row.append(self.__floor)
_ds_row.append(DSNode((r,c)))
self.__ds_nodes.append(_ds_row)
self.map.append(_row)
def in_bounds(self, row, col):
return row >= 0 and row < self.length and col >= 0 and col < self.width
def __get_candidate(self, node):
_candidates = []
_nr = node.value[0]
_nc = node.value[1]
if self.in_bounds(_nr - 2, _nc) and self.map[_nr-1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2-1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr-1, _nc))
if self.in_bounds(_nr + 2, _nc) and self.map[_nr+1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2+1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr+1, _nc))
if self.in_bounds(_nr, _nc - 2) and self.map[_nr][_nc-1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2-1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc-1))
if self.in_bounds(_nr, _nc + 2) and self.map[_nr][_nc+1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2+1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc+1))
if len(_candidates) > 0:
return choice(_candidates)
else:
return None
def gen_map(self):
for _row in self.__ds_nodes:
for _node in _row:
_merge = self.__get_candidate(_node)
if _merge != None:
union(_node, _merge[0])
self.map[_merge[1]][_merge[2]] = self.__floor
return self.map
def print_map(self):
for r in range(self.length):
row = ''
for c in range(self.width):
ch = self.map[r][c].get_ch()
row += ' ' if ch == '.' else ch
print(row)
| self.width -= 1 | conditional_block |
Maze.py | # Copyright 2010 by Dana Larose
# This file is part of crashRun.
# crashRun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or | # GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with crashRun. If not, see <http://www.gnu.org/licenses/>.
from random import choice
from .DisjointSet import DSNode
from .DisjointSet import union
from .DisjointSet import find
from .DisjointSet import split_sets
from .Terrain import TerrainFactory
from .Terrain import CYBERSPACE_WALL
from .Terrain import CYBERSPACE_FLOOR
class Maze(object):
def __init__(self, length, width):
self.length = length
self.width = width
if self.width % 2 == 0: self.width -= 1
if self.length % 2 == 0: self.length -= 1
self.map = []
self.__tf = TerrainFactory()
self.__ds_nodes = []
self.__wall = self.__tf.get_terrain_tile(CYBERSPACE_WALL)
self.__floor = self.__tf.get_terrain_tile(CYBERSPACE_FLOOR)
self.__gen_initial_map()
def __gen_initial_map(self):
for r in range(self.length):
if r % 2 == 0:
self.map.append([self.__wall] * self.width)
else:
_row = []
_ds_row = []
for c in range(self.width):
if c % 2 == 0:
_row.append(self.__wall)
else:
_row.append(self.__floor)
_ds_row.append(DSNode((r,c)))
self.__ds_nodes.append(_ds_row)
self.map.append(_row)
def in_bounds(self, row, col):
return row >= 0 and row < self.length and col >= 0 and col < self.width
def __get_candidate(self, node):
_candidates = []
_nr = node.value[0]
_nc = node.value[1]
if self.in_bounds(_nr - 2, _nc) and self.map[_nr-1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2-1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr-1, _nc))
if self.in_bounds(_nr + 2, _nc) and self.map[_nr+1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2+1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr+1, _nc))
if self.in_bounds(_nr, _nc - 2) and self.map[_nr][_nc-1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2-1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc-1))
if self.in_bounds(_nr, _nc + 2) and self.map[_nr][_nc+1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2+1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc+1))
if len(_candidates) > 0:
return choice(_candidates)
else:
return None
def gen_map(self):
for _row in self.__ds_nodes:
for _node in _row:
_merge = self.__get_candidate(_node)
if _merge != None:
union(_node, _merge[0])
self.map[_merge[1]][_merge[2]] = self.__floor
return self.map
def print_map(self):
for r in range(self.length):
row = ''
for c in range(self.width):
ch = self.map[r][c].get_ch()
row += ' ' if ch == '.' else ch
print(row) | # (at your option) any later version.
# crashRun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | random_line_split |
Maze.py | # Copyright 2010 by Dana Larose
# This file is part of crashRun.
# crashRun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# crashRun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with crashRun. If not, see <http://www.gnu.org/licenses/>.
from random import choice
from .DisjointSet import DSNode
from .DisjointSet import union
from .DisjointSet import find
from .DisjointSet import split_sets
from .Terrain import TerrainFactory
from .Terrain import CYBERSPACE_WALL
from .Terrain import CYBERSPACE_FLOOR
class Maze(object):
def __init__(self, length, width):
self.length = length
self.width = width
if self.width % 2 == 0: self.width -= 1
if self.length % 2 == 0: self.length -= 1
self.map = []
self.__tf = TerrainFactory()
self.__ds_nodes = []
self.__wall = self.__tf.get_terrain_tile(CYBERSPACE_WALL)
self.__floor = self.__tf.get_terrain_tile(CYBERSPACE_FLOOR)
self.__gen_initial_map()
def __gen_initial_map(self):
for r in range(self.length):
if r % 2 == 0:
self.map.append([self.__wall] * self.width)
else:
_row = []
_ds_row = []
for c in range(self.width):
if c % 2 == 0:
_row.append(self.__wall)
else:
_row.append(self.__floor)
_ds_row.append(DSNode((r,c)))
self.__ds_nodes.append(_ds_row)
self.map.append(_row)
def in_bounds(self, row, col):
|
def __get_candidate(self, node):
_candidates = []
_nr = node.value[0]
_nc = node.value[1]
if self.in_bounds(_nr - 2, _nc) and self.map[_nr-1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2-1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr-1, _nc))
if self.in_bounds(_nr + 2, _nc) and self.map[_nr+1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2+1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr+1, _nc))
if self.in_bounds(_nr, _nc - 2) and self.map[_nr][_nc-1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2-1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc-1))
if self.in_bounds(_nr, _nc + 2) and self.map[_nr][_nc+1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2+1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc+1))
if len(_candidates) > 0:
return choice(_candidates)
else:
return None
def gen_map(self):
for _row in self.__ds_nodes:
for _node in _row:
_merge = self.__get_candidate(_node)
if _merge != None:
union(_node, _merge[0])
self.map[_merge[1]][_merge[2]] = self.__floor
return self.map
def print_map(self):
for r in range(self.length):
row = ''
for c in range(self.width):
ch = self.map[r][c].get_ch()
row += ' ' if ch == '.' else ch
print(row)
| return row >= 0 and row < self.length and col >= 0 and col < self.width | identifier_body |
Maze.py | # Copyright 2010 by Dana Larose
# This file is part of crashRun.
# crashRun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# crashRun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with crashRun. If not, see <http://www.gnu.org/licenses/>.
from random import choice
from .DisjointSet import DSNode
from .DisjointSet import union
from .DisjointSet import find
from .DisjointSet import split_sets
from .Terrain import TerrainFactory
from .Terrain import CYBERSPACE_WALL
from .Terrain import CYBERSPACE_FLOOR
class Maze(object):
def __init__(self, length, width):
self.length = length
self.width = width
if self.width % 2 == 0: self.width -= 1
if self.length % 2 == 0: self.length -= 1
self.map = []
self.__tf = TerrainFactory()
self.__ds_nodes = []
self.__wall = self.__tf.get_terrain_tile(CYBERSPACE_WALL)
self.__floor = self.__tf.get_terrain_tile(CYBERSPACE_FLOOR)
self.__gen_initial_map()
def __gen_initial_map(self):
for r in range(self.length):
if r % 2 == 0:
self.map.append([self.__wall] * self.width)
else:
_row = []
_ds_row = []
for c in range(self.width):
if c % 2 == 0:
_row.append(self.__wall)
else:
_row.append(self.__floor)
_ds_row.append(DSNode((r,c)))
self.__ds_nodes.append(_ds_row)
self.map.append(_row)
def in_bounds(self, row, col):
return row >= 0 and row < self.length and col >= 0 and col < self.width
def | (self, node):
_candidates = []
_nr = node.value[0]
_nc = node.value[1]
if self.in_bounds(_nr - 2, _nc) and self.map[_nr-1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2-1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr-1, _nc))
if self.in_bounds(_nr + 2, _nc) and self.map[_nr+1][_nc].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2+1][_nc//2]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr+1, _nc))
if self.in_bounds(_nr, _nc - 2) and self.map[_nr][_nc-1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2-1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc-1))
if self.in_bounds(_nr, _nc + 2) and self.map[_nr][_nc+1].get_type() == CYBERSPACE_WALL:
_c_node = self.__ds_nodes[_nr//2][_nc//2+1]
if find(_c_node) != find(node):
_candidates.append((_c_node, _nr, _nc+1))
if len(_candidates) > 0:
return choice(_candidates)
else:
return None
def gen_map(self):
for _row in self.__ds_nodes:
for _node in _row:
_merge = self.__get_candidate(_node)
if _merge != None:
union(_node, _merge[0])
self.map[_merge[1]][_merge[2]] = self.__floor
return self.map
def print_map(self):
for r in range(self.length):
row = ''
for c in range(self.width):
ch = self.map[r][c].get_ch()
row += ' ' if ch == '.' else ch
print(row)
| __get_candidate | identifier_name |
views.py | from d51.django.auth.decorators import auth_required
from django.contrib.sites.models import Site
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ImproperlyConfigured
from .services import load_service, SharingServiceInvalidForm
from .models import URL
SHARE_KEY='u'
@auth_required()
def share_url(request, service_name):
# TODO: this view needs testing
response = HttpResponseRedirect(request.GET.get('next', '/'))
url_to_share = request.GET.get(SHARE_KEY, None)
if url_to_share is None:
# TODO change to a 400
raise Http404 | url=full_url_to_share,
)
try:
url.send(service_name, request.user, request.POST)
except SharingServiceInvalidForm:
service = load_service(service_name, url)
input = [] if request.method != 'POST' else [request.POST]
form = service.get_form_class()(*input)
templates, context = [
'sharing/%s/prompt.html'%service_name,
'sharing/prompt.html'
],{
'service_name':service_name,
'form': form,
'url':url_to_share,
'SHARE_KEY':SHARE_KEY,
'next':request.GET.get('next','/')
}
response = render_to_response(templates, context, context_instance=RequestContext(request))
except ImproperlyConfigured:
raise Http404
return response | else:
full_url_to_share = 'http://%s%s' % ((Site.objects.get_current().domain, url_to_share)) if url_to_share.find(':') == -1 else url_to_share
url, created = URL.objects.get_or_create( | random_line_split |
views.py | from d51.django.auth.decorators import auth_required
from django.contrib.sites.models import Site
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ImproperlyConfigured
from .services import load_service, SharingServiceInvalidForm
from .models import URL
SHARE_KEY='u'
@auth_required()
def share_url(request, service_name):
# TODO: this view needs testing
| response = HttpResponseRedirect(request.GET.get('next', '/'))
url_to_share = request.GET.get(SHARE_KEY, None)
if url_to_share is None:
# TODO change to a 400
raise Http404
else:
full_url_to_share = 'http://%s%s' % ((Site.objects.get_current().domain, url_to_share)) if url_to_share.find(':') == -1 else url_to_share
url, created = URL.objects.get_or_create(
url=full_url_to_share,
)
try:
url.send(service_name, request.user, request.POST)
except SharingServiceInvalidForm:
service = load_service(service_name, url)
input = [] if request.method != 'POST' else [request.POST]
form = service.get_form_class()(*input)
templates, context = [
'sharing/%s/prompt.html'%service_name,
'sharing/prompt.html'
],{
'service_name':service_name,
'form': form,
'url':url_to_share,
'SHARE_KEY':SHARE_KEY,
'next':request.GET.get('next','/')
}
response = render_to_response(templates, context, context_instance=RequestContext(request))
except ImproperlyConfigured:
raise Http404
return response | identifier_body |
|
views.py | from d51.django.auth.decorators import auth_required
from django.contrib.sites.models import Site
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ImproperlyConfigured
from .services import load_service, SharingServiceInvalidForm
from .models import URL
SHARE_KEY='u'
@auth_required()
def | (request, service_name):
# TODO: this view needs testing
response = HttpResponseRedirect(request.GET.get('next', '/'))
url_to_share = request.GET.get(SHARE_KEY, None)
if url_to_share is None:
# TODO change to a 400
raise Http404
else:
full_url_to_share = 'http://%s%s' % ((Site.objects.get_current().domain, url_to_share)) if url_to_share.find(':') == -1 else url_to_share
url, created = URL.objects.get_or_create(
url=full_url_to_share,
)
try:
url.send(service_name, request.user, request.POST)
except SharingServiceInvalidForm:
service = load_service(service_name, url)
input = [] if request.method != 'POST' else [request.POST]
form = service.get_form_class()(*input)
templates, context = [
'sharing/%s/prompt.html'%service_name,
'sharing/prompt.html'
],{
'service_name':service_name,
'form': form,
'url':url_to_share,
'SHARE_KEY':SHARE_KEY,
'next':request.GET.get('next','/')
}
response = render_to_response(templates, context, context_instance=RequestContext(request))
except ImproperlyConfigured:
raise Http404
return response
| share_url | identifier_name |
views.py | from d51.django.auth.decorators import auth_required
from django.contrib.sites.models import Site
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ImproperlyConfigured
from .services import load_service, SharingServiceInvalidForm
from .models import URL
SHARE_KEY='u'
@auth_required()
def share_url(request, service_name):
# TODO: this view needs testing
response = HttpResponseRedirect(request.GET.get('next', '/'))
url_to_share = request.GET.get(SHARE_KEY, None)
if url_to_share is None:
# TODO change to a 400
raise Http404
else:
|
return response
| full_url_to_share = 'http://%s%s' % ((Site.objects.get_current().domain, url_to_share)) if url_to_share.find(':') == -1 else url_to_share
url, created = URL.objects.get_or_create(
url=full_url_to_share,
)
try:
url.send(service_name, request.user, request.POST)
except SharingServiceInvalidForm:
service = load_service(service_name, url)
input = [] if request.method != 'POST' else [request.POST]
form = service.get_form_class()(*input)
templates, context = [
'sharing/%s/prompt.html'%service_name,
'sharing/prompt.html'
],{
'service_name':service_name,
'form': form,
'url':url_to_share,
'SHARE_KEY':SHARE_KEY,
'next':request.GET.get('next','/')
}
response = render_to_response(templates, context, context_instance=RequestContext(request))
except ImproperlyConfigured:
raise Http404 | conditional_block |
cursor.py | # coding=utf-8
"""cursor.py - Cursor handler."""
from __future__ import absolute_import
import gobject
import gtk
NORMAL, GRAB, WAIT = range(3)
class CursorHandler(object):
def __init__(self, window):
self._window = window
self._timer_id = None
self._auto_hide = False
self._current_cursor = NORMAL
def set_cursor_type(self, cursor):
"""Set the cursor to type <cursor>. Supported cursor types are
available as constants in this module. If <cursor> is not one of the
cursor constants above, it must be a gtk.gdk.Cursor.
"""
if cursor == NORMAL:
|
elif cursor == GRAB:
mode = gtk.gdk.Cursor(gtk.gdk.FLEUR)
elif cursor == WAIT:
mode = gtk.gdk.Cursor(gtk.gdk.WATCH)
else:
mode = cursor
self._window.set_cursor(mode)
self._current_cursor = cursor
if self._auto_hide:
if cursor == NORMAL:
self._set_hide_timer()
else:
self._kill_timer()
def auto_hide_on(self):
"""Signal that the cursor should auto-hide from now on (e.g. that
we are entering fullscreen).
"""
self._auto_hide = True
if self._current_cursor == NORMAL:
self._set_hide_timer()
def auto_hide_off(self):
"""Signal that the cursor should *not* auto-hide from now on."""
self._auto_hide = False
self._kill_timer()
if self._current_cursor == NORMAL:
self.set_cursor_type(NORMAL)
def refresh(self):
"""Refresh the current cursor (i.e. display it and set a new timer in
fullscreen). Used when we move the cursor.
"""
if self._auto_hide:
self.set_cursor_type(self._current_cursor)
def _set_hide_timer(self):
self._kill_timer()
self._timer_id = gobject.timeout_add(2000, self._window.set_cursor,
self._get_hidden_cursor())
def _kill_timer(self):
if self._timer_id is not None:
gobject.source_remove(self._timer_id)
def _get_hidden_cursor(self):
pixmap = gtk.gdk.Pixmap(None, 1, 1, 1)
color = gtk.gdk.Color()
return gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0)
| mode = None | conditional_block |
cursor.py | # coding=utf-8
"""cursor.py - Cursor handler."""
from __future__ import absolute_import
import gobject
import gtk
NORMAL, GRAB, WAIT = range(3)
class CursorHandler(object):
def __init__(self, window):
self._window = window
self._timer_id = None
self._auto_hide = False
self._current_cursor = NORMAL
def set_cursor_type(self, cursor):
"""Set the cursor to type <cursor>. Supported cursor types are
available as constants in this module. If <cursor> is not one of the
cursor constants above, it must be a gtk.gdk.Cursor.
"""
if cursor == NORMAL:
mode = None
elif cursor == GRAB:
mode = gtk.gdk.Cursor(gtk.gdk.FLEUR)
elif cursor == WAIT:
mode = gtk.gdk.Cursor(gtk.gdk.WATCH)
else:
mode = cursor
self._window.set_cursor(mode)
self._current_cursor = cursor
if self._auto_hide:
if cursor == NORMAL:
self._set_hide_timer()
else:
self._kill_timer()
def auto_hide_on(self):
|
def auto_hide_off(self):
"""Signal that the cursor should *not* auto-hide from now on."""
self._auto_hide = False
self._kill_timer()
if self._current_cursor == NORMAL:
self.set_cursor_type(NORMAL)
def refresh(self):
"""Refresh the current cursor (i.e. display it and set a new timer in
fullscreen). Used when we move the cursor.
"""
if self._auto_hide:
self.set_cursor_type(self._current_cursor)
def _set_hide_timer(self):
self._kill_timer()
self._timer_id = gobject.timeout_add(2000, self._window.set_cursor,
self._get_hidden_cursor())
def _kill_timer(self):
if self._timer_id is not None:
gobject.source_remove(self._timer_id)
def _get_hidden_cursor(self):
pixmap = gtk.gdk.Pixmap(None, 1, 1, 1)
color = gtk.gdk.Color()
return gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0)
| """Signal that the cursor should auto-hide from now on (e.g. that
we are entering fullscreen).
"""
self._auto_hide = True
if self._current_cursor == NORMAL:
self._set_hide_timer() | identifier_body |
cursor.py | # coding=utf-8
"""cursor.py - Cursor handler."""
from __future__ import absolute_import
import gobject
import gtk
NORMAL, GRAB, WAIT = range(3)
class CursorHandler(object):
def __init__(self, window):
self._window = window
self._timer_id = None
self._auto_hide = False
self._current_cursor = NORMAL
def set_cursor_type(self, cursor):
"""Set the cursor to type <cursor>. Supported cursor types are
available as constants in this module. If <cursor> is not one of the
cursor constants above, it must be a gtk.gdk.Cursor.
"""
if cursor == NORMAL:
mode = None
elif cursor == GRAB:
mode = gtk.gdk.Cursor(gtk.gdk.FLEUR)
elif cursor == WAIT:
mode = gtk.gdk.Cursor(gtk.gdk.WATCH)
else:
mode = cursor
self._window.set_cursor(mode)
self._current_cursor = cursor
if self._auto_hide:
if cursor == NORMAL:
self._set_hide_timer()
else:
self._kill_timer()
| we are entering fullscreen).
"""
self._auto_hide = True
if self._current_cursor == NORMAL:
self._set_hide_timer()
def auto_hide_off(self):
"""Signal that the cursor should *not* auto-hide from now on."""
self._auto_hide = False
self._kill_timer()
if self._current_cursor == NORMAL:
self.set_cursor_type(NORMAL)
def refresh(self):
"""Refresh the current cursor (i.e. display it and set a new timer in
fullscreen). Used when we move the cursor.
"""
if self._auto_hide:
self.set_cursor_type(self._current_cursor)
def _set_hide_timer(self):
self._kill_timer()
self._timer_id = gobject.timeout_add(2000, self._window.set_cursor,
self._get_hidden_cursor())
def _kill_timer(self):
if self._timer_id is not None:
gobject.source_remove(self._timer_id)
def _get_hidden_cursor(self):
pixmap = gtk.gdk.Pixmap(None, 1, 1, 1)
color = gtk.gdk.Color()
return gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0) | def auto_hide_on(self):
"""Signal that the cursor should auto-hide from now on (e.g. that | random_line_split |
cursor.py | # coding=utf-8
"""cursor.py - Cursor handler."""
from __future__ import absolute_import
import gobject
import gtk
NORMAL, GRAB, WAIT = range(3)
class CursorHandler(object):
def | (self, window):
self._window = window
self._timer_id = None
self._auto_hide = False
self._current_cursor = NORMAL
def set_cursor_type(self, cursor):
"""Set the cursor to type <cursor>. Supported cursor types are
available as constants in this module. If <cursor> is not one of the
cursor constants above, it must be a gtk.gdk.Cursor.
"""
if cursor == NORMAL:
mode = None
elif cursor == GRAB:
mode = gtk.gdk.Cursor(gtk.gdk.FLEUR)
elif cursor == WAIT:
mode = gtk.gdk.Cursor(gtk.gdk.WATCH)
else:
mode = cursor
self._window.set_cursor(mode)
self._current_cursor = cursor
if self._auto_hide:
if cursor == NORMAL:
self._set_hide_timer()
else:
self._kill_timer()
def auto_hide_on(self):
"""Signal that the cursor should auto-hide from now on (e.g. that
we are entering fullscreen).
"""
self._auto_hide = True
if self._current_cursor == NORMAL:
self._set_hide_timer()
def auto_hide_off(self):
"""Signal that the cursor should *not* auto-hide from now on."""
self._auto_hide = False
self._kill_timer()
if self._current_cursor == NORMAL:
self.set_cursor_type(NORMAL)
def refresh(self):
"""Refresh the current cursor (i.e. display it and set a new timer in
fullscreen). Used when we move the cursor.
"""
if self._auto_hide:
self.set_cursor_type(self._current_cursor)
def _set_hide_timer(self):
self._kill_timer()
self._timer_id = gobject.timeout_add(2000, self._window.set_cursor,
self._get_hidden_cursor())
def _kill_timer(self):
if self._timer_id is not None:
gobject.source_remove(self._timer_id)
def _get_hidden_cursor(self):
pixmap = gtk.gdk.Pixmap(None, 1, 1, 1)
color = gtk.gdk.Color()
return gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0)
| __init__ | identifier_name |
riscv32imc_esp_espidf.rs | use crate::spec::{LinkerFlavor, PanicStrategy, RelocModel};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target | {
Target {
data_layout: "e-m:e-p:32:32-i64:64-n32-S128".to_string(),
llvm_target: "riscv32".to_string(),
pointer_width: 32,
arch: "riscv32".to_string(),
options: TargetOptions {
families: vec!["unix".to_string()],
os: "espidf".to_string(),
env: "newlib".to_string(),
vendor: "espressif".to_string(),
linker_flavor: LinkerFlavor::Gcc,
linker: Some("riscv32-esp-elf-gcc".to_string()),
cpu: "generic-rv32".to_string(),
// While the RiscV32IMC architecture does not natively support atomics, ESP-IDF does support
// the __atomic* and __sync* GCC builtins, so setting `max_atomic_width` to `Some(32)`
// and `atomic_cas` to `true` will cause the compiler to emit libcalls to these builtins.
//
// Support for atomics is necessary for the Rust STD library, which is supported by the ESP-IDF framework.
max_atomic_width: Some(32),
atomic_cas: true,
features: "+m,+c".to_string(),
executables: true,
panic_strategy: PanicStrategy::Abort,
relocation_model: RelocModel::Static,
emit_debug_gdb_scripts: false,
eh_frame_header: false,
..Default::default()
},
}
} | identifier_body |
|
riscv32imc_esp_espidf.rs | use crate::spec::{LinkerFlavor, PanicStrategy, RelocModel};
use crate::spec::{Target, TargetOptions};
pub fn | () -> Target {
Target {
data_layout: "e-m:e-p:32:32-i64:64-n32-S128".to_string(),
llvm_target: "riscv32".to_string(),
pointer_width: 32,
arch: "riscv32".to_string(),
options: TargetOptions {
families: vec!["unix".to_string()],
os: "espidf".to_string(),
env: "newlib".to_string(),
vendor: "espressif".to_string(),
linker_flavor: LinkerFlavor::Gcc,
linker: Some("riscv32-esp-elf-gcc".to_string()),
cpu: "generic-rv32".to_string(),
// While the RiscV32IMC architecture does not natively support atomics, ESP-IDF does support
// the __atomic* and __sync* GCC builtins, so setting `max_atomic_width` to `Some(32)`
// and `atomic_cas` to `true` will cause the compiler to emit libcalls to these builtins.
//
// Support for atomics is necessary for the Rust STD library, which is supported by the ESP-IDF framework.
max_atomic_width: Some(32),
atomic_cas: true,
features: "+m,+c".to_string(),
executables: true,
panic_strategy: PanicStrategy::Abort,
relocation_model: RelocModel::Static,
emit_debug_gdb_scripts: false,
eh_frame_header: false,
..Default::default()
},
}
}
| target | identifier_name |
riscv32imc_esp_espidf.rs | use crate::spec::{LinkerFlavor, PanicStrategy, RelocModel};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target { |
options: TargetOptions {
families: vec!["unix".to_string()],
os: "espidf".to_string(),
env: "newlib".to_string(),
vendor: "espressif".to_string(),
linker_flavor: LinkerFlavor::Gcc,
linker: Some("riscv32-esp-elf-gcc".to_string()),
cpu: "generic-rv32".to_string(),
// While the RiscV32IMC architecture does not natively support atomics, ESP-IDF does support
// the __atomic* and __sync* GCC builtins, so setting `max_atomic_width` to `Some(32)`
// and `atomic_cas` to `true` will cause the compiler to emit libcalls to these builtins.
//
// Support for atomics is necessary for the Rust STD library, which is supported by the ESP-IDF framework.
max_atomic_width: Some(32),
atomic_cas: true,
features: "+m,+c".to_string(),
executables: true,
panic_strategy: PanicStrategy::Abort,
relocation_model: RelocModel::Static,
emit_debug_gdb_scripts: false,
eh_frame_header: false,
..Default::default()
},
}
} | Target {
data_layout: "e-m:e-p:32:32-i64:64-n32-S128".to_string(),
llvm_target: "riscv32".to_string(),
pointer_width: 32,
arch: "riscv32".to_string(), | random_line_split |
build.rs | extern crate rustc_version;
extern crate rusoto_codegen;
extern crate rayon;
use std::env;
use std::path::Path;
use std::io::Write;
use std::fs::File;
use rusoto_codegen::{Service, generate};
use rayon::prelude::*;
/// Parses and generates variables used to construct a User-Agent.
///
/// This is used to create a User-Agent header string resembling
/// `rusoto/x.y.z rust/x.y.z <os>`.
fn generate_user_agent_vars(output_path: &Path) {
let rust_version = rustc_version::version();
let mut f = File::create(&output_path.join("user_agent_vars.rs"))
.expect("Could not create user agent file");
f.write_all(format!("static RUST_VERSION: &'static str = \"{}\";", rust_version).as_bytes())
.expect("Unable to write user agent");
}
/*
gamelift/2015-10-01/service-2.json: "protocol":"json"
support/2013-04-15/service-2.json: "protocol":"json"
*/
// expand to use cfg!() so codegen only gets run for services
// in the features list
macro_rules! services {
( $( [$name:expr, $date:expr] ),* ) => {
{
let mut services = Vec::new();
$(
if cfg!(feature = $name) {
services.push(Service::new($name, $date));
}
)*
services
}
}
}
fn main() {
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not specified");
let out_path = Path::new(&out_dir).to_owned();
let services = services! {
["acm", "2015-12-08"],
["autoscaling", "2011-01-01"],
["cloudformation", "2010-05-15"],
["cloudfront", "2016-11-25"],
["cloudhsm", "2014-05-30"],
["cloudsearch", "2013-01-01"],
["cloudtrail", "2013-11-01"],
["cloudwatch", "2010-08-01"],
["codecommit", "2015-04-13"],
["codedeploy", "2014-10-06"],
["codepipeline", "2015-07-09"],
["cognito-identity", "2014-06-30"],
["config", "2014-11-12"],
["datapipeline", "2012-10-29"],
["devicefarm", "2015-06-23"],
["directconnect", "2012-10-25"],
["ds", "2015-04-16"],
["dynamodb", "2012-08-10"],
["dynamodbstreams", "2012-08-10"],
["ec2", "2016-11-15"],
["ecr", "2015-09-21"],
["ecs", "2014-11-13"],
["elasticache", "2015-02-02"],
["elasticbeanstalk", "2010-12-01"],
["elastictranscoder", "2012-09-25"],
["elb", "2012-06-01"],
["elbv2", "2015-12-01"],
["emr", "2009-03-31"],
["events", "2015-10-07"],
["firehose", "2015-08-04"],
["iam", "2010-05-08"],
["importexport", "2010-06-01"],
["inspector", "2016-02-16"],
["iot", "2015-05-28"],
["kinesis", "2013-12-02"],
["kms", "2014-11-01"],
["lambda", "2015-03-31"],
["logs", "2014-03-28"],
["machinelearning", "2014-12-12"],
["marketplacecommerceanalytics", "2015-07-01"],
["opsworks", "2013-02-18"],
["redshift", "2012-12-01"],
["rds", "2014-10-31"],
["route53", "2013-04-01"],
["route53domains", "2014-05-15"], | ["sqs", "2012-11-05"],
["ssm", "2014-11-06"],
["storagegateway", "2013-06-30"],
["sts", "2011-06-15"],
["swf", "2012-01-25"],
["waf", "2015-08-24"],
["workspaces", "2015-04-08"]
};
let count: usize = services.into_par_iter().map(|service| generate(service, &out_path.clone())).count();
println!("\nGenerated {:?} services.\n", count);
generate_user_agent_vars(&out_path);
let codegen_dir = Path::new("codegen");
// avoid unnecessary recompiles when used as a crates.io dependency
if codegen_dir.exists() {
println!("cargo:rerun-if-changed=codegen");
}
} | ["s3", "2006-03-01"],
["sdb", "2009-04-15"],
["sns", "2010-03-31"], | random_line_split |
build.rs | extern crate rustc_version;
extern crate rusoto_codegen;
extern crate rayon;
use std::env;
use std::path::Path;
use std::io::Write;
use std::fs::File;
use rusoto_codegen::{Service, generate};
use rayon::prelude::*;
/// Parses and generates variables used to construct a User-Agent.
///
/// This is used to create a User-Agent header string resembling
/// `rusoto/x.y.z rust/x.y.z <os>`.
fn generate_user_agent_vars(output_path: &Path) |
/*
gamelift/2015-10-01/service-2.json: "protocol":"json"
support/2013-04-15/service-2.json: "protocol":"json"
*/
// expand to use cfg!() so codegen only gets run for services
// in the features list
macro_rules! services {
( $( [$name:expr, $date:expr] ),* ) => {
{
let mut services = Vec::new();
$(
if cfg!(feature = $name) {
services.push(Service::new($name, $date));
}
)*
services
}
}
}
fn main() {
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not specified");
let out_path = Path::new(&out_dir).to_owned();
let services = services! {
["acm", "2015-12-08"],
["autoscaling", "2011-01-01"],
["cloudformation", "2010-05-15"],
["cloudfront", "2016-11-25"],
["cloudhsm", "2014-05-30"],
["cloudsearch", "2013-01-01"],
["cloudtrail", "2013-11-01"],
["cloudwatch", "2010-08-01"],
["codecommit", "2015-04-13"],
["codedeploy", "2014-10-06"],
["codepipeline", "2015-07-09"],
["cognito-identity", "2014-06-30"],
["config", "2014-11-12"],
["datapipeline", "2012-10-29"],
["devicefarm", "2015-06-23"],
["directconnect", "2012-10-25"],
["ds", "2015-04-16"],
["dynamodb", "2012-08-10"],
["dynamodbstreams", "2012-08-10"],
["ec2", "2016-11-15"],
["ecr", "2015-09-21"],
["ecs", "2014-11-13"],
["elasticache", "2015-02-02"],
["elasticbeanstalk", "2010-12-01"],
["elastictranscoder", "2012-09-25"],
["elb", "2012-06-01"],
["elbv2", "2015-12-01"],
["emr", "2009-03-31"],
["events", "2015-10-07"],
["firehose", "2015-08-04"],
["iam", "2010-05-08"],
["importexport", "2010-06-01"],
["inspector", "2016-02-16"],
["iot", "2015-05-28"],
["kinesis", "2013-12-02"],
["kms", "2014-11-01"],
["lambda", "2015-03-31"],
["logs", "2014-03-28"],
["machinelearning", "2014-12-12"],
["marketplacecommerceanalytics", "2015-07-01"],
["opsworks", "2013-02-18"],
["redshift", "2012-12-01"],
["rds", "2014-10-31"],
["route53", "2013-04-01"],
["route53domains", "2014-05-15"],
["s3", "2006-03-01"],
["sdb", "2009-04-15"],
["sns", "2010-03-31"],
["sqs", "2012-11-05"],
["ssm", "2014-11-06"],
["storagegateway", "2013-06-30"],
["sts", "2011-06-15"],
["swf", "2012-01-25"],
["waf", "2015-08-24"],
["workspaces", "2015-04-08"]
};
let count: usize = services.into_par_iter().map(|service| generate(service, &out_path.clone())).count();
println!("\nGenerated {:?} services.\n", count);
generate_user_agent_vars(&out_path);
let codegen_dir = Path::new("codegen");
// avoid unnecessary recompiles when used as a crates.io dependency
if codegen_dir.exists() {
println!("cargo:rerun-if-changed=codegen");
}
}
| {
let rust_version = rustc_version::version();
let mut f = File::create(&output_path.join("user_agent_vars.rs"))
.expect("Could not create user agent file");
f.write_all(format!("static RUST_VERSION: &'static str = \"{}\";", rust_version).as_bytes())
.expect("Unable to write user agent");
} | identifier_body |
build.rs | extern crate rustc_version;
extern crate rusoto_codegen;
extern crate rayon;
use std::env;
use std::path::Path;
use std::io::Write;
use std::fs::File;
use rusoto_codegen::{Service, generate};
use rayon::prelude::*;
/// Parses and generates variables used to construct a User-Agent.
///
/// This is used to create a User-Agent header string resembling
/// `rusoto/x.y.z rust/x.y.z <os>`.
fn generate_user_agent_vars(output_path: &Path) {
let rust_version = rustc_version::version();
let mut f = File::create(&output_path.join("user_agent_vars.rs"))
.expect("Could not create user agent file");
f.write_all(format!("static RUST_VERSION: &'static str = \"{}\";", rust_version).as_bytes())
.expect("Unable to write user agent");
}
/*
gamelift/2015-10-01/service-2.json: "protocol":"json"
support/2013-04-15/service-2.json: "protocol":"json"
*/
// expand to use cfg!() so codegen only gets run for services
// in the features list
macro_rules! services {
( $( [$name:expr, $date:expr] ),* ) => {
{
let mut services = Vec::new();
$(
if cfg!(feature = $name) {
services.push(Service::new($name, $date));
}
)*
services
}
}
}
fn main() {
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not specified");
let out_path = Path::new(&out_dir).to_owned();
let services = services! {
["acm", "2015-12-08"],
["autoscaling", "2011-01-01"],
["cloudformation", "2010-05-15"],
["cloudfront", "2016-11-25"],
["cloudhsm", "2014-05-30"],
["cloudsearch", "2013-01-01"],
["cloudtrail", "2013-11-01"],
["cloudwatch", "2010-08-01"],
["codecommit", "2015-04-13"],
["codedeploy", "2014-10-06"],
["codepipeline", "2015-07-09"],
["cognito-identity", "2014-06-30"],
["config", "2014-11-12"],
["datapipeline", "2012-10-29"],
["devicefarm", "2015-06-23"],
["directconnect", "2012-10-25"],
["ds", "2015-04-16"],
["dynamodb", "2012-08-10"],
["dynamodbstreams", "2012-08-10"],
["ec2", "2016-11-15"],
["ecr", "2015-09-21"],
["ecs", "2014-11-13"],
["elasticache", "2015-02-02"],
["elasticbeanstalk", "2010-12-01"],
["elastictranscoder", "2012-09-25"],
["elb", "2012-06-01"],
["elbv2", "2015-12-01"],
["emr", "2009-03-31"],
["events", "2015-10-07"],
["firehose", "2015-08-04"],
["iam", "2010-05-08"],
["importexport", "2010-06-01"],
["inspector", "2016-02-16"],
["iot", "2015-05-28"],
["kinesis", "2013-12-02"],
["kms", "2014-11-01"],
["lambda", "2015-03-31"],
["logs", "2014-03-28"],
["machinelearning", "2014-12-12"],
["marketplacecommerceanalytics", "2015-07-01"],
["opsworks", "2013-02-18"],
["redshift", "2012-12-01"],
["rds", "2014-10-31"],
["route53", "2013-04-01"],
["route53domains", "2014-05-15"],
["s3", "2006-03-01"],
["sdb", "2009-04-15"],
["sns", "2010-03-31"],
["sqs", "2012-11-05"],
["ssm", "2014-11-06"],
["storagegateway", "2013-06-30"],
["sts", "2011-06-15"],
["swf", "2012-01-25"],
["waf", "2015-08-24"],
["workspaces", "2015-04-08"]
};
let count: usize = services.into_par_iter().map(|service| generate(service, &out_path.clone())).count();
println!("\nGenerated {:?} services.\n", count);
generate_user_agent_vars(&out_path);
let codegen_dir = Path::new("codegen");
// avoid unnecessary recompiles when used as a crates.io dependency
if codegen_dir.exists() |
}
| {
println!("cargo:rerun-if-changed=codegen");
} | conditional_block |
build.rs | extern crate rustc_version;
extern crate rusoto_codegen;
extern crate rayon;
use std::env;
use std::path::Path;
use std::io::Write;
use std::fs::File;
use rusoto_codegen::{Service, generate};
use rayon::prelude::*;
/// Parses and generates variables used to construct a User-Agent.
///
/// This is used to create a User-Agent header string resembling
/// `rusoto/x.y.z rust/x.y.z <os>`.
fn | (output_path: &Path) {
let rust_version = rustc_version::version();
let mut f = File::create(&output_path.join("user_agent_vars.rs"))
.expect("Could not create user agent file");
f.write_all(format!("static RUST_VERSION: &'static str = \"{}\";", rust_version).as_bytes())
.expect("Unable to write user agent");
}
/*
gamelift/2015-10-01/service-2.json: "protocol":"json"
support/2013-04-15/service-2.json: "protocol":"json"
*/
// expand to use cfg!() so codegen only gets run for services
// in the features list
macro_rules! services {
( $( [$name:expr, $date:expr] ),* ) => {
{
let mut services = Vec::new();
$(
if cfg!(feature = $name) {
services.push(Service::new($name, $date));
}
)*
services
}
}
}
fn main() {
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not specified");
let out_path = Path::new(&out_dir).to_owned();
let services = services! {
["acm", "2015-12-08"],
["autoscaling", "2011-01-01"],
["cloudformation", "2010-05-15"],
["cloudfront", "2016-11-25"],
["cloudhsm", "2014-05-30"],
["cloudsearch", "2013-01-01"],
["cloudtrail", "2013-11-01"],
["cloudwatch", "2010-08-01"],
["codecommit", "2015-04-13"],
["codedeploy", "2014-10-06"],
["codepipeline", "2015-07-09"],
["cognito-identity", "2014-06-30"],
["config", "2014-11-12"],
["datapipeline", "2012-10-29"],
["devicefarm", "2015-06-23"],
["directconnect", "2012-10-25"],
["ds", "2015-04-16"],
["dynamodb", "2012-08-10"],
["dynamodbstreams", "2012-08-10"],
["ec2", "2016-11-15"],
["ecr", "2015-09-21"],
["ecs", "2014-11-13"],
["elasticache", "2015-02-02"],
["elasticbeanstalk", "2010-12-01"],
["elastictranscoder", "2012-09-25"],
["elb", "2012-06-01"],
["elbv2", "2015-12-01"],
["emr", "2009-03-31"],
["events", "2015-10-07"],
["firehose", "2015-08-04"],
["iam", "2010-05-08"],
["importexport", "2010-06-01"],
["inspector", "2016-02-16"],
["iot", "2015-05-28"],
["kinesis", "2013-12-02"],
["kms", "2014-11-01"],
["lambda", "2015-03-31"],
["logs", "2014-03-28"],
["machinelearning", "2014-12-12"],
["marketplacecommerceanalytics", "2015-07-01"],
["opsworks", "2013-02-18"],
["redshift", "2012-12-01"],
["rds", "2014-10-31"],
["route53", "2013-04-01"],
["route53domains", "2014-05-15"],
["s3", "2006-03-01"],
["sdb", "2009-04-15"],
["sns", "2010-03-31"],
["sqs", "2012-11-05"],
["ssm", "2014-11-06"],
["storagegateway", "2013-06-30"],
["sts", "2011-06-15"],
["swf", "2012-01-25"],
["waf", "2015-08-24"],
["workspaces", "2015-04-08"]
};
let count: usize = services.into_par_iter().map(|service| generate(service, &out_path.clone())).count();
println!("\nGenerated {:?} services.\n", count);
generate_user_agent_vars(&out_path);
let codegen_dir = Path::new("codegen");
// avoid unnecessary recompiles when used as a crates.io dependency
if codegen_dir.exists() {
println!("cargo:rerun-if-changed=codegen");
}
}
| generate_user_agent_vars | identifier_name |
jquery.slideshow.js | /*
* Source: http://sixrevisions.com/tutorials/javascript_tutorial/create-a-slick-and-accessible-slideshow-using-jquery/
*/
$(document).ready(function(){
var currentPosition = 0;
var slideWidth = 560;
var slides = $('.slide');
var numberOfSlides = slides.length;
// Remove scrollbar in JS
$('#slidesContainer').css('overflow', 'hidden');
// Wrap all .slides with #slideInner div
slides
.wrapAll('<div id="slideInner"></div>')
// Float left to display horizontally, readjust .slides width
.css({
'float' : 'left',
'width' : slideWidth
});
// Set #slideInner width equal to total width of all slides
$('#slideInner').css('width', slideWidth * numberOfSlides);
// Insert left and right arrow controls in the DOM
$('#slideshow')
.prepend('<span class="control" id="leftControl">Move left</span>')
.append('<span class="control" id="rightControl">Move right</span>');
// Hide left arrow control on first load
manageControls(currentPosition);
// Create event listeners for .controls clicks
$('.control')
.bind('click', function(){
// Determine new position
currentPosition = ($(this).attr('id')=='rightControl')
? currentPosition+1 : currentPosition-1;
// Hide / show controls
manageControls(currentPosition);
// Move slideInner using margin-left
$('#slideInner').animate({
'marginLeft' : slideWidth*(-currentPosition)
});
});
// manageControls: Hides and shows controls depending on currentPosition
function manageControls(position) |
});
| {
// Hide left arrow if position is first slide
if(position==0){ $('#leftControl').hide() }
else{ $('#leftControl').show() }
// Hide right arrow if position is last slide
if(position==numberOfSlides-1){ $('#rightControl').hide() }
else{ $('#rightControl').show() }
} | identifier_body |
jquery.slideshow.js | /*
* Source: http://sixrevisions.com/tutorials/javascript_tutorial/create-a-slick-and-accessible-slideshow-using-jquery/
*/
$(document).ready(function(){
var currentPosition = 0;
var slideWidth = 560;
var slides = $('.slide');
var numberOfSlides = slides.length;
// Remove scrollbar in JS
$('#slidesContainer').css('overflow', 'hidden');
// Wrap all .slides with #slideInner div
slides
.wrapAll('<div id="slideInner"></div>')
// Float left to display horizontally, readjust .slides width
.css({
'float' : 'left',
'width' : slideWidth
});
// Set #slideInner width equal to total width of all slides
$('#slideInner').css('width', slideWidth * numberOfSlides);
// Insert left and right arrow controls in the DOM
$('#slideshow')
.prepend('<span class="control" id="leftControl">Move left</span>')
.append('<span class="control" id="rightControl">Move right</span>');
// Hide left arrow control on first load
manageControls(currentPosition);
// Create event listeners for .controls clicks
$('.control')
.bind('click', function(){
// Determine new position
currentPosition = ($(this).attr('id')=='rightControl')
? currentPosition+1 : currentPosition-1;
// Hide / show controls
manageControls(currentPosition);
// Move slideInner using margin-left
$('#slideInner').animate({
'marginLeft' : slideWidth*(-currentPosition)
});
});
// manageControls: Hides and shows controls depending on currentPosition
function | (position){
// Hide left arrow if position is first slide
if(position==0){ $('#leftControl').hide() }
else{ $('#leftControl').show() }
// Hide right arrow if position is last slide
if(position==numberOfSlides-1){ $('#rightControl').hide() }
else{ $('#rightControl').show() }
}
});
| manageControls | identifier_name |
jquery.slideshow.js | /*
* Source: http://sixrevisions.com/tutorials/javascript_tutorial/create-a-slick-and-accessible-slideshow-using-jquery/
*/
$(document).ready(function(){
var currentPosition = 0;
var slideWidth = 560;
var slides = $('.slide');
var numberOfSlides = slides.length;
// Remove scrollbar in JS
$('#slidesContainer').css('overflow', 'hidden');
// Wrap all .slides with #slideInner div
slides
.wrapAll('<div id="slideInner"></div>')
// Float left to display horizontally, readjust .slides width
.css({
'float' : 'left',
'width' : slideWidth
});
// Set #slideInner width equal to total width of all slides
$('#slideInner').css('width', slideWidth * numberOfSlides);
// Insert left and right arrow controls in the DOM
$('#slideshow')
.prepend('<span class="control" id="leftControl">Move left</span>')
.append('<span class="control" id="rightControl">Move right</span>');
// Hide left arrow control on first load
manageControls(currentPosition);
// Create event listeners for .controls clicks
$('.control')
.bind('click', function(){
// Determine new position
currentPosition = ($(this).attr('id')=='rightControl')
? currentPosition+1 : currentPosition-1;
// Hide / show controls
manageControls(currentPosition);
// Move slideInner using margin-left
$('#slideInner').animate({
'marginLeft' : slideWidth*(-currentPosition)
});
});
// manageControls: Hides and shows controls depending on currentPosition
function manageControls(position){
// Hide left arrow if position is first slide
if(position==0){ $('#leftControl').hide() }
else{ $('#leftControl').show() }
// Hide right arrow if position is last slide
if(position==numberOfSlides-1) |
else{ $('#rightControl').show() }
}
});
| { $('#rightControl').hide() } | conditional_block |
jquery.slideshow.js | /*
* Source: http://sixrevisions.com/tutorials/javascript_tutorial/create-a-slick-and-accessible-slideshow-using-jquery/
*/
$(document).ready(function(){
var currentPosition = 0;
var slideWidth = 560;
var slides = $('.slide');
var numberOfSlides = slides.length;
// Remove scrollbar in JS
$('#slidesContainer').css('overflow', 'hidden');
// Wrap all .slides with #slideInner div
slides
.wrapAll('<div id="slideInner"></div>')
// Float left to display horizontally, readjust .slides width
.css({
'float' : 'left',
'width' : slideWidth
});
| $('#slideInner').css('width', slideWidth * numberOfSlides);
// Insert left and right arrow controls in the DOM
$('#slideshow')
.prepend('<span class="control" id="leftControl">Move left</span>')
.append('<span class="control" id="rightControl">Move right</span>');
// Hide left arrow control on first load
manageControls(currentPosition);
// Create event listeners for .controls clicks
$('.control')
.bind('click', function(){
// Determine new position
currentPosition = ($(this).attr('id')=='rightControl')
? currentPosition+1 : currentPosition-1;
// Hide / show controls
manageControls(currentPosition);
// Move slideInner using margin-left
$('#slideInner').animate({
'marginLeft' : slideWidth*(-currentPosition)
});
});
// manageControls: Hides and shows controls depending on currentPosition
function manageControls(position){
// Hide left arrow if position is first slide
if(position==0){ $('#leftControl').hide() }
else{ $('#leftControl').show() }
// Hide right arrow if position is last slide
if(position==numberOfSlides-1){ $('#rightControl').hide() }
else{ $('#rightControl').show() }
}
}); |
// Set #slideInner width equal to total width of all slides
| random_line_split |
p041.rs | //! [Problem 41](https://projecteuler.net/problem=41) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use integer::Integer;
use iter::Permutations;
use prime::PrimeSet;
// 1 + 2 + ... + 9 = 45 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 1 + 2 + ... + 8 = 36 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 7-pandigimal may be the largest pandigimal prime.
fn compute() -> u64 {
let radix = 10;
let ps = PrimeSet::new();
for (perm, _) in Permutations::new(&[7, 6, 5, 4, 3, 2, 1], 7) {
let n = Integer::from_digits(perm.iter().rev().copied(), radix);
if ps.contains(n) {
return n;
}
}
unreachable!()
}
fn | () -> String {
compute().to_string()
}
common::problem!("7652413", solve);
| solve | identifier_name |
p041.rs | //! [Problem 41](https://projecteuler.net/problem=41) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use integer::Integer;
use iter::Permutations;
use prime::PrimeSet;
// 1 + 2 + ... + 9 = 45 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 1 + 2 + ... + 8 = 36 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 7-pandigimal may be the largest pandigimal prime.
fn compute() -> u64 {
let radix = 10;
let ps = PrimeSet::new();
for (perm, _) in Permutations::new(&[7, 6, 5, 4, 3, 2, 1], 7) {
let n = Integer::from_digits(perm.iter().rev().copied(), radix);
if ps.contains(n) |
}
unreachable!()
}
fn solve() -> String {
compute().to_string()
}
common::problem!("7652413", solve);
| {
return n;
} | conditional_block |
p041.rs | //! [Problem 41](https://projecteuler.net/problem=41) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use integer::Integer;
use iter::Permutations;
use prime::PrimeSet;
// 1 + 2 + ... + 9 = 45 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 1 + 2 + ... + 8 = 36 (dividable by 9 => 9-pandigimal number is dividable by 9)
// 7-pandigimal may be the largest pandigimal prime.
fn compute() -> u64 |
fn solve() -> String {
compute().to_string()
}
common::problem!("7652413", solve);
| {
let radix = 10;
let ps = PrimeSet::new();
for (perm, _) in Permutations::new(&[7, 6, 5, 4, 3, 2, 1], 7) {
let n = Integer::from_digits(perm.iter().rev().copied(), radix);
if ps.contains(n) {
return n;
}
}
unreachable!()
} | identifier_body |
CP.py | #!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
|
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
| AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001 | conditional_block |
CP.py | #!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
|
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
| return psr.system.apply_single_basis(bslabel,bsname,syst) | identifier_body |
CP.py | #!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g") | NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize() | wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
| random_line_split |
CP.py | #!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def | (syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
| ApplyBasis | identifier_name |
gear.controller.js | (function() {
'use strict';
angular.module('character-tracker.charactersheet')
.controller('GearController', GearController);
GearController.$inject =['GearService', 'InventoryService', '$scope'];
function GearController(GearService, InventoryService, $scope) |
}()); | {
var vm = this;
var currentItem = '';
vm.inventory = InventoryService.getItems();
vm.gearSlots = GearService.getGearSlots();
vm.equipItem = function (item, slot){
if (item.equipped) {
vm.unequipItem(item);
}
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].slot === slot) {
vm.gearSlots[i].equipped = true;
vm.gearSlots[i].equippedItem = item;
}
}
InventoryService.equipItem(item);
}
vm.unequipItem = function(item) {
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].equippedItem == item) {
vm.gearSlots[i].equippedItem = {};
}
InventoryService.unequipItem(item);
}
}
vm.getItemAtSlot = function(slot) {
currentItem = InventoryService.getItemAtSlot(slot);
}
init();
function init() {
$.material.init();
}
return vm;
} | identifier_body |
gear.controller.js | (function() {
'use strict';
angular.module('character-tracker.charactersheet')
.controller('GearController', GearController);
GearController.$inject =['GearService', 'InventoryService', '$scope'];
function GearController(GearService, InventoryService, $scope) {
var vm = this;
var currentItem = '';
vm.inventory = InventoryService.getItems();
vm.gearSlots = GearService.getGearSlots();
vm.equipItem = function (item, slot){
if (item.equipped) {
vm.unequipItem(item);
}
for (i = 0; i < vm.gearSlots.length; i++) |
InventoryService.equipItem(item);
}
vm.unequipItem = function(item) {
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].equippedItem == item) {
vm.gearSlots[i].equippedItem = {};
}
InventoryService.unequipItem(item);
}
}
vm.getItemAtSlot = function(slot) {
currentItem = InventoryService.getItemAtSlot(slot);
}
init();
function init() {
$.material.init();
}
return vm;
}
}()); | {
if (vm.gearSlots[i].slot === slot) {
vm.gearSlots[i].equipped = true;
vm.gearSlots[i].equippedItem = item;
}
} | conditional_block |
gear.controller.js | (function() {
'use strict';
angular.module('character-tracker.charactersheet')
.controller('GearController', GearController);
GearController.$inject =['GearService', 'InventoryService', '$scope'];
function | (GearService, InventoryService, $scope) {
var vm = this;
var currentItem = '';
vm.inventory = InventoryService.getItems();
vm.gearSlots = GearService.getGearSlots();
vm.equipItem = function (item, slot){
if (item.equipped) {
vm.unequipItem(item);
}
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].slot === slot) {
vm.gearSlots[i].equipped = true;
vm.gearSlots[i].equippedItem = item;
}
}
InventoryService.equipItem(item);
}
vm.unequipItem = function(item) {
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].equippedItem == item) {
vm.gearSlots[i].equippedItem = {};
}
InventoryService.unequipItem(item);
}
}
vm.getItemAtSlot = function(slot) {
currentItem = InventoryService.getItemAtSlot(slot);
}
init();
function init() {
$.material.init();
}
return vm;
}
}()); | GearController | identifier_name |
gear.controller.js | (function() {
'use strict';
angular.module('character-tracker.charactersheet')
.controller('GearController', GearController);
GearController.$inject =['GearService', 'InventoryService', '$scope'];
function GearController(GearService, InventoryService, $scope) {
var vm = this;
var currentItem = '';
vm.inventory = InventoryService.getItems();
vm.gearSlots = GearService.getGearSlots();
vm.equipItem = function (item, slot){
if (item.equipped) { | if (vm.gearSlots[i].slot === slot) {
vm.gearSlots[i].equipped = true;
vm.gearSlots[i].equippedItem = item;
}
}
InventoryService.equipItem(item);
}
vm.unequipItem = function(item) {
for (i = 0; i < vm.gearSlots.length; i++) {
if (vm.gearSlots[i].equippedItem == item) {
vm.gearSlots[i].equippedItem = {};
}
InventoryService.unequipItem(item);
}
}
vm.getItemAtSlot = function(slot) {
currentItem = InventoryService.getItemAtSlot(slot);
}
init();
function init() {
$.material.init();
}
return vm;
}
}()); | vm.unequipItem(item);
}
for (i = 0; i < vm.gearSlots.length; i++) { | random_line_split |
line.rs | /**
* Flow - Realtime log analyzer
* Copyright (C) 2016 Daniel Mircea
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::cmp::max;
use std::collections::VecDeque;
use std::iter::{Rev, DoubleEndedIterator};
use unicode_width::UnicodeWidthStr;
use core::filter::{Filter, Parser as FilterParser, Constraint, ParserResult as FilterParserResult};
use utils::ansi_decoder::{ComponentCollection, AnsiStr};
#[derive(Clone)]
pub struct Line {
pub content_without_ansi: String,
pub components: Option<ComponentCollection>,
pub width: usize,
}
impl Line {
pub fn new(content: String) -> Line {
let has_ansi = content.has_ansi_escape_sequence();
let (content_without_ansi, components) = if has_ansi {
(content.strip_ansi(), Some(content.to_components()))
} else {
(content, None)
};
Line {
width: content_without_ansi.width(),
content_without_ansi: content_without_ansi,
components: components,
}
}
pub fn guess_height(&self, container_width: usize) -> usize {
max(1,
(self.width as f32 / container_width as f32).ceil() as usize)
}
pub fn matches_for(&self, text: &str) -> Vec<(usize, &str)> {
self.content_without_ansi.match_indices(text).collect()
}
pub fn contains(&self, text: &str) -> bool {
self.content_without_ansi.contains(text)
}
}
pub struct LineCollection {
pub entries: VecDeque<Line>,
capacity: usize,
}
impl LineCollection {
pub fn new(capacity: usize) -> LineCollection {
LineCollection {
entries: VecDeque::new(),
capacity: capacity,
}
}
fn clear_excess(&mut self) {
while self.entries.len() > self.capacity {
self.entries.pop_front();
}
}
pub fn len(&self) -> usize {
self.entries.len()
}
fn add(&mut self, item: String) {
self.entries.push_back(Line::new(item));
}
} |
impl Extend<String> for LineCollection {
fn extend<T: IntoIterator<Item = String>>(&mut self, iter: T) {
for item in iter {
self.add(item);
}
self.clear_excess();
}
}
pub struct ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
iterator: I,
parser: FilterParser,
pending: Vec<&'a Line>,
}
impl<'a, I> ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
fn handle_empty(&mut self) -> Option<I::Item> {
self.iterator.next()
}
fn handle_content(&mut self) -> Option<I::Item> {
let matcher = self.parser.filter.content.as_ref().unwrap();
(&mut self.iterator).filter(|line| matcher.is_match(&line.content_without_ansi)).next()
}
fn handle_boundaries(&mut self) -> Option<I::Item> {
if self.pending.is_empty() {
let mut match_found = false;
for line in &mut self.iterator {
match self.parser.matches(&line.content_without_ansi) {
FilterParserResult::Match => self.pending.push(line),
FilterParserResult::LastMatch(append) => {
match_found = true;
if append {
self.pending.push(line);
}
break;
}
FilterParserResult::Invalid(append) => {
self.pending.clear();
if append {
self.pending.push(line);
}
}
FilterParserResult::NoMatch => {}
}
}
if !(match_found || self.parser.assume_found_matches()) {
return None;
}
self.pending.reverse();
}
self.pending.pop()
}
}
pub trait Parser<'a>: Iterator<Item = &'a Line> {
fn parse(self, filter: Filter) -> ParserState<'a, Rev<Self>>
where Self: DoubleEndedIterator + Sized;
}
impl<'a, I> Parser<'a> for I
where I: Iterator<Item = &'a Line>
{
fn parse(self, filter: Filter) -> ParserState<'a, Rev<Self>>
where Self: DoubleEndedIterator + Sized
{
ParserState {
iterator: self.rev(),
pending: vec![],
parser: FilterParser::new(filter),
}
}
}
impl<'a, I> Iterator for ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
if self.parser.constraints.is_empty() {
self.handle_empty()
} else if self.parser.constraints == vec![Constraint::Content] {
self.handle_content()
} else {
self.handle_boundaries()
}
}
} | random_line_split |
|
line.rs | /**
* Flow - Realtime log analyzer
* Copyright (C) 2016 Daniel Mircea
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::cmp::max;
use std::collections::VecDeque;
use std::iter::{Rev, DoubleEndedIterator};
use unicode_width::UnicodeWidthStr;
use core::filter::{Filter, Parser as FilterParser, Constraint, ParserResult as FilterParserResult};
use utils::ansi_decoder::{ComponentCollection, AnsiStr};
#[derive(Clone)]
pub struct | {
pub content_without_ansi: String,
pub components: Option<ComponentCollection>,
pub width: usize,
}
impl Line {
pub fn new(content: String) -> Line {
let has_ansi = content.has_ansi_escape_sequence();
let (content_without_ansi, components) = if has_ansi {
(content.strip_ansi(), Some(content.to_components()))
} else {
(content, None)
};
Line {
width: content_without_ansi.width(),
content_without_ansi: content_without_ansi,
components: components,
}
}
pub fn guess_height(&self, container_width: usize) -> usize {
max(1,
(self.width as f32 / container_width as f32).ceil() as usize)
}
pub fn matches_for(&self, text: &str) -> Vec<(usize, &str)> {
self.content_without_ansi.match_indices(text).collect()
}
pub fn contains(&self, text: &str) -> bool {
self.content_without_ansi.contains(text)
}
}
pub struct LineCollection {
pub entries: VecDeque<Line>,
capacity: usize,
}
impl LineCollection {
pub fn new(capacity: usize) -> LineCollection {
LineCollection {
entries: VecDeque::new(),
capacity: capacity,
}
}
fn clear_excess(&mut self) {
while self.entries.len() > self.capacity {
self.entries.pop_front();
}
}
pub fn len(&self) -> usize {
self.entries.len()
}
fn add(&mut self, item: String) {
self.entries.push_back(Line::new(item));
}
}
impl Extend<String> for LineCollection {
fn extend<T: IntoIterator<Item = String>>(&mut self, iter: T) {
for item in iter {
self.add(item);
}
self.clear_excess();
}
}
pub struct ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
iterator: I,
parser: FilterParser,
pending: Vec<&'a Line>,
}
impl<'a, I> ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
fn handle_empty(&mut self) -> Option<I::Item> {
self.iterator.next()
}
fn handle_content(&mut self) -> Option<I::Item> {
let matcher = self.parser.filter.content.as_ref().unwrap();
(&mut self.iterator).filter(|line| matcher.is_match(&line.content_without_ansi)).next()
}
fn handle_boundaries(&mut self) -> Option<I::Item> {
if self.pending.is_empty() {
let mut match_found = false;
for line in &mut self.iterator {
match self.parser.matches(&line.content_without_ansi) {
FilterParserResult::Match => self.pending.push(line),
FilterParserResult::LastMatch(append) => {
match_found = true;
if append {
self.pending.push(line);
}
break;
}
FilterParserResult::Invalid(append) => {
self.pending.clear();
if append {
self.pending.push(line);
}
}
FilterParserResult::NoMatch => {}
}
}
if !(match_found || self.parser.assume_found_matches()) {
return None;
}
self.pending.reverse();
}
self.pending.pop()
}
}
pub trait Parser<'a>: Iterator<Item = &'a Line> {
fn parse(self, filter: Filter) -> ParserState<'a, Rev<Self>>
where Self: DoubleEndedIterator + Sized;
}
impl<'a, I> Parser<'a> for I
where I: Iterator<Item = &'a Line>
{
fn parse(self, filter: Filter) -> ParserState<'a, Rev<Self>>
where Self: DoubleEndedIterator + Sized
{
ParserState {
iterator: self.rev(),
pending: vec![],
parser: FilterParser::new(filter),
}
}
}
impl<'a, I> Iterator for ParserState<'a, I>
where I: DoubleEndedIterator<Item = &'a Line>
{
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
if self.parser.constraints.is_empty() {
self.handle_empty()
} else if self.parser.constraints == vec![Constraint::Content] {
self.handle_content()
} else {
self.handle_boundaries()
}
}
}
| Line | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.